config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.quantization_config.bits,config.backend.quantization_config.version,config.backend.quantization_config.exllama_config.version,config.backend.quantization_config.exllama_config.max_input_len,config.backend.quantization_config.exllama_config.max_batch_size,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency,report.traceback 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,805.90848,576.585728,0.0,190.840832,172.878848,s,1,9.6677470703125,9.6677470703125,0.0,9.6677470703125,9.6677470703125,9.6677470703125,9.6677470703125,[9.6677470703125],,kWh,1.2146982633324417e-05,1.3295284910000473e-06,3.4505583159996633e-06,1.6927069440324126e-05,,MB,1280.774144,685.637632,0.0,270.532608,241.723904,s,27,0.2125899534225464,0.00787370197861283,0.00017251295326695587,0.007865856170654297,0.008026720237731934,0.008078514957427979,0.008435480556488037,"[0.008553824424743652, 0.008098655700683594, 0.0077506561279296875, 0.007865856170654297, 0.00786729621887207, 0.007907423973083496, 0.007889599800109863, 0.007921216011047363, 0.007902944087982177, 0.007899168014526368, 0.00783625602722168, 0.008023520469665528, 0.007960800170898438, 0.007738527774810791, 0.007924352169036865, 0.007593088150024414, 0.0077853121757507325, 0.007774655818939209, 0.007881919860839843, 0.007785215854644776, 0.007789152145385742, 0.008031519889831543, 0.007688064098358155, 0.007719168186187744, 0.007829823970794677, 0.00783945608139038, 0.007732480049133301]",tokens/s,32513.295613088656,kWh,2.3230775440219274e-07,2.5619228690925995e-08,1.1819520248248746e-07,3.761221855756062e-07,tokens/kWh,680629885.227922,MB,1327.407104,698.220544,0.0,283.11552,241.726464,s,27,9.91493576049805,0.3672198429814092,0.006696256159797774,0.3693450927734375,0.3739226623535156,0.37650770263671873,0.37981774963378906,"[0.38061019897460935, 0.3704671325683594, 0.3738398132324219, 0.36969662475585935, 0.3693450927734375, 0.3700921325683594, 0.3637820129394531, 0.37404693603515626, 0.37244650268554685, 0.3720498962402344, 0.37103848266601563, 0.37063848876953126, 0.3675120544433594, 0.3644129333496094, 0.35702056884765626, 0.35551516723632814, 0.360065185546875, 0.37195132446289064, 0.37756231689453124, 0.3641151123046875, 0.3679649353027344, 0.3714158935546875, 0.3548122863769531, 0.36429095458984373, 0.3547358093261719, 0.364646240234375, 0.3608616638183594]",tokens/s,171.55935662003273,kWh,1.0534060715388937e-05,1.1617237802221817e-06,3.978613630517653e-06,1.5674398126128774e-05,tokens/kWh,4019293.0850072512,,s,1701,9.89941471624375,0.005819761737944588,0.0001886688976536225,0.005816959857940674,0.005996543884277344,0.0060546879768371585,0.006419456005096436,"[0.005668863773345947, 0.0060555520057678225, 0.006056320190429687, 0.006117280006408691, 0.006170720100402832, 0.006014976024627685, 0.00612502384185791, 0.005949024200439453, 0.0061075520515441895, 0.005990816116333008, 0.006059616088867187, 0.006058432102203369, 0.006017248153686524, 0.006116415977478027, 0.006003615856170655, 0.006039360046386719, 0.00604310417175293, 0.006029952049255371, 0.0059985918998718265, 0.00609222412109375, 0.006029888153076172, 0.006059328079223633, 0.005988255977630615, 0.005978911876678467, 0.005910751819610596, 0.006004511833190918, 0.0060037441253662106, 0.006054272174835205, 0.005959616184234619, 0.006013440132141113, 0.006152575969696045, 0.0060065598487854, 0.006008831977844238, 0.006032896041870117, 0.005994847774505615, 0.005908256053924561, 0.0059786558151245115, 0.006149983882904052, 0.005980000019073486, 0.00601859188079834, 0.006173151969909668, 0.006012544155120849, 0.005926976203918457, 0.006093567848205567, 0.0059649920463562016, 0.006038047790527344, 0.005996543884277344, 0.006170944213867187, 0.006047423839569092, 0.006034815788269043, 0.0060863041877746585, 0.0060917439460754395, 0.006309823989868164, 0.006012351989746094, 0.005980800151824952, 0.005963295936584473, 0.006019552230834961, 0.005892096042633056, 0.006010272026062012, 0.005889664173126221, 0.006323040008544922, 0.005989759922027588, 0.00603769588470459, 0.005718016147613526, 0.005996543884277344, 0.006046720027923584, 0.00600105619430542, 0.005937600135803222, 0.005967936038970947, 0.005871712207794189, 0.005976064205169678, 0.006090752124786377, 0.005967232227325439, 0.005868351936340332, 0.005977920055389404, 0.005857151985168457, 0.0059845118522644045, 0.005861472129821777, 0.005885791778564453, 0.006602880001068115, 0.006258495807647705, 0.006025216102600098, 0.005859583854675293, 0.005984000205993652, 0.0059572157859802246, 0.00588431978225708, 0.005860735893249511, 0.00581606388092041, 0.005878079891204834, 0.005794784069061279, 0.005902016162872314, 0.005803167819976807, 0.00589731216430664, 0.0058895998001098635, 0.006150335788726807, 0.005883808135986328, 0.005814271926879883, 0.005808127880096436, 0.00578326416015625, 0.005866047859191895, 0.005842879772186279, 0.005867136001586914, 0.005626016139984131, 0.005680319786071778, 0.00560752010345459, 0.00566710376739502, 0.005756864070892334, 0.00563046407699585, 0.005700831890106201, 0.005518271923065186, 0.005833759784698487, 0.005675839900970459, 0.005680736064910889, 0.005665184020996094, 0.005597280025482178, 0.005785632133483887, 0.005743616104125977, 0.005750847816467285, 0.005833280086517334, 0.005983712196350098, 0.006265632152557373, 0.005979551792144776, 0.005874271869659424, 0.0058104958534240725, 0.005947167873382568, 0.005836544036865235, 0.005447711944580078, 0.005898623943328857, 0.005857312202453614, 0.005900063991546631, 0.005845215797424317, 0.005847040176391601, 0.005886079788208008, 0.005861248016357422, 0.005916543960571289, 0.005770431995391846, 0.005906720161437989, 0.005889823913574219, 0.006341248035430908, 0.006041600227355957, 0.006020544052124023, 0.00582700777053833, 0.005833312034606934, 0.0059060797691345215, 0.005929344177246094, 0.0058631677627563475, 0.005827583789825439, 0.005891359806060791, 0.005823391914367676, 0.00604857587814331, 0.006009920120239258, 0.005952288150787353, 0.005929471969604493, 0.005975552082061767, 0.005883520126342774, 0.006062975883483887, 0.005936511993408203, 0.00593126392364502, 0.0059205121994018554, 0.005945631980895996, 0.006012191772460937, 0.006045760154724121, 0.005957664012908936, 0.005976416110992431, 0.006111519813537598, 0.005997983932495117, 0.005959968090057373, 0.0060210561752319335, 0.005906816005706787, 0.005971615791320801, 0.005902080059051513, 0.006076960086822509, 0.006012928009033203, 0.00596998405456543, 0.005877888202667237, 0.005925824165344238, 0.00587667179107666, 0.006025216102600098, 0.005971072196960449, 0.0059073281288146975, 0.0058122239112854005, 0.005875423908233642, 0.00590447998046875, 0.005902207851409912, 0.00586579179763794, 0.005787231922149658, 0.005802400112152099, 0.005816319942474365, 0.006041120052337647, 0.005663455963134766, 0.005840896129608154, 0.005844192028045654, 0.0060546879768371585, 0.005937151908874512, 0.005848735809326172, 0.005749087810516357, 0.005858687877655029, 0.005849728107452393, 0.00588595199584961, 0.0058206400871276855, 0.005827424049377442, 0.005815231800079345, 0.005785344123840332, 0.005867775917053223, 0.005826848030090332, 0.0058542718887329105, 0.005905312061309814, 0.005944064140319824, 0.005979135990142822, 0.00593503999710083, 0.005974080085754395, 0.005966176033020019, 0.0059043197631835935, 0.005909279823303222, 0.005792352199554443, 0.005771615982055664, 0.0058429441452026365, 0.005801983833312988, 0.005808127880096436, 0.0059351038932800295, 0.005816192150115967, 0.00578163194656372, 0.00586572790145874, 0.006419456005096436, 0.005933760166168213, 0.005927199840545654, 0.005936927795410156, 0.005959648132324219, 0.005855167865753174, 0.005745823860168457, 0.006020287990570069, 0.005836544036865235, 0.005865471839904785, 0.005767199993133545, 0.00573641586303711, 0.005767295837402344, 0.0057916159629821775, 0.005713823795318603, 0.005847136020660401, 0.005824416160583496, 0.005775263786315918, 0.005912992000579834, 0.005791520118713379, 0.005862624168395996, 0.005853024005889893, 0.005931968212127685, 0.005840447902679444, 0.005870336055755615, 0.005736127853393554, 0.005752831935882568, 0.005727392196655273, 0.0058083200454711915, 0.005541247844696045, 0.005880064010620118, 0.00585862398147583, 0.005783775806427002, 0.005800576210021973, 0.005787199974060059, 0.005856768131256103, 0.005899360179901123, 0.005830527782440186, 0.005773312091827393, 0.005812160015106201, 0.005797952175140381, 0.005795839786529541, 0.005819712162017822, 0.005763360023498535, 0.005894720077514648, 0.007110496044158935, 0.005941247940063477, 0.005803584098815918, 0.005831071853637695, 0.005809696197509766, 0.005822976112365722, 0.005744287967681885, 0.005960224151611328, 0.0058072319030761715, 0.0058429760932922365, 0.005923456192016602, 0.005777440071105957, 0.00576416015625, 0.005738751888275146, 0.00573686408996582, 0.0057285442352294925, 0.0058692159652709965, 0.005956992149353028, 0.006108128070831299, 0.005734399795532226, 0.005773312091827393, 0.005760735988616943, 0.0057244482040405276, 0.005780735969543457, 0.005815040111541748, 0.005808127880096436, 0.005770624160766602, 0.005831103801727295, 0.005720064163208007, 0.0057325439453125, 0.005800159931182861, 0.0059012799263000485, 0.005819200038909912, 0.005842016220092773, 0.005798719882965088, 0.005736735820770264, 0.005817344188690185, 0.005794623851776123, 0.005844992160797119, 0.006162367820739746, 0.006166592121124268, 0.006047455787658691, 0.005912864208221435, 0.0058059201240539555, 0.005857439994812011, 0.005889664173126221, 0.0058886399269104, 0.005576064109802246, 0.0058561921119689945, 0.005865151882171631, 0.005844992160797119, 0.005834752082824707, 0.005816095829010009, 0.0059556798934936525, 0.005879936218261719, 0.005862751960754395, 0.005827199935913086, 0.005867551803588867, 0.005863423824310303, 0.005904607772827149, 0.005914400100708008, 0.005980160236358643, 0.006047552108764649, 0.006082592010498047, 0.0059028158187866215, 0.005970848083496094, 0.005972767829895019, 0.0059985599517822265, 0.005861408233642578, 0.005926271915435791, 0.005792064189910889, 0.0058429760932922365, 0.005855519771575928, 0.006055647850036621, 0.005859615802764892, 0.005787295818328857, 0.005914912223815918, 0.006011040210723877, 0.005942912101745606, 0.005828288078308106, 0.005943903923034668, 0.005830656051635743, 0.005932672023773193, 0.005888383865356445, 0.005945663928985595, 0.005852863788604736, 0.005884064197540283, 0.005909887790679932, 0.0058371200561523435, 0.00594048023223877, 0.00582144021987915, 0.0060087361335754395, 0.0058221759796142575, 0.005851456165313721, 0.005856991767883301, 0.0059498238563537595, 0.005865439891815185, 0.005779359817504883, 0.005847104072570801, 0.005775519847869873, 0.005764895915985107, 0.005802207946777343, 0.00574886417388916, 0.0057439360618591305, 0.0057060480117797855, 0.005769023895263672, 0.005720287799835205, 0.0057794561386108395, 0.0056893758773803715, 0.005768383979797363, 0.005462175846099854, 0.005757952213287353, 0.005720032215118408, 0.005769919872283935, 0.005637311935424804, 0.005763904094696045, 0.0056415038108825685, 0.005689184188842774, 0.005729152202606201, 0.005825984001159668, 0.005780032157897949, 0.005750976085662842, 0.005891871929168701, 0.0056909117698669435, 0.005782112121582032, 0.005707520008087158, 0.0057140798568725585, 0.005719808101654053, 0.005695744037628174, 0.005728256225585937, 0.0056930241584777835, 0.005818560123443604, 0.005671135902404785, 0.005693439960479736, 0.005644447803497314, 0.005597280025482178, 0.005688543796539307, 0.005671648025512696, 0.00570681619644165, 0.005643040180206299, 0.005639359951019287, 0.005710624217987061, 0.006221824169158936, 0.005830304145812988, 0.00571011209487915, 0.005742656230926514, 0.005761023998260498, 0.005826560020446778, 0.005710015773773194, 0.005740352153778076, 0.005697535991668701, 0.005740223884582519, 0.0059556798934936525, 0.005833216190338135, 0.005879039764404297, 0.005839392185211182, 0.005875135898590088, 0.005753471851348877, 0.005844863891601562, 0.005818079948425293, 0.005820703983306885, 0.005831808090209961, 0.00574883222579956, 0.005839935779571533, 0.0057955517768859864, 0.005907551765441894, 0.005773920059204102, 0.0058739838600158694, 0.005818367958068848, 0.005912576198577881, 0.005791999816894531, 0.005795872211456299, 0.005866943836212158, 0.005591040134429932, 0.006035295963287354, 0.006346911907196045, 0.005912479877471924, 0.005826464176177978, 0.005815648078918457, 0.005868447780609131, 0.005842624187469482, 0.005822624206542969, 0.00594979190826416, 0.005842144012451172, 0.0058754878044128414, 0.005803808212280274, 0.005888864040374756, 0.005803296089172363, 0.005978335857391357, 0.005825183868408203, 0.005846784114837646, 0.0058037757873535155, 0.005767776012420654, 0.0060189437866210935, 0.005849055767059326, 0.006010655879974365, 0.005982367992401123, 0.0059203200340270995, 0.005834911823272705, 0.00585152006149292, 0.005869919776916504, 0.005803679943084717, 0.005699584007263184, 0.0057794561386108395, 0.005832704067230224, 0.005791776180267334, 0.005899871826171875, 0.005828991889953613, 0.006364480018615722, 0.005956287860870361, 0.006086656093597412, 0.006076223850250244, 0.0060351681709289555, 0.005954336166381836, 0.005802688121795655, 0.005884928226470947, 0.005865471839904785, 0.0058566718101501465, 0.005914720058441162, 0.005959904193878174, 0.005971807956695557, 0.0060395197868347164, 0.0063836159706115725, 0.006332575798034668, 0.005898560047149658, 0.005914400100708008, 0.006017343997955322, 0.005946303844451904, 0.006036064147949219, 0.0059325118064880375, 0.006075583934783936, 0.0059452481269836426, 0.00595743989944458, 0.005976064205169678, 0.005901504039764405, 0.0059236478805542, 0.005585696220397949, 0.005925087928771972, 0.006000927925109863, 0.005905759811401367, 0.006041984081268311, 0.005914624214172363, 0.006039552211761475, 0.006098048210144043, 0.00594377613067627, 0.005886688232421875, 0.00588153600692749, 0.005895296096801758, 0.0058542399406433104, 0.005883168220520019, 0.005843520164489746, 0.005905759811401367, 0.005834464073181152, 0.005861536026000976, 0.005901088237762451, 0.005865471839904785, 0.005869120121002197, 0.005759424209594727, 0.005992447853088379, 0.005843296051025391, 0.00590780782699585, 0.005861023902893067, 0.005849760055541992, 0.006048799991607666, 0.005896736145019532, 0.005928864002227783, 0.00586953592300415, 0.005866047859191895, 0.005806272029876709, 0.005961023807525635, 0.005964000225067139, 0.005935391902923584, 0.005779679775238037, 0.005881984233856202, 0.005890783786773682, 0.005891007900238037, 0.005859263896942139, 0.005918432235717774, 0.005939551830291748, 0.0058429441452026365, 0.005867360115051269, 0.005783711910247803, 0.005838784217834472, 0.005832511901855469, 0.005904640197753906, 0.005887743949890137, 0.005894495964050293, 0.0060415358543396, 0.006033376216888427, 0.005946784019470215, 0.0059622077941894535, 0.005984384059906006, 0.005937503814697266, 0.005865119934082032, 0.005957632064819336, 0.005884064197540283, 0.005968832015991211, 0.005835680007934571, 0.005988351821899414, 0.005548895835876465, 0.0058466877937316895, 0.005892447948455811, 0.005812160015106201, 0.006039616107940674, 0.00598038387298584, 0.006002528190612793, 0.005920703887939453, 0.0059443840980529785, 0.005895103931427002, 0.005875296115875244, 0.005927328109741211, 0.006000639915466309, 0.005945343971252442, 0.00586137580871582, 0.005893951892852784, 0.005851327896118164, 0.005853055953979492, 0.005912992000579834, 0.005879424095153809, 0.005996863842010498, 0.00587718391418457, 0.006054240226745606, 0.005920415878295899, 0.005939487934112549, 0.005902400016784668, 0.006087711811065674, 0.005872223854064941, 0.005872191905975342, 0.0059678077697753905, 0.005881408214569092, 0.006485919952392578, 0.005968287944793701, 0.005922815799713135, 0.00585968017578125, 0.005923871994018555, 0.005859968185424805, 0.005904223918914795, 0.005851136207580566, 0.005842527866363525, 0.005860191822052002, 0.005893856048583984, 0.006098944187164307, 0.005873568058013916, 0.006172607898712159, 0.00587340784072876, 0.005802559852600098, 0.005762432098388672, 0.005800415992736816, 0.005855231761932373, 0.0056986560821533205, 0.005780416011810303, 0.005725632190704346, 0.005798431873321533, 0.0059424638748168945, 0.005825344085693359, 0.005849088191986084, 0.005912576198577881, 0.005863423824310303, 0.0057504639625549315, 0.005881728172302246, 0.005740992069244385, 0.005834752082824707, 0.005564191818237305, 0.005837024211883545, 0.005891071796417236, 0.005876736164093017, 0.005922815799713135, 0.00588105583190918, 0.005897056102752685, 0.0058733119964599605, 0.005855519771575928, 0.005963776111602783, 0.005910528182983398, 0.0059045119285583494, 0.00587772798538208, 0.006205376148223877, 0.005916639804840088, 0.006105088233947754, 0.005891871929168701, 0.005797855854034424, 0.0058510079383850095, 0.005822976112365722, 0.005974175930023193, 0.005850431919097901, 0.005925280094146728, 0.00579801607131958, 0.005908351898193359, 0.005932767868041992, 0.005804224014282227, 0.005840991973876953, 0.005826560020446778, 0.006146240234375, 0.005860447883605957, 0.005946080207824707, 0.005808127880096436, 0.005968063831329346, 0.005818272113800049, 0.005899487972259521, 0.005995359897613526, 0.006057631969451904, 0.005906527996063233, 0.005916704177856445, 0.005892096042633056, 0.0059246401786804195, 0.006137951850891114, 0.006026463985443115, 0.005840095996856689, 0.005908224105834961, 0.005746687889099121, 0.005772319793701172, 0.005884768009185791, 0.0057338237762451175, 0.0058141441345214845, 0.00585641622543335, 0.005887648105621338, 0.005837152004241944, 0.005810175895690918, 0.00574019193649292, 0.005760992050170898, 0.005795008182525635, 0.00580022382736206, 0.005847008228302002, 0.005778016090393067, 0.005784863948822021, 0.00585209608078003, 0.005467552185058594, 0.005862175941467285, 0.0058982081413269044, 0.005857120037078858, 0.0059348797798156735, 0.005888127803802491, 0.005894400119781494, 0.00585862398147583, 0.005896895885467529, 0.005826367855072022, 0.005853375911712646, 0.00586956787109375, 0.0058585600852966305, 0.005909247875213623, 0.006024384021759034, 0.005963647842407226, 0.0058930559158325195, 0.0059351038932800295, 0.005850656032562256, 0.005865439891815185, 0.005864287853240967, 0.005885056018829346, 0.00583516788482666, 0.005895743846893311, 0.00601366376876831, 0.005848095893859863, 0.00574883222579956, 0.005734623908996582, 0.005734272003173828, 0.005885759830474854, 0.0058150081634521485, 0.005699679851531982, 0.005744639873504639, 0.005776576042175293, 0.005931839942932129, 0.005963776111602783, 0.005949088096618652, 0.005826784133911133, 0.005900288105010986, 0.005843135833740234, 0.005868480205535888, 0.005810336112976074, 0.005764256000518799, 0.005811071872711181, 0.005803103923797607, 0.005905632019042969, 0.0058046717643737795, 0.005936736106872558, 0.005849023818969726, 0.0058575358390808106, 0.005809567928314209, 0.005798719882965088, 0.0057875199317932126, 0.005745920181274414, 0.005951295852661133, 0.006171584129333496, 0.006076416015625, 0.005985439777374268, 0.0060095682144165035, 0.005940512180328369, 0.006025760173797607, 0.006031199932098389, 0.0059818878173828124, 0.00570959997177124, 0.005943039894104004, 0.005871039867401123, 0.00595747184753418, 0.006007264137268067, 0.005971807956695557, 0.005959936141967773, 0.005962111949920654, 0.005918432235717774, 0.005883552074432373, 0.0058496642112731935, 0.005861087799072266, 0.005921152114868164, 0.005919007778167724, 0.005939167976379394, 0.005906176090240478, 0.005754432201385498, 0.005862016201019287, 0.005732287883758545, 0.00649126386642456, 0.005841824054718017, 0.0058141121864318844, 0.005678592205047607, 0.005714431762695312, 0.0057118721008300784, 0.005745696067810059, 0.005923423767089844, 0.005909887790679932, 0.0059361281394958495, 0.005865471839904785, 0.006023136138916016, 0.0057569599151611325, 0.005733568191528321, 0.005690176010131836, 0.005739744186401367, 0.0058007359504699705, 0.005848288059234619, 0.005782303810119629, 0.005761151790618897, 0.005775040149688721, 0.005750976085662842, 0.005908480167388916, 0.005840703964233398, 0.005785727977752685, 0.005695551872253418, 0.0059699201583862304, 0.005921951770782471, 0.005818399906158447, 0.005812928199768066, 0.005760672092437744, 0.005726272106170654, 0.0056665921211242675, 0.0056817917823791505, 0.005627903938293457, 0.005691487789154053, 0.0057153282165527345, 0.005708320140838623, 0.005695487976074219, 0.00582371187210083, 0.005653120040893554, 0.005766816139221191, 0.0057391037940979, 0.005726111888885498, 0.005390304088592529, 0.005633056163787842, 0.005581344127655029, 0.0056277761459350584, 0.0056143999099731445, 0.0056228160858154295, 0.005677792072296142, 0.005689343929290771, 0.005996096134185791, 0.005796031951904297, 0.005775775909423828, 0.006018911838531494, 0.00591212797164917, 0.005869440078735352, 0.005808703899383545, 0.005834752082824707, 0.005854688167572022, 0.0058783040046691895, 0.005781792163848877, 0.005865407943725586, 0.005809088230133056, 0.005777279853820801, 0.005665631771087647, 0.005680863857269287, 0.005758560180664062, 0.005707903861999512, 0.0057738561630249026, 0.005731616020202637, 0.005787839889526367, 0.005943999767303467, 0.005924863815307617, 0.005748576164245606, 0.005863584041595459, 0.005818496227264405, 0.005908256053924561, 0.005895808219909668, 0.005826367855072022, 0.005869503974914551, 0.006029920101165771, 0.005888063907623291, 0.005893887996673584, 0.005849247932434082, 0.005787231922149658, 0.005872191905975342, 0.005824512004852295, 0.005854976177215576, 0.005806111812591553, 0.005835008144378662, 0.005845248222351074, 0.005670239925384521, 0.005740928173065186, 0.005650432109832764, 0.005769216060638428, 0.005758975982666016, 0.005737599849700928, 0.005716288089752197, 0.005703455924987793, 0.005687808036804199, 0.005670559883117676, 0.005683839797973633, 0.005627903938293457, 0.005597343921661377, 0.005654623985290527, 0.005397984027862549, 0.005714560031890869, 0.0056679039001464844, 0.0056999998092651365, 0.005619647979736328, 0.005622208118438721, 0.005609087944030762, 0.0056770238876342775, 0.005790207862854004, 0.005730207920074463, 0.005791327953338623, 0.005740960121154785, 0.0057077760696411135, 0.0056629118919372555, 0.005629759788513183, 0.005644000053405761, 0.005716256141662598, 0.005900000095367432, 0.005679391860961914, 0.00584659194946289, 0.005746816158294678, 0.005685567855834961, 0.005686912059783935, 0.005613823890686035, 0.005714047908782959, 0.005700607776641845, 0.005690624237060547, 0.005682943820953369, 0.005656576156616211, 0.005627903938293457, 0.005715839862823487, 0.005851168155670166, 0.005793056011199951, 0.0057842879295349125, 0.005939455986022949, 0.005828767776489258, 0.005809855937957764, 0.005765183925628662, 0.005810080051422119, 0.005708096027374268, 0.005740255832672119, 0.005652480125427246, 0.005603328227996827, 0.005605375766754151, 0.005588992118835449, 0.005584959983825684, 0.005484000205993652, 0.0055116481781005856, 0.0055289921760559085, 0.0054915518760681155, 0.005511168003082275, 0.005560063838958741, 0.00567193603515625, 0.0055203838348388675, 0.00549232006072998, 0.005491072177886963, 0.005447872161865234, 0.005617504119873047, 0.005658624172210694, 0.005535264015197754, 0.005560800075531006, 0.005513216018676758, 0.005467807769775391, 0.005358016014099121, 0.005595071792602539, 0.005544000148773194, 0.005533696174621582, 0.005526912212371826, 0.005479296207427978, 0.0056863040924072265, 0.0056200637817382815, 0.005596640110015869, 0.005555103778839112, 0.00552345609664917, 0.005646336078643799, 0.0057257599830627445, 0.0054601278305053715, 0.005431583881378174, 0.005443583965301513, 0.005480447769165039, 0.005486591815948487, 0.00548035192489624, 0.005570303916931152, 0.0056995201110839845, 0.005734816074371338, 0.005624095916748047, 0.0055604162216186526, 0.00566383981704712, 0.005581344127655029, 0.0055987839698791505, 0.005662144184112549, 0.005725183963775635, 0.005600736141204834, 0.00558739185333252, 0.005562751770019531, 0.005575776100158691, 0.005655168056488037, 0.005587039947509765, 0.005631968021392823, 0.0056728959083557125, 0.005660672187805176, 0.005699391841888428, 0.005593535900115967, 0.0055922880172729495, 0.00567903995513916, 0.005616223812103272, 0.005832704067230224, 0.005590400218963623, 0.005646975994110107, 0.005636159896850586, 0.005867040157318116, 0.0057348799705505375, 0.005757919788360596, 0.0057058238983154295, 0.005591936111450195, 0.0055784001350402835, 0.005935455799102783, 0.0056442880630493165, 0.0058464322090148926, 0.005704192161560059, 0.005779551982879639, 0.005732351779937744, 0.005836800098419189, 0.005749055862426758, 0.005756608009338379, 0.005745664119720459, 0.00539792013168335, 0.005757311820983887, 0.0056566400527954105, 0.00574294376373291, 0.005654335975646973, 0.005609087944030762, 0.00564467191696167, 0.005668863773345947, 0.005670911788940429, 0.005703680038452149, 0.005693439960479736, 0.005617248058319092, 0.005652959823608398, 0.005627488136291504, 0.005577055931091308, 0.005629248142242431, 0.005536448001861572, 0.005541920185089111, 0.005574624061584473, 0.005548351764678955, 0.005635295867919922, 0.005650911808013916, 0.005622015953063965, 0.005704544067382813, 0.005647264003753662, 0.005707007884979248, 0.005583295822143555, 0.005626175880432129, 0.005670207977294922, 0.0056629438400268555, 0.005669343948364258, 0.005588992118835449, 0.005693376064300537, 0.005774432182312011, 0.005722815990447998, 0.005708064079284668, 0.005678368091583252, 0.005736639976501465, 0.005863967895507812, 0.005828800201416015, 0.005947391986846923, 0.005789472103118897, 0.0058035202026367185, 0.005792416095733642, 0.005703616142272949, 0.005717951774597168, 0.00570959997177124, 0.005666431903839112, 0.005782112121582032, 0.005902175903320313, 0.005812384128570556, 0.005703551769256592, 0.005691808223724365, 0.005658559799194336, 0.005732128143310547, 0.005887904167175293, 0.005899903774261475, 0.005912831783294678, 0.005852384090423584, 0.005804160118103027, 0.005879968166351318, 0.005769567966461182, 0.005798431873321533, 0.005467999935150147, 0.00580460786819458, 0.005826591968536377, 0.0058100161552429195, 0.005887807846069336, 0.0059658241271972655, 0.005840896129608154, 0.0058830718994140625, 0.005685184001922607, 0.005848991870880127, 0.005768256187438965, 0.005785727977752685, 0.005800864219665527, 0.005870463848114013, 0.005926112174987793, 0.005902175903320313, 0.0058336639404296875, 0.005848480224609375, 0.005985888004302978, 0.005929984092712402, 0.00588595199584961, 0.005815392017364502, 0.005850016117095947, 0.0060144000053405765, 0.005904960155487061, 0.005909632205963135, 0.005902783870697021, 0.005914815902709961, 0.005811456203460693, 0.005792191982269287, 0.0058026561737060545, 0.0058549118041992185, 0.005949632167816162, 0.005818079948425293, 0.005846975803375244, 0.005952159881591797, 0.00589961576461792, 0.006009215831756592, 0.005937151908874512, 0.005954720020294189, 0.005890944004058838, 0.005981535911560059, 0.005843584060668945, 0.005830048084259033, 0.0058059201240539555, 0.005855072021484375, 0.006120416164398193, 0.005857120037078858, 0.005806272029876709, 0.005829887866973877, 0.005804704189300537, 0.005768256187438965, 0.006780032157897949, 0.005895296096801758, 0.0059112319946289064, 0.005944992065429687, 0.005984416007995605, 0.006238207817077636, 0.005970111846923828, 0.00587775993347168, 0.006025216102600098, 0.00585433578491211, 0.005972767829895019, 0.005580383777618408, 0.005951903820037842, 0.005948927879333496, 0.005899040222167968, 0.005899839878082276, 0.0058471999168396, 0.00582860803604126, 0.00591871976852417, 0.005854847908020019, 0.005869952201843262, 0.005797152042388916, 0.005862112045288086, 0.005898367881774902, 0.005856351852416992, 0.005972383975982666, 0.005832704067230224, 0.005882239818572998, 0.005802271842956543, 0.005830368041992187, 0.0057118721008300784, 0.005745791912078858, 0.00571827220916748, 0.005759615898132325, 0.005834752082824707, 0.00591267204284668, 0.005814271926879883, 0.0058995518684387205, 0.005804224014282227, 0.005719808101654053, 0.005853824138641358, 0.005879871845245362, 0.0058939199447631836, 0.005894368171691895, 0.005897568225860596, 0.006126368045806884, 0.0067132158279418945, 0.005958655834197998, 0.006500736236572265, 0.0064293122291564945, 0.006507808208465576, 0.006011040210723877, 0.005826399803161621, 0.0058856639862060545, 0.00585152006149292, 0.006146080017089843, 0.006062687873840332, 0.006066271781921387, 0.005885983943939209, 0.005988224029541016, 0.0061584959030151366, 0.007539616107940674, 0.007209919929504395, 0.007191743850708008, 0.006025919914245606, 0.005933087825775146, 0.005809311866760254, 0.005782464027404785, 0.005869152069091797, 0.005738463878631592, 0.005814847946166993, 0.005817311763763428, 0.005786623954772949, 0.005691648006439209, 0.005445024013519287, 0.005770912170410156, 0.005803840160369873, 0.005773727893829346, 0.005753151893615723, 0.005701759815216065, 0.005689343929290771, 0.005756927967071533, 0.005761375904083252, 0.005750432014465332, 0.0056415038108825685, 0.005763904094696045, 0.00571721601486206, 0.00572822380065918, 0.005758975982666016, 0.005706463813781738, 0.005677055835723877, 0.005591040134429932, 0.005641215801239013, 0.005732800006866455, 0.005677631855010986, 0.00567414379119873, 0.005585536003112793, 0.005560544013977051, 0.005597184181213379, 0.005696576118469238, 0.005721183776855468, 0.005650271892547607, 0.005590047836303711, 0.005556287765502929, 0.00551958417892456, 0.005622687816619873, 0.005584415912628174, 0.005631840229034424, 0.005718431949615479, 0.005742591857910156, 0.005854752063751221, 0.0059028158187866215, 0.005867392063140869, 0.005830880165100098, 0.005908383846282959, 0.005943103790283203, 0.006131872177124024, 0.005926623821258545, 0.00598252820968628, 0.005808127880096436, 0.005951648235321045, 0.0058997759819030765, 0.006032063961029053, 0.005940512180328369, 0.005888031959533691, 0.005816671848297119, 0.005783552169799804, 0.005894144058227539, 0.005881855964660645, 0.005951488018035888, 0.00586572790145874, 0.005895936012268067, 0.005814047813415527, 0.0058496642112731935, 0.005909632205963135, 0.005943840026855469, 0.00576639986038208, 0.005551008224487305, 0.005867487907409668, 0.005971519947052002, 0.0057708477973937985, 0.0057775359153747555, 0.005775584220886231, 0.005757120132446289, 0.005889855861663819, 0.005880576133728027, 0.005857151985168457, 0.005915679931640625, 0.005841792106628418, 0.00586137580871582, 0.005981855869293213, 0.005960031986236573, 0.005945343971252442, 0.005901504039764405, 0.005952544212341308, 0.005860383987426758, 0.0059500160217285155, 0.005887648105621338, 0.005835296154022217, 0.005763296127319336, 0.005756800174713135, 0.00574067211151123, 0.005789279937744141, 0.005769408226013183, 0.005734464168548584, 0.0057589120864868166, 0.00565235185623169, 0.0057366080284118656, 0.005799232006072998, 0.005804704189300537, 0.005756159782409668, 0.0059911680221557614, 0.005942399978637695, 0.005886847972869873, 0.0058853759765625, 0.00586195182800293, 0.005922815799713135, 0.005769216060638428, 0.0057160000801086425, 0.0057999038696289066, 0.005910816192626953, 0.005883615970611572, 0.005824416160583496, 0.005797311782836914, 0.005874335765838623, 0.005840479850769043, 0.005841567993164063, 0.005801439762115479, 0.0057387838363647465, 0.005751904010772705, 0.005724256038665772, 0.005775328159332275, 0.005816800117492676, 0.0057859840393066405, 0.0057794561386108395, 0.005791903972625733, 0.005809887886047364, 0.005808256149291992, 0.006022175788879394, 0.005933599948883057, 0.006048255920410156, 0.005962111949920654, 0.005977663993835449, 0.006057983875274659, 0.006000800132751465, 0.0059807682037353515, 0.005903552055358887, 0.005806655883789063, 0.005803967952728271, 0.005854527950286865, 0.0059658241271972655, 0.005822656154632569, 0.006043263912200928, 0.005882527828216553, 0.006792768001556397, 0.00710313606262207, 0.008067071914672852, 0.006776832103729248, 0.006852608203887939, 0.005759007930755615, 0.0057647361755371095, 0.00577132797241211, 0.005761312007904053, 0.005678431987762451, 0.005786272048950195, 0.005699007987976074, 0.005884479999542236, 0.005902527809143066, 0.005987775802612305, 0.005845600128173828, 0.005681215763092041, 0.005696512222290039, 0.005571296215057373, 0.005761375904083252, 0.005559743881225586, 0.0055584959983825686, 0.00556441593170166, 0.005949440002441406, 0.005623807907104492, 0.005582528114318848, 0.00559065580368042, 0.005704383850097656, 0.005846271991729737, 0.0058635520935058595, 0.005689311981201172, 0.0056533761024475096, 0.00556009578704834, 0.005744927883148193, 0.005883232116699218, 0.006031744003295899, 0.006100992202758789, 0.00590828800201416, 0.005787007808685303, 0.005725120067596436, 0.00567299222946167, 0.005592800140380859, 0.005679327964782715, 0.005592544078826905, 0.005808576107025147, 0.005608640193939209, 0.005534527778625488, 0.005629856109619141, 0.005603424072265625, 0.005320384025573731, 0.005567168235778809, 0.005494815826416016, 0.0055207362174987794, 0.005546656131744385, 0.005631999969482422, 0.005670911788940429, 0.005701632022857666, 0.005787648200988769, 0.005703680038452149, 0.005674560070037842, 0.005711999893188477, 0.005658944129943847, 0.005619743824005127, 0.005520864009857178, 0.005472544193267822, 0.00547814416885376, 0.0054481601715087894, 0.005521535873413086, 0.0056431999206542965, 0.005526463985443115, 0.005478400230407715, 0.0054579200744628905, 0.005462016105651855, 0.005523392200469971, 0.005599296092987061, 0.0056146240234375, 0.0057002239227294926, 0.005633535861968994, 0.005757760047912597, 0.005634079933166504, 0.005697535991668701, 0.0056128320693969724, 0.00567574405670166, 0.0055970559120178225, 0.005607552051544189, 0.005873663902282715, 0.0057140798568725585, 0.005640160083770752, 0.0055595521926879886, 0.005626495838165283, 0.005652639865875244, 0.005685088157653809, 0.005626880168914795, 0.005530623912811279, 0.005553664207458496, 0.005575168132781983, 0.005554175853729248, 0.005603007793426514, 0.005499008178710937, 0.0054858880043029785, 0.005660927772521972, 0.005606016159057617, 0.005677311897277832, 0.005729663848876953, 0.00581056022644043, 0.005707647800445557, 0.00581440019607544, 0.005764287948608398, 0.00577785587310791, 0.005726592063903808, 0.005705567836761474, 0.005802271842956543, 0.005416512012481689, 0.005804096221923828, 0.00588265609741211, 0.005859039783477783, 0.0057411518096923825, 0.005654016017913818, 0.00568339204788208, 0.005730303764343261, 0.005946527957916259, 0.005750624179840088, 0.00573747205734253, 0.005675007820129394, 0.005699584007263184, 0.005693439960479736, 0.005658912181854248, 0.005807328224182129, 0.005644351959228516, 0.005724192142486572, 0.005629792213439942, 0.00565119981765747, 0.005828320026397705, 0.005897535800933838, 0.005938144207000733, 0.005730112075805664, 0.005752287864685059, 0.0056360321044921875, 0.005881728172302246, 0.005843679904937744, 0.005940383911132813, 0.005755743980407715, 0.005683199882507324, 0.0057448320388793946, 0.005795328140258789, 0.0057794561386108395, 0.005677279949188233, 0.005670080184936524, 0.005747615814208984, 0.005645631790161133, 0.005683360099792481, 0.006100800037384033, 0.006034016132354736, 0.006068607807159424, 0.005781248092651368, 0.005699264049530029, 0.005739840030670166, 0.005894559860229492, 0.006314591884613037, 0.005732384204864502, 0.005789696216583252, 0.005772543907165527, 0.00583139181137085, 0.0058410558700561525, 0.005734111785888672, 0.00579801607131958, 0.005715360164642334, 0.005840767860412597, 0.005777503967285156, 0.005630752086639404, 0.0056616001129150394, 0.005641151905059814, 0.005799136161804199, 0.005726975917816162, 0.005811647891998291, 0.0053002238273620605, 0.005553343772888184, 0.005764095783233642, 0.005551040172576904, 0.005547935962677002, 0.005501952171325684, 0.005525216102600098, 0.00556057596206665, 0.005511295795440674, 0.005565728187561035, 0.005601727962493896, 0.005669023990631104, 0.005695136070251465, 0.0056854720115661625, 0.0056936640739440915, 0.005623263835906982, 0.0055948481559753415, 0.0055847358703613285, 0.005516160011291504, 0.005490496158599854, 0.005508831977844238, 0.0056154241561889645, 0.005684959888458252, 0.005671040058135986, 0.005644608020782471, 0.0055764479637145994, 0.005546751976013183, 0.00552345609664917, 0.005529600143432617, 0.005730016231536865, 0.005562272071838379, 0.005524960041046143, 0.005501152038574219, 0.005493535995483399, 0.005597087860107422, 0.005660672187805176, 0.005564064025878906, 0.005671296119689941, 0.005619200229644775, 0.005700064182281494, 0.005651552200317383, 0.00559935998916626, 0.0056195521354675294, 0.005587967872619629, 0.005607359886169434, 0.005562719821929932, 0.005584799766540528, 0.005641791820526123, 0.0056763520240783695, 0.005626976013183594, 0.005594816207885742, 0.005660223960876465, 0.005709856033325195, 0.005777919769287109, 0.005731711864471436, 0.0058618240356445315, 0.005783743858337402, 0.005704736232757569, 0.005763040065765381, 0.005692416191101074, 0.00566476821899414, 0.005775231838226319, 0.005819680213928223, 0.005619487762451172, 0.005882431983947754, 0.005881696224212646, 0.005788928031921387, 0.005827455997467041, 0.005795135974884034, 0.00579369592666626, 0.005711967945098877, 0.005818975925445557, 0.0058503680229187015, 0.005810368061065674, 0.005897024154663086, 0.0059064321517944334, 0.005811872005462647, 0.005742752075195313, 0.005825791835784912, 0.005700287818908692, 0.0057693119049072265, 0.005826687812805176, 0.005782815933227539, 0.005779967784881592, 0.0058757119178771975, 0.005816319942474365, 0.005789760112762451, 0.005785632133483887, 0.005751999855041504, 0.00578550386428833, 0.005729152202606201, 0.005754816055297852, 0.0058122239112854005, 0.005740543842315674, 0.005748960018157959, 0.00566044807434082, 0.005683199882507324, 0.005721695899963379, 0.005773344039916992, 0.005779839992523193, 0.0057712640762329105, 0.005767168045043946, 0.005808127880096436, 0.005895232200622559, 0.005816959857940674, 0.006074624061584473, 0.005791903972625733, 0.006352735996246338, 0.005817567825317383, 0.005841760158538818, 0.005740799903869629, 0.005758944034576416, 0.005725503921508789, 0.005732831954956055, 0.005681151866912842, 0.005593088150024414, 0.005623807907104492, 0.005568096160888672, 0.005630112171173095, 0.0056872000694274906, 0.005890399932861328, 0.005730303764343261, 0.005666848182678223, 0.00573033618927002, 0.005699647903442383, 0.005658495903015137, 0.005441472053527832, 0.00567299222946167, 0.005662816047668457, 0.005680736064910889, 0.005711520195007324, 0.005688191890716553, 0.005746560096740723, 0.005757279872894287, 0.005782656192779541, 0.005826911926269531, 0.005880000114440918, 0.0059023361206054685, 0.00586137580871582, 0.005849088191986084, 0.005803584098815918, 0.005751391887664795, 0.005727071762084961, 0.005638591766357422, 0.005640736103057861, 0.005623839855194092, 0.005734399795532226, 0.005711071968078614, 0.005819168090820313, 0.005786719799041748, 0.005685823917388916, 0.0056774082183837895, 0.00560975980758667, 0.005789408206939697, 0.005676320075988769, 0.0057842879295349125, 0.005833055973052979, 0.005809216022491455, 0.005740896224975586, 0.0056977920532226565, 0.005681151866912842, 0.005715968132019043, 0.0058525438308715825, 0.005769855976104736, 0.00571123218536377, 0.005911263942718506, 0.005738399982452393, 0.005768703937530518, 0.005861887931823731, 0.005879807949066162, 0.0058080959320068355, 0.005810207843780517, 0.0056910719871520994, 0.005701600074768066, 0.005718207836151123, 0.005639840126037598, 0.0055976958274841304, 0.00555401611328125, 0.005556384086608887, 0.005582848072052002, 0.00567193603515625, 0.005673376083374024, 0.005656864166259766, 0.005622079849243164, 0.005584767818450928, 0.00565388822555542, 0.005694208145141602, 0.00559830379486084, 0.005616543769836425]",tokens/s,171.82834023600046,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,806.387712,576.585728,0.0,190.840832,172.878848,s,1,9.6614052734375,9.6614052734375,0.0,9.6614052734375,9.6614052734375,9.6614052734375,9.6614052734375,[9.6614052734375],,kWh,1.2431553020818834e-05,1.3636809631521454e-06,3.4458360900005425e-06,1.724107007397152e-05,,MB,1252.18816,685.637632,0.0,270.532608,241.723904,s,35,0.2239145593643188,0.006397558838980538,0.0001504179385619405,0.006366911888122558,0.006520096015930176,0.006579964637756347,0.006939281358718871,"[0.007065152168273926, 0.006236608028411865, 0.00631609582901001, 0.006390175819396973, 0.0062501120567321775, 0.006398687839508057, 0.006272928237915039, 0.0063628158569335935, 0.006299200057983398, 0.006287839889526367, 0.006243872165679932, 0.006289023876190186, 0.006272768020629883, 0.006396512031555176, 0.006366911888122558, 0.006302720069885254, 0.006528863906860352, 0.00635814380645752, 0.006345983982086182, 0.006435711860656738, 0.006269760131835937, 0.006356895923614502, 0.006408160209655762, 0.0063794879913330075, 0.006530687808990478, 0.006475456237792969, 0.006506944179534912, 0.006429599761962891, 0.006294239997863769, 0.0064644160270690915, 0.006694943904876709, 0.00646073579788208, 0.006344927787780762, 0.006456480026245117, 0.0064216961860656735]",tokens/s,40015.26307818906,kWh,1.8798616054015206e-07,2.073141897464673e-08,9.762446956571501e-08,3.063420490805138e-07,tokens/kWh,835667192.1741871,MB,1298.82112,698.220544,0.0,283.11552,241.726464,s,35,9.970541290283203,0.28487260829380584,0.006648401623498636,0.28378140258789064,0.2931452941894531,0.2951804534912109,0.30242426940917966,"[0.2848056640625, 0.28008538818359374, 0.282115234375, 0.2835010681152344, 0.28378140258789064, 0.2814277954101562, 0.278661376953125, 0.2758060607910156, 0.28392117309570314, 0.27432696533203127, 0.2787901611328125, 0.28644854736328124, 0.2792660217285156, 0.2895779113769531, 0.2753723449707031, 0.28627069091796875, 0.2829876403808594, 0.28311666870117186, 0.29240087890625, 0.2784743347167969, 0.27839773559570313, 0.28011248779296877, 0.28833932495117187, 0.29069744873046877, 0.2945782470703125, 0.29098028564453127, 0.28779605102539063, 0.29265740966796877, 0.29347055053710935, 0.2965856018066406, 0.3054320678710937, 0.28483493041992186, 0.28069558715820314, 0.2808915405273438, 0.2839346923828125]",tokens/s,221.1514837362826,kWh,8.364706559459716e-06,9.224799726305139e-07,3.305862380205732e-06,1.2593048912295963e-05,tokens/kWh,5002759.890695434,,s,2205,9.952019627094264,0.004513387585983796,0.00017606237080719243,0.004478240013122558,0.0046963647842407225,0.004799942398071289,0.0050637171363830565,"[0.005454559803009033, 0.004562943935394287, 0.0045281281471252445, 0.004503744125366211, 0.004503359794616699, 0.004605760097503662, 0.004509888172149658, 0.004581408023834228, 0.004498400211334228, 0.004500480175018311, 0.0044930558204650875, 0.004478400230407715, 0.004442880153656006, 0.00454252815246582, 0.004463840007781983, 0.004459199905395508, 0.0044176321029663086, 0.00446668815612793, 0.004407296180725098, 0.004429823875427246, 0.004574783802032471, 0.0045138559341430665, 0.004462143898010254, 0.0044460477828979494, 0.004488031864166259, 0.004448768138885498, 0.004441120147705078, 0.004470655918121338, 0.00446127986907959, 0.004480031967163086, 0.004469727993011475, 0.0043928961753845214, 0.004372704029083252, 0.004425055980682373, 0.004386335849761963, 0.004455391883850098, 0.004456448078155518, 0.0045996479988098145, 0.004579487800598144, 0.004525568008422852, 0.004581600189208984, 0.004628767967224121, 0.004636672019958496, 0.004673535823822021, 0.004658432006835938, 0.0046350078582763675, 0.0045895037651062015, 0.004620736122131348, 0.004589920043945313, 0.004642111778259277, 0.0045796799659729, 0.00449126386642456, 0.004497407913208008, 0.004423744201660156, 0.00447276782989502, 0.004530176162719727, 0.004396416187286377, 0.004391520023345947, 0.004370463848114014, 0.004362239837646485, 0.0044932479858398435, 0.004378047943115235, 0.004379072189331055, 0.004078144073486328, 0.0044380159378051755, 0.00440934419631958, 0.004391104221343994, 0.004366208076477051, 0.004390719890594482, 0.004376224040985107, 0.004439583778381348, 0.004453311920166016, 0.004476863861083985, 0.004460896015167236, 0.004496384143829346, 0.0044551358222961425, 0.004662303924560547, 0.004688159942626953, 0.0046373758316040035, 0.004870495796203613, 0.004542111873626709, 0.004621632099151611, 0.004465343952178955, 0.004518976211547851, 0.004649663925170898, 0.004395264148712158, 0.004376575946807861, 0.0044646401405334475, 0.004693120002746582, 0.004468768119812012, 0.004344351768493652, 0.004358463764190674, 0.004361472129821777, 0.004351903915405273, 0.004367040157318115, 0.0044074559211730955, 0.0044479680061340334, 0.004397151947021484, 0.004344351768493652, 0.004327072143554688, 0.004403295993804931, 0.004353951930999756, 0.004412864208221436, 0.00437113618850708, 0.0043129601478576664, 0.004364287853240967, 0.00435814380645752, 0.004357600212097168, 0.004345471858978271, 0.004334496021270752, 0.00441539192199707, 0.004361440181732177, 0.004348671913146972, 0.004380799770355224, 0.004399104118347168, 0.004397056102752686, 0.004429823875427246, 0.004417535781860352, 0.0044091839790344235, 0.004522208213806152, 0.004497344017028808, 0.004493504047393799, 0.004551807880401612, 0.004458655834197998, 0.004459360122680664, 0.00442736005783081, 0.004151487827301025, 0.004462975978851319, 0.004424960136413574, 0.004486015796661377, 0.004425727844238282, 0.004468736171722412, 0.004413440227508545, 0.0043719358444213865, 0.004380512237548828, 0.00436294412612915, 0.004362239837646485, 0.00435814380645752, 0.0043788480758666995, 0.0043944640159606935, 0.0044217281341552735, 0.004532447814941406, 0.004470335960388184, 0.004440032005310059, 0.0045286078453063965, 0.004531551837921142, 0.004694591999053955, 0.004653151988983154, 0.004659327983856201, 0.004575104236602783, 0.0045359678268432615, 0.004614496231079101, 0.00467299222946167, 0.004512063980102539, 0.004435200214385987, 0.004432447910308838, 0.004423840045928955, 0.004426144123077393, 0.004454463958740234, 0.004496352195739746, 0.004499392032623291, 0.004457536220550537, 0.004409215927124024, 0.004402463912963867, 0.004421664237976074, 0.004405055999755859, 0.004372384071350098, 0.004395936012268066, 0.004441664218902588, 0.004429247856140137, 0.004469855785369873, 0.004517856121063232, 0.00452780818939209, 0.004487199783325195, 0.004477248191833496, 0.004499008178710937, 0.004665696144104004, 0.004509696006774902, 0.004487455844879151, 0.004505311965942383, 0.004552703857421875, 0.004530367851257325, 0.004466495990753174, 0.004429279804229736, 0.0044633917808532715, 0.0043966398239135744, 0.004413599967956543, 0.004528255939483643, 0.004469855785369873, 0.004287040233612061, 0.004501279830932617, 0.004483456134796142, 0.004463903903961182, 0.004474912166595459, 0.004448895931243897, 0.00452569580078125, 0.004524352073669434, 0.0045155839920043945, 0.004472447872161865, 0.004377280235290527, 0.004359871864318848, 0.00435427188873291, 0.004394783973693848, 0.004468031883239746, 0.004476672172546387, 0.004387775897979737, 0.004360191822052002, 0.004356095790863037, 0.004354047775268555, 0.004485119819641113, 0.004388864040374756, 0.0044382081031799315, 0.004415296077728272, 0.004421055793762207, 0.004474847793579101, 0.004467296123504639, 0.00443942403793335, 0.004420576095581055, 0.00441923189163208, 0.004417568206787109, 0.005234464168548584, 0.0047064957618713375, 0.00465715217590332, 0.004609951972961426, 0.004618336200714111, 0.004642303943634033, 0.004499680042266846, 0.004479263782501221, 0.004619904041290283, 0.0044893441200256345, 0.004548863887786865, 0.004583712100982666, 0.00462614393234253, 0.004636672019958496, 0.004536320209503173, 0.004571135997772217, 0.004509856224060058, 0.0045136637687683105, 0.0044316802024841305, 0.004393216133117676, 0.0044249920845031735, 0.004446464061737061, 0.004438399791717529, 0.0045240321159362796, 0.004538656234741211, 0.004684671878814697, 0.004518367767333984, 0.004450687885284424, 0.004458079814910889, 0.004482624053955078, 0.00435859203338623, 0.004360703945159912, 0.004093952178955078, 0.004357247829437256, 0.004348991870880127, 0.004311071872711182, 0.004360064029693604, 0.004359263896942139, 0.004425951957702637, 0.00451635217666626, 0.004472256183624268, 0.004448927879333496, 0.004511744022369385, 0.004476928234100342, 0.004427680015563965, 0.004411263942718506, 0.004380896091461182, 0.004530176162719727, 0.004407296180725098, 0.004380671977996826, 0.0043727998733520505, 0.0044551677703857425, 0.004448768138885498, 0.004516287803649903, 0.004522175788879395, 0.004526912212371826, 0.004473696231842041, 0.004467967987060547, 0.004397151947021484, 0.004416319847106933, 0.004448224067687989, 0.004523200035095215, 0.0044325442314147945, 0.004393152236938477, 0.004356287956237793, 0.004517920017242431, 0.004499360084533692, 0.0045175042152404785, 0.004477183818817138, 0.004468736171722412, 0.004493311882019043, 0.004532224178314209, 0.004573184013366699, 0.0046300158500671386, 0.004551008224487305, 0.004583583831787109, 0.00450764799118042, 0.004467807769775391, 0.0045372481346130375, 0.0046592001914978025, 0.004552576065063476, 0.004515200138092041, 0.004492032051086426, 0.004558847904205322, 0.004523839950561524, 0.005703711986541748, 0.004667840003967285, 0.00462614393234253, 0.0045747518539428714, 0.0045221118927001955, 0.00455622386932373, 0.004473184108734131, 0.0044856958389282225, 0.0045006399154663085, 0.004500319957733154, 0.0042007360458374025, 0.004481503963470459, 0.004430560111999512, 0.0044039998054504394, 0.004401311874389648, 0.0043944640159606935, 0.004495903968811035, 0.004417535781860352, 0.004354047775268555, 0.004352000236511231, 0.0043536958694458, 0.00442300796508789, 0.004376704216003418, 0.0043508801460266115, 0.00438915205001831, 0.004417215824127197, 0.0044707517623901365, 0.00453763198852539, 0.004551424026489258, 0.004579328060150147, 0.0044380159378051755, 0.004419167995452881, 0.004347519874572754, 0.0043439998626708986, 0.0043834238052368165, 0.004386720180511475, 0.004464960098266602, 0.0043712639808654785, 0.00457203197479248, 0.004485119819641113, 0.0044908800125122075, 0.004837759971618652, 0.005210336208343506, 0.004570911884307862, 0.00450764799118042, 0.0044421119689941405, 0.004407296180725098, 0.004408639907836914, 0.004602560043334961, 0.004460224151611328, 0.004440320014953613, 0.004478047847747803, 0.004436960220336914, 0.004415455818176269, 0.004395040035247802, 0.004376512050628662, 0.00448144006729126, 0.0044765758514404294, 0.0044234881401062015, 0.004409535884857178, 0.004493152141571045, 0.0044198079109191895, 0.004425055980682373, 0.004442719936370849, 0.004576511859893799, 0.004462431907653808, 0.00444220781326294, 0.004578112125396728, 0.004565120220184326, 0.004460031986236572, 0.004413023948669433, 0.004413216114044189, 0.0043705921173095704, 0.004064544200897217, 0.00443071985244751, 0.004450496196746826, 0.004397984027862549, 0.004359072208404541, 0.00435814380645752, 0.004347904205322266, 0.0043534722328186035, 0.0043443841934204105, 0.004397088050842285, 0.004373888015747071, 0.004365119934082031, 0.004383872032165528, 0.004385439872741699, 0.00436633586883545, 0.0045055999755859374, 0.004417535781860352, 0.004515007972717285, 0.004489376068115235, 0.004418367862701416, 0.004428768157958984, 0.004421919822692871, 0.004507743835449219, 0.004636864185333252, 0.004440288066864014, 0.004456607818603515, 0.004569024085998535, 0.004457568168640137, 0.004389791965484619, 0.004377664089202881, 0.004362656116485596, 0.004329152107238769, 0.004340479850769043, 0.004352096080780029, 0.004364287853240967, 0.004398848056793213, 0.004413599967956543, 0.004497504234313965, 0.004470367908477783, 0.004409311771392822, 0.004401599884033203, 0.004422815799713134, 0.004541279792785645, 0.0044577598571777345, 0.0043916478157043455, 0.004380671977996826, 0.0044011521339416505, 0.004370272159576416, 0.004372543811798095, 0.004361631870269775, 0.0043730239868164066, 0.0043621759414672855, 0.0043144960403442385, 0.004339744091033935, 0.004391744136810303, 0.004447616100311279, 0.004403840065002442, 0.004365664005279541, 0.004276895999908447, 0.004282368183135986, 0.004409247875213623, 0.004802527904510498, 0.004898943901062012, 0.0043438401222229, 0.004663424015045166, 0.0045911998748779295, 0.004503071784973145, 0.004462880134582519, 0.004411871910095215, 0.004346911907196045, 0.004317567825317383, 0.004315231800079346, 0.0043500161170959475, 0.004417952060699463, 0.004380671977996826, 0.004382719993591308, 0.004388927936553955, 0.004353888034820556, 0.004331615924835205, 0.004476384162902832, 0.004360735893249512, 0.004349440097808838, 0.004429664134979248, 0.004397727966308594, 0.004408480167388916, 0.0043712959289550785, 0.00437062406539917, 0.004349760055541992, 0.004345664024353027, 0.004310783863067627, 0.004403647899627686, 0.004331520080566406, 0.004284416198730469, 0.004302656173706055, 0.004392384052276611, 0.004315231800079346, 0.004305791854858398, 0.004343647956848145, 0.0043645758628845215, 0.004341504096984863, 0.0043879361152648925, 0.004391520023345947, 0.004317247867584228, 0.004311520099639893, 0.004314815998077393, 0.004312928199768066, 0.004278048038482666, 0.004295040130615235, 0.004399104118347168, 0.0042782721519470214, 0.004270080089569092, 0.004276447772979737, 0.004302815914154053, 0.004322879791259766, 0.004368512153625488, 0.0043664641380310055, 0.004380799770355224, 0.004396480083465576, 0.0043786559104919435, 0.004408800125122071, 0.00445084810256958, 0.004443583965301513, 0.00437065601348877, 0.004402143955230713, 0.004384096145629883, 0.004376224040985107, 0.004202879905700683, 0.004374368190765381, 0.004474720001220703, 0.00447811222076416, 0.0045304961204528805, 0.004690048217773438, 0.004747680187225342, 0.004689919948577881, 0.004524191856384277, 0.004503104209899902, 0.004519552230834961, 0.004541344165802002, 0.004485951900482178, 0.004761919975280761, 0.0046741762161254885, 0.004490816116333008, 0.004469183921813965, 0.004466815948486328, 0.00443174409866333, 0.0044085440635681155, 0.004508255958557129, 0.004517536163330078, 0.0045081920623779295, 0.004569087982177734, 0.004505760192871093, 0.004575263977050781, 0.00452182388305664, 0.004455904006958008, 0.0045000319480896, 0.004720736026763916, 0.004758495807647705, 0.004621183872222901, 0.004616223812103272, 0.0046451201438903805, 0.00459884786605835, 0.004479487895965576, 0.004509664058685302, 0.004656352043151855, 0.00479091215133667, 0.004641151905059814, 0.004564479827880859, 0.00451859188079834, 0.004409120082855224, 0.004482175827026367, 0.004408192157745361, 0.004400608062744141, 0.004411935806274414, 0.004372479915618896, 0.004381792068481445, 0.004524960041046143, 0.004429408073425293, 0.004409759998321533, 0.004403200149536133, 0.004472832202911377, 0.0044495038986206055, 0.004365087985992432, 0.004511168003082275, 0.00432316780090332, 0.004283103942871094, 0.0042781119346618655, 0.004302720069885254, 0.00428879976272583, 0.004284416198730469, 0.004061984062194824, 0.004371840000152588, 0.00434819221496582, 0.004319583892822266, 0.004335360050201416, 0.0043168959617614746, 0.004434463977813721, 0.004321023941040039, 0.004312928199768066, 0.004298751831054688, 0.004324031829833984, 0.004329184055328369, 0.004357247829437256, 0.004311391830444336, 0.004336160182952881, 0.004308159828186035, 0.004291391849517822, 0.0043080000877380375, 0.004338655948638916, 0.004286464214324951, 0.004298655986785888, 0.004304992198944092, 0.004325024127960205, 0.004518239974975586, 0.004317024230957031, 0.004309152126312256, 0.004368639945983886, 0.004318687915802002, 0.004309279918670654, 0.004308095932006836, 0.004340479850769043, 0.004371871948242187, 0.004362400054931641, 0.004419904232025146, 0.004335872173309326, 0.004318751811981201, 0.004346015930175781, 0.004315680027008057, 0.004324992179870605, 0.004402431964874267, 0.004440991878509521, 0.0043664641380310055, 0.004349408149719239, 0.004315392017364502, 0.004412864208221436, 0.004378528118133545, 0.004284319877624512, 0.004354112148284912, 0.004485599994659424, 0.00434991979598999, 0.00448144006729126, 0.0043787841796875, 0.004353888034820556, 0.004333568096160889, 0.004332767963409424, 0.004309792041778565, 0.004405248165130615, 0.0044273920059204105, 0.004352384090423584, 0.004475200176239013, 0.004335103988647461, 0.0043740801811218265, 0.004317183971405029, 0.004099936008453369, 0.0043439998626708986, 0.004347936153411865, 0.004350207805633545, 0.004401504039764404, 0.0043373122215271, 0.00434335994720459, 0.004345344066619873, 0.004369728088378906, 0.004335231781005859, 0.004327040195465088, 0.00432371187210083, 0.004300064086914063, 0.004324063777923584, 0.004316512107849121, 0.004303520202636719, 0.004375616073608399, 0.004473087787628174, 0.0045162558555603026, 0.004371007919311523, 0.004695072174072266, 0.004426655769348145, 0.004429696083068847, 0.004424672126770019, 0.004467648029327392, 0.004394176006317138, 0.004355936050415039, 0.0044059200286865234, 0.004411168098449707, 0.004405792236328125, 0.0044011521339416505, 0.005148672103881836, 0.004511744022369385, 0.004513696193695068, 0.004431968212127686, 0.00461843204498291, 0.004345536231994629, 0.004374656200408935, 0.004446208000183105, 0.004384768009185791, 0.004435840129852295, 0.004433951854705811, 0.0044992961883544924, 0.0045359678268432615, 0.004493984222412109, 0.004427968025207519, 0.004386688232421875, 0.004403071880340576, 0.004519936084747315, 0.004413440227508545, 0.004403200149536133, 0.004368480205535889, 0.004433536052703858, 0.004444255828857422, 0.0044291200637817385, 0.004370816230773926, 0.004357664108276367, 0.004349120140075683, 0.004339136123657226, 0.004346208095550537, 0.004374144077301026, 0.00448960018157959, 0.004513792037963867, 0.004056831836700439, 0.004393727779388428, 0.0043788480758666995, 0.004433311939239502, 0.004499839782714844, 0.0044380159378051755, 0.004474751949310303, 0.004468768119812012, 0.004461984157562256, 0.004507584095001221, 0.004522143840789795, 0.004460768222808838, 0.004440447807312012, 0.004449664115905761, 0.004468448162078857, 0.004449183940887451, 0.004457471847534179, 0.00451196813583374, 0.004520480155944824, 0.004501760005950928, 0.004470784187316895, 0.004517087936401367, 0.004546783924102783, 0.004622719764709473, 0.004462399959564209, 0.004498976230621338, 0.004449120044708252, 0.004416543960571289, 0.004417759895324707, 0.004550687789916992, 0.004718431949615479, 0.00464134407043457, 0.0046687998771667485, 0.004569727897644043, 0.004507296085357666, 0.004453375816345215, 0.004470240116119384, 0.00447321605682373, 0.004465888023376465, 0.004470911979675293, 0.004735231876373291, 0.0045998401641845705, 0.004607808113098144, 0.004768127918243408, 0.005356959819793701, 0.0050113282203674316, 0.005112544059753418, 0.00454041576385498, 0.004581376075744629, 0.004472832202911377, 0.00443120002746582, 0.004718463897705078, 0.004924191951751709, 0.0044505281448364254, 0.004419360160827637, 0.004433919906616211, 0.004472544193267822, 0.004467167854309082, 0.004631519794464111, 0.004541344165802002, 0.004427711963653564, 0.004410431861877441, 0.004544703960418701, 0.004314400196075439, 0.004528512001037597, 0.0043764481544494626, 0.004358176231384277, 0.0043740801811218265, 0.004399648189544678, 0.004337503910064698, 0.004432032108306885, 0.004354335784912109, 0.004425439834594726, 0.0043089919090271, 0.004314112186431885, 0.004338111877441406, 0.0043505277633667, 0.004333375930786133, 0.004332831859588623, 0.004338240146636963, 0.004430399894714355, 0.004312191963195801, 0.004403872013092041, 0.0044011521339416505, 0.0044207038879394535, 0.004406176090240479, 0.004490399837493896, 0.004799327850341797, 0.0046377601623535155, 0.005055391788482666, 0.004538400173187256, 0.004546495914459228, 0.004470848083496094, 0.004442399978637696, 0.004419295787811279, 0.004405248165130615, 0.004397056102752686, 0.004394112110137939, 0.0044486398696899415, 0.004350111961364746, 0.004321631908416748, 0.0044390721321105955, 0.004375520229339599, 0.00436633586883545, 0.00435811185836792, 0.004372735977172851, 0.004386591911315918, 0.004375872135162354, 0.004391615867614746, 0.004579103946685791, 0.004724607944488525, 0.004483424186706543, 0.004427135944366455, 0.004444096088409424, 0.004458655834197998, 0.004415840148925781, 0.004453855991363526, 0.0043836159706115724, 0.004391168117523193, 0.004388448238372803, 0.0043745279312133786, 0.0043786239624023435, 0.004378496170043945, 0.004369791984558106, 0.004356863975524902, 0.004376575946807861, 0.004147071838378907, 0.004456960201263428, 0.004431807994842529, 0.004407872200012207, 0.004611167907714844, 0.004610976219177246, 0.004531744003295898, 0.004612607955932617, 0.004634592056274414, 0.004589568138122559, 0.0045706558227539065, 0.004666912078857422, 0.0044819841384887696, 0.004449984073638916, 0.004474847793579101, 0.004627007961273194, 0.004492640018463135, 0.004468160152435303, 0.004556000232696533, 0.004599199771881103, 0.00452623987197876, 0.00445030403137207, 0.004493535995483399, 0.004417535781860352, 0.004399104118347168, 0.0046852478981018065, 0.004593535900115967, 0.004530079841613769, 0.004554624080657959, 0.004594592094421387, 0.004581376075744629, 0.004615200042724609, 0.004539360046386718, 0.0045463361740112306, 0.00452016019821167, 0.004507808208465576, 0.004595647811889649, 0.004482175827026367, 0.004461343765258789, 0.00449510383605957, 0.0045550079345703124, 0.004632575988769531, 0.004511744022369385, 0.0045015039443969725, 0.004530399799346924, 0.00455452823638916, 0.005472256183624268, 0.005437407970428467, 0.005619935989379883, 0.004854944229125977, 0.004600800037384033, 0.0045073280334472655, 0.005171520233154297, 0.004470560073852539, 0.004437920093536377, 0.004433216094970703, 0.004546432018280029, 0.004520768165588379, 0.004456543922424316, 0.004396959781646729, 0.005103104114532471, 0.004383135795593262, 0.00438102388381958, 0.004060383796691895, 0.004380671977996826, 0.004383776187896728, 0.0043723201751708985, 0.004335519790649414, 0.004395008087158203, 0.004431104183197022, 0.004391359806060791, 0.004448927879333496, 0.004388288021087646, 0.004317408084869385, 0.0043110399246215824, 0.004335616111755371, 0.00429091215133667, 0.004485824108123779, 0.004416351795196533, 0.004339968204498291, 0.004310912132263184, 0.004304512023925781, 0.004394559860229492, 0.004312160015106201, 0.004316160202026367, 0.004320223808288574, 0.00436025619506836, 0.0043290557861328125, 0.00432758378982544, 0.004312607765197754, 0.0043361282348632815, 0.004472159862518311, 0.004419775962829589, 0.004387008190155029, 0.004583615779876709, 0.0043274879455566405, 0.004306879997253418, 0.004362432003021241, 0.004382527828216552, 0.004333439826965332, 0.004315104007720947, 0.004323103904724121, 0.004309088230133057, 0.004333792209625244, 0.0043376641273498535, 0.00435811185836792, 0.00433900785446167, 0.004309855937957764, 0.004287456035614014, 0.0043071041107177735, 0.004272960186004639, 0.0043745279312133786, 0.004355423927307129, 0.004373472213745117, 0.0043433279991149905, 0.004410943984985352, 0.004428383827209473, 0.004405248165130615, 0.004392960071563721, 0.004345759868621826, 0.004339807987213134, 0.004330592155456543, 0.004562047958374024, 0.004386591911315918, 0.004353248119354248, 0.004673600196838379, 0.004092735767364502, 0.004351840019226074, 0.004343616008758545, 0.0043208317756652835, 0.004337535858154297, 0.004433856010437011, 0.004710495948791504, 0.004790656089782715, 0.004473599910736084, 0.004386559963226318, 0.004872191905975342, 0.004820320129394531, 0.005053215980529785, 0.004425600051879883, 0.00435814380645752, 0.004433919906616211, 0.004390912055969238, 0.004378943920135498, 0.004382400035858154, 0.004462592124938965, 0.004355103969573975, 0.00435097599029541, 0.00434339189529419, 0.0043088321685791015, 0.0043170561790466305, 0.004643008232116699, 0.0044766077995300295, 0.004385759830474853, 0.004516895771026611, 0.004399648189544678, 0.004370687961578369, 0.004372479915618896, 0.004356095790863037, 0.0043333439826965334, 0.004349215984344483, 0.005110720157623291, 0.005585951805114746, 0.006019392013549805, 0.004901535987854004, 0.004779232025146484, 0.004438591957092285, 0.004624256134033203, 0.004452703952789306, 0.004538368225097656, 0.004590911865234375, 0.004475584030151367, 0.004447840213775635, 0.004471199989318848, 0.004517600059509277, 0.004403488159179687, 0.0043439679145812985, 0.004409471988677979, 0.00455241584777832, 0.004478240013122558, 0.004460896015167236, 0.004513216018676758, 0.004526463985443116, 0.004559423923492432, 0.004531680107116699, 0.004483391761779785, 0.00452016019821167, 0.004630303859710693, 0.004650239944458008, 0.00434009599685669, 0.004681824207305908, 0.004693280220031739, 0.004616032123565674, 0.004586336135864258, 0.0046976318359375, 0.004704224109649658, 0.004674079895019531, 0.0046527361869812016, 0.004667712211608887, 0.004633984088897705, 0.004678271770477295, 0.004623871803283691, 0.004616447925567627, 0.004536736011505127, 0.004507487773895264, 0.004461728096008301, 0.004495520114898682, 0.00450867223739624, 0.00456876802444458, 0.004421631813049317, 0.004425983905792236, 0.004554495811462402, 0.004653056144714355, 0.004610047817230224, 0.004525248050689697, 0.00445849609375, 0.004444799900054932, 0.004419104099273681, 0.004403679847717285, 0.004421823978424072, 0.00438431978225708, 0.004350719928741455, 0.004353824138641357, 0.004296607971191406, 0.004313087940216064, 0.004384768009185791, 0.0045914239883422855, 0.004564896106719971, 0.004417056083679199, 0.004393727779388428, 0.004418752193450928, 0.004442944049835205, 0.00454041576385498, 0.004458303928375244, 0.004366655826568603, 0.004321152210235595, 0.0043786239624023435, 0.004484864234924316, 0.004345439910888672, 0.004358816146850586, 0.00434771203994751, 0.004356287956237793, 0.004419456005096436, 0.004484320163726806, 0.004498335838317871, 0.004449632167816162, 0.004426400184631348, 0.004466495990753174, 0.00439350414276123, 0.00437827205657959, 0.004401440143585205, 0.004412159919738769, 0.004135680198669433, 0.0044627199172973635, 0.0043951039314270015, 0.004406752109527588, 0.004438271999359131, 0.004421696186065673, 0.004466623783111572, 0.004497407913208008, 0.00449126386642456, 0.004395008087158203, 0.004349696159362793, 0.0043573760986328125, 0.004530367851257325, 0.004501471996307373, 0.004463488101959228, 0.004462560176849365, 0.0044380159378051755, 0.004456287860870362, 0.004423840045928955, 0.004506720066070557, 0.004569407939910889, 0.004567391872406006, 0.004513984203338623, 0.004501567840576172, 0.0044432001113891605, 0.004413536071777344, 0.004813663959503174, 0.004680960178375244, 0.004397823810577393, 0.00493558406829834, 0.004506783962249756, 0.00448419189453125, 0.004681568145751953, 0.004630527973175049, 0.004546048164367676, 0.00459830379486084, 0.004431104183197022, 0.004444479942321777, 0.004485439777374267, 0.004421216011047363, 0.004354368209838868, 0.004394527912139893, 0.004333248138427734, 0.004332096099853516, 0.004364704132080078, 0.004429823875427246, 0.0044596481323242185, 0.004475232124328613, 0.004411231994628906, 0.004403488159179687, 0.004460959911346435, 0.004526080131530762, 0.00455679988861084, 0.004569087982177734, 0.004476928234100342, 0.004470719814300537, 0.004438079833984375, 0.004490464210510254, 0.004436639785766602, 0.004604032039642334, 0.004507967948913574, 0.004612031936645508, 0.004789631843566894, 0.004273375988006592, 0.004576032161712647, 0.004569087982177734, 0.004620287895202637, 0.004611839771270752, 0.004706655979156494, 0.004605631828308105, 0.00459552001953125, 0.004589983940124512, 0.004525760173797608, 0.004534592151641845, 0.0050768318176269535, 0.005087423801422119, 0.006850783824920654, 0.005402560234069824, 0.004760863780975341, 0.004604288101196289, 0.00452623987197876, 0.004704256057739258, 0.004698112010955811, 0.00505241584777832, 0.004515967845916748, 0.004482944011688233, 0.004489215850830078, 0.004616511821746826, 0.004497087955474853, 0.004686016082763672, 0.004568575859069824, 0.0045875201225280765, 0.004587903976440429, 0.004593599796295166, 0.004651008129119873, 0.004701375961303711, 0.0047255678176879885, 0.0046900157928466794, 0.004669343948364258, 0.004594816207885742, 0.004494207859039307, 0.004601856231689453, 0.004663296222686767, 0.0045195198059082035, 0.004498015880584716, 0.004487199783325195, 0.004437983989715576, 0.004422976016998291, 0.00462716817855835, 0.004439712047576904, 0.004444287776947021, 0.004394976139068604, 0.004400544166564942, 0.00459219217300415, 0.004464320182800293, 0.004443967819213867, 0.004465216159820556, 0.004472576141357422, 0.004403456211090088, 0.0044802241325378415, 0.004573984146118164, 0.004521471977233887, 0.004532351970672608, 0.004712704181671142, 0.004624512195587158, 0.004489215850830078, 0.004298336029052734, 0.004634431838989258, 0.004589568138122559, 0.0045922880172729495, 0.004451839923858642, 0.004468768119812012, 0.004494048118591308, 0.004413440227508545, 0.004415328025817871, 0.004497280120849609, 0.00434003210067749, 0.004403295993804931, 0.004443903923034668, 0.004431072235107422, 0.004393727779388428, 0.004373792171478271, 0.004348639965057373, 0.004342879772186279, 0.004436895847320556, 0.00436572790145874, 0.004302527904510498, 0.00451036787033081, 0.004353759765625, 0.004341760158538818, 0.004383520126342773, 0.0043589119911193845, 0.0043366398811340336, 0.004351071834564209, 0.004330207824707031, 0.004337855815887451, 0.0044644479751586915, 0.004343071937561035, 0.004427807807922363, 0.004361087799072265, 0.004306335926055908, 0.004309599876403809, 0.004362527847290039, 0.0043517122268676755, 0.004331520080566406, 0.004335616111755371, 0.004720640182495117, 0.004548351764678955, 0.004548096179962159, 0.004291327953338623, 0.0043448319435119625, 0.004434016227722168, 0.004373407840728759, 0.004347904205322266, 0.004343776226043701, 0.0043434882164001464, 0.004311391830444336, 0.004364287853240967, 0.004341760158538818, 0.0043905282020568846, 0.004393343925476074, 0.004447872161865234, 0.004413440227508545, 0.004581696033477784, 0.004473055839538574, 0.004714431762695312, 0.004480991840362549, 0.004470143795013428, 0.004362720012664795, 0.004268703937530517, 0.004425727844238282, 0.004352000236511231, 0.004347936153411865, 0.004618207931518555, 0.004423679828643799, 0.004427775859832763, 0.004563072204589844, 0.004409152030944824, 0.004370495796203613, 0.004304895877838135, 0.004352159976959228, 0.0043101758956909176, 0.004317440032958984, 0.004350048065185547, 0.004607359886169434, 0.004414591789245605, 0.004372416019439697, 0.004412960052490234, 0.00432371187210083, 0.004546624183654785, 0.004384895801544189, 0.004425536155700684, 0.004476928234100342, 0.004433919906616211, 0.004417535781860352, 0.004396671772003174, 0.0044076800346374515, 0.004395008087158203, 0.004421120166778565, 0.004409247875213623, 0.004359807968139649, 0.0043467521667480465, 0.004350048065185547, 0.004601856231689453, 0.004369696140289306, 0.004369120121002198, 0.004384768009185791, 0.004355711936950684, 0.0044148478507995605, 0.004475903987884521, 0.004376575946807861, 0.0043797121047973635, 0.004406208038330078, 0.004628704071044922, 0.004420832157135009, 0.004401887893676758, 0.004371551990509033, 0.004348544120788575, 0.004302976131439209, 0.004431424140930176, 0.00437497615814209, 0.0045643200874328615, 0.004430496215820313, 0.004349664211273193, 0.004337952136993408, 0.005064064025878906, 0.0043999037742614745, 0.004405183792114258, 0.004312895774841309, 0.004300896167755127, 0.0043089919090271, 0.0042902398109436034, 0.004355711936950684, 0.004364799976348877, 0.0043925118446350095, 0.004417888164520264, 0.004376768112182617, 0.004388544082641601, 0.004380224227905273, 0.004348671913146972, 0.004360191822052002, 0.004354047775268555, 0.004389088153839111, 0.00434768009185791, 0.0043705921173095704, 0.004312928199768066, 0.004345983982086182, 0.004381760120391846, 0.004407936096191406, 0.004419775962829589, 0.004395008087158203, 0.004395008087158203, 0.004456352233886718, 0.00437667179107666, 0.004455615997314453, 0.004471199989318848, 0.0044828481674194336, 0.00438483190536499, 0.004442624092102051, 0.004368383884429932, 0.004564032077789306, 0.004455103874206543, 0.00449289608001709, 0.004492288112640381, 0.004486879825592041, 0.004490303993225098, 0.0044328317642211915, 0.004392960071563721, 0.004423679828643799, 0.004472064018249512, 0.004463679790496826, 0.004448256015777588, 0.004398335933685302, 0.0044241280555725095, 0.00446889591217041, 0.0045073280334472655, 0.004520095825195312, 0.0044646401405334475, 0.004449440002441406, 0.004447072029113769, 0.00445030403137207, 0.0044234881401062015, 0.004448480129241943, 0.004486176013946533, 0.004529248237609864, 0.004493152141571045, 0.004798495769500732, 0.004523935794830322, 0.004467103958129883, 0.004433599948883057, 0.004447455883026123, 0.004440832138061524, 0.004470784187316895, 0.0044421439170837405, 0.004452320098876953, 0.0042377920150756835, 0.00455404806137085, 0.004580160140991211, 0.004523903846740723, 0.004558847904205322, 0.004636672019958496, 0.004571135997772217, 0.0045640959739685055, 0.004510591983795166, 0.004534399986267089, 0.004460383892059326, 0.004460447788238525, 0.004513696193695068, 0.004518112182617187, 0.004513792037963867, 0.004565216064453125, 0.0045353279113769535, 0.004579552173614502, 0.00459007978439331, 0.004581088066101074, 0.004653215885162354, 0.004700064182281494, 0.004792895793914795, 0.004648640155792237, 0.004593632221221924, 0.004526080131530762, 0.004522016048431396, 0.004519264221191407, 0.004504191875457764, 0.004489247798919678, 0.004429823875427246, 0.004511744022369385, 0.004516191959381103, 0.0045903677940368655, 0.004690464019775391, 0.004632927894592285, 0.004549727916717529, 0.004546656131744385, 0.004635456085205078, 0.004609663963317871, 0.004632959842681885, 0.004589471817016601, 0.004550559997558594, 0.004710591793060302, 0.0046284799575805665, 0.004534272193908692, 0.00453772783279419, 0.004594304084777832, 0.0045994877815246586, 0.004612415790557861, 0.004661248207092285, 0.004662784099578857, 0.004603616237640381, 0.004625184059143066, 0.0046481919288635255, 0.0046434240341186525, 0.00458351993560791, 0.004566143989562988, 0.004479936122894287, 0.004456448078155518, 0.0045055999755859374, 0.004507904052734375, 0.004615231990814209, 0.004342080116271973, 0.004624063968658447, 0.00459980821609497, 0.004516032218933106, 0.00453772783279419, 0.004553152084350586, 0.0045281281471252445, 0.004516064167022705, 0.0045437440872192385, 0.004554368019104004, 0.0045632319450378414, 0.004670176029205322, 0.004666431903839112, 0.004635488033294678, 0.004634624004364014, 0.00467471981048584, 0.0046293439865112305, 0.004622335910797119, 0.004614143848419189, 0.004535583972930908, 0.004510047912597656, 0.004494719982147217, 0.004495808124542236, 0.004554463863372803, 0.004563295841217041, 0.004604415893554687, 0.004509696006774902, 0.0045051522254943846, 0.004559296131134033, 0.004597760200500488, 0.004575232028961182, 0.004633920192718506, 0.004751840114593506, 0.004597983837127686, 0.00462611198425293, 0.004840832233428955, 0.00469052791595459, 0.004608128070831299, 0.00457747220993042, 0.004539743900299072, 0.004561344146728516, 0.004528384208679199, 0.004546559810638427, 0.004576799869537354, 0.00455728006362915, 0.004620287895202637, 0.00454041576385498, 0.00449126386642456, 0.004468736171722412, 0.004532224178314209, 0.004665440082550049, 0.00472979211807251, 0.004740128040313721, 0.004696191787719727, 0.004736832141876221, 0.004765151977539062, 0.004796127796173096, 0.0048765759468078615, 0.004712575912475586, 0.004710944175720215, 0.004600895881652832, 0.004643487930297852, 0.004608160018920899, 0.004522816181182861, 0.004743167877197266, 0.0047288317680358885, 0.004684927940368652, 0.004727200031280517, 0.004792799949645996, 0.00475705623626709, 0.004716832160949707, 0.004610208034515381, 0.0045937919616699215, 0.004611968040466308, 0.004679488182067871, 0.004735167980194092, 0.004672544002532959, 0.0046274237632751465, 0.00463478422164917, 0.004592895984649658, 0.0045873279571533206, 0.004667808055877685, 0.004698495864868164, 0.0049351038932800295, 0.0046392960548400876, 0.004630752086639404, 0.004615295886993409, 0.004573855876922607, 0.004655456066131592, 0.004572671890258789, 0.004704160213470459, 0.004630784034729004, 0.004692255973815918, 0.004886240005493164, 0.0047513599395751956, 0.004714015960693359, 0.004647391796112061, 0.004630784034729004, 0.0046691842079162596, 0.0046943678855895996, 0.004696800231933594, 0.004645823955535889, 0.004728288173675537, 0.004657663822174072, 0.004689727783203125, 0.0046546878814697264, 0.004645408153533936, 0.004578591823577881, 0.004555295944213867, 0.004530240058898926, 0.004630752086639404, 0.004531680107116699, 0.004580031871795654, 0.004609888076782227, 0.004656896114349365, 0.004686079978942871, 0.004638879776000976, 0.00469814395904541, 0.004687104225158691, 0.004651584148406982, 0.004827136039733886, 0.0046837120056152344, 0.0047719039916992185, 0.004648704051971436, 0.004659488201141358, 0.004632544040679931, 0.004440000057220459, 0.004782527923583984, 0.004943007946014404, 0.004862656116485595, 0.0047218561172485355, 0.004783071994781494, 0.0046835198402404785, 0.0046410241127014164, 0.004630527973175049, 0.00467964792251587, 0.004651040077209473, 0.004758528232574463, 0.004661375999450683, 0.004662144184112549, 0.0046284799575805665, 0.004651040077209473, 0.004601920127868652, 0.0046276159286499025, 0.004565760135650635, 0.0046059517860412595, 0.004618239879608154, 0.004616191864013672, 0.004624000072479248, 0.0046936640739440915, 0.004817920207977295, 0.004716256141662598, 0.004653056144714355, 0.004675263881683349, 0.004583136081695556, 0.004614751815795898, 0.004636672019958496, 0.0045649919509887695, 0.0045706558227539065, 0.004585311889648437, 0.004628799915313721, 0.004716576099395752, 0.004579391956329346, 0.004464288234710693, 0.004465407848358154, 0.004441311836242676, 0.004418144226074219, 0.004424736022949218, 0.00445743989944458, 0.004421664237976074, 0.004427743911743164, 0.004475103855133057, 0.004640192031860352, 0.004542816162109375, 0.004627488136291504, 0.004555647850036621, 0.004610144138336182, 0.004544511795043945, 0.004573184013366699, 0.0045240321159362796, 0.004562975883483887, 0.004550335884094238, 0.004581632137298584, 0.004595071792602539, 0.004616703987121582, 0.004620448112487793, 0.0046059517860412595, 0.0045957121849060055, 0.004581696033477784, 0.004216959953308105, 0.004562528133392334, 0.004630943775177002, 0.004673535823822021, 0.004664480209350586, 0.004625247955322266, 0.004619967937469482, 0.004647232055664062, 0.004685823917388916, 0.004696063995361328, 0.004634528160095215, 0.004591360092163086, 0.004645376205444336, 0.004648799896240234, 0.004640575885772705, 0.0046936640739440915, 0.004627039909362793, 0.004654111862182617, 0.004783008098602295, 0.004636672019958496, 0.004695648193359375, 0.004638271808624268, 0.004580095767974853, 0.004757599830627441, 0.004615392208099365, 0.004563551902770996, 0.00456928014755249, 0.0045875201225280765, 0.0045382399559021, 0.004550784111022949, 0.004531424045562744, 0.004539167881011963, 0.00447488021850586, 0.004493184089660644, 0.004462751865386963, 0.004642784118652344, 0.004513023853302002, 0.0045862398147583005, 0.004571135997772217, 0.004614240169525147, 0.004556704044342041, 0.0045632638931274415, 0.004556479930877685, 0.004526080131530762, 0.004534272193908692, 0.004476672172546387, 0.004450047969818115, 0.004413663864135742, 0.004441567897796631, 0.0044347519874572755, 0.0044562239646911625, 0.0046061758995056155, 0.0045240321159362796, 0.004461696147918701, 0.004484288215637207, 0.004501183986663818, 0.004458720207214355, 0.00439689588546753, 0.004394944190979004, 0.004433919906616211, 0.004476319789886474, 0.004510303974151611, 0.00450764799118042, 0.004290272235870361, 0.004638271808624268, 0.004620736122131348, 0.004589568138122559, 0.004589727878570557, 0.0046590399742126466, 0.0045649919509887695, 0.004566527843475342, 0.0045075521469116215, 0.004477119922637939, 0.004494016170501709, 0.004466400146484375, 0.00452623987197876, 0.00457916784286499, 0.00456825590133667, 0.004595935821533203, 0.004539040088653565, 0.0046035838127136235, 0.004601247787475586, 0.004647776126861573, 0.004575232028961182, 0.004599391937255859, 0.004643360137939453, 0.0046304001808166505, 0.00466534423828125, 0.004694272041320801, 0.004714208126068115, 0.004651040077209473, 0.004655104160308838, 0.004592800140380859, 0.004638751983642578, 0.004870975971221924, 0.004667391777038574, 0.004627711772918701, 0.004641215801239013, 0.004593023777008057, 0.004604191780090332, 0.004608384132385254, 0.005832992076873779, 0.004837088108062744, 0.004691232204437256, 0.00462659215927124, 0.004600671768188477, 0.004653056144714355, 0.004654496192932129, 0.004630144119262695, 0.004666048049926758, 0.004671775817871094, 0.004640768051147461, 0.00463808012008667, 0.004688384056091309, 0.004655231952667236, 0.004619391918182373, 0.004598656177520752, 0.004638912200927735, 0.004627488136291504, 0.004682112216949463, 0.0046097922325134275, 0.0045797438621520995, 0.004585728168487548, 0.004723008155822754, 0.004590943813323974, 0.004532576084136963, 0.004191936016082764, 0.004519807815551758, 0.004606912136077881, 0.00467958402633667, 0.004626495838165283, 0.004616032123565674, 0.004554687976837158, 0.004654367923736572, 0.004623231887817383, 0.004708479881286621, 0.004642816066741944, 0.004663167953491211, 0.00465715217590332, 0.004663296222686767, 0.004650400161743164, 0.004715104103088379, 0.0047288317680358885, 0.004682015895843506, 0.004695775985717773, 0.004778143882751465, 0.004667232036590576, 0.004663296222686767, 0.004607168197631836, 0.004649792194366455, 0.004610047817230224, 0.0045790400505065915, 0.004819551944732666, 0.0047325439453125, 0.004632639884948731, 0.004638336181640625, 0.004610176086425781, 0.004652639865875244, 0.004583744049072266, 0.004639071941375732, 0.0045853757858276365, 0.004687071800231934, 0.00477788782119751, 0.0047923197746276855, 0.004700287818908692, 0.004649824142456055, 0.0045853757858276365, 0.004675712108612061, 0.004676703929901123, 0.004635615825653076, 0.0045751361846923825, 0.004628575801849365, 0.004596704006195068, 0.004656032085418701, 0.0046508159637451175, 0.0046737599372863765, 0.004832608222961426, 0.00472051191329956, 0.004642687797546387, 0.0046191678047180175, 0.005017792224884033, 0.0048000960350036625, 0.004712480068206787, 0.00459769582748413, 0.004644703865051269, 0.00455683183670044, 0.004526463985443116, 0.004483200073242187, 0.004464704036712647, 0.004267072200775147, 0.004563936233520508, 0.004599999904632568, 0.0046005439758300785, 0.004624192237854004, 0.004708672046661377, 0.0047283201217651364, 0.004761983871459961, 0.004653056144714355, 0.004598048210144043, 0.0046118078231811525, 0.004669280052185059, 0.004691487789154053, 0.004733888149261475, 0.004632160186767578, 0.004681600093841553, 0.004518112182617187, 0.004522016048431396, 0.004517856121063232, 0.004550655841827392, 0.004745215892791748, 0.004685823917388916, 0.004587679862976074, 0.004683616161346435, 0.004689919948577881, 0.004621503829956054, 0.004612927913665772, 0.004648736000061035, 0.004597311973571778, 0.004597504138946534, 0.004528672218322754, 0.004519455909729004, 0.004586527824401856, 0.004633567810058594, 0.0047820801734924315, 0.004624415874481201, 0.004547359943389892, 0.004575263977050781, 0.004694015979766846, 0.004865632057189941, 0.004804831981658935, 0.004740384101867676, 0.004754335880279541, 0.004898655891418457, 0.004802720069885254, 0.004859039783477783, 0.004715360164642334, 0.004769055843353271, 0.004815040111541748, 0.0047619519233703615, 0.004873856067657471, 0.004807136058807373, 0.004851808071136474, 0.0048089599609375, 0.004806240081787109, 0.0049237761497497555, 0.00498803186416626, 0.004819776058197021, 0.004813695907592773, 0.004818079948425293, 0.004816864013671875, 0.004789184093475342, 0.004852608203887939, 0.004677472114562989, 0.004936863899230957, 0.004893119812011719, 0.004837247848510742, 0.004808608055114746, 0.00484006404876709, 0.00481279993057251, 0.0048455681800842285, 0.004859903812408447, 0.004812128067016601, 0.004804768085479736, 0.004799071788787842, 0.004863903999328613, 0.004847455978393555, 0.004761760234832764, 0.004835455894470215, 0.004898560047149658, 0.004860032081604004, 0.004783135890960693, 0.004818111896514893, 0.004854752063751221, 0.004821919918060303, 0.004783135890960693, 0.004871039867401123, 0.004838912010192871, 0.004788064002990723, 0.0047868480682373045, 0.0048148479461669925, 0.004777279853820801, 0.004909759998321534, 0.004837376117706299, 0.004851071834564209, 0.0048808960914611815, 0.005097599983215332, 0.004909023761749267, 0.004898848056793213, 0.004812736034393311, 0.004788256168365478, 0.004883615970611572, 0.004921951770782471, 0.004872032165527344, 0.004827583789825439, 0.004841343879699707, 0.00476585578918457, 0.004767104148864746, 0.0047744960784912105, 0.004708352088928222, 0.004761600017547608, 0.0046917757987976075, 0.0046962881088256836, 0.0046910400390625, 0.004749983787536621, 0.0047513279914855955, 0.004714752197265625, 0.004810751914978028, 0.004816832065582276, 0.004853824138641358, 0.004788224220275879, 0.005971968173980713, 0.004882431983947754, 0.004704256057739258, 0.004935679912567138, 0.004767199993133545, 0.004259840011596679, 0.004576896190643311, 0.004549407958984375, 0.004470367908477783, 0.004566688060760498, 0.0045593600273132326, 0.004551040172576904, 0.004597631931304932, 0.0046119999885559084, 0.004681568145751953, 0.0046609601974487306, 0.004644927978515625, 0.0046555838584899905, 0.004687488079071045, 0.004711071968078614, 0.004663008213043213, 0.00471395206451416, 0.004624927997589111, 0.004632575988769531, 0.004622335910797119, 0.004578783988952637, 0.004625216007232666, 0.004562111854553222, 0.0046434240341186525, 0.004561920166015625, 0.0045409598350524905, 0.004528543949127197, 0.004552703857421875, 0.0046059517860412595, 0.004547872066497803, 0.004545567989349365, 0.004462272167205811, 0.004433919906616211, 0.004402431964874267, 0.004400224208831787, 0.004406047821044922, 0.004479519844055176, 0.004450655937194824, 0.004407296180725098, 0.004449344158172607, 0.004383679866790772, 0.004370431900024414, 0.004382656097412109, 0.0044135041236877445, 0.004413440227508545, 0.004665631771087647, 0.004482783794403076, 0.0044720001220703125, 0.00449129581451416, 0.004469183921813965, 0.004471392154693604, 0.004472576141357422, 0.00445849609375, 0.004441984176635742, 0.004417888164520264, 0.004396063804626465, 0.0043546237945556645, 0.004360159873962402, 0.004579552173614502, 0.004441792011260986, 0.004407072067260742, 0.004403295993804931, 0.004354496002197266, 0.004087552070617676, 0.0044037442207336425, 0.004431871891021728, 0.004398176193237305, 0.004452608108520508, 0.004464511871337891, 0.004494463920593262, 0.004503200054168701, 0.004433919906616211, 0.0044586238861083986, 0.004466400146484375, 0.004718751907348633, 0.004571135997772217, 0.00446230411529541, 0.004493311882019043, 0.004501791954040527, 0.004411392211914063, 0.004429823875427246, 0.004417535781860352, 0.004421472072601318, 0.004403359889984131, 0.004409696102142334, 0.004379456043243408, 0.0044035520553588866, 0.004456992149353027, 0.004404704093933106, 0.004397535800933838, 0.0044234561920166015, 0.004470208168029785, 0.00448799991607666, 0.004558847904205322, 0.004848959922790527, 0.00484991979598999, 0.0046143999099731445, 0.004509888172149658, 0.004462592124938965, 0.004407296180725098, 0.004407296180725098, 0.004357952117919922, 0.004586944103240967, 0.004389632225036621, 0.004381951808929444, 0.004354976177215576, 0.0044048638343811035, 0.0043934078216552735, 0.004379839897155762, 0.004354656219482422, 0.004400479793548584, 0.004391232013702393, 0.0043628478050231935, 0.0043714561462402345, 0.004388768196105957, 0.004377471923828125, 0.004368351936340332, 0.004415103912353516, 0.004438399791717529, 0.00445359992980957, 0.004477151870727539, 0.004437983989715576, 0.004423520088195801, 0.004438784122467041, 0.0044644479751586915, 0.004443935871124268, 0.004198624134063721, 0.004537568092346191, 0.0044980478286743165, 0.004472832202911377, 0.004461823940277099, 0.004503615856170654, 0.004485824108123779, 0.004506944179534912, 0.0045075521469116215, 0.004696415901184082, 0.004536767959594726, 0.00454041576385498, 0.004535840034484863, 0.004467167854309082, 0.00443990421295166, 0.00443612813949585, 0.004377888202667236, 0.004381343841552734, 0.004384736061096192, 0.004348000049591064, 0.004372000217437744, 0.0043302721977233885, 0.0043170881271362305, 0.004658304214477539, 0.004387584209442139, 0.004461887836456299, 0.004508639812469482, 0.004756383895874024, 0.004860640048980713, 0.004443615913391113, 0.00440169620513916, 0.004435328006744385, 0.0043812160491943355, 0.004460608005523682, 0.00468175983428955, 0.004456448078155518, 0.004388991832733154, 0.004419456005096436, 0.004423136234283447, 0.004493984222412109, 0.004441984176635742, 0.004485119819641113, 0.004404255867004395, 0.004406239986419678, 0.004448287963867188, 0.004433631896972656, 0.00441923189163208, 0.0044011201858520504, 0.004389503955841065, 0.004490719795227051, 0.004413472175598144, 0.004419328212738037, 0.004375296115875244, 0.004369631767272949, 0.004425983905792236, 0.004381120204925537, 0.004374623775482178, 0.004377664089202881, 0.004401663780212403, 0.004407743930816651, 0.0043907837867736815, 0.004415743827819825, 0.004366208076477051, 0.004119679927825927, 0.004529024124145508, 0.004532032012939453, 0.004547776222229004, 0.0045352959632873535, 0.004521984100341797, 0.004544511795043945, 0.004548192024230957, 0.004456639766693115, 0.004491648197174072, 0.004460351943969727, 0.004456480026245117, 0.004423679828643799, 0.004405344009399414, 0.00440719985961914, 0.00451584005355835, 0.004559936046600342, 0.004611008167266846, 0.00454860782623291, 0.004589056015014649, 0.004591968059539795, 0.004559008121490479, 0.004548416137695312, 0.004586719989776612, 0.0046087679862976075, 0.004514016151428222, 0.004490367889404297, 0.004449151992797852, 0.0045875201225280765, 0.004464479923248291, 0.004419360160827637, 0.004376959800720215, 0.004421631813049317, 0.00444323205947876, 0.004535200119018555, 0.004575232028961182, 0.00460364818572998, 0.0046080322265625, 0.004574463844299316, 0.0046212801933288574, 0.004642816066741944, 0.004644063949584961, 0.004592544078826905, 0.004624256134033203, 0.0045875201225280765, 0.004547743797302246, 0.0045875201225280765, 0.0045392317771911625, 0.0045015039443969725, 0.004469088077545166, 0.0044356160163879395, 0.0044011521339416505, 0.004427840232849121, 0.00438643217086792, 0.004616511821746826, 0.004419007778167725, 0.004416063785552978, 0.00437391996383667, 0.004356704235076904, 0.004385824203491211, 0.004340703964233398, 0.004380671977996826, 0.004374495983123779]",tokens/s,221.56306786181474,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,813.699072,576.585728,0.0,190.840832,172.878848,s,1,9.3332021484375,9.3332021484375,0.0,9.3332021484375,9.3332021484375,9.3332021484375,9.3332021484375,[9.3332021484375],,kWh,1.2189367720717806e-05,1.3372359172244237e-06,3.5461139479830983e-06,1.7072717585925327e-05,,MB,1280.692224,685.637632,0.0,270.532608,241.723904,s,29,0.21743510389328,0.007497762203216553,0.0001853077227930064,0.007449855804443359,0.007626003074645996,0.007762188911437987,0.008151646919250487,"[0.008269408226013183, 0.007323967933654785, 0.0073619518280029295, 0.007355264186859131, 0.007848832130432129, 0.007394336223602295, 0.007343584060668945, 0.007420224189758301, 0.007529727935791016, 0.007609632015228272, 0.0073628158569335935, 0.007460383892059326, 0.0075198721885681155, 0.007449855804443359, 0.007414271831512451, 0.007487103939056396, 0.00746281623840332, 0.007476255893707275, 0.007624447822570801, 0.007369855880737305, 0.007387936115264893, 0.007603583812713623, 0.007632224082946777, 0.007386943817138672, 0.00748038387298584, 0.007617440223693848, 0.007434207916259766, 0.007385280132293701, 0.007422495841979981]",tokens/s,34143.520834813295,kWh,2.145692144423559e-07,2.3663131938502173e-08,1.1153603694116613e-07,3.497683833220242e-07,tokens/kWh,731912923.5426244,MB,1327.325184,698.220544,0.0,283.11552,241.726464,s,29,9.8227380065918,0.33871510367557933,0.003675645605556092,0.3374329528808594,0.3418878356933594,0.34528271484375,0.3504019677734375,"[0.3426405944824219, 0.33557217407226564, 0.3358611145019531, 0.341079833984375, 0.33707977294921876, 0.35170779418945314, 0.3373736877441406, 0.33729129028320315, 0.3360444030761719, 0.3470441284179687, 0.3351929321289063, 0.33778363037109377, 0.3383873596191406, 0.33508853149414064, 0.33480087280273435, 0.3403803405761719, 0.3396883850097656, 0.3378749389648438, 0.34147976684570314, 0.3374329528808594, 0.3372602233886719, 0.34169964599609376, 0.3407630615234375, 0.34024227905273435, 0.338167236328125, 0.33727340698242186, 0.33524423217773436, 0.3364345397949219, 0.335848876953125]",tokens/s,185.9970202578899,kWh,9.662392972922115e-06,1.0654803234619126e-06,3.756550390859325e-06,1.448442368724335e-05,tokens/kWh,4349499.942858275,,s,1827,9.808547007560737,0.005368662839387373,0.00019553138590628757,0.0053309440612792965,0.0054725055694580076,0.005550355148315429,0.006162006893157959,"[0.005159232139587402, 0.005422272205352783, 0.0054100480079650876, 0.005490272045135498, 0.005410687923431396, 0.005467455863952636, 0.005460832118988037, 0.005400576114654541, 0.005397727966308594, 0.005332799911499023, 0.005323935985565185, 0.005395552158355713, 0.00541974401473999, 0.005486591815948487, 0.005645311832427978, 0.005540863990783691, 0.005559455871582031, 0.005513535976409912, 0.00540499210357666, 0.005416863918304443, 0.005386655807495117, 0.005394336223602295, 0.005598879814147949, 0.005594783782958984, 0.005456575870513916, 0.0058772478103637695, 0.005350207805633545, 0.0055289921760559085, 0.005293439865112304, 0.005383071899414063, 0.005505184173583985, 0.005431136131286621, 0.005406335830688476, 0.005463903903961182, 0.005331488132476807, 0.005332799911499023, 0.005346879959106445, 0.005397119998931885, 0.0053229122161865235, 0.00533625602722168, 0.0053910079002380375, 0.005600736141204834, 0.005405471801757812, 0.005435135841369629, 0.005619711875915527, 0.005452928066253662, 0.005312704086303711, 0.0055509119033813475, 0.005775231838226319, 0.005476223945617676, 0.005509247779846191, 0.0053300800323486325, 0.005349567890167236, 0.005319007873535156, 0.005417280197143555, 0.0054150080680847165, 0.005255072116851806, 0.0052633600234985355, 0.005384543895721436, 0.005368991851806641, 0.005484960079193115, 0.0053640961647033695, 0.005309728145599366, 0.005016479969024658, 0.005301727771759033, 0.0052717761993408205, 0.005374271869659424, 0.005452032089233398, 0.005279488086700439, 0.0052650561332702635, 0.005308159828186035, 0.005270463943481445, 0.005269152164459228, 0.005351424217224121, 0.00528166389465332, 0.005322879791259766, 0.00536575984954834, 0.0053630080223083494, 0.005284671783447266, 0.005312384128570557, 0.005261248111724854, 0.00525932788848877, 0.005278848171234131, 0.005311264038085938, 0.0053654079437255855, 0.005360383987426758, 0.005334784030914306, 0.005238719940185547, 0.005353439807891846, 0.005426784038543701, 0.0052614398002624515, 0.005261631965637207, 0.0053658242225646975, 0.005345151901245117, 0.00532313585281372, 0.005340864181518554, 0.00530841588973999, 0.0053002238273620605, 0.005324063777923584, 0.0052715840339660645, 0.005309375762939453, 0.005316160202026367, 0.005359807968139649, 0.005345280170440674, 0.005388319969177246, 0.005271103858947754, 0.005259712219238281, 0.0052919998168945315, 0.005252831935882568, 0.005248544216156006, 0.005472064018249512, 0.005460192203521728, 0.005763807773590088, 0.005396480083465576, 0.005341184139251709, 0.005326591968536377, 0.005294239997863769, 0.005252416133880615, 0.005249375820159912, 0.005338848114013672, 0.00535859203338623, 0.0053155198097229, 0.005302720069885254, 0.005292384147644043, 0.005246975898742676, 0.0052787518501281735, 0.005062848091125488, 0.005289792060852051, 0.005472256183624268, 0.005404736042022705, 0.005269440174102783, 0.005309599876403809, 0.00525439977645874, 0.005302944183349609, 0.005325088024139404, 0.005300640106201172, 0.005333375930786133, 0.005428991794586182, 0.005671040058135986, 0.005311744213104248, 0.0053350720405578615, 0.005292384147644043, 0.005243584156036377, 0.005295807838439941, 0.005281599998474121, 0.005294335842132568, 0.0054330239295959475, 0.00534339189529419, 0.0053207998275756835, 0.005302559852600097, 0.0052507839202880855, 0.005271647930145264, 0.005302175998687744, 0.0052715840339660645, 0.005313536167144775, 0.0054093761444091795, 0.005311872005462646, 0.005389311790466309, 0.005307871818542481, 0.00536243200302124, 0.005250879764556885, 0.005257023811340332, 0.005299776077270508, 0.005323359966278076, 0.005381184101104736, 0.005319615840911865, 0.005298175811767578, 0.005283840179443359, 0.00527558422088623, 0.005266975879669189, 0.005305151939392089, 0.005319839954376221, 0.005372831821441651, 0.005440671920776367, 0.005335008144378662, 0.0054068160057067875, 0.005314015865325928, 0.005295072078704834, 0.005312160015106201, 0.005294432163238525, 0.005277311801910401, 0.005308800220489502, 0.005511168003082275, 0.005327936172485352, 0.005262112140655518, 0.0054061121940612794, 0.005259583950042725, 0.0052904319763183594, 0.005311744213104248, 0.005275712013244629, 0.005500864028930664, 0.005369855880737305, 0.00556387186050415, 0.005452672004699707, 0.005531007766723633, 0.005339424133300782, 0.005320672035217285, 0.005383359909057617, 0.005409632205963135, 0.005425312042236328, 0.005332608222961426, 0.005324416160583496, 0.005350143909454346, 0.005402431964874267, 0.005347519874572754, 0.005307263851165771, 0.005390495777130127, 0.006177408218383789, 0.005434656143188476, 0.00530841588973999, 0.005322847843170166, 0.00534611177444458, 0.005514400005340576, 0.005337600231170654, 0.005320767879486084, 0.005269792079925537, 0.005351232051849365, 0.005388480186462402, 0.0052919678688049315, 0.005290048122406006, 0.005445631980895996, 0.005355807781219482, 0.005310175895690918, 0.005342304229736328, 0.005329919815063477, 0.005400479793548584, 0.005389791965484619, 0.005288479804992675, 0.005322239875793457, 0.005287839889526367, 0.005245696067810059, 0.005961567878723145, 0.005322463989257812, 0.005465695858001709, 0.005444287776947021, 0.0053414077758789065, 0.005371615886688232, 0.005472127914428711, 0.00529033613204956, 0.005321599960327148, 0.005317599773406983, 0.005285888195037842, 0.00526470422744751, 0.005367712020874023, 0.005333280086517334, 0.005274112224578857, 0.005285280227661133, 0.0052594561576843266, 0.0052590398788452146, 0.0056715521812438965, 0.005273439884185791, 0.006688799858093262, 0.0053992319107055665, 0.005312511920928955, 0.0053266239166259765, 0.005278207778930664, 0.00528764820098877, 0.005326720237731933, 0.005282144069671631, 0.005340576171875, 0.00535590410232544, 0.00530947208404541, 0.005263967990875244, 0.005316991806030274, 0.005263455867767334, 0.005269343852996826, 0.00530460786819458, 0.005275424003601074, 0.006420544147491455, 0.005490623950958252, 0.0052995200157165525, 0.00531935977935791, 0.005234208106994629, 0.005239264011383056, 0.00528764820098877, 0.005289728164672851, 0.005254015922546386, 0.005371551990509033, 0.005302271842956543, 0.0054265279769897465, 0.005319295883178711, 0.005281087875366211, 0.00527945613861084, 0.005398528099060058, 0.005286975860595703, 0.0052798080444335935, 0.005425151824951172, 0.005321951866149902, 0.005276639938354492, 0.005291903972625732, 0.005269311904907227, 0.0052644162178039555, 0.005472544193267822, 0.005321407794952392, 0.005306367874145508, 0.005601280212402344, 0.005332064151763916, 0.0052991042137146, 0.005318304061889648, 0.005290463924407959, 0.005273471832275391, 0.005292031764984131, 0.005286303997039795, 0.005340767860412598, 0.00537395191192627, 0.005398464202880859, 0.005799935817718506, 0.005338624000549317, 0.005242688179016113, 0.005262080192565918, 0.005278719902038574, 0.005256192207336426, 0.005332640171051025, 0.005368159770965576, 0.005294335842132568, 0.005002751827239991, 0.0052761597633361815, 0.005310175895690918, 0.005249311923980713, 0.006174719810485839, 0.00752188777923584, 0.007108607769012451, 0.00543990421295166, 0.005299424171447754, 0.005343808174133301, 0.006045631885528565, 0.006430784225463867, 0.006571584224700928, 0.006779551982879639, 0.008880352020263672, 0.006938176155090332, 0.005302048206329345, 0.005348000049591064, 0.005357344150543213, 0.005399712085723877, 0.005355999946594238, 0.005355775833129883, 0.0052902398109436035, 0.006314176082611084, 0.005292960166931152, 0.005257728099822998, 0.005323040008544922, 0.005249023914337158, 0.005359615802764893, 0.0053448319435119625, 0.00532313585281372, 0.005450016021728516, 0.005316415786743164, 0.005420608043670654, 0.005273695945739746, 0.005279679775238037, 0.0052800321578979495, 0.005283167839050293, 0.005351295948028564, 0.005359903812408447, 0.0052494401931762694, 0.005269120216369629, 0.005296192169189453, 0.0052328000068664554, 0.005271903991699219, 0.0052633600234985355, 0.00530847978591919, 0.005395423889160156, 0.005284832000732422, 0.005304575920104981, 0.005409984111785888, 0.005280447959899902, 0.005257472038269043, 0.005283455848693847, 0.005279744148254394, 0.005258240222930908, 0.0052995519638061525, 0.005341119766235351, 0.005283872127532959, 0.005995552062988282, 0.00550108814239502, 0.005458176136016846, 0.005437695980072021, 0.00514572811126709, 0.005404607772827148, 0.005400896072387695, 0.00533673620223999, 0.00539574384689331, 0.005351103782653808, 0.005288288116455078, 0.005295104026794434, 0.00538047981262207, 0.005736735820770264, 0.005416672229766845, 0.005345568180084228, 0.0053647360801696775, 0.005264736175537109, 0.005311456203460694, 0.00529472017288208, 0.005306335926055908, 0.005273695945739746, 0.005354976177215576, 0.005311007976531982, 0.005333216190338135, 0.005280576229095459, 0.0052623038291931155, 0.0052954239845275876, 0.005288159847259522, 0.005261760234832763, 0.005516704082489014, 0.0053500161170959475, 0.005342336177825928, 0.005344128131866455, 0.00526259183883667, 0.005292799949645996, 0.0053002238273620605, 0.005938655853271484, 0.005454112052917481, 0.005352992057800293, 0.005411744117736816, 0.005408576011657715, 0.005321728229522705, 0.005315680027008057, 0.005336991786956787, 0.005341184139251709, 0.005310463905334473, 0.005384191989898681, 0.005325151920318604, 0.005371551990509033, 0.00532480001449585, 0.0053084478378295896, 0.005251071929931641, 0.005291039943695068, 0.005458047866821289, 0.00534611177444458, 0.005416959762573242, 0.005336448192596436, 0.005342944145202637, 0.005514463901519775, 0.005297855854034424, 0.005292031764984131, 0.005284895896911621, 0.005242911815643311, 0.005237472057342529, 0.005291840076446533, 0.005286303997039795, 0.005034687995910644, 0.005318848133087159, 0.005301536083221436, 0.005255871772766113, 0.005234784126281739, 0.005287871837615967, 0.00523414421081543, 0.0053572158813476566, 0.005358496189117432, 0.005273151874542237, 0.005274015903472901, 0.0053012480735778805, 0.005274144172668457, 0.005247456073760986, 0.005296160221099853, 0.005277440071105957, 0.005361023902893066, 0.005561471939086914, 0.005294847965240479, 0.005256159782409668, 0.005306528091430664, 0.0052696638107299805, 0.005268928050994873, 0.005312287807464599, 0.005275424003601074, 0.005305247783660889, 0.006254176139831543, 0.005293407917022705, 0.005263391971588135, 0.005313344001770019, 0.005617663860321045, 0.005369056224822998, 0.005312640190124512, 0.005370528221130371, 0.005504032135009765, 0.005373151779174805, 0.005318496227264404, 0.005284832000732422, 0.005364672183990478, 0.005257215976715088, 0.0052674560546875, 0.005255392074584961, 0.005445407867431641, 0.005449535846710205, 0.005915135860443115, 0.005432064056396484, 0.005399487972259522, 0.005337088108062744, 0.005310175895690918, 0.00535206413269043, 0.005297183990478516, 0.005277599811553955, 0.0054074559211730955, 0.00532480001449585, 0.00530841588973999, 0.005312704086303711, 0.005277503967285156, 0.005244927883148193, 0.005326111793518066, 0.005292223930358887, 0.0052865281105041505, 0.005521312236785889, 0.005341279983520508, 0.005038303852081299, 0.0053138241767883305, 0.005284351825714111, 0.005301887989044189, 0.005321055889129639, 0.005281824111938477, 0.005378047943115235, 0.005474143981933593, 0.00532700777053833, 0.005244927883148193, 0.005349279880523682, 0.005268832206726074, 0.00530508804321289, 0.005285888195037842, 0.005298175811767578, 0.005331999778747558, 0.005377056121826172, 0.005267392158508301, 0.005299680233001709, 0.005278463840484619, 0.0052687997817993165, 0.005261184215545655, 0.0053348479270935055, 0.005294464111328125, 0.0053456959724426266, 0.0053862080574035645, 0.005349343776702881, 0.005291071891784668, 0.005282847881317138, 0.005281439781188965, 0.0052508158683776856, 0.00534991979598999, 0.005302303791046142, 0.005351168155670166, 0.005583104133605957, 0.005639296054840088, 0.005331679821014405, 0.005328735828399658, 0.005243199825286865, 0.005290207862854004, 0.005289216041564942, 0.005318975925445556, 0.00538646411895752, 0.00535484790802002, 0.005387135982513428, 0.005355296134948731, 0.005381343841552734, 0.005278495788574219, 0.005315968036651611, 0.005313375949859619, 0.005373695850372315, 0.0053760638236999515, 0.005429376125335694, 0.005285696029663086, 0.005329184055328369, 0.005359583854675293, 0.005283008098602295, 0.005266016006469727, 0.0053515520095825195, 0.005279935836791992, 0.0054137282371521, 0.005356575965881348, 0.005300191879272461, 0.00501196813583374, 0.005385727882385254, 0.005468639850616455, 0.00551852798461914, 0.005575583934783935, 0.005403808116912842, 0.0053656001091003415, 0.00537497615814209, 0.005412032127380371, 0.0053747200965881346, 0.005343008041381836, 0.005239007949829101, 0.005328896045684814, 0.0054150080680847165, 0.005648287773132324, 0.00545356798171997, 0.0060574722290039065, 0.007842336177825928, 0.0070965437889099125, 0.006875135898590088, 0.005455872058868408, 0.005468031883239746, 0.005431424140930176, 0.00535097599029541, 0.005291808128356934, 0.0056462721824646, 0.005642879962921143, 0.0053125758171081545, 0.005386623859405517, 0.005407423973083496, 0.00543228816986084, 0.005402080059051514, 0.005356063842773437, 0.005389952182769776, 0.005341311931610107, 0.005304575920104981, 0.005361279964447021, 0.005357952117919922, 0.0054570560455322265, 0.005489503860473633, 0.005364927768707275, 0.0052969598770141605, 0.005314559936523438, 0.005285888195037842, 0.0053043198585510255, 0.00652396821975708, 0.005362559795379638, 0.005412320137023926, 0.005367455959320068, 0.00529091215133667, 0.005304351806640625, 0.0052938880920410155, 0.005529280185699463, 0.00548095989227295, 0.005351039886474609, 0.005327231884002685, 0.005430592060089111, 0.005412543773651123, 0.005319231986999511, 0.0053233919143676756, 0.005267295837402344, 0.005261119842529297, 0.005283999919891358, 0.005088479995727539, 0.005325632095336914, 0.005316415786743164, 0.0052696638107299805, 0.005255167961120606, 0.005324543952941895, 0.005310912132263184, 0.00527126407623291, 0.0053023681640625, 0.0053043198585510255, 0.005492544174194336, 0.005352704048156738, 0.0052715840339660645, 0.005258048057556152, 0.005264992237091064, 0.0052986879348754885, 0.005271615982055664, 0.005300159931182861, 0.005341184139251709, 0.005382175922393799, 0.005265376091003418, 0.005298175811767578, 0.005257215976715088, 0.005253344058990478, 0.005314112186431885, 0.005257440090179443, 0.005298175811767578, 0.005361631870269775, 0.005369887828826904, 0.005318816184997559, 0.005314239978790283, 0.005271711826324463, 0.005251167774200439, 0.00532803201675415, 0.00523744010925293, 0.005252416133880615, 0.00562886381149292, 0.005483839988708496, 0.005294816017150879, 0.005316512107849121, 0.00531987190246582, 0.005247680187225342, 0.005324960231781006, 0.0052939200401306155, 0.005464191913604736, 0.005427296161651611, 0.005387519836425781, 0.005350143909454346, 0.005351071834564209, 0.005259583950042725, 0.00528934383392334, 0.005316768169403076, 0.005267551898956299, 0.005230783939361572, 0.005468192100524902, 0.005367584228515625, 0.005299392223358154, 0.005313727855682373, 0.005261119842529297, 0.005242271900177002, 0.0052824001312255855, 0.005246975898742676, 0.005336639881134034, 0.005144320011138916, 0.005313151836395263, 0.0053005762100219725, 0.005303616046905517, 0.005300704002380371, 0.005281888008117676, 0.005298304080963135, 0.005279744148254394, 0.00535756778717041, 0.0053571839332580565, 0.005308703899383545, 0.005261151790618896, 0.0052964801788330075, 0.005281983852386474, 0.005279551982879638, 0.005275775909423828, 0.005305215835571289, 0.005333920001983642, 0.00532480001449585, 0.00531660795211792, 0.005258848190307617, 0.00524124813079834, 0.005275648117065429, 0.005251071929931641, 0.005249023914337158, 0.005338592052459717, 0.005419551849365234, 0.005361824035644531, 0.005342847824096679, 0.0052820158004760745, 0.005271327972412109, 0.005318463802337647, 0.005296544075012207, 0.005287775993347168, 0.005338816165924072, 0.005405471801757812, 0.005363359928131104, 0.005527584075927734, 0.00535587215423584, 0.005297760009765625, 0.005335103988647461, 0.0053003840446472165, 0.005299680233001709, 0.00685913610458374, 0.005566239833831787, 0.0054356160163879395, 0.0053534722328186036, 0.005375840187072754, 0.005308576107025146, 0.005252096176147461, 0.005315487861633301, 0.005320864200592041, 0.005322688102722168, 0.005408768177032471, 0.00536575984954834, 0.00532480001449585, 0.0053122239112854, 0.005309887886047363, 0.005281824111938477, 0.00560211181640625, 0.005365312099456787, 0.0053888001441955566, 0.00548144006729126, 0.0052081279754638676, 0.0054271998405456545, 0.0053794879913330075, 0.0053233919143676756, 0.0053268160820007324, 0.00546611213684082, 0.005495007991790772, 0.005457695960998535, 0.005410175800323486, 0.005315199851989746, 0.0052964801788330075, 0.005384895801544189, 0.005337791919708252, 0.00530460786819458, 0.005359615802764893, 0.005421055793762207, 0.005369664192199707, 0.005340447902679443, 0.005292128086090088, 0.0057923197746276855, 0.005460224151611328, 0.005287968158721924, 0.005316512107849121, 0.005316671848297119, 0.005355519771575928, 0.005392384052276611, 0.005342688083648682, 0.005304863929748535, 0.00532480001449585, 0.005576511859893799, 0.005383679866790772, 0.0053664641380310055, 0.005322463989257812, 0.005400352001190186, 0.005455391883850098, 0.0053463678359985355, 0.005345183849334717, 0.005369855880737305, 0.005314623832702637, 0.005292255878448487, 0.005366687774658203, 0.005478655815124512, 0.005380000114440918, 0.0056733121871948245, 0.005296544075012207, 0.005294271945953369, 0.005307680130004883, 0.0052740478515625, 0.005254144191741943, 0.005315839767456055, 0.005291520118713379, 0.005372352123260498, 0.005369440078735351, 0.0052657279968261715, 0.005390272140502929, 0.005337120056152344, 0.005260479927062988, 0.005287871837615967, 0.005348159790039063, 0.0053043198585510255, 0.005328767776489258, 0.005400703907012939, 0.0053350720405578615, 0.005022848129272461, 0.005311359882354736, 0.005294079780578613, 0.00528550386428833, 0.005395167827606201, 0.005373600006103515, 0.005322751998901368, 0.005332992076873779, 0.005281407833099366, 0.005249407768249512, 0.005354656219482422, 0.005263840198516846, 0.005261343955993652, 0.005288288116455078, 0.005350751876831055, 0.005325664043426514, 0.0052973442077636716, 0.005298431873321533, 0.0052470722198486325, 0.005271967887878418, 0.005255040168762207, 0.005234687805175781, 0.005328095912933349, 0.005454432010650634, 0.0053004159927368165, 0.00529747200012207, 0.0052800321578979495, 0.0052494401931762694, 0.00525875186920166, 0.005304255962371826, 0.00570630407333374, 0.005331232070922852, 0.005381855964660644, 0.0052791681289672855, 0.005290559768676758, 0.005265215873718262, 0.005236959934234619, 0.005252607822418213, 0.005261023998260498, 0.005253151893615722, 0.005265151977539063, 0.005643104076385498, 0.005341343879699707, 0.005307680130004883, 0.005299039840698242, 0.005273439884185791, 0.005238592147827148, 0.005306623935699463, 0.005467999935150147, 0.00530953598022461, 0.0054609918594360355, 0.0053350400924682614, 0.005322751998901368, 0.005293536186218262, 0.005257823944091797, 0.005248960018157959, 0.005282112121582031, 0.005256415843963623, 0.005345856189727783, 0.005388224124908447, 0.0053636798858642575, 0.005283584117889404, 0.005288576126098633, 0.0050265278816223145, 0.005280352115631103, 0.0053825597763061525, 0.005463424205780029, 0.005380735874176025, 0.005377344131469727, 0.005296000003814697, 0.005302495956420898, 0.005370463848114014, 0.005259263992309571, 0.005261504173278809, 0.005365568161010742, 0.005354911804199219, 0.005296544075012207, 0.005408959865570068, 0.0053002238273620605, 0.005434879779815674, 0.005312992095947266, 0.005275360107421875, 0.005441343784332276, 0.005325439929962158, 0.005355391979217529, 0.005312511920928955, 0.005301568031311035, 0.005255519866943359, 0.005249184131622314, 0.005257408142089844, 0.0052674560546875, 0.005219744205474853, 0.005274208068847657, 0.005322080135345459, 0.005318751811981201, 0.005304768085479737, 0.0053331198692321774, 0.005257215976715088, 0.0052501440048217775, 0.005264639854431152, 0.005246399879455566, 0.005464288234710693, 0.005414912223815918, 0.005339136123657226, 0.005320159912109375, 0.0052939200401306155, 0.0052470722198486325, 0.00528223991394043, 0.005318143844604492, 0.005251743793487549, 0.005244927883148193, 0.005396480083465576, 0.005320479869842529, 0.005263584136962891, 0.005303359985351562, 0.005303232192993164, 0.005250688076019287, 0.005300992012023926, 0.005254784107208252, 0.0052715520858764645, 0.00536575984954834, 0.005337088108062744, 0.005274623870849609, 0.005319392204284668, 0.00524726390838623, 0.005247136116027832, 0.005161087989807129, 0.00555238389968872, 0.005587039947509765, 0.005533599853515625, 0.005406400203704834, 0.00533955192565918, 0.00538972806930542, 0.0054134721755981444, 0.005336927890777588, 0.0053229122161865235, 0.0053534722328186036, 0.0054926080703735355, 0.005357888221740723, 0.005266975879669189, 0.006033696174621582, 0.005292287826538086, 0.005297279834747314, 0.005325439929962158, 0.0053043198585510255, 0.005373536109924316, 0.005539296150207519, 0.005387199878692627, 0.005385824203491211, 0.0053673281669616695, 0.005319551944732666, 0.005371903896331787, 0.0053637118339538575, 0.0053283519744873045, 0.005374623775482178, 0.005470143795013428, 0.005519296169281006, 0.005419295787811279, 0.005368735790252686, 0.005281951904296875, 0.00534607982635498, 0.005326943874359131, 0.005320735931396484, 0.0054026880264282226, 0.005435296058654785, 0.00538915205001831, 0.0053482880592346195, 0.005323840141296387, 0.00530508804321289, 0.005320703983306885, 0.005398655891418457, 0.005296192169189453, 0.005316256046295166, 0.005380320072174072, 0.005368288040161133, 0.005364672183990478, 0.005315296173095703, 0.005285888195037842, 0.005314400196075439, 0.005320864200592041, 0.005345280170440674, 0.005371871948242187, 0.005396512031555176, 0.00551852798461914, 0.005433407783508301, 0.00531279993057251, 0.005296607971191406, 0.005328832149505615, 0.006336927890777588, 0.005255167961120606, 0.00545577621459961, 0.005539936065673828, 0.005404255867004395, 0.005302720069885254, 0.005287903785705567, 0.00543123197555542, 0.005367008209228516, 0.005419040203094482, 0.0055751361846923825, 0.005474944114685059, 0.00542464017868042, 0.005367263793945313, 0.0053089919090271, 0.005365952014923096, 0.005326528072357178, 0.005340479850769043, 0.005403359889984131, 0.005406720161437988, 0.005302239894866943, 0.005306687831878662, 0.0052676801681518556, 0.0053712639808654785, 0.005325247764587402, 0.005778751850128174, 0.005309088230133057, 0.005461056232452393, 0.005380064010620117, 0.005313055992126465, 0.005337535858154297, 0.005254271984100342, 0.005290880203247071, 0.005328896045684814, 0.0052778878211975095, 0.005339231967926026, 0.005496448040008545, 0.005459328174591064, 0.005339488029479981, 0.005433728218078613, 0.005298111915588379, 0.005261375904083252, 0.005488639831542969, 0.0054906878471374515, 0.0055175042152404786, 0.005531712055206299, 0.005477151870727539, 0.005430240154266358, 0.0053534722328186036, 0.005332223892211914, 0.005352384090423584, 0.005293856143951416, 0.0053012800216674805, 0.005460415840148926, 0.005501599788665772, 0.0054345598220825195, 0.00542742395401001, 0.005296768188476563, 0.005285024166107178, 0.005319392204284668, 0.005408768177032471, 0.0053002238273620605, 0.005365344047546386, 0.005427616119384766, 0.005044415950775146, 0.0053517122268676755, 0.00529807996749878, 0.005279488086700439, 0.005338784217834473, 0.005310463905334473, 0.005332608222961426, 0.005468575954437256, 0.005376319885253906, 0.005324128150939941, 0.005337823867797851, 0.005283775806427002, 0.005405792236328125, 0.005311391830444336, 0.005298111915588379, 0.005336959838867187, 0.005494688034057617, 0.00538643217086792, 0.005332704067230225, 0.0053471360206604, 0.005253632068634034, 0.0052486081123352055, 0.005319136142730713, 0.00526262378692627, 0.005260287761688232, 0.005431168079376221, 0.005327040195465088, 0.0052995200157165525, 0.005312928199768066, 0.005260928153991699, 0.005261631965637207, 0.005287424087524414, 0.005270016193389892, 0.005299488067626953, 0.005426015853881836, 0.005352704048156738, 0.005325439929962158, 0.00536678409576416, 0.005295392036437988, 0.005435103893280029, 0.00533846378326416, 0.005304992198944092, 0.005286208152770996, 0.005439104080200196, 0.0053731842041015625, 0.0053292479515075685, 0.005755360126495361, 0.005318655967712403, 0.0053043198585510255, 0.005348896026611328, 0.00532534408569336, 0.005406079769134521, 0.005570112228393554, 0.005501952171325684, 0.005414912223815918, 0.005381855964660644, 0.00531606388092041, 0.005517375946044922, 0.005309375762939453, 0.005326176166534424, 0.0054440641403198245, 0.005525504112243652, 0.0055105919837951664, 0.005308800220489502, 0.005367519855499267, 0.005322624206542969, 0.005390367984771729, 0.0053534722328186036, 0.005532991886138916, 0.005472959995269776, 0.006086912155151367, 0.005428991794586182, 0.005328735828399658, 0.005283455848693847, 0.005355967998504639, 0.005339168071746826, 0.0054133119583129885, 0.005613440036773681, 0.005413760185241699, 0.005403711795806885, 0.005346208095550537, 0.005356448173522949, 0.005332992076873779, 0.005314623832702637, 0.005375807762145996, 0.005539904117584229, 0.005468255996704101, 0.005381792068481445, 0.005408383846282959, 0.005290688037872315, 0.005285888195037842, 0.005347328186035156, 0.005311999797821045, 0.005394944190979004, 0.005478400230407715, 0.0055903677940368655, 0.005349440097808838, 0.005374368190765381, 0.005290175914764404, 0.005298175811767578, 0.005473696231842041, 0.005283648014068603, 0.00540342378616333, 0.005549056053161621, 0.005417984008789062, 0.005388288021087646, 0.005340832233428955, 0.0053089599609375, 0.005448927879333496, 0.005317215919494629, 0.005313759803771973, 0.005343808174133301, 0.005441376209259033, 0.0053827519416809085, 0.005383200168609619, 0.005351808071136475, 0.005268928050994873, 0.005393695831298828, 0.005352191925048828, 0.005730400085449219, 0.005534495830535889, 0.005454048156738282, 0.005494592189788819, 0.005531424045562744, 0.005355743885040283, 0.005769152164459229, 0.005055039882659912, 0.005325439929962158, 0.005389855861663818, 0.005435679912567139, 0.005337088108062744, 0.005369184017181397, 0.005316480159759522, 0.0053422718048095705, 0.005420767784118653, 0.005328896045684814, 0.0053729920387268065, 0.0054563841819763184, 0.00548089599609375, 0.005965888023376465, 0.0053818879127502444, 0.005294271945953369, 0.005307424068450928, 0.005290495872497559, 0.005259744167327881, 0.005296127796173096, 0.005371647834777832, 0.00536191987991333, 0.005381311893463135, 0.00540940809249878, 0.005330815792083741, 0.005341504096984863, 0.0053433279991149905, 0.005319744110107422, 0.005300320148468017, 0.005452703952789306, 0.005323935985565185, 0.005366079807281494, 0.005286272048950195, 0.005221727848052978, 0.00554860782623291, 0.005263455867767334, 0.005251071929931641, 0.0052787518501281735, 0.005357952117919922, 0.005326720237731933, 0.0053071041107177735, 0.005283840179443359, 0.00526038408279419, 0.005272416114807129, 0.005269567966461181, 0.005251071929931641, 0.005306528091430664, 0.00551632022857666, 0.005433440208435059, 0.0055119037628173825, 0.005357632160186768, 0.0052706880569458004, 0.005298975944519043, 0.005269504070281982, 0.005346496105194092, 0.005329919815063477, 0.005377151966094971, 0.005395135879516602, 0.005441535949707031, 0.005293568134307861, 0.005268256187438965, 0.005322144031524658, 0.005325119972229004, 0.005044352054595947, 0.005618879795074463, 0.00561356782913208, 0.005317440032958984, 0.005360896110534668, 0.005274367809295654, 0.005261312007904053, 0.00532697582244873, 0.0052930240631103515, 0.005323679924011231, 0.00536575984954834, 0.005316287994384766, 0.005302591800689697, 0.005284927845001221, 0.0052789759635925295, 0.005273280143737793, 0.005338624000549317, 0.005258944034576416, 0.005269824028015136, 0.005378047943115235, 0.005337600231170654, 0.005353151798248291, 0.0052817602157592776, 0.005276000022888184, 0.005280992031097412, 0.005466911792755127, 0.005289919853210449, 0.0053125758171081545, 0.0054570879936218265, 0.005411168098449707, 0.005315072059631347, 0.005324031829833984, 0.00527843189239502, 0.005274879932403565, 0.005302879810333252, 0.0052919678688049315, 0.005322976112365723, 0.005430943965911865, 0.005325151920318604, 0.005509471893310547, 0.005407423973083496, 0.005450719833374023, 0.00531660795211792, 0.005296224117279052, 0.00527452802658081, 0.005288959980010987, 0.005357024192810059, 0.005306399822235107, 0.00527843189239502, 0.005387775897979737, 0.005378335952758789, 0.005369855880737305, 0.005475679874420166, 0.0054336957931518555, 0.005428607940673828, 0.005578720092773438, 0.005401408195495606, 0.005451519966125488, 0.005427008152008057, 0.0052650561332702635, 0.005281856060028076, 0.005292287826538086, 0.005298816204071045, 0.0059886078834533694, 0.005504767894744873, 0.00532419204711914, 0.005341792106628418, 0.005300511837005615, 0.0053407039642333985, 0.005398719787597656, 0.005375999927520752, 0.0054471039772033695, 0.005425727844238281, 0.005318655967712403, 0.0053060798645019535, 0.0054152002334594725, 0.005261312007904053, 0.005329184055328369, 0.005332704067230225, 0.005299776077270508, 0.0053578557968139645, 0.005429408073425293, 0.005308095932006836, 0.005273920059204101, 0.00532480001449585, 0.0052717761993408205, 0.005304384231567383, 0.005347040176391602, 0.0053350400924682614, 0.0054373440742492675, 0.005417056083679199, 0.005355519771575928, 0.005337152004241943, 0.00533296012878418, 0.0058141121864318844, 0.005693568229675293, 0.00531987190246582, 0.005527999877929688, 0.005529024124145508, 0.0054068160057067875, 0.005992959976196289, 0.005352896213531494, 0.005333280086517334, 0.0053441600799560545, 0.005334720134735107, 0.005353119850158692, 0.0054644479751586915, 0.005465663909912109, 0.0053803520202636715, 0.005365952014923096, 0.00535756778717041, 0.0054268798828125, 0.005375967979431152, 0.005462399959564209, 0.005358560085296631, 0.005460608005523682, 0.00541103982925415, 0.005386271953582764, 0.005392384052276611, 0.005328512191772461, 0.005365920066833496, 0.005345632076263427, 0.005455071926116943, 0.0055931200981140135, 0.005657343864440918, 0.005543935775756836, 0.00514902400970459, 0.005399968147277832, 0.005371744155883789, 0.005393311977386474, 0.005601088047027588, 0.005453824043273926, 0.0054724798202514646, 0.005421023845672607, 0.005414912223815918, 0.005650432109832764, 0.005437439918518067, 0.005506048202514649, 0.0054293122291564945, 0.005479008197784424, 0.005517663955688476, 0.005531455993652344, 0.005462207794189453, 0.005380320072174072, 0.005340960025787354, 0.0053610877990722654, 0.005336927890777588, 0.005310175895690918, 0.005401599884033203, 0.005416959762573242, 0.00546943998336792, 0.005464831829071045, 0.005407904148101807, 0.005770080089569092, 0.005350751876831055, 0.0053582401275634765, 0.005348383903503418, 0.005362400054931641, 0.005396543979644775, 0.0054234881401062016, 0.005334368228912353, 0.005329376220703125, 0.005333183765411377, 0.005295104026794434, 0.005302559852600097, 0.005312096118927002, 0.0053155198097229, 0.005471456050872803, 0.005411424160003662, 0.005318592071533203, 0.005301599979400635, 0.005325119972229004, 0.005281407833099366, 0.005276031970977783, 0.0053373122215271, 0.005368192195892334, 0.005472256183624268, 0.005655935764312744, 0.00541318416595459, 0.00539680004119873, 0.005406720161437988, 0.005343232154846191, 0.005314591884613037, 0.005351359844207764, 0.005343264102935791, 0.005546080112457275, 0.005410975933074951, 0.005360671997070313, 0.005360352039337158, 0.005054368019104004, 0.005298175811767578, 0.005339168071746826, 0.005453792095184326, 0.005389408111572265, 0.005354400157928467, 0.0053942399024963375, 0.005351615905761719, 0.005361663818359375, 0.0053350400924682614, 0.005316512107849121, 0.005414688110351563, 0.0054716157913208005, 0.005352575778961182, 0.005440512180328369, 0.005339168071746826, 0.005304768085479737, 0.005308767795562744, 0.0053630399703979495, 0.005311135768890381, 0.0053820481300354, 0.005428768157958984, 0.005390912055969238, 0.005359615802764893, 0.005303872108459473, 0.005323200225830078, 0.005357279777526856, 0.0053108158111572264, 0.005300159931182861, 0.0053864002227783205, 0.00571123218536377, 0.005378528118133545, 0.005392255783081054, 0.005474559783935547, 0.005474336147308349, 0.005352640151977539, 0.00530511999130249, 0.005371776103973389, 0.005500959873199463, 0.005408736228942871, 0.005378047943115235, 0.0053393921852111816, 0.0052938880920410155, 0.005543583869934082, 0.005345568180084228, 0.005416384220123291, 0.00551584005355835, 0.005500895977020264, 0.00548576021194458, 0.005485407829284668, 0.005387680053710937, 0.00535587215423584, 0.0053474240303039555, 0.005338304042816162, 0.005348127841949463, 0.00536191987991333, 0.005455391883850098, 0.006125823974609375, 0.005380256175994873, 0.00531385612487793, 0.005433440208435059, 0.005347936153411865, 0.0053637118339538575, 0.0052408638000488285, 0.005492959976196289, 0.005527679920196533, 0.0053792958259582515, 0.005317408084869385, 0.005326848030090332, 0.005326176166534424, 0.005352447986602784, 0.00538588809967041, 0.005445536136627197, 0.005402944087982178, 0.005404448032379151, 0.005383872032165528, 0.005343039989471435, 0.005368319988250733, 0.0053309440612792965, 0.0053142719268798825, 0.005372191905975342, 0.00540067195892334, 0.00543507194519043, 0.0054170241355896, 0.005345344066619873, 0.005322175979614258, 0.005360671997070313, 0.005321760177612305, 0.005313119888305664, 0.00534768009185791, 0.005389984130859375, 0.005388288021087646, 0.005375999927520752, 0.005314559936523438, 0.005291872024536133, 0.0053331518173217775, 0.005275519847869873, 0.0052921600341796875, 0.005355040073394776, 0.005597663879394531, 0.005422368049621582, 0.0053975038528442385, 0.005332128047943115, 0.005330687999725342, 0.005333888053894043, 0.005283775806427002, 0.005316671848297119, 0.005388319969177246, 0.005425024032592774, 0.0054150400161743165, 0.005365664005279541, 0.005327040195465088, 0.005353439807891846, 0.005316448211669922, 0.0053002238273620605, 0.00531660795211792, 0.005320703983306885, 0.0053637118339538575, 0.005470016002655029, 0.0053537278175354005, 0.005307871818542481, 0.0053621759414672855, 0.005312479972839355, 0.005290304183959961, 0.005325984001159668, 0.005357088088989258, 0.005126848220825196, 0.005430655956268311, 0.005386847972869873, 0.005350624084472656, 0.005364511966705322, 0.005322432041168213, 0.005388607978820801, 0.005414912223815918, 0.005471456050872803, 0.005402912139892578, 0.005407264232635498, 0.005330912113189697, 0.005345536231994629, 0.005334112167358398, 0.005296800136566162, 0.005326047897338867, 0.005444352149963379, 0.005396512031555176, 0.005375487804412842, 0.005373983860015869, 0.005309919834136963, 0.005305344104766845, 0.0053309440612792965, 0.005287327766418457, 0.005325632095336914, 0.005440544128417968, 0.005596127986907959, 0.005446464061737061, 0.005437856197357178, 0.005517888069152832, 0.005365664005279541, 0.005317791938781739, 0.005286848068237305, 0.005310463905334473, 0.005377344131469727, 0.005332704067230225, 0.0053339838981628414, 0.005287936210632324, 0.005285888195037842, 0.005283840179443359, 0.005279104232788086, 0.0052865281105041505, 0.005328896045684814, 0.005398431777954102, 0.005335135936737061, 0.005380000114440918, 0.00533462381362915, 0.005281280040740967, 0.00528223991394043, 0.005276224136352539, 0.005284927845001221, 0.005305471897125244, 0.005391327857971191, 0.005313375949859619, 0.005479743957519531, 0.005333439826965332, 0.005269760131835937, 0.005300159931182861, 0.005281856060028076, 0.005252768039703369, 0.005259967803955078, 0.00536950397491455, 0.005306367874145508, 0.004997119903564453, 0.005300543785095215, 0.005258656024932861, 0.005279295921325684, 0.00533577585220337, 0.005310463905334473, 0.005373311996459961, 0.005358208179473877, 0.005273727893829345, 0.005261184215545655, 0.0053043198585510255, 0.005277376174926757, 0.005300543785095215, 0.005316351890563965, 0.00532908821105957, 0.005331007957458496, 0.005326303958892822, 0.005310304164886474, 0.0052722558975219725, 0.005281343936920166, 0.005269951820373535, 0.0052531838417053224, 0.005259520053863526, 0.005300127983093262, 0.005414688110351563, 0.005338848114013672, 0.00529033613204956, 0.0052427520751953125, 0.00523203182220459, 0.005268223762512207, 0.005268608093261719, 0.005245471954345703, 0.005304575920104981, 0.005341023921966553, 0.00528604793548584, 0.005315936088562011, 0.00526307201385498, 0.005256224155426026, 0.005287839889526367, 0.005246047973632813, 0.005255072116851806, 0.005364128112792969, 0.00540227222442627, 0.005329728126525879, 0.005318304061889648, 0.005296127796173096, 0.0054481601715087894, 0.0053344001770019535, 0.00529472017288208, 0.005272960186004639, 0.005312511920928955, 0.005374591827392578, 0.005315968036651611, 0.005297952175140381, 0.005639008045196533, 0.00555788803100586, 0.005402207851409912, 0.005269536018371582, 0.005285888195037842, 0.005346047878265381, 0.005378047943115235, 0.005352799892425537, 0.005444352149963379, 0.005017568111419677, 0.005288000106811523, 0.0053398399353027344, 0.005279488086700439, 0.005353504180908203, 0.005402783870697022, 0.005326367855072021, 0.005302783966064453, 0.005318143844604492, 0.005254559993743897, 0.005255231857299805, 0.0052921600341796875, 0.005290719985961914, 0.005275680065155029, 0.005374112129211426, 0.005356575965881348, 0.005270304203033448, 0.005298175811767578, 0.005279039859771729, 0.005260992050170899, 0.005299392223358154, 0.005263455867767334, 0.005266623973846436, 0.005381728172302246, 0.005308703899383545, 0.0052576642036437984, 0.005239007949829101, 0.005289696216583252, 0.005231103897094727, 0.0052470722198486325, 0.005266560077667236, 0.005249023914337158, 0.005324736118316651, 0.005318304061889648, 0.005529823780059814, 0.005241600036621094, 0.005329055786132813, 0.00524832010269165, 0.005715648174285889, 0.005530464172363281, 0.005388383865356445, 0.005560224056243897, 0.005419007778167725, 0.005396255970001221, 0.005367904186248779, 0.00531059217453003, 0.005296031951904297, 0.005335296154022217, 0.005318016052246094, 0.00548038387298584, 0.005501344203948975, 0.005371647834777832, 0.005320864200592041, 0.005343711853027343, 0.00529094409942627, 0.005287968158721924, 0.00532969617843628, 0.005390336036682129, 0.005447296142578125, 0.005472799777984619, 0.00534716796875, 0.005296127796173096, 0.005328415870666504, 0.005041088104248047, 0.005297120094299316, 0.00536950397491455, 0.005364319801330566, 0.005330368041992188, 0.005365312099456787, 0.005297920227050781, 0.00531763219833374, 0.00533519983291626, 0.005281599998474121, 0.00528326416015625, 0.005327328205108643, 0.005406847953796387, 0.0053864002227783205, 0.005414752006530762, 0.005350751876831055, 0.005328959941864013, 0.005327455997467041, 0.005279744148254394, 0.005266687870025635, 0.005335807800292969, 0.00536575984954834, 0.005337024211883545, 0.005361023902893066, 0.005297120094299316, 0.005268864154815674, 0.00527510404586792, 0.005237567901611328, 0.005285759925842285, 0.005315008163452148, 0.005363455772399902, 0.0055961918830871584, 0.005440671920776367, 0.005375807762145996, 0.005359583854675293, 0.005322783946990967, 0.005284224033355713, 0.005284512042999267, 0.005362624168395996, 0.005373983860015869, 0.005337088108062744, 0.005332352161407471, 0.005288032054901123, 0.0052698559761047365, 0.005298367977142334, 0.005255167961120606, 0.005248928070068359, 0.005323200225830078, 0.005360320091247559, 0.005333055973052978, 0.005323679924011231, 0.005285888195037842, 0.0052779197692871095, 0.00528934383392334, 0.005255616188049316, 0.005268864154815674, 0.005300640106201172, 0.005379936218261719, 0.00533135986328125, 0.005448736190795898, 0.0053441600799560545, 0.005295584201812744, 0.005309023857116699]",tokens/s,186.26612061824164,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,805.515264,576.585728,0.0,190.840832,172.878848,s,1,9.5104287109375,9.5104287109375,0.0,9.5104287109375,9.5104287109375,9.5104287109375,9.5104287109375,[9.5104287109375],,kWh,1.2579599724991415e-05,1.3802828618390297e-06,3.4558360979958236e-06,1.741571868482627e-05,,MB,1273.921536,685.637632,0.0,270.532608,241.723904,s,35,0.22017107152938845,0.006290602043696812,0.00013731028919921774,0.006258048057556152,0.006463942432403565,0.00662011523246765,0.006700048828125,"[0.006389823913574219, 0.006318719863891602, 0.006411104202270508, 0.00667952013015747, 0.006315199851989746, 0.006303679943084717, 0.006271743774414062, 0.006310111999511718, 0.006223519802093506, 0.006258143901824951, 0.006263008117675781, 0.006710624217987061, 0.006257887840270996, 0.006230751991271973, 0.006202079772949219, 0.0065946559906005855, 0.006258048057556152, 0.00622054386138916, 0.006219071865081787, 0.006185344219207764, 0.006265312194824219, 0.006222047805786133, 0.006293536186218262, 0.006233215808868408, 0.006142623901367187, 0.006197855949401855, 0.006116256237030029, 0.006349664211273193, 0.006228096008300781, 0.006187935829162597, 0.006202559947967529, 0.006188608169555664, 0.006139200210571289, 0.006499167919158936, 0.006281407833099365]",tokens/s,40695.62789407608,kWh,1.8711536730234296e-07,2.063549207889079e-08,9.80063105987234e-08,3.0575716997995715e-07,tokens/kWh,837265729.5878987,MB,1320.28416,698.220544,0.0,283.11552,241.726464,s,35,9.793362945556643,0.2798103698730469,0.00596094249310523,0.27801028442382814,0.288182470703125,0.29094169616699217,0.2964447705078125,"[0.29079360961914064, 0.2912872314453125, 0.2891829833984375, 0.28545770263671877, 0.28107666015625, 0.2991016845703125, 0.2818441467285156, 0.28154583740234373, 0.2866817016601563, 0.28283731079101565, 0.2795537414550781, 0.2752619934082031, 0.27801028442382814, 0.2792362670898437, 0.2782505187988281, 0.2778215637207031, 0.2767486572265625, 0.2749921264648437, 0.2754400329589844, 0.27714694213867186, 0.2760839233398438, 0.27533221435546873, 0.27788467407226564, 0.276215576171875, 0.28140277099609373, 0.27710183715820313, 0.27355404663085936, 0.2750039978027344, 0.2720769348144531, 0.27320477294921874, 0.2730509643554688, 0.2728078308105469, 0.28157400512695313, 0.28383163452148436, 0.28196676635742185]",tokens/s,225.15248462229545,kWh,8.103193838715785e-06,8.936040062878324e-07,3.2455534630122085e-06,1.2242351308015826e-05,tokens/kWh,5146070.261743755,,s,2205,9.77497245287896,0.004433094082938301,0.00017680592479283382,0.004410079956054688,0.004582419204711914,0.0046626368522644035,0.00508764295578003,"[0.004267360210418701, 0.004478879928588867, 0.004521056175231934, 0.004572959899902343, 0.004523039817810059, 0.004542431831359863, 0.004676479816436768, 0.004648799896240234, 0.004610559940338135, 0.004494815826416016, 0.004531519889831543, 0.00467852783203125, 0.00447603178024292, 0.004465536117553711, 0.004362271785736084, 0.004369855880737305, 0.004571839809417725, 0.004454239845275879, 0.0044457921981811525, 0.0043727998733520505, 0.004343616008758545, 0.0054848318099975585, 0.005037856101989746, 0.004489535808563232, 0.004527488231658936, 0.004503551959991455, 0.004501599788665772, 0.004692192077636719, 0.004512032032012939, 0.004513088226318359, 0.004536608219146729, 0.004538943767547607, 0.004532000064849853, 0.0045055999755859374, 0.004500864028930664, 0.004455359935760498, 0.004695680141448975, 0.004605088233947754, 0.004887455940246582, 0.005026112079620362, 0.004633632183074951, 0.004643295764923096, 0.004680928230285645, 0.004696544170379639, 0.0047448320388793945, 0.00471347188949585, 0.004765120029449463, 0.004714144229888916, 0.004725535869598388, 0.004785408020019531, 0.004948512077331543, 0.004597983837127686, 0.00521830415725708, 0.004542143821716309, 0.004520256042480469, 0.004486720085144043, 0.004525504112243652, 0.004738175868988037, 0.00452185583114624, 0.004489088058471679, 0.004581503868103028, 0.004530144214630127, 0.0045073280334472655, 0.004101183891296387, 0.00439404821395874, 0.0045536961555480955, 0.00446284818649292, 0.004504223823547363, 0.0051357121467590335, 0.004549248218536377, 0.004509471893310547, 0.004611423969268799, 0.00454691219329834, 0.004497983932495117, 0.004495359897613525, 0.00445849609375, 0.0045015039443969725, 0.0045447998046875, 0.0045463042259216305, 0.004616159915924073, 0.00455238389968872, 0.004766016006469726, 0.004691840171813965, 0.00464089584350586, 0.004868095874786377, 0.004733248233795166, 0.00477132797241211, 0.004704288005828857, 0.004669600009918213, 0.004662784099578857, 0.004616703987121582, 0.004569439888000488, 0.004515456199645996, 0.004569119930267334, 0.0044399361610412596, 0.004385119915008545, 0.004423168182373047, 0.004491072177886963, 0.004635104179382324, 0.004593599796295166, 0.004546624183654785, 0.004562943935394287, 0.004572319984436035, 0.0045392317771911625, 0.004506815910339355, 0.004516672134399414, 0.004448256015777588, 0.00444159984588623, 0.004481823921203614, 0.004476416110992432, 0.004534560203552246, 0.004581503868103028, 0.0045444159507751465, 0.004726528167724609, 0.004789535999298096, 0.00480083179473877, 0.004624896049499512, 0.004585536003112793, 0.004673535823822021, 0.005051519870758057, 0.004483871936798096, 0.0045446081161499025, 0.005179423809051514, 0.005082911968231201, 0.005519552230834961, 0.004585472106933594, 0.004296224117279052, 0.004521503925323487, 0.00453331184387207, 0.004801663875579834, 0.00448963212966919, 0.004541888236999512, 0.004716608047485351, 0.004721216201782227, 0.004702527999877929, 0.004560863971710205, 0.004573279857635498, 0.004668896198272705, 0.004834047794342041, 0.004742847919464111, 0.0046694397926330565, 0.004687776088714599, 0.004712096214294434, 0.004661695957183838, 0.004597472190856934, 0.0046926078796386715, 0.004662015914916992, 0.004653984069824218, 0.004605440139770508, 0.004628191947937012, 0.004688992023468018, 0.004623648166656494, 0.004508063793182373, 0.0044423041343688964, 0.004497056007385254, 0.004640384197235108, 0.0046310720443725585, 0.004519936084747315, 0.004460544109344483, 0.004464352130889892, 0.004486527919769287, 0.004602431774139404, 0.004567679882049561, 0.004556672096252441, 0.004462431907653808, 0.004448256015777588, 0.004546559810638427, 0.0045281281471252445, 0.0045526399612426756, 0.004603231906890869, 0.00454150390625, 0.0046538558006286625, 0.004602655887603759, 0.004609344005584717, 0.004581503868103028, 0.004588191986083985, 0.004543647766113281, 0.0046560959815979, 0.00459276819229126, 0.004387584209442139, 0.004507967948913574, 0.004366015911102295, 0.004339488029479981, 0.004421440124511719, 0.004487872123718262, 0.004515007972717285, 0.004824704170227051, 0.004463103771209717, 0.004768447875976562, 0.004463071823120117, 0.004615200042724609, 0.004573344230651855, 0.00453715181350708, 0.0045424637794494625, 0.00451091194152832, 0.004430208206176758, 0.004386943817138672, 0.004369791984558106, 0.004504511833190918, 0.004509696006774902, 0.004405248165130615, 0.004453887939453125, 0.004493824005126953, 0.004566559791564942, 0.004393439769744873, 0.004417151927947998, 0.004376383781433105, 0.004315711975097656, 0.0043823041915893556, 0.00443228816986084, 0.004708096027374268, 0.00450764799118042, 0.004501247882843017, 0.004594175815582275, 0.004538271903991699, 0.00459171199798584, 0.004483071804046631, 0.004511744022369385, 0.0059060797691345215, 0.004585824012756348, 0.004562943935394287, 0.004709824085235596, 0.004667871952056884, 0.004581632137298584, 0.004554431915283203, 0.00447708797454834, 0.004380671977996826, 0.0043110399246215824, 0.00435584020614624, 0.004437952041625976, 0.004440576076507568, 0.004533152103424073, 0.004449183940887451, 0.0044421119689941405, 0.004492767810821533, 0.00445414400100708, 0.004449056148529053, 0.00445849609375, 0.004472832202911377, 0.00471449613571167, 0.004623936176300049, 0.004508096218109131, 0.004874239921569825, 0.004599232196807862, 0.004513408184051514, 0.004505760192871093, 0.004490272045135498, 0.004459392070770263, 0.004426688194274902, 0.004386079788208008, 0.004423520088195801, 0.004565279960632324, 0.004154623985290528, 0.004534656047821045, 0.0044727039337158206, 0.004430016040802002, 0.004460864067077637, 0.004386975765228272, 0.004376416206359863, 0.004357312202453613, 0.004383135795593262, 0.00491542387008667, 0.004424032211303711, 0.004435711860656738, 0.004427872180938721, 0.004460544109344483, 0.004475200176239013, 0.004463935852050781, 0.004455840110778809, 0.004411839962005616, 0.004428607940673828, 0.004398975849151611, 0.004559872150421143, 0.004499584197998047, 0.0044655041694641115, 0.004466559886932373, 0.004501215934753418, 0.004428160190582275, 0.00439081621170044, 0.004429696083068847, 0.004363999843597412, 0.004401567935943603, 0.004425727844238282, 0.004372640132904053, 0.0044132800102233884, 0.004551680088043213, 0.004605023860931396, 0.004489215850830078, 0.004538432121276855, 0.004453695774078369, 0.004559743881225586, 0.004478623867034912, 0.004406879901885987, 0.004398752212524414, 0.004453184127807618, 0.004382016181945801, 0.004405888080596923, 0.0045220799446105955, 0.00447379207611084, 0.00451478385925293, 0.004431871891021728, 0.0043927359580993655, 0.004423903942108154, 0.004449535846710205, 0.004427775859832763, 0.004383711814880371, 0.0044584641456604, 0.004486976146697998, 0.004499008178710937, 0.004469151973724365, 0.0045080318450927736, 0.0044633917808532715, 0.004469535827636719, 0.004477119922637939, 0.004402239799499511, 0.004131296157836914, 0.0045378880500793455, 0.004499231815338135, 0.004514624118804931, 0.00455072021484375, 0.004539360046386718, 0.0044469118118286135, 0.004497951984405517, 0.004474527835845947, 0.004455584049224854, 0.0046212477684021, 0.0045281281471252445, 0.004413055896759033, 0.0044585919380187985, 0.004708288192749023, 0.004600192070007324, 0.004586495876312256, 0.005390624046325684, 0.00543174409866333, 0.006201920032501221, 0.006816671848297119, 0.005518112182617187, 0.005616767883300781, 0.005968575954437256, 0.00464467191696167, 0.004616576194763184, 0.004548128128051758, 0.004551424026489258, 0.00480617618560791, 0.0046278080940246585, 0.004512256145477295, 0.004560256004333496, 0.0045166401863098145, 0.004505695819854737, 0.0047283520698547365, 0.004651264190673828, 0.004560704231262207, 0.00448473596572876, 0.0044371519088745115, 0.004429471969604492, 0.004543839931488037, 0.004442912101745605, 0.004498784065246582, 0.004503839969635009, 0.004468287944793701, 0.004473599910736084, 0.004394495964050293, 0.004382527828216552, 0.004462624073028564, 0.005135263919830322, 0.005473472118377685, 0.005179999828338623, 0.005389344215393066, 0.006076416015625, 0.0045404801368713376, 0.004506847858428955, 0.00451251220703125, 0.0044980478286743165, 0.004465216159820556, 0.004429535865783691, 0.0044486079216003414, 0.004556159973144531, 0.004491424083709717, 0.004149856090545655, 0.004429823875427246, 0.0044646081924438474, 0.0044661760330200195, 0.004503615856170654, 0.004517632007598877, 0.004489535808563232, 0.004446591854095459, 0.00441926383972168, 0.004401408195495606, 0.004406655788421631, 0.004416224002838135, 0.004462592124938965, 0.004570432186126709, 0.004581151962280273, 0.004506591796875, 0.004579135894775391, 0.004460671901702881, 0.004390912055969238, 0.004392159938812256, 0.004462624073028564, 0.004517856121063232, 0.004436895847320556, 0.004384352207183838, 0.004354335784912109, 0.0043573760986328125, 0.00439686393737793, 0.004533023834228516, 0.004455711841583252, 0.004415808200836181, 0.004454112052917481, 0.004414336204528808, 0.0044338879585266115, 0.004396416187286377, 0.004405695915222168, 0.004440159797668457, 0.004416800022125244, 0.004423520088195801, 0.004432032108306885, 0.00447756814956665, 0.004552896022796631, 0.004503488063812256, 0.004439807891845703, 0.004422175884246826, 0.0044562239646911625, 0.0044215679168701175, 0.004462656021118164, 0.0045015039443969725, 0.004493216037750244, 0.004523583889007568, 0.004561024188995361, 0.004528512001037597, 0.004679711818695068, 0.00452511978149414, 0.00454252815246582, 0.004641119956970215, 0.004514336109161377, 0.004517888069152832, 0.00451091194152832, 0.004567967891693115, 0.004429247856140137, 0.004418079853057861, 0.004441728115081787, 0.004120927810668945, 0.0044581441879272465, 0.004480800151824951, 0.004424255847930908, 0.004388512134552002, 0.00442406415939331, 0.0043985600471496585, 0.004319392204284668, 0.0043010878562927245, 0.004349088191986084, 0.004403520107269287, 0.0043975038528442385, 0.00437337589263916, 0.004338624000549317, 0.004370431900024414, 0.004431615829467774, 0.004448512077331543, 0.004440063953399658, 0.00445468807220459, 0.004474368095397949, 0.004440288066864014, 0.0045313601493835445, 0.004590432167053223, 0.004476928234100342, 0.004511744022369385, 0.004480000019073486, 0.004475903987884521, 0.004630527973175049, 0.004519455909729004, 0.0044282560348510745, 0.004374112129211426, 0.004444575786590576, 0.00445849609375, 0.004456672191619873, 0.004429344177246094, 0.004513440132141114, 0.0050878400802612305, 0.004511744022369385, 0.004546207904815674, 0.004872255802154541, 0.0045090880393981935, 0.0046674561500549315, 0.0045218877792358395, 0.004499392032623291, 0.0045550079345703124, 0.004496096134185791, 0.004451583862304687, 0.004425792217254638, 0.00441155195236206, 0.004471327781677246, 0.004390912055969238, 0.004360191822052002, 0.004373760223388672, 0.004436736106872559, 0.0044124479293823245, 0.004375167846679687, 0.00436572790145874, 0.00440553617477417, 0.004436927795410156, 0.004591296195983887, 0.00440227222442627, 0.004458975791931152, 0.00442416000366211, 0.00416153621673584, 0.00450764799118042, 0.0045558719635009765, 0.004522016048431396, 0.004509888172149658, 0.004555456161499024, 0.00449126386642456, 0.0044997758865356445, 0.00442742395401001, 0.004466720104217529, 0.004470367908477783, 0.004495776176452636, 0.0045056319236755375, 0.004525407791137696, 0.004491615772247315, 0.004600031852722168, 0.004525631904602051, 0.004448575973510742, 0.004433407783508301, 0.004420063972473145, 0.004481376171112061, 0.004482944011688233, 0.00492742395401001, 0.004634655952453613, 0.00471452808380127, 0.004737023830413818, 0.005195775985717774, 0.004529600143432617, 0.004465343952178955, 0.004978559970855713, 0.004767744064331054, 0.004886271953582763, 0.004475327968597412, 0.0044746241569519046, 0.004499135971069336, 0.004421184062957764, 0.004444992065429688, 0.0045437440872192385, 0.004467455863952636, 0.004476160049438477, 0.004551040172576904, 0.004549248218536377, 0.004602975845336914, 0.004647583961486817, 0.004609888076782227, 0.004575391769409179, 0.0045829439163208, 0.004649439811706543, 0.004587808132171631, 0.004519552230834961, 0.0045322561264038086, 0.004531680107116699, 0.004466784000396728, 0.004440351963043213, 0.0044577598571777345, 0.004530399799346924, 0.004473887920379639, 0.004531904220581055, 0.004394368171691894, 0.004388927936553955, 0.004416063785552978, 0.0044421119689941405, 0.004444223880767822, 0.004308127880096436, 0.004346720218658447, 0.004351967811584472, 0.004318304061889648, 0.004369344234466553, 0.004353439807891846, 0.004530784130096436, 0.004388288021087646, 0.004343616008758545, 0.004399968147277832, 0.004480607986450195, 0.004495232105255127, 0.004428383827209473, 0.004470623970031738, 0.004434080123901367, 0.004470623970031738, 0.004517632007598877, 0.004436223983764648, 0.005900288105010986, 0.004563039779663086, 0.004498559951782227, 0.0045801281929016115, 0.004665567874908448, 0.004533279895782471, 0.0045186557769775395, 0.0045008001327514644, 0.004518303871154785, 0.004456736087799072, 0.004652063846588134, 0.004529119968414307, 0.004437664031982422, 0.004413248062133789, 0.004387360095977783, 0.004577023983001709, 0.004448512077331543, 0.004470784187316895, 0.004468736171722412, 0.00452019214630127, 0.004484864234924316, 0.00456928014755249, 0.004519872188568115, 0.004473983764648437, 0.0045001602172851565, 0.004419328212738037, 0.004553023815155029, 0.004521120071411133, 0.004416351795196533, 0.004388415813446045, 0.0043831682205200196, 0.00439024019241333, 0.00437663984298706, 0.004602496147155762, 0.004381728172302246, 0.004350624084472656, 0.004530464172363281, 0.004341248035430908, 0.004358463764190674, 0.004479328155517578, 0.004417600154876709, 0.00441926383972168, 0.004425951957702637, 0.004459712028503418, 0.004434239864349365, 0.00416761589050293, 0.004488800048828125, 0.004590047836303711, 0.004591328144073486, 0.004380640029907227, 0.0044362878799438475, 0.0043151359558105465, 0.004330848217010498, 0.004312928199768066, 0.004318016052246094, 0.004380671977996826, 0.0044011521339416505, 0.004380832195281983, 0.004329376220703125, 0.004294623851776123, 0.004321280002593994, 0.00425980806350708, 0.004272160053253174, 0.004285823822021484, 0.0042891201972961425, 0.004335616111755371, 0.004360032081604004, 0.00437443208694458, 0.0045409278869628904, 0.004335360050201416, 0.0044728641510009765, 0.004560863971710205, 0.004390912055969238, 0.004360191822052002, 0.004403200149536133, 0.004386816024780273, 0.004415296077728272, 0.004426976203918457, 0.004371327877044678, 0.004355616092681884, 0.004698112010955811, 0.004370719909667969, 0.00437007999420166, 0.0043910079002380375, 0.004432415962219239, 0.004434175968170166, 0.004428991794586182, 0.004446784019470215, 0.004460544109344483, 0.0047288317680358885, 0.004689919948577881, 0.0045725440979003905, 0.004529056072235108, 0.004806111812591553, 0.0045749440193176266, 0.004671648025512696, 0.0044787201881408695, 0.004506048202514649, 0.004501376152038574, 0.00446665620803833, 0.004411871910095215, 0.004384640216827392, 0.004376863956451416, 0.004349664211273193, 0.004376575946807861, 0.004372704029083252, 0.0045606718063354494, 0.004423679828643799, 0.0041262397766113285, 0.004432352066040039, 0.004972064018249512, 0.004501984119415283, 0.00445468807220459, 0.004419295787811279, 0.0045015039443969725, 0.00440934419631958, 0.00446668815612793, 0.004463615894317627, 0.004430111885070801, 0.00442851209640503, 0.004412735939025879, 0.004377280235290527, 0.004368639945983886, 0.00435584020614624, 0.004425727844238282, 0.004538368225097656, 0.004405248165130615, 0.004329472064971924, 0.004331520080566406, 0.0043151359558105465, 0.004321599960327148, 0.004311935901641846, 0.004324031829833984, 0.00434611177444458, 0.004306367874145508, 0.00430847978591919, 0.004301983833312989, 0.004435872077941895, 0.004261760234832763, 0.004257791996002197, 0.004275712013244629, 0.004379136085510254, 0.004281919956207275, 0.004256192207336426, 0.004257791996002197, 0.00425164794921875, 0.004275296211242676, 0.004239903926849365, 0.004280863761901855, 0.004341919898986816, 0.004544191837310791, 0.004320767879486084, 0.00435814380645752, 0.004358367919921875, 0.004327424049377441, 0.004380703926086426, 0.004297311782836914, 0.004255392074584961, 0.004251232147216797, 0.004274816036224365, 0.004640351772308349, 0.004318719863891602, 0.00435475206375122, 0.004311391830444336, 0.004291935920715332, 0.0042707200050354005, 0.004282048225402832, 0.004354047775268555, 0.0043745279312133786, 0.0043534722328186035, 0.004354047775268555, 0.004294655799865723, 0.004419583797454834, 0.004420928001403809, 0.004334271907806397, 0.004347424030303955, 0.004313024044036865, 0.004315680027008057, 0.0043374719619750975, 0.004339424133300782, 0.00438918399810791, 0.004466047763824463, 0.00454531192779541, 0.004509696006774902, 0.004413440227508545, 0.004376575946807861, 0.004354335784912109, 0.004344736099243164, 0.00430182409286499, 0.0043006081581115725, 0.004351808071136475, 0.004260032176971435, 0.004272128105163574, 0.004247231960296631, 0.00426796817779541, 0.004305280208587646, 0.004279359817504883, 0.004258336067199707, 0.00424399995803833, 0.004294528007507324, 0.004261248111724854, 0.004506239891052246, 0.0044624958038330075, 0.0043910079002380375, 0.004353504180908203, 0.0044856958389282225, 0.004394976139068604, 0.004392191886901855, 0.004360064029693604, 0.004400000095367431, 0.004353631973266601, 0.0043413758277893065, 0.0043364157676696775, 0.004345856189727783, 0.004354047775268555, 0.004452479839324951, 0.0045138239860534664, 0.0045979199409484865, 0.0046169919967651366, 0.004540512084960938, 0.0045493760108947755, 0.00460364818572998, 0.004547103881835938, 0.0045487041473388674, 0.004461696147918701, 0.0044345917701721195, 0.0044637761116027835, 0.004415679931640625, 0.00453878402709961, 0.0045569281578063964, 0.004576288223266602, 0.004498432159423828, 0.004517888069152832, 0.004386720180511475, 0.004126048088073731, 0.00443884801864624, 0.004422815799713134, 0.0043563518524169925, 0.00446233606338501, 0.004380959987640381, 0.004372767925262451, 0.004377183914184571, 0.004453375816345215, 0.0044856958389282225, 0.004519936084747315, 0.004466335773468017, 0.004428383827209473, 0.004417376041412353, 0.004381951808929444, 0.004407423973083496, 0.004440383911132812, 0.004438720226287842, 0.0044089918136596675, 0.004410624027252197, 0.004612576007843017, 0.0043686718940734864, 0.0044399361610412596, 0.004348031997680664, 0.004535647869110107, 0.004416160106658936, 0.004360191822052002, 0.004360191822052002, 0.004350175857543945, 0.004436831951141357, 0.0044100480079650875, 0.004394944190979004, 0.0044254717826843265, 0.004348480224609375, 0.004360191822052002, 0.004415487766265869, 0.0043992319107055665, 0.004454368114471435, 0.004511583805084229, 0.004431072235107422, 0.004352000236511231, 0.004483551979064942, 0.004357503890991211, 0.004309152126312256, 0.004305759906768799, 0.00436633586883545, 0.004409696102142334, 0.004365983963012695, 0.004334688186645508, 0.004315423965454102, 0.004338304042816162, 0.004468095779418945, 0.004411839962005616, 0.004403391838073731, 0.004365344047546387, 0.004520448207855224, 0.004719423770904541, 0.004492959976196289, 0.004417535781860352, 0.0043311681747436525, 0.0047207679748535155, 0.004948512077331543, 0.0044184961318969726, 0.003973504066467285, 0.004276480197906494, 0.004251391887664795, 0.004245503902435303, 0.0042761597633361815, 0.0043086719512939456, 0.004305280208587646, 0.004292384147644043, 0.004285759925842285, 0.004333824157714844, 0.004325984001159668, 0.00434115219116211, 0.004291200160980225, 0.00429468822479248, 0.004403135776519775, 0.004327104091644287, 0.004376959800720215, 0.004377823829650879, 0.004445055961608887, 0.004456511974334717, 0.0043456959724426265, 0.0043496317863464355, 0.004321343898773193, 0.004348159790039063, 0.0043431038856506345, 0.004546527862548828, 0.00440342378616333, 0.004366847991943359, 0.004417280197143555, 0.004375135898590088, 0.004345632076263427, 0.004341472148895264, 0.004421792030334473, 0.0043534722328186035, 0.004395008087158203, 0.004467264175415039, 0.004607840061187744, 0.0045218877792358395, 0.004887135982513427, 0.004588479995727539, 0.00448095989227295, 0.004662047863006591, 0.0044646401405334475, 0.004472832202911377, 0.004402719974517823, 0.004414271831512451, 0.004376128196716309, 0.0043541440963745116, 0.004389120101928711, 0.004345759868621826, 0.004318624019622803, 0.004391359806060791, 0.0043905282020568846, 0.004389247894287109, 0.004728576183319092, 0.00447705602645874, 0.004457824230194092, 0.004451456069946289, 0.004409088134765625, 0.005023647785186767, 0.004493663787841797, 0.004466591835021972, 0.004427519798278809, 0.004134912014007569, 0.004419167995452881, 0.004440480232238769, 0.004464735984802246, 0.004535583972930908, 0.0045359997749328616, 0.004489215850830078, 0.004513760089874267, 0.004469727993011475, 0.004462528228759766, 0.004437695980072021, 0.004419968128204345, 0.004472991943359375, 0.004526048183441162, 0.004676576137542725, 0.0045327038764953614, 0.004466784000396728, 0.004389215946197509, 0.004339168071746826, 0.0043545918464660644, 0.004388160228729248, 0.004444640159606933, 0.00461027193069458, 0.0044208641052246095, 0.004432640075683594, 0.004375807762145996, 0.004387519836425781, 0.004532288074493409, 0.004352000236511231, 0.004313087940216064, 0.004589920043945313, 0.004318528175354004, 0.004435391902923584, 0.004340640068054199, 0.0043439679145812985, 0.004281824111938477, 0.004252128124237061, 0.004294496059417724, 0.004287839889526367, 0.004285151958465576, 0.0043069438934326175, 0.004296256065368652, 0.004368832111358642, 0.004376607894897461, 0.0044596481323242185, 0.004416351795196533, 0.004388671875, 0.004319424152374267, 0.004353119850158692, 0.004379295825958252, 0.004402847766876221, 0.0043894720077514646, 0.004396063804626465, 0.0043632321357727055, 0.004311391830444336, 0.004273824214935303, 0.0042490558624267575, 0.00441155195236206, 0.004544447898864746, 0.004375072002410889, 0.004376480102539063, 0.004340767860412598, 0.004421984195709229, 0.004124671936035156, 0.004452223777770996, 0.004475232124328613, 0.004521984100341797, 0.004431647777557373, 0.004415487766265869, 0.004402527809143067, 0.0044059200286865234, 0.004353087902069092, 0.004352640151977539, 0.004356704235076904, 0.004398816108703614, 0.0043823041915893556, 0.004342207908630371, 0.004456416130065918, 0.00440934419631958, 0.004345856189727783, 0.0043069438934326175, 0.0043574080467224125, 0.004335807800292969, 0.004345632076263427, 0.0044122557640075685, 0.004335519790649414, 0.004337376117706299, 0.004495264053344727, 0.004413248062133789, 0.004409920215606689, 0.004359807968139649, 0.004438399791717529, 0.004433568000793457, 0.0044488320350646975, 0.004390719890594482, 0.004356160163879395, 0.004343167781829834, 0.004397632122039795, 0.004454368114471435, 0.004397056102752686, 0.004393184185028076, 0.00445417594909668, 0.0043089919090271, 0.004339712142944336, 0.0043719677925109866, 0.0044365119934082035, 0.00441542387008667, 0.004427296161651611, 0.004411712169647217, 0.004335872173309326, 0.004480415821075439, 0.004388383865356445, 0.004367680072784424, 0.004385503768920898, 0.004428800106048584, 0.0044436798095703126, 0.004426047801971435, 0.004573247909545899, 0.004360127925872803, 0.004361408233642578, 0.004295839786529541, 0.004345632076263427, 0.004302688121795654, 0.0042804799079895015, 0.004273952007293701, 0.00426361608505249, 0.0040219521522521975, 0.004307263851165771, 0.004319168090820312, 0.0043438720703125, 0.004335040092468261, 0.004338240146636963, 0.004320767879486084, 0.004393472194671631, 0.004427775859832763, 0.004411392211914063, 0.004374623775482178, 0.004404255867004395, 0.0043854079246521, 0.004328735828399658, 0.004325632095336914, 0.004324063777923584, 0.0043019518852233886, 0.004275072097778321, 0.004286464214324951, 0.004460544109344483, 0.00436633586883545, 0.004308703899383545, 0.004294400215148926, 0.004278816223144531, 0.004329472064971924, 0.004272128105163574, 0.004340991973876953, 0.004315487861633301, 0.004307360172271729, 0.004319456100463867, 0.004532000064849853, 0.004380512237548828, 0.0042722878456115726, 0.00425600004196167, 0.004245088100433349, 0.004283999919891358, 0.0043235840797424315, 0.004528160095214844, 0.004394720077514649, 0.004390560150146485, 0.004376832008361816, 0.004427680015563965, 0.0044059200286865234, 0.004352096080780029, 0.004345376014709473, 0.0043504319190979, 0.00436787223815918, 0.004368896007537842, 0.004481023788452149, 0.004428832054138183, 0.004400191783905029, 0.004401055812835694, 0.004376128196716309, 0.004475327968597412, 0.004340896129608154, 0.004385824203491211, 0.004466495990753174, 0.004345632076263427, 0.004386623859405517, 0.004404672145843506, 0.004407936096191406, 0.0043887038230896, 0.00437334394454956, 0.003998016119003296, 0.004245408058166504, 0.004260511875152588, 0.004259840011596679, 0.0042880640029907225, 0.00428707218170166, 0.004328671932220459, 0.006004799842834472, 0.004376992225646973, 0.004318719863891602, 0.004287168025970459, 0.004298719882965088, 0.004285696029663086, 0.0042954239845275875, 0.004306816101074219, 0.004253824234008789, 0.004257791996002197, 0.004253344058990478, 0.004346399784088135, 0.004287327766418457, 0.004305888175964356, 0.004327424049377441, 0.00429260778427124, 0.0042724800109863285, 0.004325024127960205, 0.004305088043212891, 0.004308544158935547, 0.004319647789001465, 0.004263775825500488, 0.0044926080703735355, 0.004638495922088623, 0.004323775768280029, 0.004544320106506347, 0.004379295825958252, 0.004322336196899414, 0.004355040073394776, 0.004318848133087159, 0.004301184177398681, 0.0042475519180297855, 0.0042824001312255855, 0.004299903869628906, 0.00432419204711914, 0.004278016090393066, 0.004286848068237305, 0.004269951820373535, 0.004282368183135986, 0.004356095790863037, 0.004349952220916748, 0.004345439910888672, 0.0043435201644897465, 0.004357920169830322, 0.0045203838348388675, 0.0043639039993286135, 0.00441155195236206, 0.004498112201690674, 0.0044380159378051755, 0.0044011521339416505, 0.004552192211151123, 0.0043854079246521, 0.004372352123260498, 0.004347904205322266, 0.004343743801116943, 0.0045998401641845705, 0.004128704071044922, 0.004483359813690186, 0.004324160099029541, 0.004409984111785888, 0.004393119812011719, 0.0043545918464660644, 0.004343455791473389, 0.004357151985168457, 0.004314080238342285, 0.004278592109680176, 0.004251327991485596, 0.0043143038749694825, 0.004262720108032227, 0.004284063816070557, 0.004354207992553711, 0.004336991786956787, 0.004280608177185058, 0.004317696094512939, 0.004345632076263427, 0.0043318080902099605, 0.004298399925231933, 0.0043134398460388185, 0.004280416011810303, 0.0043376960754394536, 0.004360064029693604, 0.004285952091217041, 0.004491775989532471, 0.004419583797454834, 0.004367904186248779, 0.0043647680282592775, 0.004372543811798095, 0.004384704113006592, 0.004490560054779053, 0.004367040157318115, 0.004429408073425293, 0.004386528015136719, 0.0043929281234741215, 0.004442848205566406, 0.004520031929016113, 0.004450208187103272, 0.004497407913208008, 0.004478975772857666, 0.004482944011688233, 0.004449600219726562, 0.004416319847106933, 0.004370304107666015, 0.004407423973083496, 0.0043721280097961425, 0.004374368190765381, 0.00437286376953125, 0.004372928142547607, 0.0045994877815246586, 0.004503551959991455, 0.004472832202911377, 0.0044646401405334475, 0.00449945592880249, 0.004546720027923584, 0.0044538240432739256, 0.004442527770996094, 0.0044577598571777345, 0.00446947193145752, 0.004462592124938965, 0.004411680221557617, 0.004089344024658203, 0.004399456024169922, 0.004405471801757812, 0.004417471885681153, 0.004426015853881836, 0.004323040008544922, 0.004292704105377197, 0.004304831981658936, 0.004292736053466797, 0.004288352012634277, 0.00429257583618164, 0.004304959774017334, 0.004337440013885498, 0.004335807800292969, 0.004315008163452148, 0.0043236160278320316, 0.004274015903472901, 0.004324960231781006, 0.0043851838111877445, 0.004400415897369385, 0.004389823913574219, 0.004634560108184815, 0.004513631820678711, 0.004487167835235595, 0.004497056007385254, 0.004665696144104004, 0.004435935974121094, 0.004460576057434082, 0.0044421119689941405, 0.0044239358901977536, 0.004427040100097656, 0.00435862398147583, 0.004345119953155518, 0.004344672203063965, 0.0043764481544494626, 0.004372479915618896, 0.004324639797210693, 0.0043731842041015625, 0.004323679924011231, 0.0043322877883911134, 0.0044860801696777345, 0.004383776187896728, 0.004395999908447266, 0.004333407878875733, 0.004344031810760498, 0.004298848152160644, 0.004385983943939209, 0.004391136169433593, 0.004375167846679687, 0.004403007984161377, 0.004419583797454834, 0.004368383884429932, 0.0043721599578857425, 0.004368480205535889, 0.004303071975708008, 0.004331615924835205, 0.004318655967712403, 0.004461023807525635, 0.004362239837646485, 0.00435814380645752, 0.004390912055969238, 0.004339327812194824, 0.004333951950073242, 0.004534880161285401, 0.004411424160003662, 0.00448633623123169, 0.004410143852233887, 0.00446668815612793, 0.004441887855529785, 0.004469056129455567, 0.004595615863800049, 0.0045651521682739254, 0.00448473596572876, 0.004448480129241943, 0.004407296180725098, 0.00441161584854126, 0.004478400230407715, 0.00442303991317749, 0.004465760231018067, 0.004467807769775391, 0.004463520050048828, 0.0044819841384887696, 0.004346816062927246, 0.004323328018188476, 0.004290559768676758, 0.0043376641273498535, 0.004357888221740723, 0.004331456184387207, 0.004436160087585449, 0.004302976131439209, 0.0042782721519470214, 0.00427785587310791, 0.004302976131439209, 0.0043582720756530765, 0.004315584182739258, 0.004281824111938477, 0.004295263767242432, 0.004484767913818359, 0.004331840038299561, 0.004263008117675781, 0.004257952213287354, 0.004261695861816407, 0.004289152145385743, 0.004255072116851806, 0.004285088062286377, 0.004249599933624268, 0.004614143848419189, 0.004317311763763427, 0.0043272957801818845, 0.004300032138824463, 0.00434003210067749, 0.004360896110534668, 0.004343552112579346, 0.004397280216217041, 0.004331295967102051, 0.004298751831054688, 0.004280064105987549, 0.0042741761207580565, 0.004282112121582031, 0.004305408000946045, 0.00428217601776123, 0.004265503883361816, 0.004295328140258789, 0.004288383960723877, 0.004254079818725586, 0.004241151809692383, 0.004064799785614014, 0.0043238081932067875, 0.0043110399246215824, 0.004415487766265869, 0.0043067197799682615, 0.004344064235687256, 0.004288608074188232, 0.004385824203491211, 0.004391776084899902, 0.004446239948272705, 0.004445343971252441, 0.004416319847106933, 0.00445849609375, 0.0045136637687683105, 0.004549920082092285, 0.004459360122680664, 0.004474112033843994, 0.0044633598327636715, 0.004459936141967774, 0.004421919822692871, 0.0043928961753845214, 0.004386847972869873, 0.004335391998291016, 0.0043937602043151855, 0.004352128028869629, 0.004347551822662354, 0.0043393921852111815, 0.004396416187286377, 0.004598527908325195, 0.004436160087585449, 0.00440934419631958, 0.004410592079162598, 0.004409247875213623, 0.004362304210662842, 0.004426559925079346, 0.004362080097198486, 0.004376736164093018, 0.004374623775482178, 0.004363840103149414, 0.004356448173522949, 0.004384064197540284, 0.0043834238052368165, 0.004607999801635742, 0.0046059517860412595, 0.004538623809814453, 0.004564640045166016, 0.004517600059509277, 0.004505055904388428, 0.004486048221588134, 0.004441792011260986, 0.0043768959045410154, 0.004337920188903809, 0.00437334394454956, 0.004340640068054199, 0.004513792037963867, 0.004330848217010498, 0.004391392230987549, 0.004380864143371582, 0.00430617618560791, 0.004329567909240723, 0.004292543888092041, 0.004278048038482666, 0.004262656211853027, 0.004085824012756347, 0.0043745279312133786, 0.0043638720512390134, 0.004377183914184571, 0.004429632186889648, 0.00430291223526001, 0.004315072059631347, 0.004325376033782959, 0.004515071868896484, 0.004347936153411865, 0.004354335784912109, 0.004424448013305664, 0.004375616073608399, 0.004347519874572754, 0.004375520229339599, 0.004376607894897461, 0.0043376641273498535, 0.004347104072570801, 0.00435484790802002, 0.004321248054504395, 0.004331552028656006, 0.0043292479515075685, 0.004497632026672364, 0.00442080020904541, 0.004430208206176758, 0.0044572482109069824, 0.004435103893280029, 0.004437600135803223, 0.004438240051269532, 0.0044776320457458494, 0.004429823875427246, 0.004406271934509277, 0.004512928009033203, 0.0043887038230896, 0.004418975830078125, 0.0043628478050231935, 0.004364287853240967, 0.004319231986999511, 0.0043069438934326175, 0.004288000106811523, 0.004303520202636719, 0.004265823841094971, 0.004276224136352539, 0.004431392192840576, 0.004368576049804687, 0.004323935985565185, 0.004304575920104981, 0.004313087940216064, 0.004255743980407714, 0.004375807762145996, 0.00438483190536499, 0.004511551856994629, 0.0043281598091125485, 0.004313248157501221, 0.0042782721519470214, 0.004302847862243653, 0.004335231781005859, 0.004325759887695312, 0.004270080089569092, 0.004301152229309082, 0.004904607772827149, 0.004621920108795166, 0.004469151973724365, 0.004040703773498535, 0.004319231986999511, 0.004349952220916748, 0.004350111961364746, 0.004372416019439697, 0.004364255905151367, 0.004364223957061768, 0.004462399959564209, 0.004587168216705322, 0.004499616146087647, 0.004460927963256836, 0.00441974401473999, 0.004488768100738526, 0.004435679912567139, 0.004432447910308838, 0.004347904205322266, 0.004376863956451416, 0.00460975980758667, 0.004478975772857666, 0.004427775859832763, 0.004384768009185791, 0.004370463848114014, 0.004400896072387695, 0.00447273588180542, 0.004453951835632324, 0.004457215785980225, 0.004500991821289062, 0.0044364800453186035, 0.004511744022369385, 0.004481023788452149, 0.004491456031799316, 0.0044234881401062015, 0.004529407978057861, 0.00440396785736084, 0.004474368095397949, 0.004435679912567139, 0.004440288066864014, 0.0044263038635253904, 0.004417535781860352, 0.004509696006774902, 0.004560895919799805, 0.004603519916534424, 0.004550943851470948, 0.004581183910369873, 0.004552127838134766, 0.004536799907684326, 0.004618624210357666, 0.004601856231689453, 0.004558144092559814, 0.004522784233093262, 0.004534175872802734, 0.004482240200042725, 0.004456672191619873, 0.00443891191482544, 0.004369664192199707, 0.004364992141723633, 0.004466464042663574, 0.004540736198425293, 0.004437312126159668, 0.004678304195404053, 0.004505023956298828, 0.004372767925262451, 0.004353536128997803, 0.004076640129089356, 0.004381599903106689, 0.004388288021087646, 0.004456639766693115, 0.004678016185760498, 0.0045015039443969725, 0.004448256015777588, 0.004413216114044189, 0.00441161584854126, 0.004458655834197998, 0.004506688117980957, 0.0045650238990783696, 0.0045903358459472654, 0.004597568035125733, 0.004569024085998535, 0.004525856018066406, 0.004505951881408691, 0.004517759799957275, 0.004447648048400879, 0.0044284482002258305, 0.004380191802978516, 0.004391136169433593, 0.004421279907226562, 0.004371327877044678, 0.004351903915405273, 0.004380127906799316, 0.004349855899810791, 0.0043854079246521, 0.004446368217468261, 0.004471648216247559, 0.004754432201385498, 0.004446208000183105, 0.00445033597946167, 0.004488736152648926, 0.004397632122039795, 0.004358016014099121, 0.004392960071563721, 0.0043686718940734864, 0.004398047924041748, 0.004334368228912353, 0.004362207889556885, 0.004326720237731934, 0.004343808174133301, 0.004364992141723633, 0.00436633586883545, 0.0043151359558105465, 0.004281919956207275, 0.004282688140869141, 0.004274240016937256, 0.004290624141693115, 0.004257791996002197, 0.004245503902435303, 0.004231167793273926, 0.004231167793273926, 0.004233344078063965, 0.004255807876586914, 0.004278016090393066, 0.004411456108093262, 0.004271615982055664, 0.004303359985351562, 0.0042865281105041505, 0.004335552215576172, 0.004263936042785644, 0.003969183921813965, 0.004282783985137939, 0.004296703815460205, 0.004323328018188476, 0.004284416198730469, 0.004255743980407714, 0.004245440006256104, 0.004233280181884766, 0.0042863678932189946, 0.004300896167755127, 0.004302495956420898, 0.004333312034606933, 0.0042706880569458, 0.00425164794921875, 0.004276127815246582, 0.004337440013885498, 0.004274015903472901, 0.00426470422744751, 0.00431279993057251, 0.00434339189529419, 0.004321792125701904, 0.004327328205108643, 0.0043274879455566405, 0.004356031894683838, 0.004489535808563232, 0.0043842878341674805, 0.004372255802154541, 0.004414080142974853, 0.004318975925445557, 0.0043088321685791015, 0.004331679821014405, 0.00433516788482666, 0.004305344104766845, 0.004276415824890137, 0.004362400054931641, 0.0044148478507995605, 0.004380959987640381, 0.0043640961647033695, 0.004477119922637939, 0.00429366397857666, 0.00433190393447876, 0.004428383827209473, 0.004427519798278809, 0.004326911926269531, 0.00431609582901001, 0.004304895877838135, 0.004646719932556153, 0.004370431900024414, 0.004462592124938965, 0.004445536136627197, 0.004393439769744873, 0.004339903831481934, 0.004352000236511231, 0.004329472064971924, 0.004290143966674805, 0.004310848236083985, 0.0042648959159851075, 0.004285280227661133, 0.004277088165283203, 0.00434607982635498, 0.004429567813873291, 0.004405151844024658, 0.004358016014099121, 0.004122528076171875, 0.004411007881164551, 0.004360671997070313, 0.0043266239166259764, 0.004291679859161377, 0.004269760131835937, 0.00428656005859375, 0.004287903785705567, 0.004280128002166748, 0.004281023979187012, 0.004337535858154297, 0.004312895774841309, 0.004288095951080323, 0.004276639938354492, 0.004285888195037842, 0.004258111953735351, 0.004245855808258057, 0.004239583969116211, 0.004417535781860352, 0.004260928153991699, 0.004340352058410645, 0.004260159969329834, 0.004297056198120117, 0.004447455883026123, 0.004431327819824219, 0.004438047885894776, 0.004424736022949218, 0.004352992057800293, 0.004395264148712158, 0.0044325761795043946, 0.004417695999145508, 0.004589407920837402, 0.004399104118347168, 0.00436633586883545, 0.004348063945770263, 0.0044783358573913575, 0.004334047794342041, 0.004317503929138183, 0.004349952220916748, 0.00435916805267334, 0.004308735847473145, 0.004289567947387695, 0.004275199890136719, 0.004307456016540527, 0.004323679924011231, 0.004371583938598633, 0.004361408233642578, 0.004508831977844238, 0.004424287796020508, 0.004390912055969238, 0.004354047775268555, 0.004583199977874756, 0.004407519817352295, 0.004429823875427246, 0.004490335941314697, 0.004482175827026367, 0.0044495038986206055, 0.0044263038635253904, 0.0044124479293823245, 0.004385024070739746, 0.004322015762329102, 0.004291808128356934, 0.004278175830841065, 0.004043136119842529, 0.004398655891418457, 0.004342207908630371, 0.00429856014251709, 0.00428985595703125, 0.0044102401733398435, 0.0043086400032043455, 0.004338016033172607, 0.004251455783843994, 0.004319424152374267, 0.0043122239112854, 0.004305759906768799, 0.004325056076049804, 0.004329792022705078, 0.004328896045684814, 0.004339615821838379, 0.004330143928527832, 0.00429260778427124, 0.004290559768676758, 0.0042837438583374025, 0.004300960063934326, 0.0042988801002502445, 0.00429036808013916, 0.004283040046691894, 0.0044069762229919434, 0.0043153600692749025, 0.004310848236083985, 0.00431328010559082, 0.004287744045257568, 0.00426470422744751, 0.004270175933837891, 0.004283679962158203, 0.004547232151031494, 0.00435814380645752, 0.004479040145874023, 0.004323232173919678, 0.004316864013671875, 0.0043498239517211915, 0.004345632076263427, 0.0043463678359985354, 0.00432089614868164, 0.004333856105804443, 0.004318655967712403, 0.004290527820587158, 0.004272992134094238, 0.004284224033355713, 0.004241600036621094, 0.004321280002593994, 0.004517888069152832, 0.0042568321228027345, 0.0042362241744995114, 0.004231167793273926, 0.004249728202819824, 0.004269951820373535, 0.004226624011993408, 0.004222591876983642, 0.004340672016143799, 0.004287424087524414, 0.004273087978363037, 0.0042486081123352054, 0.004249631881713867, 0.004453472137451172, 0.004304768085479737, 0.004069151878356933, 0.004292672157287598, 0.004337823867797851, 0.004317183971405029, 0.004296895980834961, 0.004299808025360108, 0.004366623878479004, 0.0042984957695007325, 0.004276991844177246, 0.004270080089569092, 0.004276224136352539, 0.004306399822235107, 0.004334112167358398, 0.004367775917053222, 0.004328192234039307, 0.004328896045684814, 0.004288415908813476, 0.0043097281455993655, 0.004312863826751709, 0.004327199935913086, 0.00430134391784668, 0.004310719966888428, 0.004286208152770996, 0.0042592639923095706, 0.004253695964813233, 0.004260640144348144, 0.004468768119812012, 0.004313087940216064, 0.004405280113220215, 0.004384448051452636, 0.0043136320114135745, 0.004286208152770996, 0.004280320167541504, 0.0043376641273498535, 0.004331679821014405, 0.004312640190124512, 0.004309343814849854, 0.004333280086517334, 0.004360415935516357, 0.004351647853851318, 0.004389215946197509, 0.004276063919067383, 0.004421408176422119, 0.004328991889953613, 0.004340576171875, 0.004296703815460205, 0.0043089919090271, 0.004271999835968017, 0.004306816101074219, 0.0044089918136596675, 0.0046369280815124515, 0.004473472118377685, 0.004496287822723388, 0.004391744136810303, 0.004396383762359619, 0.004370975971221924, 0.004317311763763427, 0.0042863359451293945, 0.0042947521209716795, 0.0042557759284973145, 0.004325376033782959, 0.004305215835571289, 0.004333248138427734, 0.004100736141204834, 0.004267231941223144, 0.004261568069458008, 0.0042668161392211915, 0.0043019518852233886, 0.004467584133148193, 0.004257728099822998, 0.0042650880813598635, 0.004252448081970215, 0.004280320167541504, 0.004286943912506103, 0.0043290238380432125, 0.0042800002098083495, 0.004322815895080567, 0.00435430383682251, 0.004321728229522705, 0.004299007892608643, 0.004313055992126465, 0.004298783779144287, 0.0043110399246215824, 0.004484479904174805, 0.0043342080116271975, 0.004335616111755371, 0.004300032138824463, 0.0042969598770141604, 0.004303199768066406, 0.004275775909423828, 0.004274400234222412, 0.004254079818725586, 0.0042475519180297855, 0.004269120216369629, 0.004277184009552002, 0.00433187198638916, 0.0043784317970275875, 0.004413216114044189, 0.004346303939819336, 0.004292223930358887, 0.0043089919090271, 0.004283616065979004, 0.004478975772857666, 0.004295743942260742, 0.0042960958480834964, 0.004288832187652588, 0.004362559795379638, 0.004288191795349121, 0.004282368183135986, 0.004285888195037842, 0.0042848000526428225, 0.004290272235870361, 0.004331488132476807, 0.00429318380355835, 0.004321375846862793, 0.004300000190734864, 0.004274816036224365, 0.004261087894439697, 0.0042767682075500486, 0.0044355840682983394, 0.004430431842803955, 0.00535097599029541, 0.004383488178253174, 0.004312416076660156, 0.0043005762100219724, 0.0042932162284851074, 0.004024864196777344, 0.004295008182525635, 0.004285600185394287, 0.0043096961975097655, 0.004485311985015869, 0.004296576023101807, 0.004325471878051758, 0.004298655986785888, 0.004302527904510498, 0.004339871883392334, 0.004364448070526123, 0.00439737606048584, 0.004664063930511475, 0.004461503982543945, 0.004362239837646485, 0.004339712142944336, 0.004330495834350586, 0.00431001615524292, 0.004277503967285156, 0.004305632114410401, 0.004386528015136719, 0.004355519771575928, 0.004460415840148926, 0.004348447799682617, 0.004336095809936523, 0.004330527782440185, 0.004323296070098877, 0.004344287872314453, 0.004388991832733154, 0.004327839851379395, 0.004331744194030761, 0.004341536045074463, 0.004550687789916992, 0.004370719909667969, 0.0042759041786193846, 0.00425164794921875, 0.004255743980407714, 0.0042408638000488285, 0.004270624160766601, 0.004323103904724121, 0.004275839805603027, 0.004305503845214844, 0.004330495834350586, 0.004283391952514649, 0.004298719882965088, 0.004300831794738769, 0.004282368183135986, 0.004282368183135986, 0.004270080089569092, 0.004265984058380127, 0.004270080089569092, 0.00427558422088623, 0.004262080192565918, 0.0042676801681518555, 0.004294655799865723, 0.004281472206115722, 0.004243360042572021, 0.0042391037940979, 0.004245503902435303, 0.004269152164459228, 0.004244383811950684, 0.004419456005096436, 0.004474143981933593, 0.004015488147735596, 0.004285247802734375, 0.004286464214324951, 0.004299935817718506, 0.004245759963989258, 0.004339456081390381, 0.004291200160980225, 0.004364352226257324, 0.004405248165130615, 0.004491424083709717, 0.004495584011077881, 0.004577055931091308, 0.004581024169921875, 0.004577631950378418, 0.004562943935394287, 0.004550271987915039, 0.0046063361167907714, 0.0045281281471252445, 0.004427648067474365, 0.0043719358444213865, 0.004385439872741699, 0.0043292160034179685, 0.004307295799255371, 0.004378528118133545, 0.004322879791259766, 0.004315584182739258, 0.00450764799118042, 0.004429696083068847, 0.004331647872924805, 0.004325568199157715, 0.004320608139038086, 0.004311776161193848, 0.004291647911071777, 0.004340415954589844, 0.004343616008758545, 0.004383200168609619, 0.004427487850189209, 0.004467743873596192, 0.00447321605682373, 0.004424287796020508, 0.0044067840576171875, 0.00450105619430542, 0.0045181760787963866, 0.004635295867919922, 0.004515456199645996, 0.004520319938659668, 0.004728000164031982, 0.004580160140991211, 0.004601503849029541, 0.0045714879035949705, 0.004671775817871094, 0.004468448162078857, 0.004512063980102539, 0.004652448177337646, 0.004579616069793701, 0.004667295932769776, 0.004613376140594482, 0.004698976039886475, 0.004577280044555664, 0.004636159896850586, 0.00456060791015625, 0.0045535039901733396, 0.00452617597579956, 0.004350272178649903, 0.004633952140808106, 0.004590303897857666, 0.004558144092559814, 0.004598688125610351, 0.0045504322052001955, 0.0045361599922180175, 0.004535520076751709, 0.0045454721450805665, 0.004570879936218262, 0.00456934404373169, 0.0045281281471252445, 0.004547776222229004, 0.0044882879257202146, 0.004517600059509277, 0.0044683837890625, 0.004508096218109131, 0.004464863777160644, 0.004515135765075683, 0.004506207942962646, 0.004628255844116211, 0.004452032089233398, 0.004484416007995606, 0.004453375816345215, 0.004480576038360596, 0.004559296131134033, 0.00454041576385498, 0.004583392143249512, 0.004554880142211914, 0.0044850239753723145, 0.004457856178283691, 0.00448576021194458, 0.00455238389968872, 0.004537919998168946, 0.004469183921813965, 0.004452672004699707, 0.004441952228546143, 0.004431295871734619, 0.004491040229797363, 0.004686240196228027, 0.004602399826049805, 0.004549952030181885, 0.004480000019073486, 0.004448063850402832, 0.0044009599685668945, 0.004438079833984375, 0.004405248165130615, 0.004577280044555664, 0.004572735786437988, 0.004488736152648926, 0.004443039894104004, 0.0043821120262146, 0.004435647964477539, 0.004395936012268066, 0.004433440208435059, 0.004468704223632812, 0.004562911987304688, 0.004489984035491943, 0.0044581441879272465, 0.00437056016921997, 0.004336863994598389, 0.004380928039550781, 0.004372735977172851, 0.004126463890075684, 0.004396927833557129, 0.004366943836212158, 0.004427552223205566, 0.004443615913391113, 0.004493631839752197, 0.004516064167022705, 0.004543871879577637, 0.004539008140563965, 0.004546559810638427, 0.004585247993469238, 0.0045361919403076175, 0.004612448215484619, 0.004556159973144531, 0.004503168106079101, 0.0045660161972045895, 0.004578720092773438, 0.004591648101806641, 0.004551231861114502, 0.004572991847991943, 0.004537568092346191, 0.004533376216888428, 0.004470304012298584, 0.004426047801971435, 0.004425759792327881, 0.004528096199035644, 0.004417888164520264, 0.004423391819000244, 0.004456511974334717, 0.004450496196746826, 0.004464320182800293, 0.004487071990966797, 0.004444255828857422, 0.004616415977478027, 0.0045463361740112306, 0.004445727825164795, 0.004585055828094482, 0.004410079956054688, 0.0043434882164001464, 0.00437724781036377, 0.004426720142364502, 0.004522784233093262, 0.004536384105682373, 0.00445849609375, 0.0044109120368957516, 0.00439958381652832, 0.004359744071960449, 0.004402815818786621, 0.004404160022735595, 0.004390048027038574, 0.004346591949462891, 0.004468095779418945, 0.004452991962432862, 0.004468031883239746, 0.004518815994262695, 0.00450054407119751, 0.004426368236541748, 0.004363743782043457, 0.004362783908843994, 0.004388671875, 0.004407584190368652, 0.004477280139923096, 0.004484896183013916]",tokens/s,225.5760832707592,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,812.417024,576.585728,0.0,190.840832,172.878848,s,1,9.7195107421875,9.7195107421875,0.0,9.7195107421875,9.7195107421875,9.7195107421875,9.7195107421875,[9.7195107421875],,kWh,1.3925303029172179e-05,1.528792350615618e-06,3.4488916479946674e-06,1.8902987027782463e-05,,MB,1251.237888,685.637632,0.0,270.532608,241.723904,s,35,0.22436684989929195,0.006410481425694056,0.00015726450846672405,0.0064225921630859375,0.0065786751747131346,0.006708223915100097,0.006802901821136475,"[0.006672607898712158, 0.0062979841232299804, 0.006808864116668701, 0.006336544036865234, 0.006332543849945069, 0.006482016086578369, 0.006207680225372315, 0.00643779182434082, 0.0064225921630859375, 0.006485792160034179, 0.006474463939666748, 0.006236480236053467, 0.006602943897247315, 0.006542272090911865, 0.006497632026672364, 0.006791327953338623, 0.0064362878799438476, 0.006366208076477051, 0.006439328193664551, 0.006253695964813232, 0.006504288196563721, 0.006161632061004639, 0.006247136116027832, 0.006415328025817871, 0.006216415882110596, 0.006377920150756836, 0.006272672176361084, 0.006294239997863769, 0.006533408164978027, 0.006472703933715821, 0.006300896167755127, 0.0063887038230896, 0.00611952018737793, 0.006457632064819336, 0.006479296207427978]",tokens/s,39934.59819942979,kWh,1.8859709564478682e-07,2.079875801910911e-08,9.764246840335915e-08,3.0703832206725503e-07,tokens/kWh,833772143.7388673,MB,1297.870848,698.220544,0.0,283.11552,241.726464,s,35,10.03211410522461,0.28663183157784605,0.0064343015655475636,0.28498422241210936,0.294206640625,0.29722244262695313,0.302714873046875,"[0.3041597900390625, 0.28978662109375, 0.29154510498046876, 0.2863673095703125, 0.2894788513183594, 0.288564208984375, 0.2835123291015625, 0.28461639404296873, 0.288713134765625, 0.29245318603515624, 0.2917049560546875, 0.283035400390625, 0.29051785278320313, 0.2999100341796875, 0.29409710693359375, 0.29607061767578124, 0.2942796630859375, 0.28897314453125, 0.2763087158203125, 0.28328314208984373, 0.28248574829101564, 0.28139749145507814, 0.2809055480957031, 0.2809966735839844, 0.28452545166015625, 0.28498422241210936, 0.28285491943359375, 0.283226806640625, 0.27728070068359373, 0.2733273010253906, 0.2822798461914062, 0.283328857421875, 0.2800694580078125, 0.2881258544921875, 0.2889476623535156]",tokens/s,219.7941507515012,kWh,8.116389713614129e-06,8.950905377034099e-07,3.2367506312078507e-06,1.224823088252539e-05,tokens/kWh,5143599.970007293,,s,2205,10.012760695219024,0.004540934555654894,0.00018691483847583528,0.004527840137481689,0.00472468490600586,0.004787935924530029,0.005212925319671631,"[0.004507455825805664, 0.0048230719566345216, 0.004821184158325195, 0.004761375904083252, 0.004845280170440674, 0.004804895877838135, 0.0050728960037231445, 0.004764031887054443, 0.0047643837928771976, 0.004811679840087891, 0.0047820801734924315, 0.004806687831878662, 0.004704224109649658, 0.004836351871490479, 0.004750207901000976, 0.004960031986236573, 0.004777632236480713, 0.004735648155212402, 0.00476582384109497, 0.0047635197639465336, 0.004868063926696777, 0.005177408218383789, 0.005263679981231689, 0.005287680149078369, 0.005356959819793701, 0.005311007976531982, 0.005307616233825684, 0.00526416015625, 0.005084799766540527, 0.0047676157951354985, 0.0048072638511657715, 0.004691359996795655, 0.005447391986846924, 0.004727359771728516, 0.004722911834716797, 0.004781119823455811, 0.004711103916168213, 0.004628896236419678, 0.004702079772949219, 0.004642784118652344, 0.004597343921661377, 0.004583136081695556, 0.004657631874084473, 0.0046962881088256836, 0.004728640079498291, 0.004663487911224365, 0.004638720035552979, 0.005406335830688476, 0.006642047882080078, 0.004718016147613526, 0.004594175815582275, 0.004515903949737549, 0.004620384216308593, 0.004595263957977295, 0.0045361599922180175, 0.004532735824584961, 0.004540607929229736, 0.004518752098083496, 0.004582047939300537, 0.004454400062561035, 0.004579840183258056, 0.0044356160163879395, 0.004403647899627686, 0.004033664226531983, 0.0044263038635253904, 0.004569407939910889, 0.004566336154937744, 0.0044735360145568845, 0.004549727916717529, 0.004590496063232422, 0.004612095832824707, 0.004505087852478027, 0.004479487895965576, 0.004546559810638427, 0.00456822395324707, 0.00465331220626831, 0.0047276802062988284, 0.004460256099700927, 0.004597760200500488, 0.004568960189819336, 0.004421823978424072, 0.004532159805297852, 0.004486847877502442, 0.0046531839370727536, 0.004612383842468262, 0.004546495914459228, 0.004419551849365234, 0.004435840129852295, 0.004442240238189697, 0.00464796781539917, 0.005131231784820557, 0.004415487766265869, 0.004390912055969238, 0.0043604478836059574, 0.004365375995635986, 0.004407743930816651, 0.004491424083709717, 0.004709887981414795, 0.004407904148101807, 0.004436063766479492, 0.004486879825592041, 0.00449126386642456, 0.004635935783386231, 0.004718944072723389, 0.0046919360160827634, 0.004778207778930664, 0.0046179518699646, 0.0046189122200012206, 0.004607423782348633, 0.004648736000061035, 0.004642655849456787, 0.004672800064086914, 0.004574399948120117, 0.004608479976654053, 0.004607999801635742, 0.004616479873657226, 0.0048121919631958, 0.004814752101898193, 0.004731296062469482, 0.004798463821411133, 0.004732384204864502, 0.004763423919677734, 0.004696832180023193, 0.004816224098205566, 0.004841216087341308, 0.00487283182144165, 0.004508351802825928, 0.0048640317916870115, 0.004867487907409668, 0.004850240230560303, 0.004730879783630371, 0.005356639862060547, 0.004726816177368164, 0.004911680221557618, 0.004871520042419433, 0.004766848087310791, 0.004730879783630371, 0.004746655941009522, 0.0048091521263122555, 0.004756608009338379, 0.004717440128326416, 0.004642687797546387, 0.004643008232116699, 0.005068352222442627, 0.004644415855407715, 0.00465388822555542, 0.004612095832824707, 0.004681727886199951, 0.004646912097930909, 0.004632192134857178, 0.0045650238990783696, 0.004626783847808838, 0.00467145586013794, 0.0048642239570617675, 0.0046815361976623535, 0.004556608200073242, 0.004516032218933106, 0.004539936065673828, 0.004553184032440185, 0.004579328060150147, 0.004633600234985351, 0.0046622719764709475, 0.004721920013427735, 0.0046128640174865725, 0.004583424091339112, 0.004548575878143311, 0.0045304961204528805, 0.00449721622467041, 0.004513696193695068, 0.004503520011901856, 0.004464767932891846, 0.004521728038787842, 0.004481184005737305, 0.004431871891021728, 0.004501632213592529, 0.004495232105255127, 0.00447488021850586, 0.004544511795043945, 0.004415487766265869, 0.0045855998992919925, 0.004455615997314453, 0.004393663883209229, 0.004372416019439697, 0.004413792133331299, 0.004411359786987304, 0.00439680004119873, 0.004430111885070801, 0.0043597760200500485, 0.004454527854919434, 0.004429408073425293, 0.00449286413192749, 0.004508448123931885, 0.004531936168670654, 0.00454691219329834, 0.004493311882019043, 0.004490303993225098, 0.004643680095672608, 0.004632607936859131, 0.004525568008422852, 0.004596288204193115, 0.004539519786834717, 0.004582272052764892, 0.004571072101593018, 0.004511807918548584, 0.00456713581085205, 0.004597248077392578, 0.0045797438621520995, 0.004560768127441406, 0.004658847808837891, 0.004639264106750488, 0.004612063884735108, 0.004696032047271728, 0.004583040237426758, 0.004561279773712158, 0.004512928009033203, 0.00451584005355835, 0.0045801281929016115, 0.004622079849243164, 0.004528384208679199, 0.00455072021484375, 0.004507840156555176, 0.004531328201293945, 0.004548768043518067, 0.004567200183868408, 0.004471168041229248, 0.004470784187316895, 0.004402656078338623, 0.0044362878799438475, 0.00445462417602539, 0.004456287860870362, 0.00447708797454834, 0.004607359886169434, 0.004463263988494873, 0.0044932799339294435, 0.004517824172973633, 0.004501791954040527, 0.00453388786315918, 0.0045730881690979, 0.00460748815536499, 0.004850751876831054, 0.0045667200088500975, 0.0045277118682861325, 0.004536736011505127, 0.004470047950744629, 0.004465375900268555, 0.004562560081481933, 0.0044469118118286135, 0.004398911952972412, 0.004489088058471679, 0.004360223770141601, 0.004542431831359863, 0.004530208110809326, 0.0042475519180297855, 0.004476160049438477, 0.004475359916687012, 0.004449408054351806, 0.004402048110961914, 0.004444159984588623, 0.004429471969604492, 0.004554080009460449, 0.004488096237182617, 0.004589663982391357, 0.004588768005371093, 0.004561696052551269, 0.004618336200714111, 0.004562848091125489, 0.004597087860107422, 0.004598432064056397, 0.004589824199676513, 0.00456879997253418, 0.004636608123779297, 0.00461840009689331, 0.004583360195159912, 0.00464185619354248, 0.004604320049285889, 0.004611775875091552, 0.0046284799575805665, 0.00457747220993042, 0.004557472229003906, 0.00450764799118042, 0.004581056118011475, 0.004575551986694336, 0.004553023815155029, 0.004619520187377929, 0.004755199909210205, 0.004616831779479981, 0.004627999782562256, 0.004823584079742432, 0.00455679988861084, 0.004546559810638427, 0.00464079999923706, 0.004580639839172364, 0.004624735832214355, 0.004665472030639648, 0.004622560024261474, 0.004610047817230224, 0.004584479808807373, 0.0046861438751220705, 0.004511551856994629, 0.0045064640045166015, 0.00447273588180542, 0.004597856044769287, 0.00459552001953125, 0.004614336013793945, 0.0046583361625671385, 0.004583456039428711, 0.004688096046447754, 0.0046080641746520995, 0.00459449577331543, 0.004574975967407227, 0.004637951850891113, 0.004813792228698731, 0.004722400188446045, 0.004721888065338135, 0.004543327808380127, 0.004353087902069092, 0.004688864231109619, 0.004632607936859131, 0.004647200107574463, 0.00460975980758667, 0.00464086389541626, 0.004589600086212158, 0.00459552001953125, 0.004569056034088134, 0.004563136100769043, 0.004650080204010009, 0.004641536235809326, 0.004622335910797119, 0.004648128032684326, 0.004586304187774658, 0.00460368013381958, 0.004728096008300781, 0.0046499199867248535, 0.004598080158233642, 0.004601535797119141, 0.004465663909912109, 0.004396031856536866, 0.004392191886901855, 0.004366623878479004, 0.004434336185455323, 0.004422751903533936, 0.004453343868255615, 0.004462592124938965, 0.004497024059295654, 0.004573311805725098, 0.00535753583908081, 0.004731232166290283, 0.004577216148376465, 0.004567039966583252, 0.004490303993225098, 0.004531295776367188, 0.004560959815979004, 0.004506432056427002, 0.004518208026885987, 0.004538527965545654, 0.004575744152069092, 0.00449945592880249, 0.004556479930877685, 0.004761919975280761, 0.00454860782623291, 0.0045771517753601075, 0.004544288158416748, 0.004682079792022705, 0.004630144119262695, 0.004595327854156494, 0.004581952095031738, 0.0046159358024597166, 0.004569119930267334, 0.004591807842254638, 0.0045898880958557126, 0.004568992137908935, 0.004545536041259766, 0.004677951812744141, 0.004526783943176269, 0.004482207775115966, 0.004410208225250244, 0.004453919887542725, 0.004401919841766358, 0.004045248031616211, 0.004364640235900879, 0.004430912017822266, 0.004383679866790772, 0.004451456069946289, 0.0044263038635253904, 0.004415808200836181, 0.0045240321159362796, 0.004737023830413818, 0.004622335910797119, 0.004597760200500488, 0.00451145601272583, 0.004555071830749512, 0.004514111995697022, 0.0044924159049987795, 0.004498208045959472, 0.004472576141357422, 0.004423776149749756, 0.00442790412902832, 0.004384704113006592, 0.004502912044525146, 0.004460256099700927, 0.004412479877471924, 0.004594912052154541, 0.004503935813903809, 0.0044811201095581055, 0.004631616115570068, 0.004446176052093506, 0.004393311977386474, 0.0044243202209472655, 0.004469791889190674, 0.004424672126770019, 0.004461984157562256, 0.00442793607711792, 0.0043871040344238285, 0.004434080123901367, 0.004405248165130615, 0.004394688129425049, 0.0043787522315979, 0.004413631916046143, 0.0044380159378051755, 0.004440063953399658, 0.004440063953399658, 0.00435590410232544, 0.004464831829071045, 0.004704351902008057, 0.004619584083557129, 0.004552351951599121, 0.004600768089294434, 0.004622719764709473, 0.004605631828308105, 0.0047021441459655765, 0.004654943943023682, 0.00462659215927124, 0.004619935989379883, 0.004591839790344238, 0.004589951992034912, 0.004525824069976807, 0.004549888134002686, 0.004528223991394043, 0.004469247817993164, 0.004441887855529785, 0.00448902416229248, 0.004315904140472412, 0.004578720092773438, 0.004616799831390381, 0.004581120014190674, 0.004548255920410156, 0.0047060160636901854, 0.004608895778656006, 0.00459980821609497, 0.004624383926391602, 0.004601856231689453, 0.0044967360496521, 0.0044980478286743165, 0.004553952217102051, 0.004467520236968994, 0.004398655891418457, 0.004452799797058106, 0.004485119819641113, 0.004546463966369629, 0.004612512111663818, 0.004611775875091552, 0.004526080131530762, 0.004487167835235595, 0.004495359897613525, 0.004446112155914307, 0.004480512142181397, 0.0044160962104797365, 0.004428127765655518, 0.004456160068511963, 0.004459775924682617, 0.004482975959777832, 0.004470719814300537, 0.004502336025238037, 0.00451196813583374, 0.004513055801391602, 0.004483615875244141, 0.004484416007995606, 0.004501855850219726, 0.00471289587020874, 0.004695968151092529, 0.004517216205596924, 0.0046230401992797855, 0.0046029119491577146, 0.004545567989349365, 0.004509215831756592, 0.004514175891876221, 0.004449567794799805, 0.004522848129272461, 0.00452185583114624, 0.004542431831359863, 0.004476960182189941, 0.004518144130706787, 0.004498976230621338, 0.004509856224060058, 0.004525504112243652, 0.004452991962432862, 0.004402944087982178, 0.004460800170898438, 0.0043522238731384276, 0.004369791984558106, 0.004473440170288086, 0.004417503833770752, 0.0044009919166564946, 0.004427231788635254, 0.004220928192138672, 0.004438111782073975, 0.0044583997726440425, 0.004492928028106689, 0.004458879947662354, 0.004488192081451416, 0.004463007926940918, 0.004440256118774414, 0.0045482878684997554, 0.004461535930633545, 0.004416895866394043, 0.004421120166778565, 0.004463744163513183, 0.004524928092956543, 0.004425983905792236, 0.004415135860443115, 0.004385759830474853, 0.004461919784545899, 0.004413792133331299, 0.004413887977600097, 0.004410751819610596, 0.004429440021514893, 0.0046473278999328614, 0.0045327038764953614, 0.00455452823638916, 0.00460316801071167, 0.004561855792999268, 0.00456444787979126, 0.0045409598350524905, 0.00452627182006836, 0.004482880115509034, 0.004720320224761963, 0.00466710376739502, 0.004591839790344238, 0.004651743888854981, 0.00474076795578003, 0.004687871932983399, 0.005177472114562988, 0.004679935932159424, 0.004660736083984375, 0.004654560089111328, 0.004671232223510742, 0.004631296157836914, 0.004690080165863037, 0.004616256237030029, 0.0045874881744384765, 0.004556992053985596, 0.0045751361846923825, 0.004589151859283447, 0.004579616069793701, 0.004590943813323974, 0.004651135921478271, 0.004700607776641845, 0.004750559806823731, 0.004703264236450195, 0.004619135856628418, 0.0045905599594116215, 0.004571135997772217, 0.004655168056488037, 0.004630752086639404, 0.0049164481163024906, 0.004653567790985107, 0.004689919948577881, 0.0045032958984375, 0.004862207889556885, 0.004797984123229981, 0.0047231678962707515, 0.004758848190307617, 0.005075551986694336, 0.004673791885375977, 0.004697951793670654, 0.0046592001914978025, 0.004704256057739258, 0.004585472106933594, 0.004620287895202637, 0.004612351894378662, 0.004632319927215576, 0.004617343902587891, 0.004583360195159912, 0.00455731201171875, 0.00468012809753418, 0.004632832050323487, 0.004630591869354248, 0.004573184013366699, 0.004558527946472168, 0.00455075216293335, 0.0045632319450378414, 0.004577055931091308, 0.004683616161346435, 0.004567039966583252, 0.004571135997772217, 0.004556640148162842, 0.004568352222442627, 0.0046286401748657224, 0.004664127826690674, 0.004562848091125489, 0.004533984184265137, 0.004537951946258545, 0.004569407939910889, 0.00462886381149292, 0.004560895919799805, 0.00455679988861084, 0.004554751873016357, 0.0045734081268310545, 0.0046139202117919925, 0.004635807991027832, 0.004620448112487793, 0.004573599815368652, 0.004625919818878174, 0.004681663990020752, 0.004593952178955078, 0.00453436803817749, 0.004733471870422363, 0.004651103973388672, 0.004605792045593262, 0.004644927978515625, 0.004587456226348877, 0.00455679988861084, 0.0046629438400268555, 0.004626719951629638, 0.004749311923980713, 0.0046406078338623044, 0.004654848098754883, 0.004645472049713135, 0.0048000960350036625, 0.004657440185546875, 0.0043318080902099605, 0.004622111797332764, 0.0047002239227294925, 0.004634463787078857, 0.004629983901977539, 0.0047049598693847654, 0.004700096130371094, 0.00467903995513916, 0.005206719875335693, 0.004720287799835205, 0.004769824028015137, 0.00482096004486084, 0.005474016189575195, 0.0050941438674926755, 0.004884575843811035, 0.004636320114135742, 0.004584767818450928, 0.004625247955322266, 0.004570335865020752, 0.0046143999099731445, 0.004971007823944092, 0.004759552001953125, 0.0046592001914978025, 0.0046100797653198244, 0.00461030387878418, 0.004566751956939698, 0.0045313920974731445, 0.00459449577331543, 0.0044802560806274415, 0.004864768028259277, 0.004586783885955811, 0.0045370559692382816, 0.004560927867889405, 0.0045948481559753415, 0.004582240104675293, 0.004532192230224609, 0.0045545601844787596, 0.004532415866851807, 0.004640768051147461, 0.004538527965545654, 0.004521120071411133, 0.004512447834014893, 0.004567200183868408, 0.004617695808410645, 0.0045715198516845705, 0.004616191864013672, 0.0045866560935974125, 0.004546624183654785, 0.004555583953857422, 0.004532192230224609, 0.004516928195953369, 0.004446559906005859, 0.004434879779815674, 0.004439871788024902, 0.004376416206359863, 0.004609375953674316, 0.00455951976776123, 0.004483071804046631, 0.004481247901916504, 0.00453718376159668, 0.00445356798171997, 0.004470016002655029, 0.004448768138885498, 0.004058239936828613, 0.004325215816497803, 0.004360191822052002, 0.004412831783294678, 0.004445119857788086, 0.0045034241676330565, 0.004567935943603516, 0.004569183826446533, 0.004549439907073975, 0.0044421119689941405, 0.0045033278465271, 0.004464863777160644, 0.004472352027893066, 0.004540895938873291, 0.004427775859832763, 0.0044239358901977536, 0.0044460477828979494, 0.00439081621170044, 0.004423232078552246, 0.004452415943145752, 0.004495744228363037, 0.004515232086181641, 0.004509664058685302, 0.004463200092315674, 0.004443456172943115, 0.00448412799835205, 0.004433055877685547, 0.004445024013519287, 0.004421311855316162, 0.004433760166168213, 0.004417695999145508, 0.004378399848937988, 0.004442399978637696, 0.004566400051116943, 0.004517920017242431, 0.004940000057220459, 0.005059232234954834, 0.0044617919921875, 0.004401599884033203, 0.004411392211914063, 0.004414944171905517, 0.004360544204711914, 0.004354239940643311, 0.004342112064361572, 0.0044165759086608886, 0.004469344139099121, 0.004530176162719727, 0.004759552001953125, 0.004502655982971192, 0.004563839912414551, 0.00454041576385498, 0.004459904193878174, 0.004440703868865967, 0.004419136047363281, 0.004581823825836182, 0.00449724817276001, 0.004464863777160644, 0.004499392032623291, 0.004423232078552246, 0.004493023872375488, 0.00460259199142456, 0.004558527946472168, 0.004749824047088623, 0.004372288227081298, 0.005252352237701416, 0.004641536235809326, 0.004584544181823731, 0.004617119789123535, 0.004685823917388916, 0.004687871932983399, 0.00466534423828125, 0.004628159999847412, 0.004585247993469238, 0.004555071830749512, 0.004518112182617187, 0.004517216205596924, 0.004536992073059082, 0.004879903793334961, 0.004504032135009765, 0.004489215850830078, 0.004478911876678467, 0.004509984016418457, 0.0045658559799194335, 0.0045088639259338375, 0.004413343906402588, 0.0044336957931518555, 0.0044503679275512695, 0.004450272083282471, 0.004554783821105957, 0.004591904163360596, 0.004923264026641846, 0.0046794238090515135, 0.0046350078582763675, 0.004628191947937012, 0.004614143848419189, 0.0046284799575805665, 0.0045830078125, 0.0046739521026611325, 0.004589568138122559, 0.004639904022216797, 0.004553567886352539, 0.00451584005355835, 0.004513792037963867, 0.004583424091339112, 0.004533247947692871, 0.004480127811431885, 0.0045034241676330565, 0.004525087833404541, 0.004604896068572998, 0.004651008129119873, 0.00464896011352539, 0.0046239042282104495, 0.004584224224090576, 0.004636191844940186, 0.004569536209106446, 0.0047214717864990235, 0.004592735767364502, 0.00464467191696167, 0.004597951889038086, 0.004667391777038574, 0.004593472003936768, 0.004567039966583252, 0.004640768051147461, 0.004583424091339112, 0.004734975814819336, 0.004777984142303467, 0.004427775859832763, 0.004781343936920166, 0.004737728118896485, 0.004802591800689697, 0.0047861762046813965, 0.004699552059173584, 0.004763904094696045, 0.004669792175292968, 0.004730239868164062, 0.004731008052825928, 0.004718463897705078, 0.004717184066772461, 0.004773888111114502, 0.004726367950439453, 0.004780223846435547, 0.004720255851745605, 0.0046434240341186525, 0.004698112010955811, 0.004652671813964844, 0.004671872138977051, 0.0046694397926330565, 0.004855552196502686, 0.004692224025726319, 0.0048148479461669925, 0.00486790418624878, 0.004835455894470215, 0.004718656063079834, 0.004642047882080078, 0.004720704078674316, 0.004776351928710938, 0.004657663822174072, 0.004676511764526367, 0.004661439895629883, 0.005213183879852295, 0.0047223038673400875, 0.005318528175354004, 0.005204160213470459, 0.005074880123138428, 0.005858751773834228, 0.0047291841506958004, 0.004744480133056641, 0.004965087890625, 0.0047019200325012205, 0.004690495967864991, 0.004734015941619873, 0.004657792091369629, 0.004706975936889649, 0.004735680103302002, 0.0047092800140380855, 0.004763904094696045, 0.00470201587677002, 0.00472873592376709, 0.00464086389541626, 0.004724736213684082, 0.004578911781311036, 0.0045749440193176266, 0.004539296150207519, 0.004545792102813721, 0.004606751918792724, 0.004627552032470703, 0.004590240001678467, 0.004530176162719727, 0.004617824077606201, 0.004279583930969238, 0.004594719886779785, 0.004570943832397461, 0.004532000064849853, 0.004536543846130371, 0.004474751949310303, 0.004518335819244385, 0.004513599872589111, 0.004511360168457031, 0.004507296085357666, 0.004418144226074219, 0.004489215850830078, 0.004593503952026367, 0.004501664161682129, 0.004511744022369385, 0.00452627182006836, 0.004523839950561524, 0.004564032077789306, 0.00451036787033081, 0.004633120059967041, 0.004622079849243164, 0.004532288074493409, 0.0045526399612426756, 0.004538400173187256, 0.004532447814941406, 0.00449724817276001, 0.004508927822113037, 0.00451855993270874, 0.004468160152435303, 0.004487232208251953, 0.004456960201263428, 0.004537471771240234, 0.004512639999389648, 0.00464896011352539, 0.004487167835235595, 0.004499360084533692, 0.004533728122711181, 0.0046146559715271, 0.0045979199409484865, 0.004567008018493652, 0.004540128231048584, 0.004626719951629638, 0.004611680030822754, 0.004659776210784912, 0.004632415771484375, 0.004644192218780517, 0.004780704021453857, 0.004749184131622315, 0.0047075519561767575, 0.0046497278213500975, 0.0046880321502685545, 0.004714047908782959, 0.00466758394241333, 0.00466758394241333, 0.00468179178237915, 0.004756768226623535, 0.00476643180847168, 0.00638156795501709, 0.006092927932739258, 0.006149312019348144, 0.004788095951080322, 0.0047049918174743655, 0.0048213119506835935, 0.004490272045135498, 0.004823359966278076, 0.0048109121322631836, 0.00487497615814209, 0.004763296127319336, 0.004685311794281006, 0.004661888122558594, 0.004737023830413818, 0.004667391777038574, 0.004633664131164551, 0.0046273918151855465, 0.004663551807403564, 0.004687615871429443, 0.004742688179016114, 0.0047149438858032225, 0.004696415901184082, 0.004648640155792237, 0.004605504035949707, 0.00459446382522583, 0.004742176055908203, 0.0046612157821655275, 0.0048464322090148925, 0.004779007911682129, 0.004702911853790283, 0.00475926399230957, 0.004690336227416992, 0.0047185921669006346, 0.004871967792510986, 0.00465123176574707, 0.004608223915100098, 0.0047199358940124515, 0.004649343967437744, 0.00459990406036377, 0.004627999782562256, 0.00472873592376709, 0.004742976188659668, 0.004768640041351318, 0.004661056041717529, 0.004652383804321289, 0.004716959953308106, 0.00464518404006958, 0.004740320205688477, 0.0046878399848937985, 0.004655935764312744, 0.00464896011352539, 0.004626175880432129, 0.004569536209106446, 0.004654399871826172, 0.0045574078559875485, 0.004626368045806885, 0.0049049282073974606, 0.004636672019958496, 0.0047636480331420894, 0.004748799800872803, 0.004755743980407715, 0.004657120227813721, 0.004675936222076416, 0.004601151943206787, 0.004551360130310059, 0.00472054386138916, 0.004577280044555664, 0.004665023803710937, 0.004788544178009033, 0.004354879856109619, 0.00472492790222168, 0.004698751926422119, 0.004604000091552735, 0.0046341118812561035, 0.004571775913238526, 0.004620351791381836, 0.004613952159881592, 0.004603551864624023, 0.004585824012756348, 0.004517375946044922, 0.00454911994934082, 0.0045443840026855465, 0.0045153918266296385, 0.0045082240104675295, 0.004499263763427734, 0.004525760173797608, 0.004540448188781738, 0.004660863876342774, 0.004572256088256836, 0.004658880233764648, 0.004757503986358643, 0.004662399768829346, 0.004623295783996582, 0.004654176235198974, 0.004633312225341797, 0.004796351909637451, 0.004598015785217285, 0.004732704162597657, 0.004630623817443847, 0.004814080238342285, 0.005005568027496338, 0.004664159774780273, 0.004730591773986817, 0.005093440055847168, 0.004699935913085937, 0.004593887805938721, 0.004534272193908692, 0.005256224155426026, 0.005000415802001953, 0.004660895824432373, 0.004695295810699463, 0.004812992095947265, 0.004718400001525879, 0.004643680095672608, 0.0047002239227294925, 0.004667327880859375, 0.004620543956756592, 0.0046014080047607426, 0.004594143867492676, 0.004609280109405518, 0.004609888076782227, 0.004643680095672608, 0.004621439933776855, 0.004677504062652588, 0.004565983772277832, 0.004538368225097656, 0.004609312057495117, 0.004687903881072998, 0.004649472236633301, 0.004841663837432861, 0.004678463935852051, 0.004676608085632325, 0.004671264171600342, 0.004595967769622802, 0.004524159908294678, 0.0045240321159362796, 0.004487199783325195, 0.004506688117980957, 0.004496287822723388, 0.004546879768371582, 0.004527872085571289, 0.004761536121368409, 0.004661248207092285, 0.004640511989593506, 0.004593215942382813, 0.004645567893981933, 0.004569087982177734, 0.004581439971923828, 0.004497344017028808, 0.004499135971069336, 0.00445907211303711, 0.00440828800201416, 0.004561247825622558, 0.004432320117950439, 0.0043006081581115725, 0.004333888053894043, 0.004379583835601807, 0.004383679866790772, 0.004427775859832763, 0.004420928001403809, 0.004424384117126465, 0.00448528003692627, 0.0045668802261352535, 0.0044926080703735355, 0.004457151889801026, 0.004500959873199463, 0.004736735820770264, 0.004567200183868408, 0.004631008148193359, 0.00456928014755249, 0.0045255360603332516, 0.004646783828735352, 0.004647583961486817, 0.004650144100189209, 0.004755936145782471, 0.004683360099792481, 0.004757408142089844, 0.00467852783203125, 0.004745312213897705, 0.004787295818328857, 0.0046633281707763674, 0.004696512222290039, 0.0047578558921813965, 0.004667391777038574, 0.004673247814178467, 0.004669727802276612, 0.004702208042144776, 0.004738143920898438, 0.004727200031280517, 0.004637087821960449, 0.004619711875915527, 0.004548960208892823, 0.004536640167236328, 0.004542143821716309, 0.004504928112030029, 0.004226719856262207, 0.004472928047180176, 0.004438111782073975, 0.004501215934753418, 0.004399136066436767, 0.004390431880950928, 0.004456639766693115, 0.004429664134979248, 0.004447135925292969, 0.004449759960174561, 0.0044111042022705075, 0.0043787522315979, 0.004366847991943359, 0.004319200038909912, 0.004317183971405029, 0.004314943790435791, 0.0043545918464660644, 0.00432908821105957, 0.004390944004058838, 0.004331776142120361, 0.004347648143768311, 0.004368383884429932, 0.004429823875427246, 0.004302847862243653, 0.004329472064971924, 0.0042434558868408205, 0.0045825281143188476, 0.0046739840507507326, 0.004315584182739258, 0.0045424637794494625, 0.004454207897186279, 0.0043851838111877445, 0.0044234561920166015, 0.004467775821685791, 0.004411712169647217, 0.004389376163482666, 0.004463935852050781, 0.0044941439628601075, 0.004503007888793945, 0.0045181760787963866, 0.004569087982177734, 0.00449561595916748, 0.004397056102752686, 0.004388864040374756, 0.004415487766265869, 0.0043961601257324215, 0.004323647975921631, 0.004274752140045166, 0.004259840011596679, 0.0042386560440063475, 0.00422163200378418, 0.004222911834716797, 0.004241055965423584, 0.004284863948822022, 0.004267392158508301, 0.004440671920776367, 0.00429033613204956, 0.004311264038085938, 0.004275392055511475, 0.004238143920898437, 0.0042449598312377925, 0.004297247886657715, 0.004323647975921631, 0.004149055957794189, 0.004368639945983886, 0.004382495880126953, 0.004398591995239258, 0.004425792217254638, 0.004302944183349609, 0.00425161600112915, 0.004243807792663574, 0.0042854719161987305, 0.004283360004425049, 0.004298592090606689, 0.004466752052307129, 0.0044998078346252446, 0.004372223854064942, 0.004452352046966553, 0.004425727844238282, 0.004431583881378174, 0.004425151824951172, 0.004447231769561768, 0.004405439853668213, 0.004453184127807618, 0.0045064640045166015, 0.00449948787689209, 0.0046856322288513184, 0.004527552127838135, 0.004610144138336182, 0.004966879844665527, 0.0049419841766357425, 0.004415647983551025, 0.004417535781860352, 0.0044354238510131835, 0.004448287963867188, 0.004567552089691162, 0.004497439861297608, 0.004456511974334717, 0.004553791999816895, 0.004503839969635009, 0.004565599918365478, 0.004540256023406982, 0.004465695858001709, 0.0045864319801330565, 0.004575232028961182, 0.004654176235198974, 0.004524960041046143, 0.004548384189605713, 0.004789792060852051, 0.004760255813598633, 0.004624383926391602, 0.004685823917388916, 0.004628704071044922, 0.00454041576385498, 0.004531295776367188, 0.004501791954040527, 0.004497280120849609, 0.004432000160217285, 0.004387423992156982, 0.004443488121032715, 0.004380832195281983, 0.004402719974517823, 0.004499584197998047, 0.004387807846069336, 0.0044271678924560545, 0.004529983997344971, 0.004271232128143311, 0.0046089601516723635, 0.004651487827301026, 0.004683360099792481, 0.004610367774963379, 0.00484988784790039, 0.0046657600402832036, 0.004709343910217285, 0.00452182388305664, 0.0045363521575927735, 0.004453536033630371, 0.004445695877075195, 0.004450592041015625, 0.004456480026245117, 0.0045097279548645015, 0.00465119981765747, 0.004830976009368896, 0.004848896026611328, 0.004696832180023193, 0.004657120227813721, 0.00455683183670044, 0.004577280044555664, 0.0047916479110717775, 0.004585792064666748, 0.00443123197555542, 0.00443612813949585, 0.004462656021118164, 0.004367104053497314, 0.004422880172729492, 0.004291039943695068, 0.004305215835571289, 0.004276224136352539, 0.004265984058380127, 0.004421088218688965, 0.0043730239868164066, 0.004360064029693604, 0.004476096153259277, 0.004561855792999268, 0.0044273920059204105, 0.004536704063415527, 0.00449289608001709, 0.004427584171295166, 0.0044438400268554685, 0.0044102721214294436, 0.0043604478836059574, 0.004316351890563965, 0.004315423965454102, 0.004332992076873779, 0.004451168060302734, 0.00439247989654541, 0.004339168071746826, 0.004459519863128662, 0.004403200149536133, 0.004321280002593994, 0.00436950397491455, 0.0043935999870300295, 0.004364160060882569, 0.0043515520095825195, 0.004356959819793701, 0.004401343822479248, 0.004415296077728272, 0.00446230411529541, 0.004339968204498291, 0.004370816230773926, 0.004382719993591308, 0.004390719890594482, 0.004358335971832276, 0.004302847862243653, 0.004298655986785888, 0.004284512042999267, 0.004325376033782959, 0.004460544109344483, 0.004523744106292724, 0.00449286413192749, 0.004493631839752197, 0.004540768146514893, 0.004617599964141845, 0.00451148796081543, 0.00443833589553833, 0.004387135982513428, 0.004409664154052735, 0.004349760055541992, 0.004464831829071045, 0.004538368225097656, 0.004566239833831787, 0.004538271903991699, 0.0046212477684021, 0.00461407995223999, 0.004586751937866211, 0.004498176097869873, 0.004500864028930664, 0.004549248218536377, 0.004472415924072265, 0.004477375984191894, 0.004709568023681641, 0.004405024051666259, 0.004400032043457031, 0.004447487831115723, 0.004371615886688232, 0.004329376220703125, 0.004324575901031494, 0.0043812479972839356, 0.004360191822052002, 0.0045584640502929685, 0.004379039764404297, 0.004429855823516846, 0.004476223945617676, 0.004478655815124512, 0.004393919944763184, 0.0043823041915893556, 0.004473504066467285, 0.004415359973907471, 0.004370304107666015, 0.004753503799438476, 0.004442016124725342, 0.004634624004364014, 0.004404672145843506, 0.00441209602355957, 0.004475999832153321, 0.0044102401733398435, 0.004408383846282959, 0.004434720039367675, 0.004620351791381836, 0.00452732801437378, 0.004456480026245117, 0.004401919841766358, 0.0047636480331420894, 0.004449632167816162, 0.004366176128387451, 0.0043812479972839356, 0.004311295986175537, 0.004345856189727783, 0.00440934419631958, 0.0044646401405334475, 0.004558752059936524, 0.004546271800994873, 0.004473567962646485, 0.004515488147735596, 0.0045606718063354494, 0.00453001594543457, 0.004438399791717529, 0.004405248165130615, 0.004773344039916992, 0.0045307202339172365, 0.004491072177886963, 0.004483263969421386, 0.004465727806091308, 0.00442464017868042, 0.004412960052490234, 0.004423679828643799, 0.004448224067687989, 0.004620800018310547, 0.004470784187316895, 0.004470399856567383, 0.004457183837890625, 0.004398752212524414, 0.004407296180725098, 0.004477983951568604, 0.004524543762207032, 0.004635424137115479, 0.004680895805358886, 0.00459827184677124, 0.004518080234527588, 0.004503392219543457, 0.004468416213989258, 0.004501791954040527, 0.004554656028747559, 0.004857952117919922, 0.004488800048828125, 0.004417439937591553, 0.004358655929565429, 0.0043415999412536625, 0.004316927909851075, 0.004305088043212891, 0.004475103855133057, 0.0043151359558105465, 0.004287519931793213, 0.004272384166717529, 0.004276351928710937, 0.004276832103729248, 0.004257791996002197, 0.004288512229919434, 0.004293856143951416, 0.004272992134094238, 0.004273471832275391, 0.004284639835357666, 0.004405248165130615, 0.004471199989318848, 0.004537504196166992, 0.004124832153320312, 0.004426752090454102, 0.004398111820220947, 0.004407104015350342, 0.004388768196105957, 0.0044176321029663086, 0.004451968193054199, 0.004417215824127197, 0.004400896072387695, 0.004334527969360352, 0.0043089919090271, 0.0043290238380432125, 0.004399839878082275, 0.004463935852050781, 0.0044766077995300295, 0.0044308481216430665, 0.0044419198036193845, 0.004445312023162842, 0.004403232097625732, 0.004528287887573242, 0.004364640235900879, 0.004419839859008789, 0.004421408176422119, 0.004345503807067871, 0.004332096099853516, 0.004386847972869873, 0.004532192230224609, 0.004550655841827392, 0.004575232028961182, 0.004536320209503173, 0.004538368225097656, 0.004478975772857666, 0.004536223888397217, 0.004487264156341553, 0.004470880031585693, 0.004490303993225098, 0.004516704082489014, 0.00462553596496582, 0.004487328052520752, 0.0044513921737670895, 0.004490079879760742, 0.004524127960205078, 0.004442848205566406, 0.004468736171722412, 0.0045257282257080075, 0.004569536209106446, 0.004497536182403564, 0.004514592170715332, 0.004516863822937011, 0.004570784091949463, 0.00455241584777832, 0.004525951862335205, 0.004495999813079834, 0.0046631360054016115, 0.004534399986267089, 0.004442272186279297, 0.004386816024780273, 0.004386816024780273, 0.004333568096160889, 0.004313087940216064, 0.004472832202911377, 0.00436633586883545, 0.004345856189727783, 0.004004479885101319, 0.004349952220916748, 0.0043599681854248045, 0.004380447864532471, 0.004339360237121582, 0.004327231884002685, 0.00434006404876709, 0.004338304042816162, 0.00432534408569336, 0.004375936031341553, 0.004386943817138672, 0.004441855907440185, 0.004462431907653808, 0.004385727882385254, 0.004341760158538818, 0.004347904205322266, 0.004341023921966553, 0.0043138241767883305, 0.004321280002593994, 0.004353792190551758, 0.004389120101928711, 0.004331488132476807, 0.0043151679039001465, 0.004390912055969238, 0.004417535781860352, 0.0044432001113891605, 0.0046191678047180175, 0.0046633281707763674, 0.00444163179397583, 0.004878655910491943, 0.00451151990890503, 0.004478655815124512, 0.004737728118896485, 0.0045071358680725095, 0.004523839950561524, 0.004731743812561035, 0.004513023853302002, 0.004487775802612305, 0.0045424637794494625, 0.0045129919052124025, 0.004512608051300049, 0.004532415866851807, 0.0046080322265625, 0.004613855838775635, 0.00454041576385498, 0.004560895919799805, 0.004546847820281983, 0.0046609601974487306, 0.0045847358703613285, 0.004598432064056397, 0.004711904048919678, 0.004661056041717529, 0.004637472152709961, 0.004654272079467774, 0.00465388822555542, 0.004577280044555664, 0.004791296005249023, 0.004703231811523437, 0.004651008129119873, 0.00464896011352539, 0.0046835517883300785, 0.004624608039855957, 0.0049071359634399415, 0.004272128105163574, 0.004523007869720459, 0.004496575832366944, 0.004501984119415283, 0.0045409278869628904, 0.0045740799903869625, 0.0046039681434631346, 0.004505663871765137, 0.004555168151855468, 0.00448905611038208, 0.0044488320350646975, 0.004506976127624512, 0.004524223804473877, 0.004487264156341553, 0.004452735900878906, 0.004493216037750244, 0.0045749440193176266, 0.004521952152252197, 0.0048661761283874515, 0.0045428800582885745, 0.004736447811126709, 0.004511295795440674, 0.004475520133972168, 0.004460256099700927, 0.00470684814453125, 0.004493311882019043, 0.004618239879608154, 0.004486303806304931, 0.004399072170257568, 0.004448895931243897, 0.00449561595916748, 0.004469855785369873, 0.004457119941711426, 0.004471039772033692, 0.004562848091125489, 0.004647007942199707, 0.0045929279327392575, 0.004504000186920166, 0.004546976089477539, 0.004537504196166992, 0.0045042881965637206, 0.0045148801803588864, 0.004518943786621094, 0.004532383918762207, 0.0044950079917907716, 0.004483168125152588, 0.004464223861694336, 0.004454815864562988, 0.004444159984588623, 0.004452640056610107, 0.004406208038330078, 0.00464192008972168, 0.004535391807556153, 0.00447983980178833, 0.004476672172546387, 0.004442080020904541, 0.004493311882019043, 0.004515552043914795, 0.00449724817276001, 0.004532671928405761, 0.004488671779632568, 0.004508096218109131, 0.004464735984802246, 0.0041146240234375, 0.00446454381942749, 0.004461696147918701, 0.004394144058227539, 0.0043935680389404295, 0.004361472129821777, 0.004367008209228516, 0.004581471920013428, 0.005598879814147949, 0.004527616024017334, 0.004455264091491699, 0.0044501757621765135, 0.004443583965301513, 0.004376319885253907, 0.004379583835601807, 0.004375872135162354, 0.004332575798034668, 0.0043086400032043455, 0.004317247867584228, 0.0043396477699279784, 0.004317183971405029, 0.00449945592880249, 0.004341887950897217, 0.004470655918121338, 0.004415584087371826, 0.004394400119781494, 0.004394752025604248, 0.004386943817138672, 0.00442790412902832, 0.004530687808990478, 0.004454400062561035, 0.004412511825561524, 0.004374911785125732, 0.0043515520095825195, 0.004371647834777832, 0.004388639926910401, 0.0043235840797424315, 0.004351776123046875, 0.004300672054290772, 0.004380415916442871, 0.00450377607345581, 0.0044627199172973635, 0.004579103946685791, 0.004566239833831787, 0.004584447860717773, 0.004571455955505371, 0.004661087989807129, 0.004583263874053955, 0.004656896114349365, 0.004761856079101562, 0.004570464134216309, 0.00456771183013916, 0.0045055999755859374, 0.0045240321159362796, 0.004502719879150391, 0.004547616004943847, 0.004515615940093995, 0.004524223804473877, 0.004761407852172852, 0.004769536018371582, 0.004605855941772461, 0.00455459213256836, 0.004492959976196289, 0.004155648231506348, 0.004425407886505127, 0.00443833589553833, 0.004421088218688965, 0.004446752071380615, 0.00446668815612793, 0.004425280094146729, 0.004374815940856934, 0.00445251178741455, 0.0044215679168701175, 0.0044913277626037595, 0.004597760200500488, 0.004580704212188721, 0.00458409595489502, 0.004546559810638427, 0.004554751873016357, 0.004671487808227539, 0.004611264228820801, 0.004639552116394043, 0.004532159805297852, 0.004529920101165772, 0.0045090880393981935, 0.004496287822723388, 0.0045116481781005855, 0.004460639953613281, 0.004579584121704102, 0.004517248153686523, 0.004515423774719239, 0.004498464107513428, 0.004524127960205078, 0.004555424213409424, 0.004781119823455811, 0.004624063968658447, 0.004611904144287109, 0.004906784057617188, 0.004576223850250244, 0.0045792641639709475, 0.004523647785186768, 0.004398623943328858, 0.00439574384689331, 0.004343679904937744, 0.004357439994812012, 0.00429318380355835, 0.004257184028625488, 0.004274911880493164, 0.004295775890350342, 0.004321216106414795, 0.004414048194885254, 0.004483456134796142, 0.004417535781860352, 0.004410783767700196, 0.004428383827209473, 0.004417088031768799, 0.004425792217254638, 0.004473087787628174, 0.004539936065673828, 0.004477536201477051, 0.004527616024017334, 0.004481503963470459, 0.004532447814941406, 0.004527040004730224, 0.004504000186920166, 0.0044988799095153804, 0.004244639873504639, 0.0045186557769775395, 0.004501664161682129, 0.004509888172149658, 0.004534016132354737, 0.004528192043304444, 0.004534272193908692, 0.004571135997772217, 0.004495584011077881, 0.004477727890014649, 0.0044349441528320314, 0.004482431888580322, 0.004444543838500977, 0.004362495899200439, 0.004335616111755371, 0.004342847824096679, 0.004415775775909424, 0.0044059200286865234, 0.004433119773864746, 0.00430944013595581, 0.004268383979797364, 0.004320256233215332, 0.004412415981292724, 0.004445631980895996, 0.004440480232238769, 0.004486815929412842, 0.004477536201477051, 0.004466303825378418, 0.004674943923950195, 0.004449088096618653, 0.004452799797058106, 0.004398752212524414, 0.004344128131866455, 0.0043721599578857425, 0.004397119998931885, 0.004359295845031738, 0.004311264038085938, 0.0042991042137146, 0.004367712020874023, 0.004363167762756348, 0.004335616111755371, 0.004347296237945557, 0.0043218879699707035, 0.004319231986999511, 0.00430079984664917, 0.004294911861419678, 0.004251391887664795, 0.004259744167327881, 0.004241504192352295, 0.004224959850311279, 0.0043359360694885255, 0.004424511909484863, 0.004272319793701172, 0.00440396785736084, 0.004409664154052735, 0.00452780818939209, 0.004318655967712403, 0.0042912960052490234, 0.004322591781616211, 0.004322944164276123, 0.004355008125305176, 0.004433375835418701, 0.0044498882293701175, 0.004046495914459229, 0.0043916478157043455, 0.004332704067230225, 0.0043361282348632815, 0.004314655780792236, 0.004276832103729248, 0.0042492480278015134, 0.004274623870849609, 0.004292223930358887, 0.004280352115631104, 0.004278207778930664, 0.004259391784667969, 0.004234208106994629, 0.004245183944702149, 0.004278719902038574, 0.0043170561790466305, 0.004273248195648193, 0.004268959999084472, 0.004249728202819824, 0.004236480236053467, 0.004235328197479248, 0.004233503818511963, 0.0044629120826721195, 0.004249631881713867, 0.004242688179016113, 0.004221695899963379, 0.0042106881141662595, 0.004210015773773193, 0.0042288641929626464, 0.004250463962554932, 0.004282368183135986, 0.004359551906585693, 0.004272672176361084, 0.004261184215545655, 0.004249887943267822, 0.0042436480522155765, 0.004232895851135254, 0.0042657599449157715, 0.004240287780761719, 0.004521984100341797, 0.004262207984924316, 0.004239359855651856, 0.004249375820159912, 0.004580287933349609, 0.004344607830047607, 0.004440608024597168, 0.004439167976379395, 0.004465184211730957, 0.0045313601493835445, 0.004457503795623779, 0.004447711944580078, 0.0044076480865478515, 0.004394112110137939, 0.004355296134948731, 0.004417183876037597, 0.004377823829650879, 0.004360576152801513, 0.004360608100891113, 0.004544511795043945, 0.004518144130706787, 0.004509439945220947, 0.004485119819641113, 0.0046566400527954105, 0.004266111850738525, 0.004456480026245117, 0.004421599864959717, 0.004392640113830566, 0.004379168033599854, 0.004492127895355224, 0.004780992031097412, 0.004688159942626953, 0.004619999885559082, 0.004606016159057617, 0.004485055923461914, 0.004481023788452149, 0.004511744022369385, 0.00440499210357666, 0.004333824157714844, 0.004315231800079346, 0.004360095977783203, 0.004773888111114502, 0.004546720027923584, 0.004501376152038574, 0.004532192230224609, 0.004362239837646485, 0.0042781119346618655, 0.004319392204284668, 0.004316927909851075, 0.004301055908203125, 0.004263936042785644, 0.004296703815460205, 0.0043089919090271, 0.004314976215362549, 0.0042594242095947265, 0.004262112140655518, 0.004274432182312012, 0.004554719924926758, 0.004340960025787354, 0.0043478717803955074, 0.004380703926086426, 0.0046683201789855956, 0.0043441920280456545, 0.004361631870269775, 0.004382912158966064, 0.004565279960632324, 0.004743040084838867, 0.004649024009704589, 0.004758399963378906, 0.004555263996124268, 0.004458943843841553, 0.004407296180725098, 0.00454860782623291, 0.004560959815979004, 0.004513792037963867, 0.004569024085998535, 0.0044843521118164064, 0.004410336017608643, 0.004425504207611084, 0.004464096069335938, 0.004420127868652344, 0.004423136234283447, 0.005654431819915771, 0.004501952171325684, 0.004341951847076416, 0.00447049617767334, 0.00450812816619873, 0.004208640098571777, 0.004583424091339112, 0.004515903949737549, 0.004546175956726075, 0.004489408016204834, 0.004548543930053711, 0.004515679836273193, 0.004471360206604004, 0.004558623790740967, 0.004489215850830078, 0.004544511795043945, 0.004573184013366699, 0.004573184013366699, 0.004611680030822754, 0.004587071895599365, 0.004674335956573486, 0.00456441593170166, 0.004557119846343994, 0.004583744049072266, 0.004527840137481689, 0.004730432033538818, 0.004831967830657959, 0.0046343040466308594, 0.004585792064666748, 0.0046384320259094235, 0.004597152233123779, 0.0045361919403076175, 0.004471327781677246, 0.004448768138885498, 0.004746687889099121, 0.004525919914245605, 0.004463295936584472, 0.004505343914031982, 0.004546815872192383, 0.004581376075744629, 0.004594816207885742, 0.0046163201332092285, 0.004578048229217529, 0.00464086389541626, 0.004557727813720703, 0.0044709758758544925, 0.004440896034240723, 0.004562335968017578, 0.004526976108551025, 0.0045708479881286625, 0.004302847862243653, 0.004304224014282226, 0.004297376155853272, 0.004261919975280762, 0.004290847778320312, 0.004257791996002197, 0.004277760028839111, 0.004304255962371826, 0.00431606388092041, 0.004311168193817138, 0.004321055889129639, 0.00445849609375, 0.004312928199768066, 0.004347360134124756, 0.0043463678359985354, 0.00435158395767212, 0.004364895820617676, 0.004288224220275879, 0.003915424108505249, 0.004207007884979248, 0.00425977611541748, 0.004200448036193847, 0.004191840171813965, 0.004196767807006836, 0.004192255973815918, 0.004185247898101807, 0.00418287992477417, 0.004225024223327637, 0.004204768180847168, 0.004220608234405517, 0.004224480152130127, 0.004231103897094727, 0.0042230401039123535, 0.00423964786529541, 0.004192607879638672, 0.004244991779327393, 0.004229631900787354, 0.004216256141662597, 0.004222815990447998, 0.004256159782409668, 0.004618144035339356, 0.004563295841217041, 0.004816959857940674, 0.004524288177490235, 0.0044889597892761235, 0.004472959995269776, 0.004471712112426758, 0.004446176052093506, 0.004489312171936035, 0.0045147199630737305, 0.004466944217681885, 0.004472576141357422, 0.004609119892120361, 0.004426784038543701, 0.004386911869049073, 0.004364352226257324, 0.004407135963439941, 0.004462463855743408, 0.004413440227508545, 0.004411327838897705, 0.004407360076904297, 0.0045240321159362796, 0.0046614079475402834, 0.004871007919311524, 0.0047149438858032225, 0.004663648128509522, 0.004671711921691895, 0.004636640071868896, 0.004653088092803955, 0.004677760124206543, 0.005328512191772461, 0.004571584224700928, 0.004652544021606446, 0.004583807945251464, 0.00458131217956543, 0.004486976146697998, 0.004503200054168701, 0.004493504047393799, 0.0044998078346252446, 0.004574528217315674, 0.004481696128845215, 0.0041682558059692384, 0.004482016086578369, 0.004702752113342285, 0.004663936138153076, 0.0046665921211242675, 0.004666079998016357, 0.004722176074981689, 0.00473740816116333, 0.00464086389541626, 0.004666687965393067, 0.004623295783996582, 0.0046293120384216305, 0.004635488033294678, 0.004687871932983399, 0.004734784126281738, 0.004710783958435058, 0.004610911846160888, 0.0046212801933288574, 0.004755712032318115, 0.004638495922088623, 0.004609920024871826, 0.004752960205078125, 0.0046310720443725585, 0.004614143848419189, 0.004642079830169678, 0.004604544162750244, 0.004604063987731934, 0.00465225601196289, 0.00451804780960083, 0.004595776081085205, 0.004534815788269043, 0.004491231918334961, 0.0045015039443969725, 0.00446892786026001, 0.004605599880218506, 0.004513951778411865, 0.004487103939056396, 0.004478687763214111, 0.004502016067504883, 0.004525919914245605, 0.004521984100341797, 0.004505087852478027, 0.004517407894134522, 0.004442368030548096, 0.00442406415939331, 0.004409503936767578, 0.004413536071777344, 0.0043951039314270015, 0.004374271869659424, 0.00453056001663208, 0.004476831912994385, 0.0043755841255187986, 0.004409311771392822, 0.004497471809387207, 0.0045782079696655275, 0.004666431903839112, 0.004593855857849121, 0.004565279960632324, 0.004544703960418701, 0.004663584232330323, 0.004567039966583252, 0.004489215850830078, 0.00451142406463623, 0.004331424236297608, 0.004567039966583252, 0.004460544109344483, 0.004526080131530762, 0.00445849609375, 0.004655392169952393, 0.00466710376739502, 0.004694015979766846, 0.00468390417098999, 0.004724607944488525, 0.004629600048065186, 0.004630655765533447, 0.00465503978729248, 0.004635488033294678, 0.004642879962921143, 0.00462227201461792, 0.004656288146972656, 0.00471724796295166, 0.0047669439315795896, 0.0046499199867248535, 0.004583424091339112, 0.00463647985458374, 0.004666560173034668, 0.005053791999816894, 0.0046876478195190426, 0.004674528121948242, 0.004676447868347168, 0.004672800064086914, 0.004574111938476563, 0.004651167869567871, 0.004562016010284424, 0.004768191814422607, 0.004499648094177246, 0.004526080131530762, 0.004526144027709961, 0.004493120193481445, 0.004451744079589844, 0.004432608127593994, 0.0045008320808410645, 0.004616511821746826, 0.004632224082946777, 0.004700575828552246, 0.004629055976867676, 0.004712063789367676, 0.0046382398605346676, 0.004542816162109375, 0.004603712081909179, 0.004577439785003662, 0.004495552062988281, 0.004445536136627197, 0.00443990421295166, 0.004432799816131591, 0.004435935974121094, 0.004525087833404541, 0.0044720001220703125, 0.00439305591583252, 0.004398784160614014, 0.004361536026000977, 0.004338367938995362, 0.0045378880500793455, 0.004510176181793213, 0.004474944114685059, 0.0044867520332336425]",tokens/s,220.218985264759,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,813.289472,576.585728,0.0,190.840832,172.878848,s,1,9.8279833984375,9.8279833984375,0.0,9.8279833984375,9.8279833984375,9.8279833984375,9.8279833984375,[9.8279833984375],,kWh,1.3139808354158806e-05,1.442007503746257e-06,4.282225648008509e-06,1.8864041505913572e-05,,MB,1317.23264,685.637632,0.0,270.532608,241.723904,s,28,0.21744515228271488,0.007765898295811244,0.00016092491112766377,0.007761264085769653,0.007894255876541138,0.008081566405296324,0.008273715734481811,"[0.008181280136108398, 0.007771584033966064, 0.007614848136901856, 0.007653952121734619, 0.00789638376235962, 0.0077627840042114255, 0.0077597441673278806, 0.0076769919395446775, 0.007770783901214599, 0.007768799781799316, 0.007893343925476075, 0.007765632152557373, 0.007744351863861084, 0.007706783771514893, 0.007827807903289795, 0.0076286401748657225, 0.007621856212615967, 0.0077060480117797855, 0.008307904243469237, 0.007773632049560547, 0.007601119995117187, 0.007705632209777832, 0.007875167846679687, 0.007656735897064209, 0.007824800014495849, 0.007784768104553223, 0.007532095909118653, 0.007631680011749267]",tokens/s,32964.634643500394,kWh,2.2353848647588007e-07,2.465206942280595e-08,1.1456874913323003e-07,3.6275930503191606e-07,tokens/kWh,705702090.7498893,MB,1363.8656,698.220544,0.0,283.11552,241.726464,s,28,10.099601104736328,0.36070003945486884,0.0061831607404840925,0.3609567108154297,0.36589821472167966,0.37035649719238284,0.3764014306640625,"[0.37796200561523435, 0.3581861572265625, 0.357117431640625, 0.3654405517578125, 0.3640215759277344, 0.36210568237304686, 0.3638113403320313, 0.3639907531738281, 0.36003750610351565, 0.36185623168945313, 0.36696609497070315, 0.36472119140625, 0.3721820983886719, 0.3593215637207031, 0.35937741088867187, 0.3577214050292969, 0.3565188903808594, 0.3530903015136719, 0.36005718994140623, 0.3581804504394531, 0.35412307739257814, 0.3647520751953125, 0.36209042358398436, 0.36289410400390626, 0.3619071350097656, 0.35423434448242186, 0.3499942626953125, 0.3469398498535156]",tokens/s,174.6603634843312,kWh,1.0038366858638888e-05,1.1070565535310679e-06,3.844237099901329e-06,1.4989660512071282e-05,tokens/kWh,4202897.053556726,,s,1764,10.083379935741414,0.005716201777631193,0.0001948931429875152,0.005708719968795776,0.005855231761932373,0.005926224040985108,0.006348974881172176,"[0.005363167762756348, 0.005822463989257813, 0.0057738561630249026, 0.005904384136199951, 0.005644383907318115, 0.005681056022644043, 0.005863520145416259, 0.005745728015899659, 0.005659647941589355, 0.005614496231079101, 0.005614240169525147, 0.005717440128326416, 0.005684063911437988, 0.005664735794067383, 0.005610623836517334, 0.008348416328430176, 0.007155807971954345, 0.006796768188476562, 0.005865407943725586, 0.005787648200988769, 0.00588153600692749, 0.0058418879508972165, 0.005717120170593262, 0.005796735763549805, 0.005763072013854981, 0.005846848011016845, 0.005696991920471192, 0.00565334415435791, 0.0057875199317932126, 0.005890048027038574, 0.006004735946655273, 0.005740255832672119, 0.005830944061279297, 0.0059269118309021, 0.005736447811126709, 0.005844480037689209, 0.005806655883789063, 0.005861311912536621, 0.005703680038452149, 0.005895679950714112, 0.005638336181640625, 0.0057422080039978025, 0.005798463821411133, 0.006015103816986084, 0.005813663959503174, 0.0057740159034729, 0.005812128067016601, 0.005687327861785889, 0.005746655941009522, 0.005722015857696533, 0.00589251184463501, 0.0071363520622253415, 0.0074973759651184085, 0.007583968162536621, 0.007523295879364014, 0.007686048030853271, 0.006280735969543457, 0.0057627840042114254, 0.005818975925445557, 0.005638144016265869, 0.00581004810333252, 0.005767295837402344, 0.005694975852966308, 0.005511136054992676, 0.005929791927337646, 0.005690815925598144, 0.005662591934204102, 0.00568012809753418, 0.005717696189880371, 0.0056945600509643556, 0.005744768142700195, 0.00603545618057251, 0.005845280170440674, 0.005718368053436279, 0.005732511997222901, 0.0057283201217651365, 0.0056769919395446775, 0.005750783920288086, 0.00572438383102417, 0.005737792015075684, 0.005698016166687012, 0.005750400066375732, 0.005718400001525879, 0.005629759788513183, 0.0057223038673400875, 0.005670911788940429, 0.005728096008300781, 0.005705056190490723, 0.005696320056915284, 0.005695648193359375, 0.005689280033111572, 0.005715583801269531, 0.005740543842315674, 0.005927199840545654, 0.005711391925811767, 0.005742527961730957, 0.005657120227813721, 0.005660672187805176, 0.005633024215698243, 0.0056126718521118165, 0.005717055797576904, 0.005566592216491699, 0.005591968059539795, 0.0056005120277404785, 0.0055138239860534665, 0.0055763840675354, 0.005521632194519043, 0.005533152103424073, 0.005562943935394287, 0.0056130561828613285, 0.005789504051208496, 0.005700287818908692, 0.005609439849853516, 0.005607456207275391, 0.005574656009674072, 0.005605120182037354, 0.0056282877922058105, 0.005625728130340576, 0.005576831817626953, 0.005610911846160888, 0.00579417610168457, 0.0055740799903869626, 0.00558787202835083, 0.005637343883514404, 0.0055874881744384765, 0.005640192031860352, 0.005306208133697509, 0.005599391937255859, 0.005612895965576172, 0.0056612157821655275, 0.005726592063903808, 0.0056399679183959965, 0.00561897611618042, 0.005642943859100342, 0.005550303936004639, 0.005558015823364258, 0.005477791786193848, 0.005565343856811523, 0.005574368000030518, 0.0056341118812561035, 0.005722367763519287, 0.005637248039245605, 0.0055875201225280766, 0.005643807888031006, 0.005546080112457275, 0.005572192192077637, 0.0055140161514282224, 0.005582848072052002, 0.005619711875915527, 0.005761216163635254, 0.005723616123199463, 0.005703167915344238, 0.005770080089569092, 0.005642240047454834, 0.005696800231933594, 0.0056650562286376955, 0.00557919979095459, 0.005578752040863037, 0.005609504222869873, 0.005683167934417725, 0.005680255889892578, 0.005715839862823487, 0.005730720043182373, 0.005725056171417236, 0.005799808025360107, 0.005840864181518555, 0.005670783996582031, 0.005660384178161621, 0.005624095916748047, 0.005765376091003418, 0.005687039852142334, 0.00567907190322876, 0.005640416145324707, 0.005713535785675049, 0.00567519998550415, 0.005761023998260498, 0.005759007930755615, 0.005697504043579101, 0.00572435188293457, 0.005609280109405518, 0.005666463851928711, 0.00568998384475708, 0.005695199966430664, 0.0056769919395446775, 0.005630015850067139, 0.005611008167266846, 0.005679967880249023, 0.005877344131469726, 0.00586348819732666, 0.0055047359466552735, 0.005681920051574707, 0.00574399995803833, 0.005840799808502197, 0.005730144023895264, 0.005692063808441162, 0.005643807888031006, 0.0057021121978759765, 0.005840447902679444, 0.005818848133087158, 0.00584496021270752, 0.005726240158081055, 0.005861343860626221, 0.005814271926879883, 0.005701024055480957, 0.005783167839050293, 0.005790688037872314, 0.005850207805633545, 0.0058748478889465335, 0.005779263973236084, 0.005748191833496094, 0.005817056179046631, 0.005749855995178223, 0.005763743877410889, 0.005772543907165527, 0.0057699837684631345, 0.005926464080810547, 0.00581385612487793, 0.005827424049377442, 0.0058074240684509275, 0.005785920143127441, 0.005683584213256836, 0.005819392204284668, 0.005760000228881836, 0.005645440101623535, 0.0057021121978759765, 0.005650720119476318, 0.005666944026947021, 0.005685056209564209, 0.005841311931610108, 0.00621340799331665, 0.005961984157562256, 0.005829919815063476, 0.005745408058166504, 0.005802847862243652, 0.005733248233795166, 0.005705376148223877, 0.005697887897491455, 0.005709568023681641, 0.005721504211425781, 0.0056778559684753415, 0.005777472019195556, 0.005818143844604492, 0.005790207862854004, 0.006606560230255127, 0.00601638412475586, 0.005825151920318603, 0.005792799949645996, 0.005747615814208984, 0.0058000640869140625, 0.005828544139862061, 0.005766176223754883, 0.005790143966674805, 0.005511168003082275, 0.005699584007263184, 0.00586956787109375, 0.0057029762268066405, 0.005696191787719727, 0.0057602238655090334, 0.005755680084228516, 0.005705728054046631, 0.005654079914093018, 0.005660416126251221, 0.005701375961303711, 0.005642208099365234, 0.005655360221862793, 0.005609632015228272, 0.0057223038673400875, 0.00555788803100586, 0.005613408088684082, 0.005654751777648926, 0.005671040058135986, 0.005736447811126709, 0.005761023998260498, 0.005824512004852295, 0.005927999973297119, 0.005727136135101318, 0.005666848182678223, 0.005666463851928711, 0.005716608047485351, 0.0056275839805603025, 0.005715839862823487, 0.005605535984039307, 0.005612607955932617, 0.005602367877960205, 0.0057608962059021, 0.005713920116424561, 0.005576863765716553, 0.0056230401992797855, 0.005839456081390381, 0.005888000011444092, 0.005740543842315674, 0.005710976123809814, 0.00575929594039917, 0.005716544151306152, 0.00582860803604126, 0.005734399795532226, 0.005795839786529541, 0.005885280132293701, 0.005768127918243408, 0.0057918081283569334, 0.005820064067840576, 0.005898240089416504, 0.005869631767272949, 0.005870975971221924, 0.0056929922103881835, 0.005837920188903808, 0.006780831813812256, 0.005881855964660645, 0.006253983974456787, 0.005821087837219238, 0.0059575681686401365, 0.005961728096008301, 0.005793791770935058, 0.005992447853088379, 0.005800032138824463, 0.005485375881195068, 0.005795743942260742, 0.005752511978149414, 0.005775775909423828, 0.005796063899993897, 0.005768991947174072, 0.005758975982666016, 0.005736095905303955, 0.005664351940155029, 0.00577510404586792, 0.005796000003814698, 0.005731167793273925, 0.0056804800033569336, 0.005642911911010742, 0.005715424060821533, 0.005660511970520019, 0.005661151885986328, 0.005839072227478028, 0.005806240081787109, 0.0058999361991882325, 0.005788159847259521, 0.005778143882751465, 0.0056475200653076174, 0.005676864147186279, 0.005600639820098877, 0.005825151920318603, 0.005789984226226807, 0.005707488059997558, 0.005674208164215088, 0.005663072109222412, 0.0057084159851074215, 0.005654592037200928, 0.005596447944641114, 0.005698016166687012, 0.005715072154998779, 0.0057597441673278805, 0.005687679767608643, 0.005772928237915039, 0.005636352062225342, 0.00564415979385376, 0.005944608211517334, 0.0058334717750549315, 0.0057346558570861815, 0.0056685757637023925, 0.0057118721008300784, 0.005711135864257813, 0.005970176219940186, 0.005781983852386475, 0.005805600166320801, 0.005720672130584717, 0.005736256122589111, 0.005697760105133056, 0.005697375774383545, 0.00572211217880249, 0.005761023998260498, 0.005891232013702392, 0.005804895877838135, 0.005756927967071533, 0.005758111953735352, 0.005762944221496582, 0.0057415361404418944, 0.005726367950439453, 0.0058141121864318844, 0.005404575824737549, 0.005677152156829834, 0.0056910719871520994, 0.005708064079284668, 0.00573033618927002, 0.005720064163208007, 0.005740543842315674, 0.005752960205078125, 0.005769087791442871, 0.005761023998260498, 0.005687327861785889, 0.005642047882080078, 0.005609407901763916, 0.005705952167510986, 0.005629631996154785, 0.005695903778076172, 0.005756896018981933, 0.005771200180053711, 0.005871935844421387, 0.0059040641784667966, 0.005836800098419189, 0.00581990385055542, 0.005949984073638916, 0.005717760086059571, 0.005789023876190185, 0.0056856322288513185, 0.005761536121368408, 0.0057794561386108395, 0.005758944034576416, 0.005738687992095947, 0.005722047805786133, 0.005824416160583496, 0.005697311878204346, 0.00587120008468628, 0.006078271865844726, 0.006076672077178955, 0.00574121618270874, 0.005704639911651611, 0.00583139181137085, 0.005869823932647705, 0.0057890558242797855, 0.005763711929321289, 0.006029407978057861, 0.005840320110321045, 0.005864960193634033, 0.0057374401092529295, 0.005787648200988769, 0.005793791770935058, 0.005761023998260498, 0.005715551853179931, 0.005788064002990723, 0.005812160015106201, 0.005710080146789551, 0.005766496181488037, 0.005751264095306396, 0.005711935997009278, 0.005767231941223145, 0.005784832000732422, 0.005776000022888183, 0.0057552638053894045, 0.005706463813781738, 0.005647071838378906, 0.005653984069824218, 0.005469727993011475, 0.005664512157440186, 0.005782015800476074, 0.005771135807037354, 0.00581440019607544, 0.005797088146209717, 0.00579369592666626, 0.005862271785736084, 0.005797887802124023, 0.005763072013854981, 0.005744895935058594, 0.005752575874328613, 0.0057571840286254885, 0.005752575874328613, 0.005660672187805176, 0.005782976150512696, 0.005717567920684815, 0.005767519950866699, 0.005769887924194336, 0.005758975982666016, 0.005812064170837402, 0.006074528217315674, 0.005924863815307617, 0.005695487976074219, 0.005855231761932373, 0.005784575939178467, 0.005788735866546631, 0.005713535785675049, 0.0058014078140258786, 0.005769248008728027, 0.005765984058380127, 0.0057118721008300784, 0.005709824085235596, 0.005819712162017822, 0.005876416206359863, 0.005675007820129394, 0.005767168045043946, 0.00584281587600708, 0.0059369921684265135, 0.005689631938934326, 0.005675007820129394, 0.005676640033721924, 0.005799967765808106, 0.005744448184967041, 0.005775360107421875, 0.005775936126708984, 0.00578268814086914, 0.005784416198730468, 0.005743616104125977, 0.005759679794311524, 0.0057736320495605465, 0.005803264141082763, 0.005767712116241455, 0.005750815868377685, 0.005763232231140137, 0.0057458882331848146, 0.005765952110290527, 0.005799935817718506, 0.005773024082183838, 0.005730112075805664, 0.0057770881652832035, 0.005711711883544922, 0.005708735942840576, 0.005533408164978027, 0.00578380823135376, 0.005711840152740478, 0.005668000221252441, 0.005708703994750977, 0.00566476821899414, 0.005711647987365722, 0.00570304012298584, 0.005698112010955811, 0.005740831851959229, 0.005652480125427246, 0.0057489280700683595, 0.005721920013427735, 0.005732031822204589, 0.005732831954956055, 0.005685088157653809, 0.005672736167907715, 0.005640351772308349, 0.005814208030700684, 0.005705056190490723, 0.005741439819335938, 0.005744095802307129, 0.005687359809875488, 0.00569382381439209, 0.00564243221282959, 0.0056934719085693355, 0.0057710399627685545, 0.005683199882507324, 0.005666816234588623, 0.005617663860321045, 0.005606431961059571, 0.005639359951019287, 0.005656352043151855, 0.005666304111480713, 0.00567955207824707, 0.005855648040771484, 0.005824160099029541, 0.00586240005493164, 0.00570470380783081, 0.005769087791442871, 0.005685376167297363, 0.005772480010986328, 0.00579859209060669, 0.00576639986038208, 0.005737696170806885, 0.005660192012786865, 0.0056501121520996095, 0.0055604162216186526, 0.00559548807144165, 0.00561356782913208, 0.005774847984313965, 0.00571673583984375, 0.0057259521484375, 0.005786719799041748, 0.005739712238311768, 0.005701344013214111, 0.005629951953887939, 0.005697152137756348, 0.005749472141265869, 0.00574019193649292, 0.005731647968292237, 0.005655231952667236, 0.005699584007263184, 0.005382400035858154, 0.00582425594329834, 0.00571123218536377, 0.005734079837799072, 0.005757887840270996, 0.005632160186767578, 0.005740575790405274, 0.005762271881103516, 0.0058005437850952146, 0.005713920116424561, 0.005705376148223877, 0.00578172779083252, 0.005764416217803955, 0.0057391681671142575, 0.005685408115386963, 0.005686880111694336, 0.005701280117034912, 0.005723199844360352, 0.00571779203414917, 0.005715871810913086, 0.005734272003173828, 0.005738624095916748, 0.0057621440887451175, 0.005784480094909668, 0.005677055835723877, 0.005668863773345947, 0.005631999969482422, 0.00574019193649292, 0.005775455951690674, 0.005699840068817139, 0.00571782398223877, 0.005699168205261231, 0.005729119777679443, 0.005889664173126221, 0.005783679962158203, 0.005770751953125, 0.005732704162597657, 0.005743840217590332, 0.005725120067596436, 0.005699295997619629, 0.005697824001312256, 0.005817632198333741, 0.0056902079582214354, 0.005850783824920654, 0.005771488189697266, 0.005742591857910156, 0.0056975679397583, 0.005711840152740478, 0.005718016147613526, 0.005727839946746826, 0.0056975679397583, 0.0056928319931030275, 0.005728960037231445, 0.005716256141662598, 0.005695168018341064, 0.005743231773376465, 0.005647424221038818, 0.005902304172515869, 0.0057693438529968265, 0.0058347201347351075, 0.005833280086517334, 0.005799071788787842, 0.005802591800689697, 0.00551526403427124, 0.005851136207580566, 0.0058798398971557616, 0.005801951885223389, 0.006086656093597412, 0.005973951816558838, 0.005899456024169922, 0.005878015995025635, 0.0058579201698303225, 0.005958879947662353, 0.005860127925872803, 0.005941247940063477, 0.005777503967285156, 0.005842495918273926, 0.005865632057189941, 0.005851456165313721, 0.005803904056549072, 0.005738495826721191, 0.005741631984710694, 0.005725120067596436, 0.005816224098205566, 0.005732160091400146, 0.005776800155639648, 0.006029759883880615, 0.005737088203430176, 0.005830463886260987, 0.005834464073181152, 0.005843071937561035, 0.005898399829864502, 0.005730303764343261, 0.005718111991882324, 0.005898143768310547, 0.005826560020446778, 0.005804031848907471, 0.0058204479217529295, 0.005795839786529541, 0.0060210881233215335, 0.00572544002532959, 0.0057699837684631345, 0.005772416114807129, 0.0057844481468200684, 0.005736480236053467, 0.005803647994995117, 0.005843296051025391, 0.005767168045043946, 0.0057875199317932126, 0.005781280040740967, 0.00585968017578125, 0.005795775890350342, 0.005850592136383057, 0.005768832206726074, 0.005839839935302734, 0.005799935817718506, 0.005806272029876709, 0.005750495910644532, 0.005793856143951416, 0.0058204479217529295, 0.005780608177185059, 0.0058090238571167, 0.005789631843566894, 0.00576313591003418, 0.005683199882507324, 0.005744639873504639, 0.0054926080703735355, 0.00583897590637207, 0.005803199768066406, 0.005839680194854736, 0.005799424171447754, 0.005845503807067871, 0.005761023998260498, 0.005713920116424561, 0.005736447811126709, 0.005689343929290771, 0.00570579195022583, 0.0056929922103881835, 0.005750879764556884, 0.005813632011413574, 0.005669568061828613, 0.0056724162101745605, 0.005643008232116699, 0.005618783950805664, 0.005667744159698486, 0.005744192123413086, 0.005677696228027344, 0.005648096084594726, 0.0056648640632629395, 0.005730432033538818, 0.005691455841064453, 0.005784512042999268, 0.0057496318817138676, 0.005793824195861817, 0.005887968063354492, 0.005783552169799804, 0.005686751842498779, 0.0058618240356445315, 0.005808224201202392, 0.0058776321411132815, 0.0057777280807495114, 0.005779263973236084, 0.0057775678634643555, 0.005693280220031739, 0.005720384120941162, 0.005705408096313477, 0.00576697587966919, 0.0058347201347351075, 0.005763360023498535, 0.005809247970581055, 0.005853248119354248, 0.005810272216796875, 0.0058149762153625485, 0.005816480159759521, 0.005809216022491455, 0.006040448188781738, 0.0057563199996948245, 0.005792384147644043, 0.005821824073791504, 0.005855743885040283, 0.0058364481925964355, 0.005990752220153809, 0.005844768047332764, 0.005775584220886231, 0.005881472110748291, 0.005824895858764648, 0.006186463832855225, 0.005811840057373047, 0.005841279983520508, 0.005581183910369873, 0.005904831886291504, 0.005787263870239258, 0.005875135898590088, 0.005774271965026855, 0.005826720237731934, 0.005789535999298096, 0.00577129602432251, 0.005822432041168213, 0.005795839786529541, 0.005806079864501953, 0.005752960205078125, 0.005866559982299804, 0.005785696029663086, 0.005800672054290771, 0.005829664230346679, 0.005891071796417236, 0.005891808032989502, 0.005820896148681641, 0.007003232002258301, 0.0060310401916503905, 0.006370304107666015, 0.006371327877044678, 0.005864704132080078, 0.005845151901245117, 0.0057944002151489255, 0.005951488018035888, 0.00588595199584961, 0.0059881601333618166, 0.005914080142974854, 0.005904384136199951, 0.005817056179046631, 0.005801152229309082, 0.005792799949645996, 0.0058458237648010255, 0.005764063835144043, 0.005819871902465821, 0.005770976066589355, 0.005729087829589844, 0.005951488018035888, 0.005762368202209473, 0.005862080097198487, 0.005793791770935058, 0.005800992012023926, 0.0057374401092529295, 0.0058154878616333006, 0.005800704002380371, 0.006069888114929199, 0.005826591968536377, 0.007065631866455078, 0.006196896076202393, 0.006114367961883545, 0.0061641278266906735, 0.006383039951324463, 0.005730879783630371, 0.0058624639511108394, 0.005784512042999268, 0.005682496070861817, 0.005847968101501465, 0.005705247879028321, 0.005703904151916504, 0.005688992023468018, 0.005652544021606446, 0.005308576107025146, 0.005795680046081543, 0.0056852478981018065, 0.00575705623626709, 0.005699456214904785, 0.00561356782913208, 0.005600992202758789, 0.005556511878967285, 0.005601280212402344, 0.0055289921760559085, 0.005575263977050781, 0.005691391944885254, 0.005554272174835205, 0.005545887947082519, 0.005508639812469482, 0.005648928165435791, 0.005625792026519775, 0.005752831935882568, 0.005631807804107666, 0.00558409595489502, 0.005651423931121826, 0.005658815860748291, 0.005635903835296631, 0.005649663925170898, 0.0056471037864685054, 0.005656576156616211, 0.005592800140380859, 0.005595424175262451, 0.005676415920257569, 0.005726848125457764, 0.005675007820129394, 0.005690847873687744, 0.005636000156402588, 0.005716063976287842, 0.005693984031677246, 0.005687039852142334, 0.005728511810302734, 0.005893280029296875, 0.00582700777053833, 0.005714335918426514, 0.005726304054260254, 0.005682496070861817, 0.005697216033935547, 0.005868447780609131, 0.00584716796875, 0.005832032203674316, 0.005709568023681641, 0.0057300801277160645, 0.00567193603515625, 0.005720064163208007, 0.005627647876739502, 0.00572870397567749, 0.005768767833709717, 0.005841087818145752, 0.005840960025787353, 0.005804031848907471, 0.005862592220306396, 0.005780096054077148, 0.0058124160766601565, 0.0057868800163269046, 0.00585756778717041, 0.005750368118286133, 0.00573689603805542, 0.005390655994415283, 0.0057873277664184574, 0.005748896121978759, 0.005703135967254639, 0.005798272132873535, 0.005691391944885254, 0.005775008201599121, 0.005766560077667236, 0.0057654080390930175, 0.005716415882110595, 0.005804224014282227, 0.005807487964630127, 0.005837471961975098, 0.005989376068115234, 0.005823808193206787, 0.005891647815704346, 0.005734399795532226, 0.005711071968078614, 0.005696415901184082, 0.0059064321517944334, 0.005767168045043946, 0.0057396478652954105, 0.005726687908172608, 0.005658944129943847, 0.005724415779113769, 0.005664927959442139, 0.005879007816314698, 0.005783103942871094, 0.005716896057128907, 0.00590828800201416, 0.005797920227050782, 0.005793951988220215, 0.005775680065155029, 0.005788671970367432, 0.005673600196838379, 0.005723328113555908, 0.005673855781555176, 0.005666560173034668, 0.005692895889282227, 0.005585696220397949, 0.005631008148193359, 0.005640927791595459, 0.005678431987762451, 0.005643167972564697, 0.005576704025268555, 0.005537792205810547, 0.005519360065460205, 0.005533696174621582, 0.005511168003082275, 0.005554495811462402, 0.005766848087310791, 0.005701632022857666, 0.0056234879493713376, 0.005564735889434814, 0.0056993598937988285, 0.0056301760673522945, 0.005656383991241455, 0.0055484800338745114, 0.005488383769989014, 0.005504831790924072, 0.005512415885925293, 0.005625951766967774, 0.005585855960845947, 0.005251359939575195, 0.005586656093597412, 0.005561759948730469, 0.005630144119262695, 0.005683584213256836, 0.005651872158050537, 0.00567574405670166, 0.005590943813323974, 0.005594399929046631, 0.005535456180572509, 0.005531007766723633, 0.005602943897247315, 0.005638144016265869, 0.005676703929901123, 0.005628255844116211, 0.005652607917785645, 0.00559500789642334, 0.005619711875915527, 0.005683199882507324, 0.005596960067749023, 0.005611743927001953, 0.005855231761932373, 0.005675007820129394, 0.005578303813934326, 0.00565715217590332, 0.005633312225341797, 0.005558432102203369, 0.005679327964782715, 0.005648767948150635, 0.0059491519927978515, 0.005783679962158203, 0.006090432167053223, 0.005771520137786865, 0.006058047771453858, 0.00628111982345581, 0.006674528121948242, 0.005672959804534912, 0.005593088150024414, 0.005629951953887939, 0.005640384197235108, 0.005609280109405518, 0.005534848213195801, 0.005610496044158936, 0.005662591934204102, 0.00557260799407959, 0.005586880207061768, 0.0055071358680725095, 0.00551043176651001, 0.005842912197113037, 0.005494783878326416, 0.005540607929229736, 0.0054939198493957515, 0.005571424007415771, 0.005530848026275635, 0.005548831939697266, 0.005763360023498535, 0.005597184181213379, 0.005734111785888672, 0.005758975982666016, 0.005746496200561523, 0.00568339204788208, 0.005599232196807862, 0.005643680095672608, 0.0052789759635925295, 0.005661312103271484, 0.0057051839828491215, 0.005630623817443847, 0.005695712089538574, 0.0056044478416442875, 0.005634751796722412, 0.005713280200958252, 0.005674752235412597, 0.005634943962097168, 0.005631711959838867, 0.0056462721824646, 0.00566102409362793, 0.0056415038108825685, 0.005706463813781738, 0.005654079914093018, 0.005685696125030518, 0.005513216018676758, 0.005595136165618897, 0.005623807907104492, 0.005701888084411621, 0.005738207817077637, 0.005680768013000488, 0.005673024177551269, 0.005697216033935547, 0.0056341438293457035, 0.0055989761352539065, 0.005641024112701416, 0.005691391944885254, 0.005681151866912842, 0.005686367988586426, 0.005625792026519775, 0.005708703994750977, 0.0056975998878479005, 0.005658624172210694, 0.005632256031036377, 0.005661439895629883, 0.00567193603515625, 0.005979455947875976, 0.005789696216583252, 0.005726943969726563, 0.005681280136108398, 0.00575705623626709, 0.005666816234588623, 0.005598400115966797, 0.005614175796508789, 0.0056053118705749515, 0.005601280212402344, 0.0055658559799194336, 0.005619520187377929, 0.005624608039855957, 0.005578688144683838, 0.005603392124176026, 0.005598879814147949, 0.005630303859710693, 0.005703680038452149, 0.005647744178771973, 0.005659296035766602, 0.005586912155151367, 0.005601280212402344, 0.005624127864837646, 0.0055840320587158205, 0.005653024196624756, 0.00541539192199707, 0.005766623973846435, 0.005779615879058838, 0.005686848163604736, 0.005654592037200928, 0.005609536170959472, 0.005669760227203369, 0.005651743888854981, 0.00566326379776001, 0.005918528079986572, 0.005608672142028808, 0.005591616153717041, 0.005576223850250244, 0.005516160011291504, 0.005615808010101318, 0.005586656093597412, 0.005573984146118164, 0.005599999904632568, 0.005658271789550781, 0.005658976078033447, 0.00557040023803711, 0.005600927829742431, 0.005490975856781006, 0.005998816013336182, 0.005589248180389405, 0.005555967807769776, 0.005534751892089844, 0.005470687866210937, 0.005489312171936035, 0.005443488121032715, 0.0055725440979003905, 0.005494175910949707, 0.005608255863189697, 0.005631775856018067, 0.005603328227996827, 0.005586944103240967, 0.0056212477684021, 0.005560832023620605, 0.005578752040863037, 0.005578720092773438, 0.005613152027130127, 0.005630752086639404, 0.00556441593170166, 0.005600927829742431, 0.005629280090332031, 0.005580480098724365, 0.005602528095245361, 0.005566207885742187, 0.00558460807800293, 0.005582687854766846, 0.005471903800964356, 0.005532447814941406, 0.005476128101348877, 0.005460192203521728, 0.005431295871734619, 0.0054971518516540524, 0.00548140811920166, 0.005704448223114013, 0.00551151990890503, 0.005528416156768799, 0.005524288177490235, 0.006071743965148926, 0.005527999877929688, 0.005290080070495606, 0.005603871822357178, 0.005486847877502442, 0.0063034558296203615, 0.005990687847137451, 0.005637887954711914, 0.00552780818939209, 0.005529888153076172, 0.005551839828491211, 0.005463935852050781, 0.00550105619430542, 0.005489727973937988, 0.005683231830596924, 0.0056200318336486815, 0.00554863977432251, 0.005673024177551269, 0.005610496044158936, 0.00569974422454834, 0.005773119926452637, 0.005743584156036377, 0.005764128208160401, 0.005686240196228027, 0.005653952121734619, 0.005802559852600098, 0.005766848087310791, 0.005809951782226563, 0.005716512203216553, 0.0057502717971801755, 0.005712575912475586, 0.005918528079986572, 0.005713056087493897, 0.005749472141265869, 0.0058043842315673826, 0.0056911039352416995, 0.005697919845581055, 0.005871295928955078, 0.005746240139007568, 0.005632351875305175, 0.00576259183883667, 0.005646912097930909, 0.005694655895233154, 0.00582476806640625, 0.005791391849517822, 0.005752863883972168, 0.00580294418334961, 0.005754816055297852, 0.0056555838584899905, 0.005739583969116211, 0.005715744018554687, 0.005705887794494629, 0.005881824016571045, 0.005750783920288086, 0.005683231830596924, 0.005631968021392823, 0.0056769919395446775, 0.005646399974822998, 0.005650688171386719, 0.005762080192565918, 0.005623744010925293, 0.005749536037445068, 0.005670623779296875, 0.005755392074584961, 0.005891392230987549, 0.005349408149719239, 0.005750879764556884, 0.005731967926025391, 0.005730591773986817, 0.005695807933807373, 0.0056275839805603025, 0.005651999950408935, 0.0057021121978759765, 0.00578547191619873, 0.005710015773773194, 0.005742496013641358, 0.005687327861785889, 0.005705728054046631, 0.005756608009338379, 0.005685567855834961, 0.00570140790939331, 0.005679327964782715, 0.005717599868774414, 0.005658048152923584, 0.005739488124847412, 0.005691391944885254, 0.005641727924346924, 0.00567142391204834, 0.005545983791351319, 0.005582496166229248, 0.0056260800361633305, 0.005646368026733398, 0.005711967945098877, 0.005687295913696289, 0.005849088191986084, 0.0056442880630493165, 0.0056824002265930176, 0.005722847938537598, 0.005658688068389893, 0.005683040142059326, 0.005717343807220459, 0.005679935932159424, 0.005699584007263184, 0.00569379186630249, 0.0056072001457214355, 0.0055437440872192385, 0.005736800193786621, 0.005545599937438965, 0.005639391899108887, 0.005589888095855713, 0.005704832077026367, 0.005624639987945557, 0.005555552005767822, 0.005571296215057373, 0.005668863773345947, 0.0059489917755126956, 0.005763552188873291, 0.005660639762878418, 0.005708032131195069, 0.00561740779876709, 0.005820608139038086, 0.0058447041511535645, 0.006038815975189209, 0.005628704071044922, 0.005559679985046387, 0.005532063961029053, 0.005554687976837158, 0.005515007972717285, 0.005373119831085205, 0.005500703811645508, 0.005885439872741699, 0.005507584095001221, 0.005484543800354004, 0.005469600200653076, 0.005474815845489502, 0.0055419840812683105, 0.005521503925323487, 0.005570015907287598, 0.005545599937438965, 0.0055140480995178225, 0.0055316481590271, 0.005549791812896728, 0.005564703941345215, 0.005535744190216065, 0.0055214080810546875, 0.00551526403427124, 0.00556441593170166, 0.005517119884490967, 0.0056202239990234375, 0.0055559039115905765, 0.005491775989532471, 0.005489535808563232, 0.005959743976593017, 0.006100992202758789, 0.005500991821289062, 0.00548857593536377, 0.005557695865631104, 0.005539680004119873, 0.005564767837524414, 0.005494175910949707, 0.005536736011505127, 0.005774847984313965, 0.005628416061401367, 0.005687327861785889, 0.005486976146697998, 0.005506847858428955, 0.00551423978805542, 0.005522111892700196, 0.005595263957977295, 0.005535359859466553, 0.005754687786102295, 0.005661119937896728, 0.005607391834259033, 0.005658783912658691, 0.005617280006408692, 0.005612256050109864, 0.005666463851928711, 0.005678207874298096, 0.005706751823425293, 0.005706783771514893, 0.005721248149871826, 0.005907360076904297, 0.0057307519912719726, 0.005837152004241944, 0.0056887040138244626, 0.005632639884948731, 0.005596640110015869, 0.005643807888031006, 0.0057290239334106445, 0.005615871906280517, 0.005877600193023681, 0.005796127796173096, 0.005780543804168701, 0.0058149762153625485, 0.005822463989257813, 0.005733471870422363, 0.005778336048126221, 0.005715680122375488, 0.005824863910675049, 0.005769408226013183, 0.005797855854034424, 0.0057381119728088376, 0.005702079772949219, 0.005739871978759766, 0.00570201587677002, 0.0057589120864868166, 0.005751999855041504, 0.00567519998550415, 0.005624703884124756, 0.005697343826293945, 0.0061123518943786625, 0.005643328189849853, 0.005672800064086914, 0.005668704032897949, 0.005619904041290283, 0.005617119789123535, 0.00559276819229126, 0.005608255863189697, 0.005618815898895264, 0.005686079978942871, 0.005767327785491943, 0.005771423816680908, 0.0057075839042663576, 0.00567903995513916, 0.005760416030883789, 0.00568995189666748, 0.0056483840942382815, 0.00571289587020874, 0.005706655979156494, 0.005750879764556884, 0.005703616142272949, 0.005810239791870117, 0.005808127880096436, 0.005799680233001709, 0.0057346558570861815, 0.005786623954772949, 0.005841695785522461, 0.005873087882995605, 0.0058288640975952145, 0.00578000020980835, 0.005963776111602783, 0.005720064163208007, 0.006107135772705078, 0.0060059518814086915, 0.005880671977996826, 0.005915743827819824, 0.005952383995056152, 0.005834239959716797, 0.006050303936004638, 0.005849215984344482, 0.005950431823730468, 0.00596614408493042, 0.005898848056793213, 0.0058215041160583495, 0.005396704196929932, 0.0057712640762329105, 0.005730112075805664, 0.005841087818145752, 0.00590172815322876, 0.005896800041198731, 0.005932864189147949, 0.005910719871520996, 0.005703680038452149, 0.005803936004638672, 0.0056976318359375, 0.0057257599830627445, 0.005714719772338867, 0.005655360221862793, 0.005704256057739257, 0.005820703983306885, 0.005773312091827393, 0.005685503959655762, 0.005796703815460205, 0.005787903785705566, 0.0057840957641601565, 0.005824319839477539, 0.005802303791046143, 0.005834879875183105, 0.005672063827514649, 0.005761824131011963, 0.005627359867095947, 0.005715839862823487, 0.0058336639404296875, 0.005740223884582519, 0.005715871810913086, 0.0056911039352416995, 0.005767551898956299, 0.005746175765991211, 0.005746592044830323, 0.005763167858123779, 0.005689472198486328, 0.005655200004577637, 0.005587935924530029, 0.0057064957618713375, 0.005827648162841797, 0.005542623996734619, 0.005841119766235352, 0.005748288154602051, 0.005823967933654785, 0.005684192180633545, 0.0056908798217773435, 0.005676959991455078, 0.005638751983642578, 0.005668863773345947, 0.005766367912292481, 0.00572870397567749, 0.005760672092437744, 0.005683040142059326, 0.005666719913482666, 0.005742656230926514, 0.005737343788146973, 0.005772319793701172, 0.00581935977935791, 0.005728256225585937, 0.005732351779937744, 0.005630303859710693, 0.0056705279350280765, 0.005398655891418457, 0.005712704181671142, 0.005783520221710205, 0.005771423816680908, 0.005769023895263672, 0.005845056056976318, 0.005850751876831054, 0.005787775993347168, 0.005750976085662842, 0.005764512062072754, 0.005825119972229004, 0.005822688102722168, 0.005806047916412354, 0.005998176097869873, 0.00588150405883789, 0.005712480068206787, 0.005750751972198486, 0.005712992191314698, 0.005612703800201416, 0.005709727764129638, 0.005693280220031739, 0.005598944187164306, 0.005699872016906738, 0.0056483840942382815, 0.005654816150665283, 0.005730016231536865, 0.005679135799407959, 0.005662752151489258, 0.005607359886169434, 0.005742591857910156, 0.005646336078643799, 0.005702847957611084, 0.005757567882537842, 0.005732800006866455, 0.005811967849731445, 0.005982495784759521, 0.005694719791412353, 0.005720416069030762, 0.005759103775024414, 0.0058009600639343266, 0.0058971519470214845, 0.005891168117523193, 0.005796288013458252, 0.005783360004425048, 0.005824863910675049, 0.00576966381072998, 0.005791296005249023, 0.005894879817962647, 0.005770912170410156, 0.005765088081359863, 0.005830527782440186, 0.005783455848693848, 0.006082464218139649, 0.005738656044006348, 0.005686912059783935, 0.0055775041580200194, 0.0057032318115234374, 0.005674304008483887, 0.005780384063720703, 0.005730432033538818, 0.005649759769439698, 0.005649312019348145, 0.0056455998420715334, 0.005455872058868408, 0.005772736072540283, 0.005935616016387939, 0.005748960018157959, 0.005678880214691162, 0.005716032028198243, 0.005762368202209473, 0.005771967887878418, 0.005729951858520508, 0.005730656147003174, 0.005723455905914307, 0.00584281587600708, 0.005761856079101562, 0.005660704135894775, 0.005756896018981933, 0.005711584091186523, 0.005667263984680176, 0.005752831935882568, 0.005790688037872314, 0.005779551982879639, 0.005599487781524659, 0.005632287979125976, 0.005638400077819824, 0.005670048236846924, 0.005634079933166504, 0.005778495788574219, 0.005686880111694336, 0.0056854400634765625, 0.00583676815032959, 0.0056442880630493165, 0.005718080043792725, 0.005568064212799072, 0.005749311923980713, 0.00575878381729126, 0.005754879951477051, 0.005756927967071533, 0.005738527774810791, 0.0057775359153747555, 0.005738336086273194, 0.005730303764343261, 0.0057643518447875975, 0.005646719932556153, 0.005662496089935303, 0.0056735677719116214, 0.005826560020446778, 0.005708000183105469, 0.005642271995544433, 0.005708672046661377, 0.005686143875122071, 0.005725599765777588, 0.005739264011383057, 0.005731552124023437, 0.005735328197479248, 0.005920639991760254, 0.005818208217620849, 0.005797887802124023, 0.005785600185394287, 0.005774720191955567, 0.005816959857940674, 0.005799935817718506, 0.005795839786529541, 0.005865471839904785, 0.005846208095550537, 0.005549600124359131, 0.005777919769287109, 0.005736000061035156, 0.005646592140197754, 0.005677599906921387, 0.005676511764526367, 0.005752927780151367, 0.005669312000274658, 0.005666816234588623, 0.005692512035369873, 0.005659552097320557, 0.005623807907104492, 0.005600480079650879, 0.005576704025268555, 0.005647136211395264, 0.005654592037200928, 0.005637919902801514, 0.005629663944244385, 0.005683487892150879, 0.005638304233551025, 0.005664512157440186, 0.0057502717971801755, 0.005523903846740723, 0.005506815910339355, 0.006626175880432129, 0.005527359962463379, 0.005506944179534912, 0.005537792205810547, 0.0055114560127258305, 0.005525216102600098, 0.005584928035736084, 0.005527520179748535, 0.005482528209686279, 0.005409952163696289, 0.0053827519416809085, 0.005384416103363037, 0.005427231788635254, 0.005355743885040283, 0.005429279804229736, 0.005403552055358887, 0.005372543811798095, 0.005525023937225342, 0.005544991970062256, 0.005587679862976074, 0.006669087886810303, 0.006209184169769287, 0.005677152156829834, 0.005651072025299072, 0.005603072166442871, 0.005545055866241455, 0.005548031806945801, 0.0055848960876464845, 0.0055361919403076176, 0.005599103927612305, 0.005581408023834228, 0.005578591823577881, 0.005580575942993164, 0.005669504165649414, 0.005476096153259277, 0.005466239929199219, 0.005549439907073975, 0.005496448040008545, 0.005407616138458252, 0.0051773438453674315, 0.005440767765045166, 0.005409535884857178, 0.005654016017913818, 0.005808640003204346, 0.005548031806945801, 0.0054579520225524905, 0.005367775917053222, 0.005395552158355713, 0.0054971837997436525, 0.005489215850830078, 0.005521183967590332, 0.0054635839462280275, 0.005444223880767822, 0.005451839923858642, 0.0054869441986083985, 0.005703328132629394, 0.00552345609664917, 0.005462016105651855, 0.005430560111999511, 0.0053994240760803225, 0.005516416072845459, 0.005617760181427002, 0.005769855976104736, 0.005990399837493897, 0.005553823947906494, 0.005533696174621582, 0.005588831901550293, 0.005720640182495117, 0.0055577921867370605, 0.005795968055725098, 0.005503263950347901, 0.005492703914642334, 0.005453887939453125, 0.005496799945831299, 0.00544982385635376, 0.005490592002868652, 0.005486591815948487, 0.005695487976074219, 0.005467807769775391, 0.005461599826812744, 0.005447840213775635, 0.005546592235565186, 0.005592671871185303, 0.005710239887237549, 0.005601280212402344, 0.005549952030181885, 0.005761375904083252, 0.005582623958587647, 0.005576704025268555, 0.005597184181213379, 0.005560480117797852, 0.005578495979309082, 0.005521503925323487, 0.005595200061798096, 0.005640448093414307, 0.005693120002746582, 0.0056310720443725586, 0.005532639980316162, 0.005558207988739013, 0.005459712028503418, 0.0054438400268554685, 0.005477759838104248, 0.005207104206085205, 0.005481311798095703, 0.005525599956512451, 0.005462016105651855, 0.005461567878723144, 0.005487232208251953, 0.00545363187789917, 0.0054988799095153805, 0.00550707197189331, 0.005502975940704345, 0.005531551837921142, 0.005460063934326172, 0.0054579200744628905, 0.005443520069122314, 0.005382207870483399, 0.0053844799995422365, 0.005467199802398682, 0.005425695896148682, 0.0054766077995300295, 0.005477375984191894, 0.005571167945861816, 0.005632192134857178, 0.005582464218139648, 0.0055751361846923825, 0.005514400005340576, 0.005483136177062988, 0.005503200054168701, 0.005472256183624268, 0.00547430419921875, 0.005439455986022949, 0.0055862398147583005, 0.005503744125366211, 0.006336448192596436, 0.00551251220703125, 0.005832992076873779, 0.005501376152038574, 0.005547488212585449, 0.005579296112060547, 0.005478400230407715, 0.005494783878326416, 0.005486720085144043, 0.005482367992401123, 0.005474143981933593, 0.005463903903961182, 0.005489280223846435, 0.0054531521797180176, 0.005433119773864746, 0.005450431823730469, 0.005451968193054199, 0.0054167361259460445, 0.005494688034057617, 0.005402624130249023, 0.005396480083465576, 0.005419007778167725, 0.005434400081634522, 0.005460447788238525, 0.00543177604675293, 0.005548064231872559, 0.005447423934936523, 0.005449600219726562, 0.0055437440872192385, 0.005570975780487061, 0.005478208065032959]",tokens/s,174.9413402293161,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,813.858816,576.585728,0.0,190.840832,172.878848,s,1,9.890080078125,9.890080078125,0.0,9.890080078125,9.890080078125,9.890080078125,9.890080078125,[9.890080078125],,kWh,1.441145162916655e-05,1.5823626253446684e-06,4.317225676002401e-06,2.0311039930513617e-05,,MB,1273.942016,685.637632,0.0,270.532608,241.723904,s,27,0.21110083198547364,0.00781854933279532,0.00010508465436072272,0.007810880184173584,0.007981830501556396,0.007991340732574462,0.008008021793365478,"[0.007984576225280761, 0.007823232173919678, 0.007856736183166504, 0.007924831867218018, 0.007980000019073487, 0.007844351768493652, 0.0077701759338378905, 0.007810880184173584, 0.007871391773223877, 0.007902080059051514, 0.007871424198150635, 0.007994239807128906, 0.007720255851745605, 0.00783238410949707, 0.007646495819091797, 0.0077478399276733395, 0.007697279930114746, 0.007718751907348633, 0.008012864112854004, 0.007801695823669434, 0.007906144142150878, 0.007751840114593506, 0.007787199974060058, 0.007780608177185059, 0.007776607990264892, 0.007658944129943847, 0.007627999782562256]",tokens/s,32742.64689054201,kWh,2.2971747066614734e-07,2.5333718605106384e-08,1.1756544067348122e-07,3.72616629944735e-07,tokens/kWh,687033211.6899047,MB,1320.845312,698.220544,0.0,283.11552,241.726464,s,27,9.978897064208985,0.3695887801558883,0.008063366634083096,0.37001638793945313,0.37936072387695313,0.3822731872558594,0.3908531640625,"[0.39378143310546876, 0.37032635498046873, 0.37780123901367185, 0.37131307983398437, 0.38251885986328127, 0.37755438232421873, 0.36874411010742186, 0.370843017578125, 0.381699951171875, 0.37229141235351565, 0.372091552734375, 0.36995538330078126, 0.37319244384765626, 0.36286627197265625, 0.3593752746582031, 0.3623958740234375, 0.36077423095703126, 0.3665907287597656, 0.37041094970703126, 0.37001638793945313, 0.3715799255371094, 0.36525271606445314, 0.363974853515625, 0.3647017822265625, 0.3553302307128906, 0.36166067504882815, 0.36185394287109374]",tokens/s,170.4597200527227,kWh,1.0411038211625422e-05,1.148162526768724e-06,3.918340565826452e-06,1.5477541304220595e-05,tokens/kWh,4070413.9476481597,,s,1701,9.963394266605386,0.005857374642331203,0.00024768873241902504,0.0058397121429443356,0.006029439926147461,0.006111519813537598,0.006956160068511963,"[0.005646336078643799, 0.005971327781677246, 0.0059889922142028805, 0.0059699201583862304, 0.005916607856750488, 0.006029248237609863, 0.005924992084503174, 0.005979584217071533, 0.005915200233459473, 0.006045695781707764, 0.006022719860076904, 0.006132160186767578, 0.005947391986846923, 0.006015359878540039, 0.005957248210906982, 0.006141664028167725, 0.006533408164978027, 0.0061337599754333495, 0.006165760040283203, 0.006068992137908936, 0.006157983779907226, 0.005907904148101806, 0.006058207988739014, 0.005903039932250977, 0.005955584049224853, 0.005928192138671875, 0.005974783897399903, 0.006078464031219482, 0.006160384178161621, 0.006367231845855713, 0.007347551822662354, 0.007451295852661133, 0.0072846078872680665, 0.007182464122772216, 0.006459392070770263, 0.006074368000030517, 0.006012159824371338, 0.005974688053131103, 0.00589737606048584, 0.0059560642242431645, 0.005879551887512207, 0.005984992027282715, 0.005982207775115967, 0.0059303040504455565, 0.005908415794372558, 0.005870336055755615, 0.006111231803894043, 0.006151999950408936, 0.00648198413848877, 0.006170752048492431, 0.007618559837341309, 0.0077472000122070315, 0.007811423778533936, 0.007744703769683838, 0.007723872184753418, 0.005955552101135254, 0.006024608135223389, 0.005974656105041504, 0.005804031848907471, 0.005804031848907471, 0.006043647766113281, 0.005958911895751953, 0.005866367816925049, 0.005568831920623779, 0.0058464322090148926, 0.005931935787200928, 0.005859200000762939, 0.005895999908447266, 0.00586137580871582, 0.00582860803604126, 0.005931007862091065, 0.0059391999244689945, 0.005875423908233642, 0.005883647918701172, 0.005915103912353515, 0.005834112167358399, 0.005886911869049072, 0.005805823802947998, 0.005864799976348877, 0.005882431983947754, 0.005797728061676026, 0.005817984104156494, 0.005931647777557373, 0.005860415935516358, 0.0058269438743591305, 0.0058048319816589354, 0.005895967960357666, 0.005888095855712891, 0.005918176174163819, 0.005867616176605224, 0.005784192085266113, 0.0058652482032775875, 0.005768544197082519, 0.00589024019241333, 0.005908031940460205, 0.005923168182373047, 0.005941535949707031, 0.005978335857391357, 0.005810239791870117, 0.0058504638671875, 0.00586732816696167, 0.005810527801513672, 0.006015423774719238, 0.0058429441452026365, 0.005951488018035888, 0.00584716796875, 0.0059185919761657714, 0.005976064205169678, 0.0059351038932800295, 0.005963744163513184, 0.005948736190795899, 0.005753312110900879, 0.005751039981842041, 0.005731520175933838, 0.005868351936340332, 0.005797887802124023, 0.005842400074005127, 0.0058475837707519535, 0.006073535919189453, 0.005878592014312744, 0.00581331205368042, 0.005888832092285156, 0.005887104034423828, 0.005850111961364746, 0.005854879856109619, 0.005886303901672363, 0.005523712158203125, 0.0059539518356323245, 0.005881247997283935, 0.005908895969390869, 0.006170944213867187, 0.005883903980255127, 0.005801983833312988, 0.005890048027038574, 0.005811264038085937, 0.005940224170684814, 0.006110688209533691, 0.006228447914123535, 0.0076984319686889645, 0.00638976001739502, 0.006956160068511963, 0.006147103786468506, 0.005916351795196533, 0.005910783767700195, 0.005869472026824951, 0.005795711994171142, 0.005886079788208008, 0.005896192073822021, 0.005863423824310303, 0.005830399990081787, 0.005894464015960693, 0.0057914237976074215, 0.005875072002410889, 0.005921599864959717, 0.00590774393081665, 0.005912831783294678, 0.00585481595993042, 0.005841695785522461, 0.006042848110198975, 0.005940159797668457, 0.005974271774291992, 0.00610643196105957, 0.005930784225463867, 0.005958303928375244, 0.005840320110321045, 0.005907008171081543, 0.005994495868682862, 0.005988255977630615, 0.005934495925903321, 0.006010848045349121, 0.006017600059509277, 0.005992544174194336, 0.005908671855926514, 0.006008416175842285, 0.00595417594909668, 0.005918367862701416, 0.005930528163909912, 0.00598473596572876, 0.006112736225128174, 0.00614243221282959, 0.006099008083343506, 0.006053567886352539, 0.005928991794586181, 0.005912864208221435, 0.005836832046508789, 0.005933119773864746, 0.005887487888336182, 0.005924863815307617, 0.005762688159942627, 0.005695072174072266, 0.005941696166992187, 0.005873663902282715, 0.005838175773620605, 0.005984928131103516, 0.005879680156707764, 0.00590342378616333, 0.005802688121795655, 0.00586572790145874, 0.005906655788421631, 0.005897920131683349, 0.005890336036682129, 0.00580515193939209, 0.005843679904937744, 0.005846240043640137, 0.0058735361099243166, 0.005792031764984131, 0.005826720237731934, 0.005808608055114746, 0.005969247817993164, 0.005851808071136474, 0.0058726720809936525, 0.005907551765441894, 0.005789408206939697, 0.0059253120422363284, 0.005865151882171631, 0.005821631908416748, 0.005818975925445557, 0.005867551803588867, 0.005775584220886231, 0.00582860803604126, 0.005783775806427002, 0.005826335906982422, 0.0057428798675537105, 0.005818079948425293, 0.006080383777618408, 0.005957759857177734, 0.005959680080413818, 0.0059055681228637694, 0.006039455890655517, 0.005845088005065918, 0.005862239837646484, 0.005880095958709717, 0.005887775897979736, 0.005848544120788574, 0.005835008144378662, 0.005863647937774658, 0.005947711944580078, 0.006161664009094238, 0.005973728179931641, 0.005914720058441162, 0.005911263942718506, 0.005847040176391601, 0.005867424011230468, 0.005826911926269531, 0.005962495803833008, 0.00601097583770752, 0.005946176052093506, 0.005832159996032715, 0.005857823848724365, 0.00585536003112793, 0.006148159980773926, 0.0059547839164733884, 0.005616384029388428, 0.005951039791107178, 0.005974207878112793, 0.006013216018676758, 0.005946656227111816, 0.005971968173980713, 0.0059987521171569825, 0.005988895893096924, 0.005986303806304932, 0.006086656093597412, 0.006000703811645508, 0.006004288196563721, 0.0060236802101135255, 0.00611520004272461, 0.0060928001403808595, 0.006041376113891601, 0.006099167823791504, 0.005990399837493897, 0.006039552211761475, 0.0060026879310607914, 0.005998623847961426, 0.005959551811218261, 0.006056064128875732, 0.005973983764648438, 0.006078144073486328, 0.0059944639205932616, 0.006017343997955322, 0.005953567981719971, 0.00606822395324707, 0.005957183837890625, 0.0062687678337097165, 0.005866079807281494, 0.006792640209197998, 0.005935679912567138, 0.006409791946411133, 0.006285759925842285, 0.006182655811309814, 0.0060412797927856445, 0.00674675178527832, 0.00634444808959961, 0.006012864112854004, 0.0063368000984191895, 0.0060395197868347164, 0.006049791812896729, 0.005944608211517334, 0.006039807796478272, 0.006039135932922364, 0.006076479911804199, 0.006039807796478272, 0.006037888050079346, 0.005951295852661133, 0.005947775840759277, 0.005967616081237793, 0.005918432235717774, 0.005912447929382324, 0.005894303798675537, 0.005933279991149902, 0.00596611213684082, 0.0071142082214355465, 0.00591107177734375, 0.00595904016494751, 0.00586406421661377, 0.006086656093597412, 0.005578656196594239, 0.006043456077575684, 0.005927648067474365, 0.005923935890197754, 0.00596611213684082, 0.006061728000640869, 0.0060018239021301266, 0.006102848052978516, 0.005980160236358643, 0.005994495868682862, 0.006040736198425293, 0.006067168235778808, 0.006084479808807373, 0.0060928001403808595, 0.005928095817565918, 0.0060709438323974605, 0.005898560047149658, 0.005932928085327149, 0.005969471931457519, 0.00609225606918335, 0.0059127678871154785, 0.006075168132781982, 0.005947264194488525, 0.006029439926147461, 0.00587775993347168, 0.005988351821899414, 0.005995840072631836, 0.006175168037414551, 0.006005343914031983, 0.006087776184082031, 0.005937727928161621, 0.006094816207885742, 0.006006624221801758, 0.006344511985778809, 0.006121856212615967, 0.006092639923095703, 0.006187168121337891, 0.006017151832580566, 0.005971104145050049, 0.005988480091094971, 0.0058906559944152835, 0.005963776111602783, 0.005924863815307617, 0.0060661759376525876, 0.005979360103607178, 0.005956607818603516, 0.005920479774475098, 0.0058932480812072755, 0.005846079826354981, 0.005909503936767578, 0.005796095848083496, 0.0059192957878112795, 0.005830207824707031, 0.0059079999923706055, 0.006191232204437256, 0.005943583965301514, 0.0057964158058166505, 0.005875743865966797, 0.00578275203704834, 0.005923583984375, 0.005957727909088135, 0.006178719997406006, 0.005903711795806885, 0.005472127914428711, 0.00584441614151001, 0.00593171215057373, 0.005908480167388916, 0.005967872142791748, 0.0059697279930114745, 0.005925087928771972, 0.005963744163513184, 0.0059283838272094725, 0.005854112148284912, 0.00580787181854248, 0.005783455848693848, 0.005799935817718506, 0.006096896171569824, 0.0058388481140136715, 0.005937151908874512, 0.005855552196502686, 0.005861055850982666, 0.005816512107849121, 0.005779263973236084, 0.005776447772979736, 0.005813183784484863, 0.005804031848907471, 0.005713920116424561, 0.005889056205749511, 0.005872576236724853, 0.005888031959533691, 0.005804031848907471, 0.005836703777313232, 0.005908576011657715, 0.006086016178131103, 0.005977727890014648, 0.005833727836608887, 0.005888000011444092, 0.005752799987792968, 0.00572822380065918, 0.00570905590057373, 0.005727039813995361, 0.005736447811126709, 0.005758080005645752, 0.005753568172454834, 0.0057281599044799805, 0.00576032018661499, 0.005782464027404785, 0.0058217282295227055, 0.0057740478515625, 0.005790847778320313, 0.005908607959747314, 0.005866335868835449, 0.006003680229187011, 0.005741312026977539, 0.0057981438636779785, 0.005830592155456543, 0.005797887802124023, 0.005908127784729004, 0.005907872200012207, 0.005911551952362061, 0.005889247894287109, 0.0059723520278930665, 0.005878431797027588, 0.0057770881652832035, 0.005850207805633545, 0.00584496021270752, 0.0055231680870056156, 0.005851295948028565, 0.005736480236053467, 0.005770527839660645, 0.005681503772735596, 0.006018496036529541, 0.005682112216949463, 0.005732351779937744, 0.005652480125427246, 0.005719359874725342, 0.00565503978729248, 0.005676479816436768, 0.005742720127105713, 0.0056665921211242675, 0.005745503902435303, 0.005695583820343017, 0.005808032035827636, 0.005713183879852295, 0.005808671951293945, 0.005841087818145752, 0.00580998420715332, 0.005791935920715332, 0.005849088191986084, 0.005890143871307373, 0.005858304023742676, 0.005936031818389893, 0.005779232025146484, 0.00593942403793335, 0.005854432106018066, 0.005904831886291504, 0.00589356803894043, 0.005839263916015625, 0.005962240219116211, 0.005860896110534668, 0.0059560642242431645, 0.005882016181945801, 0.006016511917114258, 0.005887487888336182, 0.006064991950988769, 0.005894144058227539, 0.005908544063568115, 0.006182015895843506, 0.006058656215667725, 0.005975520133972168, 0.005951519966125488, 0.006015647888183593, 0.005914368152618408, 0.006002943992614746, 0.006001760005950927, 0.005964704036712646, 0.006045375823974609, 0.005927231788635254, 0.005986527919769287, 0.005991231918334961, 0.006068287849426269, 0.005988575935363769, 0.006027103900909424, 0.005915232181549072, 0.006084864139556885, 0.005959616184234619, 0.005965248107910156, 0.005884352207183838, 0.0058566718101501465, 0.00555456018447876, 0.006032800197601319, 0.006000639915466309, 0.005996575832366943, 0.005979775905609131, 0.005979199886322021, 0.0059443840980529785, 0.00606063985824585, 0.005964000225067139, 0.006096799850463867, 0.00595363187789917, 0.006048031806945801, 0.005914336204528809, 0.005928959846496582, 0.006060160160064697, 0.006009888172149658, 0.006080543994903565, 0.006003520011901855, 0.006092063903808594, 0.00602185583114624, 0.006113279819488526, 0.00605398416519165, 0.005971871852874756, 0.005903711795806885, 0.005976736068725586, 0.006011199951171875, 0.005976960182189941, 0.006085375785827637, 0.00593228816986084, 0.006009696006774902, 0.005963871955871582, 0.00604966402053833, 0.0059333758354187015, 0.0060368962287902835, 0.006196703910827637, 0.00600761604309082, 0.005970975875854492, 0.005966176033020019, 0.005951424121856689, 0.005974751949310303, 0.0059508800506591795, 0.005956255912780762, 0.005988448143005371, 0.0058397121429443356, 0.007746431827545166, 0.009476287841796875, 0.005888192176818847, 0.00582422399520874, 0.005949440002441406, 0.0059361600875854495, 0.005917407989501953, 0.005894752025604248, 0.00587059211730957, 0.005963776111602783, 0.0058989119529724125, 0.005900063991546631, 0.006059584140777588, 0.005879968166351318, 0.005865983963012696, 0.005862656116485595, 0.0059348797798156735, 0.005835743904113769, 0.005844639778137207, 0.005540575981140137, 0.005889056205749511, 0.005934016227722168, 0.005824512004852295, 0.005924863815307617, 0.00584281587600708, 0.0059205121994018554, 0.005853568077087402, 0.005827744007110596, 0.005872479915618897, 0.006139039993286133, 0.005939583778381347, 0.005836895942687988, 0.005934783935546875, 0.005935808181762695, 0.005975743770599365, 0.00590880012512207, 0.005965280055999756, 0.00586191987991333, 0.005916287899017334, 0.00587772798538208, 0.005939104080200195, 0.005833216190338135, 0.005827775955200195, 0.00586630392074585, 0.0058856959342956545, 0.0058982081413269044, 0.006111519813537598, 0.0059740481376647945, 0.005926623821258545, 0.005818016052246093, 0.005888607978820801, 0.005937151908874512, 0.006033631801605224, 0.005931903839111328, 0.005896448135375976, 0.005892767906188965, 0.00597324800491333, 0.0059517440795898435, 0.005882271766662598, 0.005903584003448486, 0.005862271785736084, 0.005912543773651123, 0.005910496234893799, 0.005910560131072998, 0.006166560173034668, 0.005865471839904785, 0.005804031848907471, 0.005864799976348877, 0.005756864070892334, 0.005774367809295655, 0.0059081602096557615, 0.00584659194946289, 0.005968319892883301, 0.0058716158866882326, 0.005885248184204102, 0.005844992160797119, 0.0060003199577331546, 0.005866496086120605, 0.005916672229766846, 0.0058648958206176756, 0.005894239902496338, 0.00589462423324585, 0.005632768154144287, 0.0059012799263000485, 0.005929503917694092, 0.005910848140716553, 0.005996543884277344, 0.0059862079620361325, 0.005999936103820801, 0.005826560020446778, 0.005968512058258057, 0.005894303798675537, 0.0059344000816345215, 0.005925407886505127, 0.005963615894317627, 0.005933152198791504, 0.0059918718338012695, 0.005888607978820801, 0.005851456165313721, 0.005875584125518799, 0.005821792125701905, 0.00609503984451294, 0.0058700480461120605, 0.005907648086547852, 0.005888959884643555, 0.005957503795623779, 0.005918432235717774, 0.005912864208221435, 0.005887231826782227, 0.005970208168029785, 0.006029632091522217, 0.005982367992401123, 0.005928607940673828, 0.00597046422958374, 0.005928703784942627, 0.005954944133758545, 0.005837024211883545, 0.006307456016540527, 0.00603766393661499, 0.005978816032409668, 0.005952799797058105, 0.006009664058685303, 0.0058219838142395015, 0.005939487934112549, 0.005909759998321534, 0.005972959995269775, 0.0058587841987609865, 0.005813920021057129, 0.005788415908813477, 0.005712192058563232, 0.005758656024932861, 0.005703455924987793, 0.0057325758934021, 0.005808127880096436, 0.005744480133056641, 0.005724319934844971, 0.005851456165313721, 0.0057914237976074215, 0.005754720211029052, 0.0058083200454711915, 0.005791711807250977, 0.005799168109893798, 0.005849152088165283, 0.006002431869506836, 0.005999551773071289, 0.005666495800018311, 0.005963808059692383, 0.006019487857818604, 0.005879519939422607, 0.005851424217224121, 0.0057909760475158695, 0.005792128086090088, 0.0058023362159729, 0.0058072319030761715, 0.005708703994750977, 0.005654016017913818, 0.005704063892364502, 0.005690720081329346, 0.005788320064544677, 0.005797599792480469, 0.005912864208221435, 0.0058056640625, 0.005908031940460205, 0.0058306241035461425, 0.005773727893829346, 0.005808608055114746, 0.005785408020019531, 0.005886144161224365, 0.0058791360855102535, 0.005790592193603516, 0.006393983840942383, 0.005860576152801514, 0.0060104641914367676, 0.0057495999336242675, 0.005729472160339356, 0.005794623851776123, 0.0058759040832519534, 0.005785247802734375, 0.005791007995605469, 0.005677792072296142, 0.0058362560272216795, 0.006025248050689698, 0.005919392108917236, 0.005863423824310303, 0.005850111961364746, 0.005825535774230957, 0.0057487359046936035, 0.005697535991668701, 0.005769375801086426, 0.005768320083618164, 0.006787968158721924, 0.007197696208953858, 0.005925727844238281, 0.005854432106018066, 0.005927743911743164, 0.0057725758552551265, 0.005815072059631348, 0.00580947208404541, 0.005818111896514893, 0.005720736026763916, 0.005777599811553955, 0.0057712640762329105, 0.005890048027038574, 0.005843296051025391, 0.005756639957427979, 0.005807807922363281, 0.005806335926055908, 0.005837024211883545, 0.005562431812286377, 0.005777408123016357, 0.005826560020446778, 0.005928192138671875, 0.005990911960601806, 0.005958208084106445, 0.005811647891998291, 0.0058382081985473636, 0.005809088230133056, 0.005808063983917236, 0.0059517440795898435, 0.007071487903594971, 0.006021120071411133, 0.00588105583190918, 0.005720863819122315, 0.006127615928649902, 0.00606822395324707, 0.00585097599029541, 0.0058648958206176756, 0.005997504234313965, 0.00581334400177002, 0.005966432094573974, 0.0059229121208190915, 0.006691840171813965, 0.005856256008148194, 0.0060026879310607914, 0.005918015956878662, 0.006094816207885742, 0.005959807872772217, 0.005966432094573974, 0.005908480167388916, 0.005893311977386475, 0.005854015827178955, 0.005801087856292725, 0.005890719890594483, 0.005821760177612305, 0.00589740800857544, 0.005846752166748047, 0.006409791946411133, 0.00590828800201416, 0.0058681597709655765, 0.005842207908630371, 0.0059006080627441405, 0.005961696147918701, 0.005889664173126221, 0.005780576229095459, 0.005867231845855713, 0.005856607913970947, 0.005917344093322754, 0.0057560958862304685, 0.0057413759231567385, 0.005796895980834961, 0.00571619176864624, 0.005856063842773438, 0.0057998719215393065, 0.00586300802230835, 0.005878335952758789, 0.005863135814666748, 0.005857408046722412, 0.005955584049224853, 0.005795328140258789, 0.005814720153808594, 0.0057543997764587405, 0.005416895866394043, 0.005890111923217773, 0.005836800098419189, 0.005810175895690918, 0.005793791770935058, 0.005895391941070556, 0.005916800022125244, 0.0058718080520629885, 0.00581440019607544, 0.005874015808105469, 0.00582860803604126, 0.005850175857543946, 0.005751743793487549, 0.005762752056121826, 0.005835072040557861, 0.005756447792053223, 0.005711328029632568, 0.005626880168914795, 0.005720287799835205, 0.005715263843536377, 0.005695968151092529, 0.005816319942474365, 0.005738431930541992, 0.005686367988586426, 0.0057794880867004395, 0.005703936100006104, 0.005694208145141602, 0.0056665921211242675, 0.005688831806182861, 0.005659296035766602, 0.005648640155792237, 0.005633279800415039, 0.005677055835723877, 0.005659135818481445, 0.0056720318794250485, 0.005624415874481201, 0.005572383880615235, 0.0057144641876220705, 0.005689343929290771, 0.005834208011627197, 0.005932640075683593, 0.005812543869018555, 0.0057350401878356935, 0.006647871971130371, 0.006135744094848633, 0.005795839786529541, 0.005761023998260498, 0.005677055835723877, 0.00566476821899414, 0.005590623855590821, 0.005689504146575927, 0.005681407928466797, 0.0058037757873535155, 0.005746816158294678, 0.00566812801361084, 0.0057495999336242675, 0.005689695835113525, 0.005798943996429443, 0.005644927978515625, 0.005644095897674561, 0.005658720016479492, 0.005680960178375244, 0.005601312160491943, 0.005327040195465088, 0.005736224174499512, 0.005715487957000732, 0.005655488014221191, 0.005715360164642334, 0.005673344135284424, 0.005701536178588867, 0.005658783912658691, 0.005636127948760986, 0.005619520187377929, 0.005683199882507324, 0.005672959804534912, 0.005687295913696289, 0.005657824039459228, 0.005743391990661621, 0.005651968002319336, 0.005669600009918213, 0.0055981121063232425, 0.005696479797363281, 0.005650335788726806, 0.0055848960876464845, 0.005643936157226562, 0.005566495895385742, 0.005693855762481689, 0.005621664047241211, 0.005650432109832764, 0.005619264125823974, 0.005697984218597412, 0.005773056030273438, 0.005664576053619385, 0.006173120021820068, 0.005656576156616211, 0.005867519855499268, 0.005840320110321045, 0.005671008110046387, 0.0056795840263366695, 0.005642240047454834, 0.005844319820404053, 0.005661344051361084, 0.005785376071929932, 0.005590879917144775, 0.005703455924987793, 0.005726687908172608, 0.0056993279457092285, 0.005705344200134277, 0.005710591793060302, 0.005715968132019043, 0.005656576156616211, 0.005634047985076904, 0.005756896018981933, 0.00572214412689209, 0.005683199882507324, 0.0056945600509643556, 0.005668863773345947, 0.0057266240119934085, 0.005752639770507812, 0.0057842559814453125, 0.005750207901000976, 0.0057242240905761715, 0.005687104225158691, 0.005745344161987305, 0.005732416152954101, 0.005727551937103272, 0.005386240005493164, 0.005699935913085937, 0.005826208114624023, 0.006049791812896729, 0.005693439960479736, 0.005617504119873047, 0.005580959796905517, 0.005599071979522705, 0.005611680030822754, 0.005574143886566162, 0.005528063774108887, 0.005559391975402832, 0.005546175956726075, 0.005538527965545654, 0.005646336078643799, 0.005541215896606446, 0.005556640148162842, 0.005556479930877685, 0.005578720092773438, 0.00562179183959961, 0.0057192320823669434, 0.005643072128295898, 0.0056258559226989744, 0.005589248180389405, 0.0056434240341186525, 0.005657375812530518, 0.005631360054016113, 0.00558739185333252, 0.005580160140991211, 0.005794271945953369, 0.005676703929901123, 0.006140543937683106, 0.005771168231964111, 0.0057580161094665525, 0.0057376642227172855, 0.005631711959838867, 0.005605375766754151, 0.005689248085021973, 0.00580617618560791, 0.005646016120910645, 0.0070373439788818356, 0.007272352218627929, 0.006706655979156494, 0.005852704048156738, 0.006235136032104492, 0.005701632022857666, 0.005664735794067383, 0.005675327777862549, 0.005643904209136963, 0.005632095813751221, 0.005633535861968994, 0.005718336105346679, 0.0056665921211242675, 0.005599040031433106, 0.00561840009689331, 0.00579366397857666, 0.005711552143096924, 0.00571452808380127, 0.005664224147796631, 0.0057428798675537105, 0.005726175785064697, 0.005774687767028808, 0.005790336132049561, 0.005365280151367188, 0.005632991790771484, 0.005536960124969482, 0.0063146882057189945, 0.005553631782531738, 0.0057777280807495114, 0.005783648014068604, 0.005634272098541259, 0.005570559978485107, 0.005527552127838135, 0.005473696231842041, 0.005438047885894776, 0.0054774718284606935, 0.005494847774505616, 0.005491199970245361, 0.005554207801818848, 0.005581088066101074, 0.005498079776763916, 0.005588128089904785, 0.005617023944854737, 0.005701344013214111, 0.005718719959259033, 0.0058776321411132815, 0.0058788161277771, 0.005759488105773926, 0.005764895915985107, 0.005763775825500488, 0.005826560020446778, 0.005660672187805176, 0.005682752132415771, 0.005767615795135498, 0.0058388481140136715, 0.0059269118309021, 0.005882944107055664, 0.005917759895324707, 0.005799808025360107, 0.0057612800598144534, 0.005712704181671142, 0.005817567825317383, 0.0057441282272338865, 0.005657055854797363, 0.005797632217407226, 0.005826911926269531, 0.006235583782196045, 0.0056975998878479005, 0.0057441282272338865, 0.005702144145965577, 0.0057387838363647465, 0.005627456188201904, 0.005621920108795166, 0.005618944168090821, 0.005703616142272949, 0.005712863922119141, 0.005719776153564453, 0.005717279911041259, 0.00581324815750122, 0.0057359042167663575, 0.005802688121795655, 0.005856800079345703, 0.005894464015960693, 0.005788703918457031, 0.005747680187225342, 0.0057133760452270504, 0.005468031883239746, 0.005699423789978028, 0.005711999893188477, 0.005699456214904785, 0.005683199882507324, 0.0057079682350158695, 0.005589087963104248, 0.005687007904052735, 0.005750495910644532, 0.005659039974212647, 0.005662144184112549, 0.0057758078575134274, 0.0057630081176757815, 0.0062362241744995115, 0.005666495800018311, 0.0056097922325134275, 0.00560649585723877, 0.00559606409072876, 0.005649407863616943, 0.005628992080688477, 0.005664480209350586, 0.005780896186828613, 0.00575929594039917, 0.005753344058990479, 0.005715072154998779, 0.0057413759231567385, 0.005744095802307129, 0.005677279949188233, 0.005861248016357422, 0.0058429760932922365, 0.005736703872680664, 0.005828991889953613, 0.0058468799591064454, 0.005860864162445068, 0.0058204798698425295, 0.0057758078575134274, 0.0063690562248229984, 0.005875040054321289, 0.0059073281288146975, 0.005955711841583252, 0.0060824317932128905, 0.0059716801643371585, 0.005896671772003174, 0.005898047924041748, 0.005904384136199951, 0.005984255790710449, 0.005992095947265625, 0.006154176235198975, 0.006039775848388672, 0.005836991786956787, 0.005838528156280518, 0.005897568225860596, 0.005876128196716309, 0.005897024154663086, 0.005762976169586181, 0.005792895793914795, 0.005806816101074219, 0.005860479831695556, 0.005837696075439453, 0.005834784030914307, 0.00577561616897583, 0.005745952129364014, 0.005929408073425293, 0.005647168159484863, 0.006058239936828613, 0.005932799816131592, 0.005893695831298828, 0.005828512191772461, 0.005917215824127197, 0.005857439994812011, 0.005902175903320313, 0.005844223976135254, 0.005718783855438232, 0.005705728054046631, 0.005752831935882568, 0.006187007904052734, 0.005804031848907471, 0.005809919834136963, 0.00578380823135376, 0.005804031848907471, 0.006045695781707764, 0.006076416015625, 0.005896192073822021, 0.006528672218322754, 0.006639584064483643, 0.006697343826293945, 0.006590464115142822, 0.005734399795532226, 0.005746687889099121, 0.005758304119110108, 0.005878431797027588, 0.005797887802124023, 0.005816192150115967, 0.005812511920928955, 0.00576694393157959, 0.0059515519142150876, 0.005797984123229981, 0.005847008228302002, 0.0058707199096679685, 0.005892928123474121, 0.005844992160797119, 0.005826560020446778, 0.005835008144378662, 0.005853280067443848, 0.005812032222747803, 0.005812064170837402, 0.005840896129608154, 0.005849088191986084, 0.005898240089416504, 0.005818016052246093, 0.005798463821411133, 0.005738560199737549, 0.005753920078277588, 0.0056715841293334965, 0.005627200126647949, 0.005635903835296631, 0.005647232055664062, 0.005730271816253662, 0.005787680149078369, 0.005770592212677002, 0.005816800117492676, 0.005713856220245362, 0.005711775779724121, 0.005732960224151611, 0.00592460823059082, 0.005793216228485107, 0.005530911922454834, 0.005869311809539795, 0.005835743904113769, 0.005776607990264892, 0.005805183887481689, 0.0057768959999084475, 0.005822112083435058, 0.005884704113006592, 0.005802976131439209, 0.005800320148468018, 0.005818496227264405, 0.005912543773651123, 0.005860671997070312, 0.0057842879295349125, 0.005912831783294678, 0.005978240013122559, 0.0059779839515686035, 0.005986303806304932, 0.00587775993347168, 0.005951680183410644, 0.005842912197113037, 0.0059269118309021, 0.005889887809753418, 0.0058429441452026365, 0.005880000114440918, 0.0060227842330932615, 0.005951295852661133, 0.005934847831726074, 0.005923456192016602, 0.00591871976852417, 0.005895616054534912, 0.005887839794158935, 0.005917407989501953, 0.005864575862884521, 0.005874559879302978, 0.005900544166564941, 0.005842688083648681, 0.0058951997756958, 0.00582371187210083, 0.005840095996856689, 0.0059270401000976565, 0.005790304183959961, 0.005791168212890625, 0.005787775993347168, 0.005829951763153076, 0.005921728134155273, 0.005846784114837646, 0.006027520179748535, 0.005885727882385254, 0.0058841280937194824, 0.005868607997894287, 0.005906784057617188, 0.005793439865112305, 0.006015967845916748, 0.0058121919631958004, 0.005856287956237793, 0.005808832168579102, 0.005764480113983154, 0.005833759784698487, 0.005830527782440186, 0.005818367958068848, 0.005779168128967285, 0.00580841588973999, 0.00547430419921875, 0.005808127880096436, 0.005802271842956543, 0.005906144142150879, 0.005844992160797119, 0.005892096042633056, 0.0058122239112854005, 0.005885759830474854, 0.005952896118164063, 0.00588431978225708, 0.00583516788482666, 0.005838399887084961, 0.005894911766052246, 0.005846720218658447, 0.005881855964660645, 0.005836800098419189, 0.005854656219482422, 0.005816383838653565, 0.005999040126800537, 0.005779520034790039, 0.005959904193878174, 0.00586729621887207, 0.005940447807312012, 0.005910719871520996, 0.005853983879089355, 0.0061970877647399905, 0.005922783851623535, 0.00595465612411499, 0.005929952144622803, 0.0058858561515808105, 0.005862751960754395, 0.005946047782897949, 0.005979551792144776, 0.0059316158294677734, 0.005890048027038574, 0.006172544002532959, 0.0058369278907775875, 0.0058713917732238765, 0.0058206400871276855, 0.005902463912963867, 0.0058735361099243166, 0.00591871976852417, 0.006041600227355957, 0.005912576198577881, 0.005864640235900879, 0.005878592014312744, 0.005818367958068848, 0.005975488185882568, 0.00578604793548584, 0.005828735828399659, 0.005800992012023926, 0.0058355841636657715, 0.006093152046203613, 0.005795135974884034, 0.005986815929412842, 0.0060193600654602055, 0.0059860157966613765, 0.005917856216430664, 0.005825376033782959, 0.00595692777633667, 0.005810527801513672, 0.005832096099853515, 0.005782464027404785, 0.005709568023681641, 0.00583350419998169, 0.005814464092254639, 0.005836607933044434, 0.0058124160766601565, 0.005781504154205322, 0.005750783920288086, 0.00577942419052124, 0.00588105583190918, 0.00581440019607544, 0.005851712226867676, 0.005724639892578125, 0.005827968120574951, 0.0057695040702819824, 0.005732351779937744, 0.005738495826721191, 0.005834943771362304, 0.005754687786102295, 0.005809887886047364, 0.005856704235076904, 0.005755743980407715, 0.005844992160797119, 0.00577510404586792, 0.005769087791442871, 0.0057450242042541505, 0.005703680038452149, 0.005730303764343261, 0.005752831935882568, 0.005813536167144776, 0.005739232063293457, 0.005765279769897461, 0.0056975998878479005, 0.00571724796295166, 0.0058551039695739744, 0.005781472206115723, 0.0059359679222106936, 0.0058512320518493656, 0.0058222079277038576, 0.005795839786529541, 0.005849088191986084, 0.005867519855499268, 0.005864543914794922, 0.005809279918670654, 0.005750400066375732, 0.005712031841278076, 0.005888000011444092, 0.005912543773651123, 0.005951168060302735, 0.005896543979644776, 0.005859551906585693, 0.0058561601638793944, 0.005911424160003662, 0.005947391986846923, 0.005791744232177734, 0.00578323221206665, 0.005738815784454346, 0.005711167812347412, 0.005720191955566406, 0.0056694397926330565, 0.005657824039459228, 0.005616159915924073, 0.005615647792816162, 0.005583072185516357, 0.0054371519088745116, 0.005892735958099365, 0.00576035213470459, 0.00579856014251709, 0.005666880130767822, 0.005790656089782715, 0.0057844481468200684, 0.005767295837402344, 0.00587494421005249, 0.0059604477882385255, 0.00588595199584961, 0.005900479793548584, 0.005911968231201172, 0.0058495039939880375, 0.005823999881744385, 0.005787648200988769, 0.005740799903869629, 0.0058197760581970214, 0.005857823848724365, 0.005765471935272217, 0.005770495891571045, 0.005720831871032715, 0.005689343929290771, 0.005746687889099121, 0.005712959766387939, 0.005745471954345703, 0.005909759998321534, 0.006101088047027588, 0.00605398416519165, 0.005980800151824952, 0.005876832008361817, 0.00597705602645874, 0.005785600185394287, 0.005846655845642089, 0.005888383865356445, 0.005889279842376709, 0.005812992095947265, 0.005809279918670654, 0.005856448173522949, 0.00588972806930542, 0.0058080959320068355, 0.005765151977539062, 0.005900288105010986, 0.005760191917419433, 0.005786719799041748, 0.0057067198753356935, 0.0057404799461364744, 0.005634880065917969, 0.005578335762023926, 0.005701856136322021, 0.005599487781524659, 0.005739967823028564, 0.0057608962059021, 0.005714560031890869, 0.00562614393234253, 0.005568575859069824, 0.005600927829742431, 0.0055760002136230466, 0.005530111789703369, 0.005511360168457031, 0.005557248115539551, 0.005555232048034668, 0.005578720092773438, 0.005789408206939697, 0.005937151908874512, 0.005883808135986328, 0.0058763518333435055, 0.006107135772705078, 0.005899903774261475, 0.005777791976928711, 0.00595468807220459, 0.005820543766021729, 0.0057762241363525394, 0.005686463832855225, 0.005705503940582275, 0.005788928031921387, 0.005729983806610108, 0.005712063789367676, 0.00573529577255249, 0.005794079780578614, 0.005810848236083984, 0.00590396785736084, 0.0059391999244689945, 0.005937439918518066, 0.005869696140289307, 0.005899487972259521, 0.005902688026428223, 0.005851583957672119, 0.006003903865814209, 0.0058355841636657715, 0.00586137580871582, 0.005791744232177734, 0.005744351863861084, 0.005616960048675537, 0.00572495985031128, 0.005624000072479248, 0.0055920958518981935, 0.005680416107177734, 0.005654496192932129, 0.005630720138549805, 0.005670207977294922, 0.0058733119964599605, 0.005736447811126709, 0.005744480133056641, 0.005732511997222901, 0.0056863360404968265, 0.005733312129974365, 0.005945375919342041, 0.005768159866333008, 0.00573747205734253, 0.005671008110046387, 0.005730175971984863, 0.005638239860534668, 0.0056888961791992185, 0.0056631040573120114, 0.005808127880096436, 0.005731391906738281, 0.005734335899353027, 0.0057743039131164554, 0.0057400321960449216, 0.0057470078468322755, 0.005703968048095703, 0.005690432071685791, 0.005775936126708984, 0.005801472187042236, 0.005761055946350097, 0.0053788161277770995, 0.005666816234588623, 0.0056908798217773435, 0.0057617278099060054, 0.0057710719108581545, 0.005784607887268066, 0.005706143856048584, 0.005727807998657227, 0.005669023990631104, 0.005636960029602051, 0.005635744094848633, 0.005588831901550293, 0.005599743843078613, 0.005556352138519287, 0.00555350399017334, 0.005592895984649658, 0.005608160018920899, 0.005654528141021729, 0.005675007820129394, 0.005582848072052002, 0.005696767807006836, 0.005753824234008789, 0.0057482562065124514, 0.005708032131195069, 0.005654528141021729, 0.005691391944885254, 0.005666175842285156, 0.00565667200088501, 0.005667551994323731, 0.0056748161315917965, 0.005681375980377198, 0.005639520168304444, 0.0056078720092773435, 0.00557862377166748, 0.0058525438308715825, 0.0056020479202270506, 0.005584288120269776, 0.0057391037940979, 0.00559500789642334, 0.005568480014801026, 0.005488800048828125, 0.005502975940704345, 0.005505023956298828, 0.005511007785797119, 0.005523839950561524, 0.005525599956512451, 0.005559999942779541, 0.005662720203399658, 0.005631999969482422, 0.005709824085235596, 0.0057259840965271, 0.005832736015319824, 0.005771679878234863, 0.005668672084808349, 0.005588960170745849, 0.005532864093780517, 0.00552185583114624, 0.005511744022369385, 0.005603136062622071, 0.005661791801452637, 0.005565343856811523, 0.005530655860900879, 0.005483520030975342, 0.005344543933868408, 0.0057413759231567385, 0.005736159801483154, 0.005656864166259766, 0.005703680038452149, 0.00572111988067627, 0.005712448120117187, 0.005668511867523193, 0.005804800033569336, 0.005708992004394531, 0.005767168045043946, 0.005761856079101562, 0.005818528175354004, 0.006264575958251953, 0.005801152229309082, 0.005860576152801514, 0.005805984020233155, 0.005797664165496826, 0.005797791957855225, 0.0058260798454284664, 0.005675839900970459, 0.005823647975921631, 0.005787456035614013, 0.005814784049987793, 0.00580841588973999, 0.005844992160797119, 0.005815807819366455, 0.005777023792266846, 0.005788544178009033, 0.005844992160797119, 0.0060128321647644044, 0.005824416160583496, 0.00602950382232666, 0.0057768959999084475, 0.0058692159652709965, 0.0058089919090271, 0.005767168045043946, 0.005697535991668701, 0.005664896011352539, 0.005710783958435058, 0.005665664196014404, 0.005619616031646729, 0.00563420820236206, 0.005654208183288574, 0.005645760059356689, 0.005571455955505371, 0.005584832191467285, 0.0056590080261230465, 0.005506847858428955, 0.005609375953674316, 0.005711552143096924, 0.005641727924346924, 0.005528384208679199, 0.005642047882080078, 0.0056733121871948245, 0.005659647941589355, 0.005786464214324951, 0.00562332820892334, 0.00560972785949707, 0.005677279949188233, 0.005699584007263184, 0.005684607982635498, 0.005570367813110351, 0.005627456188201904, 0.005591904163360596, 0.005582848072052002, 0.0060663681030273435, 0.0057231678962707516, 0.005818848133087158, 0.006594719886779785, 0.00690723180770874, 0.005784383773803711, 0.0056459841728210445, 0.0056179518699646, 0.005597407817840576, 0.005599071979522705, 0.005868800163269043, 0.005624288082122802, 0.005583199977874756, 0.005660128116607666, 0.005747168064117431, 0.005541664123535156, 0.00551478385925293, 0.00571827220916748, 0.0057608637809753414, 0.005605599880218506, 0.005692959785461426, 0.005691616058349609, 0.005808640003204346, 0.005804160118103027, 0.005918528079986572, 0.005835072040557861, 0.005762944221496582, 0.0058014078140258786, 0.005774112224578858, 0.00573967981338501, 0.005659264087677002, 0.005653664112091065, 0.005915487766265869, 0.005830656051635743, 0.0057322559356689454, 0.005701151847839356, 0.005798208236694336, 0.005566336154937744, 0.005498847961425781, 0.005484960079193115, 0.00546611213684082, 0.005607359886169434, 0.005547552108764648, 0.005677120208740234, 0.0058406081199646, 0.005583615779876709, 0.00566374397277832, 0.005642496109008789, 0.005824384212493897, 0.005818975925445557, 0.0056650562286376955, 0.005668831825256348, 0.0057160000801086425, 0.005799935817718506, 0.005804031848907471, 0.005860479831695556, 0.0056756801605224606, 0.005709824085235596, 0.005646624088287353, 0.005619232177734375]",tokens/s,170.72495120476117,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,811.331584,576.585728,0.0,190.840832,172.878848,s,1,9.860083984375,9.860083984375,0.0,9.860083984375,9.860083984375,9.860083984375,9.860083984375,[9.860083984375],,kWh,1.3193555870842981e-05,1.4477387263524017e-06,4.320281234003465e-06,1.896157583119885e-05,,MB,1259.286528,685.637632,0.0,270.532608,241.723904,s,35,0.22474726390838623,0.006421350397382464,0.00020045011425225804,0.006418848037719726,0.0065628928184509275,0.0066384481430053705,0.007098124990463255,"[0.006745632171630859, 0.006402815818786621, 0.00611414384841919, 0.006206783771514892, 0.006093344211578369, 0.00620684814453125, 0.006444191932678223, 0.006261184215545655, 0.006303296089172363, 0.006184319972991944, 0.007279712200164795, 0.006269983768463135, 0.006372223854064942, 0.006396383762359619, 0.0062988481521606444, 0.006390880107879639, 0.0064395198822021485, 0.006418848037719726, 0.0065550398826599125, 0.0064834880828857425, 0.006422016143798828, 0.006568128108978271, 0.0063333120346069334, 0.006592512130737305, 0.006393184185028076, 0.00648960018157959, 0.006506591796875, 0.0065218877792358396, 0.00650710391998291, 0.0064291200637817385, 0.006430399894714355, 0.006315392017364502, 0.006444831848144531, 0.006515872001647949, 0.006409823894500732]",tokens/s,39867.003691988735,kWh,1.8731426420090715e-07,2.0656963804141445e-08,9.819256179460582e-08,3.0616378979965444e-07,tokens/kWh,836153746.8801248,MB,1305.919488,698.220544,0.0,283.11552,241.726464,s,35,10.044201721191405,0.2869771920340402,0.0078953739461364,0.287510986328125,0.29676428222656254,0.29813898620605467,0.2993614898681641,"[0.28218157958984375, 0.2768486633300781, 0.2721372985839844, 0.274162841796875, 0.275233154296875, 0.2759754638671875, 0.28022442626953126, 0.27968316650390623, 0.28046820068359374, 0.2817784423828125, 0.29033914184570314, 0.2832670288085937, 0.2915084228515625, 0.2861893310546875, 0.28468902587890627, 0.2795397644042969, 0.29263555908203126, 0.2941939392089844, 0.29223031616210937, 0.290689453125, 0.2942028503417969, 0.287510986328125, 0.2969974365234375, 0.29802728271484374, 0.29641455078125, 0.29985699462890625, 0.2963609619140625, 0.2882708740234375, 0.29473635864257813, 0.2983996276855469, 0.29329318237304686, 0.28622640991210935, 0.2922865905761719, 0.2792242431640625, 0.27841815185546875]",tokens/s,219.52964120063996,kWh,8.010959841123188e-06,8.834711623255697e-07,3.2168949668167517e-06,1.2111325970265511e-05,tokens/kWh,5201742.580017345,,s,2205,10.024763295650471,0.00454637791185963,0.00021271863398341598,0.004549312114715576,0.004737657451629638,0.004798745727539062,0.005214969730377198,"[0.004276415824890137, 0.004593472003936768, 0.004609504222869873, 0.00460649585723877, 0.0046694397926330565, 0.004733151912689209, 0.0047199039459228515, 0.004687583923339843, 0.00463270378112793, 0.004526944160461426, 0.004499135971069336, 0.004458271980285645, 0.004362592220306397, 0.004319488048553467, 0.00430617618560791, 0.004334080219268799, 0.004325376033782959, 0.004327328205108643, 0.0043951039314270015, 0.004342976093292236, 0.004545152187347412, 0.004462783813476563, 0.004398335933685302, 0.004318048000335693, 0.004331424236297608, 0.004421631813049317, 0.004367775917053222, 0.004316127777099609, 0.00432476806640625, 0.004311264038085938, 0.004302175998687744, 0.004352992057800293, 0.004308351993560791, 0.004360511779785157, 0.004478975772857666, 0.004411712169647217, 0.004421216011047363, 0.004769824028015137, 0.004454495906829834, 0.004673024177551269, 0.004692543983459472, 0.00542409610748291, 0.004620287895202637, 0.004562880039215088, 0.004525152206420899, 0.004476287841796875, 0.00460649585723877, 0.00440934419631958, 0.004675583839416504, 0.004403200149536133, 0.004347519874572754, 0.004347968101501465, 0.0045223040580749515, 0.004370431900024414, 0.004334688186645508, 0.004354239940643311, 0.004455359935760498, 0.004378528118133545, 0.004321152210235595, 0.004312863826751709, 0.004562911987304688, 0.004426208019256592, 0.004481855869293213, 0.0040915517807006835, 0.004387455940246582, 0.004315264225006103, 0.004281983852386474, 0.004319200038909912, 0.004370848178863526, 0.004361728191375733, 0.004426432132720947, 0.004366432189941406, 0.004352735996246338, 0.005040351867675781, 0.004414239883422852, 0.0043862080574035645, 0.004434783935546875, 0.004368544101715088, 0.004370016098022461, 0.005365248203277588, 0.005673471927642822, 0.005711840152740478, 0.005709311962127686, 0.0057775678634643555, 0.004843167781829834, 0.004281343936920166, 0.004288224220275879, 0.0042065601348876954, 0.004233248233795166, 0.004318655967712403, 0.004201119899749756, 0.004193215847015381, 0.004178751945495605, 0.004196512222290039, 0.004198400020599366, 0.004253376007080078, 0.0042417278289794925, 0.004233312129974365, 0.00421673583984375, 0.0041933441162109375, 0.004229856014251709, 0.004196224212646485, 0.004200799942016602, 0.004185215950012207, 0.004230303764343262, 0.00419811201095581, 0.0041840639114379885, 0.004208064079284668, 0.00418668794631958, 0.0041976318359375, 0.004177855968475342, 0.004253695964813233, 0.004264768123626709, 0.004236800193786621, 0.004168191909790039, 0.004182015895843506, 0.004182015895843506, 0.004182015895843506, 0.0041998400688171384, 0.004186592102050781, 0.004215199947357178, 0.004191840171813965, 0.004268256187438965, 0.00432969617843628, 0.004281599998474121, 0.004232960224151611, 0.003926975965499878, 0.004183872222900391, 0.0041669120788574215, 0.004186912059783936, 0.004182400226593018, 0.004204480171203613, 0.004194143772125244, 0.004167520046234131, 0.004238751888275147, 0.004252255916595459, 0.0042434239387512205, 0.004241856098175048, 0.004574336051940918, 0.0048050560951232914, 0.004257984161376953, 0.004229119777679443, 0.004259840011596679, 0.004269152164459228, 0.004282815933227539, 0.004231647968292236, 0.004276447772979737, 0.004304384231567383, 0.004317344188690186, 0.0042988801002502445, 0.004238463878631592, 0.004243807792663574, 0.004337600231170654, 0.0045164480209350585, 0.004302720069885254, 0.004257887840270996, 0.004246560096740723, 0.004531551837921142, 0.004255392074584961, 0.004316287994384766, 0.004295551776885986, 0.004284416198730469, 0.004302847862243653, 0.004270304203033448, 0.004255008220672607, 0.00426035213470459, 0.0043249278068542485, 0.004377376079559326, 0.004338496208190918, 0.004368447780609131, 0.004379424095153808, 0.004293791770935059, 0.004303711891174317, 0.004302783966064453, 0.0043274879455566405, 0.004384768009185791, 0.0043089919090271, 0.004413440227508545, 0.004345727920532227, 0.0043844799995422365, 0.00439244794845581, 0.00443225622177124, 0.004616767883300781, 0.004417503833770752, 0.0043376641273498535, 0.0043151359558105465, 0.004347743988037109, 0.004362304210662842, 0.004357312202453613, 0.004160863876342773, 0.004356959819793701, 0.004368319988250733, 0.004523519992828369, 0.00451200008392334, 0.004483200073242187, 0.004446208000183105, 0.004423808097839355, 0.004399040222167969, 0.00443552017211914, 0.004458879947662354, 0.00451584005355835, 0.004495520114898682, 0.0045586881637573245, 0.004532224178314209, 0.0045240321159362796, 0.00451910400390625, 0.004477791786193848, 0.004439616203308105, 0.004468959808349609, 0.004387008190155029, 0.004501855850219726, 0.004322591781616211, 0.004278336048126221, 0.0042576961517333985, 0.004254271984100342, 0.0042555837631225585, 0.0042292160987854, 0.004233119964599609, 0.004266016006469727, 0.004237279891967773, 0.004255263805389404, 0.004263936042785644, 0.004282527923583984, 0.004278560161590576, 0.004273952007293701, 0.004300320148468017, 0.004254720211029053, 0.004292319774627686, 0.004259391784667969, 0.004351647853851318, 0.004272831916809082, 0.004296383857727051, 0.004313504219055176, 0.004270048141479492, 0.004253632068634034, 0.004275743961334229, 0.00431328010559082, 0.004296639919281006, 0.004261568069458008, 0.004271903991699219, 0.0043673281669616695, 0.004395008087158203, 0.004388864040374756, 0.004362400054931641, 0.004317024230957031, 0.004290112018585205, 0.004274432182312012, 0.004379839897155762, 0.004274208068847657, 0.00425439977645874, 0.00421507215499878, 0.004205728054046631, 0.00392192006111145, 0.004245503902435303, 0.004245503902435303, 0.004224480152130127, 0.004301631927490234, 0.00422492790222168, 0.004275936126708985, 0.004292704105377197, 0.004322463989257812, 0.00428272008895874, 0.004279903888702393, 0.004341919898986816, 0.004251455783843994, 0.004249536037445069, 0.0042687997817993165, 0.004372735977172851, 0.004261888027191162, 0.004257279872894287, 0.004253215789794922, 0.004210944175720215, 0.004219615936279297, 0.0042781119346618655, 0.004405407905578613, 0.004481279850006103, 0.004523776054382324, 0.004388351917266845, 0.004352511882781983, 0.004354047775268555, 0.004347904205322266, 0.004516960144042969, 0.004483071804046631, 0.004445055961608887, 0.004456639766693115, 0.004402976036071777, 0.004397215843200683, 0.004379968166351319, 0.004325600147247315, 0.004338175773620606, 0.004335360050201416, 0.004341951847076416, 0.004294847965240479, 0.004327167987823486, 0.004431871891021728, 0.005652480125427246, 0.0045649919509887695, 0.004380671977996826, 0.004374239921569824, 0.004333856105804443, 0.004274112224578857, 0.0042949762344360355, 0.004341504096984863, 0.004276480197906494, 0.004446239948272705, 0.004398816108703614, 0.0044646401405334475, 0.004331935882568359, 0.004281951904296875, 0.004265984058380127, 0.00430079984664917, 0.004340832233428955, 0.004755680084228516, 0.004369088172912598, 0.004357664108276367, 0.0040096001625061035, 0.004327807903289795, 0.0043985600471496585, 0.004373407840728759, 0.004355519771575928, 0.00437062406539917, 0.004362432003021241, 0.00438486385345459, 0.0043879680633544926, 0.004389440059661865, 0.004413856029510498, 0.004341248035430908, 0.004310848236083985, 0.004355807781219482, 0.004252575874328614, 0.004286208152770996, 0.004251967906951904, 0.004315040111541748, 0.004237472057342529, 0.004398848056793213, 0.004280511856079102, 0.004258815765380859, 0.004298975944519043, 0.004280992031097412, 0.004264063835144043, 0.00440934419631958, 0.004333568096160889, 0.004376863956451416, 0.004287744045257568, 0.004259520053863526, 0.004285024166107178, 0.0043105602264404295, 0.004284319877624512, 0.004297247886657715, 0.004350175857543945, 0.004456704139709473, 0.0043435201644897465, 0.0043970241546630855, 0.004423744201660156, 0.004352000236511231, 0.004437664031982422, 0.0046106882095336916, 0.004463840007781983, 0.004325727939605713, 0.004364416122436523, 0.004407328128814697, 0.004453728199005127, 0.0043711037635803226, 0.0044011521339416505, 0.004347519874572754, 0.0043155198097229, 0.004304128170013428, 0.004314112186431885, 0.004449376106262207, 0.004410336017608643, 0.004378015995025635, 0.004335807800292969, 0.004330880165100097, 0.004327775955200196, 0.004370016098022461, 0.005353536128997803, 0.004911839962005615, 0.004419583797454834, 0.004079936027526855, 0.004630655765533447, 0.004497407913208008, 0.004325056076049804, 0.004295040130615235, 0.004349887847900391, 0.0042475519180297855, 0.0042518720626831055, 0.00424729585647583, 0.0042288641929626464, 0.004274464130401611, 0.004304927825927734, 0.004605599880218506, 0.004384768009185791, 0.004493631839752197, 0.004363743782043457, 0.004348447799682617, 0.004382080078125, 0.004247456073760986, 0.004307936191558838, 0.004365312099456787, 0.004348063945770263, 0.004481696128845215, 0.004416512012481689, 0.004374752044677735, 0.004395264148712158, 0.0043647680282592775, 0.004703775882720947, 0.00456547212600708, 0.004487167835235595, 0.004488831996917725, 0.004509632110595703, 0.0045145277976989745, 0.004435679912567139, 0.004419712066650391, 0.004386688232421875, 0.004335616111755371, 0.004453695774078369, 0.004459199905395508, 0.004403071880340576, 0.004475008010864258, 0.004476928234100342, 0.004476928234100342, 0.004654816150665283, 0.004659488201141358, 0.004544511795043945, 0.004637887954711914, 0.0045344319343566895, 0.004555615901947021, 0.004551712036132812, 0.004433919906616211, 0.004430528163909912, 0.004460480213165283, 0.004448416233062744, 0.004578559875488281, 0.0045669121742248535, 0.004479680061340332, 0.004526368141174316, 0.004487071990966797, 0.004497407913208008, 0.004448351860046387, 0.00447814416885376, 0.004517856121063232, 0.0042256321907043455, 0.004573184013366699, 0.004736224174499512, 0.004530975818634033, 0.004529183864593506, 0.004551008224487305, 0.004513631820678711, 0.00453001594543457, 0.004532767772674561, 0.004429247856140137, 0.004444640159606933, 0.00453436803817749, 0.004803264141082763, 0.005097439765930176, 0.0045008320808410645, 0.004463007926940918, 0.00442521619796753, 0.004524640083312988, 0.004429728031158447, 0.004337759971618652, 0.004368288040161133, 0.00432912015914917, 0.004396575927734375, 0.004371615886688232, 0.004486815929412842, 0.0044622402191162105, 0.00443017578125, 0.004343200206756592, 0.0043218879699707035, 0.004272128105163574, 0.00425171184539795, 0.0042839360237121584, 0.004368800163269043, 0.004302847862243653, 0.004359519958496093, 0.004354464054107666, 0.004321599960327148, 0.004309120178222656, 0.004388671875, 0.004265376091003418, 0.004292960166931152, 0.004364384174346924, 0.004288095951080323, 0.004360767841339111, 0.004407296180725098, 0.004343808174133301, 0.004447391986846924, 0.004471456050872803, 0.00427839994430542, 0.004292672157287598, 0.004288415908813476, 0.004334688186645508, 0.004417791843414307, 0.004406079769134521, 0.0043621759414672855, 0.004421504020690918, 0.004446335792541504, 0.004440063953399658, 0.004478015899658203, 0.004602719783782959, 0.004560287952423096, 0.004596415996551514, 0.0045015039443969725, 0.00427180814743042, 0.0045717120170593265, 0.004534272193908692, 0.004593664169311523, 0.004495359897613525, 0.004616191864013672, 0.004525824069976807, 0.0044605121612548825, 0.004444447994232178, 0.004444159984588623, 0.004421631813049317, 0.004517888069152832, 0.004455423831939697, 0.004441088199615479, 0.00454860782623291, 0.004666751861572266, 0.004516479969024659, 0.004511072158813477, 0.004833951950073242, 0.004435391902923584, 0.004438591957092285, 0.00439081621170044, 0.004345759868621826, 0.004377088069915771, 0.004369728088378906, 0.00433190393447876, 0.004369952201843262, 0.0043281598091125485, 0.004431615829467774, 0.004397056102752686, 0.004562943935394287, 0.004418879985809326, 0.004435711860656738, 0.004369152069091797, 0.004446559906005859, 0.004472320079803467, 0.004385119915008545, 0.004349472045898438, 0.004393439769744873, 0.004429696083068847, 0.004441215991973877, 0.004492288112640381, 0.00448412799835205, 0.004496352195739746, 0.004560895919799805, 0.004452191829681397, 0.004474815845489502, 0.004493855953216553, 0.00441107177734375, 0.0044440641403198245, 0.0044852161407470705, 0.004446400165557861, 0.00439686393737793, 0.004368639945983886, 0.004282112121582031, 0.004280320167541504, 0.0043745279312133786, 0.004512032032012939, 0.004396768093109131, 0.004314559936523438, 0.004266655921936035, 0.004345759868621826, 0.004384992122650147, 0.0040813121795654295, 0.004317279815673828, 0.004237215995788574, 0.004263936042785644, 0.004263199806213379, 0.0042137598991394045, 0.00429468822479248, 0.004277535915374756, 0.004238944053649902, 0.0043318080902099605, 0.00424729585647583, 0.004268608093261719, 0.004241631984710693, 0.004375936031341553, 0.004321919918060303, 0.004390912055969238, 0.00447276782989502, 0.0045195198059082035, 0.004544991970062256, 0.004482175827026367, 0.004457695960998535, 0.0044553279876708984, 0.004455008029937744, 0.0044152002334594724, 0.004581503868103028, 0.004489535808563232, 0.004531487941741943, 0.004563680171966553, 0.00455244779586792, 0.004474656105041504, 0.00441596794128418, 0.004419583797454834, 0.004517951965332031, 0.0043966398239135744, 0.0049721598625183105, 0.004563680171966553, 0.004655104160308838, 0.00451584005355835, 0.004503168106079101, 0.004567423820495606, 0.004530176162719727, 0.004517183780670166, 0.004534976005554199, 0.00466534423828125, 0.004490528106689453, 0.004479487895965576, 0.004554975986480713, 0.0045240321159362796, 0.004433824062347412, 0.0043910079002380375, 0.0045015039443969725, 0.0045335679054260255, 0.004372479915618896, 0.004434271812438965, 0.004536896228790283, 0.0045196800231933594, 0.004600063800811767, 0.004630303859710693, 0.004531807899475097, 0.00482038402557373, 0.004561952114105225, 0.004509664058685302, 0.0045875201225280765, 0.004757503986358643, 0.005041183948516846, 0.006973919868469239, 0.007921311855316162, 0.0054651198387145996, 0.004650144100189209, 0.00469209623336792, 0.004618783950805664, 0.004654943943023682, 0.004600255966186523, 0.004597472190856934, 0.004532127857208252, 0.004524127960205078, 0.004519936084747315, 0.004512864112854004, 0.004548799991607666, 0.0046080322265625, 0.00460975980758667, 0.0049649600982666015, 0.004532576084136963, 0.004454080104827881, 0.004468224048614502, 0.004438623905181884, 0.004442431926727295, 0.004474143981933593, 0.00452675199508667, 0.004553760051727295, 0.0045229759216308596, 0.004680960178375244, 0.004555200099945068, 0.004485663890838623, 0.004457503795623779, 0.004471456050872803, 0.004391263961791992, 0.0043517441749572755, 0.004500607967376709, 0.00438156795501709, 0.004399104118347168, 0.00440723180770874, 0.004421855926513672, 0.0044460477828979494, 0.00445030403137207, 0.004481023788452149, 0.004433919906616211, 0.004498688220977784, 0.0044258561134338376, 0.004386752128601074, 0.004383135795593262, 0.004429152011871338, 0.004395040035247802, 0.004506527900695801, 0.00450105619430542, 0.0044479680061340334, 0.00442851209640503, 0.004431871891021728, 0.004364287853240967, 0.004282112121582031, 0.004315455913543701, 0.0042919998168945315, 0.004262752056121826, 0.004286464214324951, 0.0042902398109436034, 0.004378335952758789, 0.004081567764282226, 0.004478015899658203, 0.004450816154479981, 0.004441887855529785, 0.0043712959289550785, 0.004304831981658936, 0.004276095867156982, 0.004282368183135986, 0.004359903812408447, 0.004388959884643555, 0.004389056205749512, 0.004333568096160889, 0.004392960071563721, 0.0043110399246215824, 0.0043151359558105465, 0.004309120178222656, 0.00436025619506836, 0.004349984169006347, 0.004425504207611084, 0.004478911876678467, 0.00444159984588623, 0.004491807937622071, 0.004484767913818359, 0.004521632194519043, 0.004735616207122803, 0.004536608219146729, 0.004500703811645508, 0.004512351989746094, 0.00451584005355835, 0.004539552211761474, 0.004559167861938477, 0.004546527862548828, 0.004479423999786377, 0.004454207897186279, 0.004553023815155029, 0.004476928234100342, 0.004470623970031738, 0.004633823871612549, 0.004501567840576172, 0.004542335987091064, 0.004518847942352295, 0.004505663871765137, 0.004511744022369385, 0.004585472106933594, 0.00452185583114624, 0.00445251178741455, 0.004506783962249756, 0.0045883522033691405, 0.004601471900939942, 0.004592000007629394, 0.004579648017883301, 0.004585343837738037, 0.00452732801437378, 0.00466707181930542, 0.00456713581085205, 0.004619071960449219, 0.004618239879608154, 0.004573279857635498, 0.0045730881690979, 0.004529344081878662, 0.004614975929260254, 0.004625855922698974, 0.004607967853546143, 0.00438486385345459, 0.004564256191253662, 0.00467193603515625, 0.004702400207519531, 0.004585472106933594, 0.004621920108795166, 0.004587935924530029, 0.004644351959228516, 0.004668191909790039, 0.0046845760345458985, 0.004586271762847901, 0.0045387520790100095, 0.0045660161972045895, 0.004569888114929199, 0.004577600002288819, 0.004593247890472412, 0.004599967956542969, 0.0045895037651062015, 0.0045418238639831545, 0.004622975826263428, 0.00447488021850586, 0.0044646401405334475, 0.004476191997528076, 0.004502431869506836, 0.004611328125, 0.004682047843933106, 0.004759071826934814, 0.004588255882263184, 0.0045559039115905765, 0.00452288007736206, 0.004569087982177734, 0.0045015039443969725, 0.004529344081878662, 0.0045874881744384765, 0.004518527984619141, 0.004538591861724854, 0.004585472106933594, 0.0045784001350402835, 0.0046072001457214354, 0.004588704109191894, 0.004643519878387451, 0.004642015933990479, 0.0046518721580505375, 0.0046499199867248535, 0.004745984077453613, 0.00474121618270874, 0.00481279993057251, 0.004722591876983643, 0.004707488059997558, 0.004885600090026855, 0.004767903804779053, 0.004715871810913086, 0.004681951999664306, 0.004716383934020996, 0.004655488014221191, 0.004636576175689697, 0.004655072212219238, 0.004637824058532715, 0.004596640110015869, 0.004642879962921143, 0.004585663795471192, 0.004676864147186279, 0.0046146559715271, 0.004255743980407714, 0.004569087982177734, 0.00456115198135376, 0.004664927959442139, 0.00488259220123291, 0.004610047817230224, 0.00455679988861084, 0.004518911838531494, 0.0045084161758422855, 0.00454860782623291, 0.004534719944000244, 0.004553791999816895, 0.004512288093566894, 0.004532415866851807, 0.0045316481590271, 0.004532671928405761, 0.004615488052368164, 0.0046434240341186525, 0.00458400011062622, 0.0046936960220336915, 0.004849247932434082, 0.004605375766754151, 0.00451196813583374, 0.004548831939697266, 0.004646527767181397, 0.004516128063201904, 0.0044980478286743165, 0.004588863849639893, 0.004543168067932129, 0.004557087898254395, 0.004652703762054443, 0.00458784008026123, 0.0045658559799194335, 0.004529024124145508, 0.00449129581451416, 0.004478975772857666, 0.004415264129638672, 0.004620512008666992, 0.004584512233734131, 0.004527040004730224, 0.0044934401512145995, 0.0045075201988220214, 0.004486303806304931, 0.0044085121154785155, 0.004388512134552002, 0.00449561595916748, 0.004411136150360108, 0.004460544109344483, 0.004499392032623291, 0.004497471809387207, 0.004491487979888916, 0.004519807815551758, 0.004459807872772217, 0.004409632205963135, 0.004458432197570801, 0.004403615951538086, 0.004547616004943847, 0.004469727993011475, 0.004474783897399902, 0.00447814416885376, 0.004479104042053222, 0.004515711784362793, 0.004553247928619384, 0.004155488014221192, 0.004487071990966797, 0.00454041576385498, 0.0045240321159362796, 0.004466239929199219, 0.004465216159820556, 0.0045240001678466795, 0.004492224216461182, 0.004491903781890869, 0.004514175891876221, 0.0045240001678466795, 0.004474720001220703, 0.004523935794830322, 0.00470246410369873, 0.004544511795043945, 0.004562943935394287, 0.004589568138122559, 0.004686048030853271, 0.004596672058105469, 0.004696063995361328, 0.004604703903198242, 0.004622687816619873, 0.004559775829315185, 0.004520768165588379, 0.004612224102020264, 0.004646783828735352, 0.004493311882019043, 0.00448524808883667, 0.004633567810058594, 0.004528287887573242, 0.004501376152038574, 0.004530816078186035, 0.004468192100524902, 0.0044306240081787105, 0.004474080085754394, 0.004428415775299072, 0.00447708797454834, 0.004476064205169677, 0.004393824100494385, 0.0044009919166564946, 0.004373888015747071, 0.0044633598327636715, 0.004444416046142578, 0.00443779182434082, 0.004429696083068847, 0.004448256015777588, 0.004595839977264404, 0.00455679988861084, 0.004515615940093995, 0.004501855850219726, 0.004580927848815918, 0.004583231925964356, 0.004533984184265137, 0.004504223823547363, 0.0045511360168457035, 0.004551807880401612, 0.00459830379486084, 0.004460544109344483, 0.0044438719749450685, 0.004448544025421142, 0.004453375816345215, 0.00444220781326294, 0.004397984027862549, 0.004165760040283203, 0.004468671798706055, 0.00449564790725708, 0.004456319808959961, 0.004463903903961182, 0.0048830718994140624, 0.004492959976196289, 0.004432320117950439, 0.004433343887329102, 0.00445088005065918, 0.0044373760223388676, 0.004395455837249756, 0.004425951957702637, 0.004335328102111817, 0.004412000179290772, 0.004447679996490478, 0.004489503860473633, 0.004453728199005127, 0.004438623905181884, 0.004413568019866944, 0.004381696224212647, 0.004409279823303223, 0.004404287815093994, 0.004396704196929932, 0.004417759895324707, 0.0044380159378051755, 0.0045400958061218265, 0.0043760638236999515, 0.004397119998931885, 0.004444416046142578, 0.004469247817993164, 0.004480288028717041, 0.004441887855529785, 0.004470784187316895, 0.004481023788452149, 0.004477888107299805, 0.004440159797668457, 0.004385695934295654, 0.004350815773010254, 0.004352159976959228, 0.0044111042022705075, 0.0043740801811218265, 0.004403872013092041, 0.004387008190155029, 0.0043907837867736815, 0.004388544082641601, 0.004408703804016113, 0.004428991794586182, 0.004396128177642822, 0.004577184200286865, 0.004472799777984619, 0.00450435209274292, 0.004470560073852539, 0.004516064167022705, 0.0044421119689941405, 0.0044068160057067875, 0.004368031978607178, 0.004377408027648926, 0.004334784030914306, 0.004352672100067139, 0.004322688102722168, 0.004319647789001465, 0.00442406415939331, 0.004142176151275634, 0.004379551887512207, 0.004432960033416748, 0.004361152172088623, 0.004357600212097168, 0.004391424179077149, 0.004462880134582519, 0.004449215888977051, 0.004502336025238037, 0.004591328144073486, 0.004567296028137207, 0.00468175983428955, 0.004639999866485596, 0.0046529917716979985, 0.004687903881072998, 0.004733920097351074, 0.004649983882904053, 0.004673888206481933, 0.0047346558570861815, 0.004726719856262207, 0.004798816204071045, 0.00475596809387207, 0.004790239810943604, 0.004882463932037354, 0.004661344051361084, 0.0046286721229553225, 0.004656864166259766, 0.004591936111450195, 0.004636352062225342, 0.0045996479988098145, 0.004632736206054687, 0.0046592001914978025, 0.004836959838867188, 0.004725152015686035, 0.004663296222686767, 0.004663296222686767, 0.004638847827911377, 0.00458355188369751, 0.0047094721794128415, 0.004807328224182129, 0.004745183944702148, 0.004704288005828857, 0.004644832134246826, 0.0045641279220581055, 0.00460646390914917, 0.004567296028137207, 0.0045221118927001955, 0.00455679988861084, 0.004837567806243896, 0.004654431819915771, 0.004620768070220947, 0.004689439773559571, 0.004653247833251953, 0.00462278413772583, 0.004842527866363525, 0.0049016962051391605, 0.004759552001953125, 0.0046377921104431155, 0.004638815879821777, 0.0047257599830627444, 0.004601984024047851, 0.004585216045379639, 0.004630655765533447, 0.004274208068847657, 0.004634624004364014, 0.004611648082733154, 0.004620736122131348, 0.004559999942779541, 0.004723711967468262, 0.0046672639846801756, 0.004732768058776856, 0.004646240234375, 0.004607840061187744, 0.004611264228820801, 0.004564544200897217, 0.004574592113494873, 0.004527200222015381, 0.004665215969085693, 0.004755328178405762, 0.004591872215270996, 0.00460591983795166, 0.0046098241806030275, 0.00461023998260498, 0.004640672206878662, 0.004607264041900635, 0.004612959861755371, 0.00455452823638916, 0.004573184013366699, 0.004622560024261474, 0.004551519870758056, 0.0046867837905883785, 0.004642975807189941, 0.0046221761703491215, 0.004599103927612305, 0.004641248226165772, 0.004600063800811767, 0.004871712207794189, 0.0046228480339050295, 0.004603871822357178, 0.00464192008972168, 0.004661248207092285, 0.00459769582748413, 0.0048295679092407225, 0.00474780797958374, 0.004759552001953125, 0.004780032157897949, 0.004749311923980713, 0.0047225279808044435, 0.004702367782592773, 0.004651008129119873, 0.0047370557785034175, 0.004763423919677734, 0.004896895885467529, 0.004773952007293701, 0.004626431941986084, 0.004648640155792237, 0.004617983818054199, 0.004760128021240234, 0.004716544151306152, 0.0047738242149353025, 0.004685984134674072, 0.004695968151092529, 0.004696063995361328, 0.004761184215545654, 0.004698527812957764, 0.004659103870391846, 0.004384704113006592, 0.004696159839630127, 0.004698048114776612, 0.00472214412689209, 0.004670144081115723, 0.004631552219390869, 0.004707168102264405, 0.004655104160308838, 0.004643968105316162, 0.004647103786468506, 0.004669151782989502, 0.0046562237739562985, 0.004654751777648926, 0.004597760200500488, 0.004696320056915284, 0.004638688087463379, 0.004655104160308838, 0.0047041277885437015, 0.0046278080940246585, 0.00456496000289917, 0.004608704090118408, 0.004727136135101318, 0.004633823871612549, 0.0046022400856018065, 0.0046528639793396, 0.004605728149414063, 0.004590176105499268, 0.004798463821411133, 0.00465715217590332, 0.0045996160507202144, 0.004631968021392823, 0.004543263912200928, 0.00454636812210083, 0.0045565757751464845, 0.0045780158042907715, 0.004713727951049805, 0.004700767993927002, 0.004697951793670654, 0.004646111965179443, 0.00461033582687378, 0.004573056221008301, 0.004590400218963623, 0.0045279359817504885, 0.004556416034698486, 0.004616576194763184, 0.00458515214920044, 0.004545951843261719, 0.004559775829315185, 0.004607999801635742, 0.0045056319236755375, 0.004550623893737793, 0.004577087879180908, 0.004752639770507812, 0.004694591999053955, 0.004671872138977051, 0.004630720138549805, 0.004573311805725098, 0.004709536075592041, 0.004637407779693604, 0.004671296119689941, 0.004631999969482422, 0.00463759994506836, 0.004589248180389405, 0.004292704105377197, 0.004533504009246826, 0.004559296131134033, 0.004482944011688233, 0.004454112052917481, 0.004560863971710205, 0.004682176113128662, 0.004677279949188233, 0.004667359828948975, 0.004743616104125977, 0.004668928146362304, 0.004747712135314942, 0.004650207996368408, 0.004712224006652832, 0.004649280071258545, 0.004647295951843261, 0.004588096141815185, 0.00455894422531128, 0.004554368019104004, 0.004522367954254151, 0.004510591983795166, 0.004761600017547608, 0.004667871952056884, 0.004622432231903076, 0.004600031852722168, 0.004607264041900635, 0.004707200050354004, 0.004675104141235352, 0.004687776088714599, 0.00474348783493042, 0.004700255870819092, 0.004679200172424317, 0.0046843838691711425, 0.004718463897705078, 0.0046527361869812016, 0.004618559837341309, 0.004627967834472656, 0.004661759853363037, 0.004651008129119873, 0.004693280220031739, 0.004601920127868652, 0.004735616207122803, 0.004589600086212158, 0.004738848209381103, 0.004475296020507813, 0.004460351943969727, 0.004540063858032226, 0.004475168228149414, 0.004489280223846435, 0.00449945592880249, 0.004484416007995606, 0.004467391967773437, 0.004535808086395264, 0.004545023918151855, 0.004566431999206543, 0.004544320106506347, 0.004569888114929199, 0.00453984022140503, 0.00454099178314209, 0.004720479965209961, 0.004569056034088134, 0.004654399871826172, 0.004553599834442139, 0.004341504096984863, 0.0046059517860412595, 0.004559103965759277, 0.0046059517860412595, 0.004569087982177734, 0.0045996160507202144, 0.004627935886383057, 0.004633312225341797, 0.00463372802734375, 0.0046641921997070315, 0.004656928062438965, 0.004681183815002442, 0.004623104095458985, 0.004703231811523437, 0.004637407779693604, 0.004565279960632324, 0.004681727886199951, 0.004556416034698486, 0.0045246720314025875, 0.004617568016052246, 0.0046423358917236325, 0.004696032047271728, 0.00471337604522705, 0.004642303943634033, 0.004617760181427002, 0.004682720184326172, 0.0046694397926330565, 0.00466703987121582, 0.004690464019775391, 0.004689184188842774, 0.004659743785858155, 0.0048230400085449215, 0.004633855819702148, 0.004567103862762451, 0.004550943851470948, 0.00458784008026123, 0.004605599880218506, 0.004706751823425293, 0.0046880640983581546, 0.00494598388671875, 0.004652095794677734, 0.004561600208282471, 0.004562623977661133, 0.004532192230224609, 0.004820735931396485, 0.004705984115600586, 0.0046394882202148435, 0.0047036480903625486, 0.004690688133239746, 0.004687871932983399, 0.004620287895202637, 0.004730879783630371, 0.0047226881980895995, 0.004719744205474854, 0.0046713600158691405, 0.0047626237869262695, 0.004824543952941894, 0.004721375942230224, 0.004693120002746582, 0.004751808166503906, 0.004739232063293457, 0.004783711910247803, 0.004734943866729737, 0.004726143836975097, 0.004732960224151611, 0.004747583866119385, 0.004655712127685547, 0.004716576099395752, 0.00459980821609497, 0.004612351894378662, 0.004654399871826172, 0.004688672065734863, 0.0046442561149597165, 0.0046733121871948245, 0.004675968170166016, 0.004669760227203369, 0.004635583877563476, 0.00465334415435791, 0.004694496154785156, 0.004691487789154053, 0.00460368013381958, 0.004723487854003906, 0.0046013760566711425, 0.004578080177307129, 0.004499135971069336, 0.004505792140960693, 0.004531455993652344, 0.004506144046783447, 0.004458528041839599, 0.004495296001434326, 0.00444374418258667, 0.004423776149749756, 0.004618144035339356, 0.004411871910095215, 0.004415487766265869, 0.004386847972869873, 0.0044011201858520504, 0.004392960071563721, 0.004611616134643555, 0.0045911998748779295, 0.004502399921417237, 0.004869440078735352, 0.004549312114715576, 0.004487167835235595, 0.004585728168487548, 0.004548351764678955, 0.004537951946258545, 0.004513408184051514, 0.004490015983581543, 0.004434175968170166, 0.0044295997619628906, 0.004419551849365234, 0.004404799938201904, 0.004409855842590332, 0.004421408176422119, 0.004628384113311767, 0.0044239358901977536, 0.004407296180725098, 0.004448448181152344, 0.004708032131195069, 0.004579455852508545, 0.00449945592880249, 0.004485119819641113, 0.0045279359817504885, 0.004496992111206055, 0.004463200092315674, 0.004198592185974121, 0.004539872169494629, 0.00464086389541626, 0.004606719970703125, 0.004583424091339112, 0.004638720035552979, 0.004556896209716797, 0.004640448093414307, 0.004599040031433106, 0.004604735851287842, 0.0045220799446105955, 0.004513504028320313, 0.0044711360931396485, 0.004480031967163086, 0.0045045437812805175, 0.004505280017852783, 0.004546847820281983, 0.004626495838165283, 0.004585247993469238, 0.004612192153930664, 0.004634175777435303, 0.004606656074523926, 0.00463759994506836, 0.004981696128845215, 0.004766719818115234, 0.004695040225982666, 0.004616191864013672, 0.004611167907714844, 0.004563839912414551, 0.004708384037017822, 0.004681727886199951, 0.004630527973175049, 0.004597760200500488, 0.004683104038238526, 0.004732895851135254, 0.004701951980590821, 0.004654016017913818, 0.004662591934204102, 0.004659904003143311, 0.004720096111297607, 0.004658880233764648, 0.00475222396850586, 0.0048005437850952145, 0.004759520053863525, 0.004698112010955811, 0.004957920074462891, 0.004770080089569092, 0.004734079837799072, 0.004725632190704346, 0.004775135993957519, 0.0047247037887573244, 0.004800352096557617, 0.004714911937713623, 0.004733503818511963, 0.004669023990631104, 0.004933440208435059, 0.005184095859527588, 0.005216256141662597, 0.005130239963531494, 0.00514412784576416, 0.0052211198806762695, 0.005015552043914795, 0.004758815765380859, 0.004379360198974609, 0.004724800109863281, 0.00493990421295166, 0.004903776168823242, 0.004731008052825928, 0.004702432155609131, 0.0046844158172607425, 0.004718175888061523, 0.0047066879272460935, 0.004660543918609619, 0.00466806411743164, 0.0047225279808044435, 0.004739295959472656, 0.004808256149291992, 0.004769216060638428, 0.004767903804779053, 0.004711328029632568, 0.004718527793884277, 0.004907008171081543, 0.0047288317680358885, 0.004759071826934814, 0.004681663990020752, 0.00468390417098999, 0.004660831928253174, 0.0047112321853637696, 0.004674943923950195, 0.004630784034729004, 0.005476191997528076, 0.004909599781036377, 0.004691423892974854, 0.004788991928100586, 0.004742559909820557, 0.005010848045349121, 0.0048089919090270996, 0.00491590404510498, 0.004804416179656983, 0.004761792182922363, 0.004724256038665772, 0.004792352199554443, 0.004721024036407471, 0.004734943866729737, 0.004700255870819092, 0.00470630407333374, 0.004609471797943116, 0.004679488182067871, 0.00462332820892334, 0.00466921615600586, 0.004698048114776612, 0.0046408319473266605, 0.004779871940612793, 0.004790688037872314, 0.004596767902374267, 0.004616928100585937, 0.00468179178237915, 0.004642047882080078, 0.004592607975006104, 0.004689824104309082, 0.0045873279571533206, 0.004571135997772217, 0.004544511795043945, 0.004560959815979004, 0.004507584095001221, 0.004587456226348877, 0.0042278399467468265, 0.004579616069793701, 0.004577055931091308, 0.004525824069976807, 0.004494783878326416, 0.004506432056427002, 0.004481056213378906, 0.0045236158370971676, 0.004546080112457275, 0.0045385599136352536, 0.004647808074951172, 0.004562880039215088, 0.0045131840705871585, 0.004555232048034668, 0.004681727886199951, 0.004590784072875977, 0.004544960021972656, 0.004548992156982422, 0.004546144008636475, 0.004596127986907959, 0.004620287895202637, 0.004714848041534424, 0.004718239784240723, 0.00481279993057251, 0.004753407955169678, 0.004761600017547608, 0.00475657606124878, 0.004690847873687744, 0.00474675178527832, 0.004745408058166504, 0.004710720062255859, 0.004915200233459473, 0.004872511863708496, 0.004840799808502197, 0.004738592147827149, 0.004723199844360352, 0.004808095932006836, 0.004800992012023926, 0.00485200023651123, 0.004858208179473877, 0.00484332799911499, 0.004773888111114502, 0.004700128078460693, 0.004742303848266602, 0.004676479816436768, 0.004726975917816162, 0.004737887859344482, 0.004753536224365235, 0.004741663932800293, 0.004720928192138672, 0.0046993279457092285, 0.00467849588394165, 0.004701471805572509, 0.004674464225769043, 0.004939583778381348, 0.004673696041107177, 0.004732448101043701, 0.005220672130584717, 0.004747263908386231, 0.005010528087615967, 0.004715712070465088, 0.004672832012176513, 0.004689407825469971, 0.004314559936523438, 0.004588096141815185, 0.004612160205841064, 0.004628416061401367, 0.0046648321151733394, 0.004649343967437744, 0.004735104084014893, 0.00471449613571167, 0.0046880321502685545, 0.004829023838043213, 0.004737311840057373, 0.004632256031036377, 0.004664544105529785, 0.004694623947143555, 0.0047244482040405275, 0.004629024028778076, 0.004616159915924073, 0.00466534423828125, 0.004665631771087647, 0.004668928146362304, 0.004694240093231201, 0.004755648136138916, 0.004673344135284424, 0.0047387838363647464, 0.00470198392868042, 0.004905471801757813, 0.0047944002151489255, 0.004738016128540039, 0.004754432201385498, 0.004710400104522705, 0.004703775882720947, 0.004640384197235108, 0.004700640201568604, 0.004655488014221191, 0.004687871932983399, 0.004630112171173096, 0.004757120132446289, 0.0047132477760314945, 0.004802559852600098, 0.004671296119689941, 0.004716351985931396, 0.004741504192352295, 0.004730879783630371, 0.004753407955169678, 0.004699456214904785, 0.004746943950653076, 0.004715680122375488, 0.00470307207107544, 0.005218688011169433, 0.0059666881561279295, 0.005949056148529053, 0.005464223861694336, 0.0047136001586914065, 0.004690815925598144, 0.0046020479202270505, 0.0046549119949340825, 0.004685120105743408, 0.004663104057312011, 0.004652192115783691, 0.0046583681106567385, 0.004676127910614013, 0.004689919948577881, 0.004636640071868896, 0.004398431777954102, 0.004606431961059571, 0.004696415901184082, 0.004630527973175049, 0.0046573119163513185, 0.004668863773345947, 0.004704671859741211, 0.0046566400527954105, 0.004714623928070068, 0.005005695819854736, 0.0047853121757507324, 0.0048009600639343265, 0.004821407794952393, 0.005023744106292725, 0.0051190400123596195, 0.005114816188812256, 0.0047094721794128415, 0.004723455905914307, 0.004736224174499512, 0.004881343841552734, 0.004799647808074951, 0.004762464046478272, 0.004730879783630371, 0.004866047859191895, 0.004675712108612061, 0.004665247917175293, 0.004607391834259033, 0.004604479789733887, 0.004564000129699707, 0.004524767875671387, 0.004530176162719727, 0.004505824089050293, 0.004580480098724365, 0.0046212158203125, 0.004634624004364014, 0.004686944007873535, 0.00467849588394165, 0.00478380823135376, 0.004768127918243408, 0.0046694397926330565, 0.004644991874694825, 0.004601535797119141, 0.00455622386932373, 0.00459443187713623, 0.004580927848815918, 0.004570720195770264, 0.004520031929016113, 0.004942592144012451, 0.00464896011352539, 0.004687871932983399, 0.004584544181823731, 0.004642911911010742, 0.0045617280006408694, 0.004777984142303467, 0.004694015979766846, 0.0046612157821655275, 0.004570335865020752, 0.004619296073913574, 0.004689439773559571, 0.0047329277992248535, 0.004753664016723633, 0.004722976207733155, 0.0046629438400268555, 0.004268032073974609, 0.004547999858856201, 0.004618847846984863, 0.00457916784286499, 0.004601823806762695, 0.0045445761680603025, 0.004476672172546387, 0.004419968128204345, 0.004390079975128174, 0.004465663909912109, 0.004478816032409668, 0.004551712036132812, 0.004584383964538574, 0.00463372802734375, 0.0046171841621398925, 0.004655007839202881, 0.004710015773773194, 0.004768127918243408, 0.004656864166259766, 0.004548895835876465, 0.004533664226531982, 0.004571743965148926, 0.004665311813354492, 0.004684031963348389, 0.004642591953277588, 0.004655327796936035, 0.004581151962280273, 0.004567039966583252, 0.004685823917388916, 0.004697919845581055, 0.004605823993682861, 0.00458784008026123, 0.004593311786651611, 0.004565536022186279, 0.004499263763427734, 0.004460288047790527, 0.004493567943572998, 0.0044355840682983394, 0.004450975894927978, 0.004427487850189209, 0.004455520153045654, 0.004420767784118653, 0.004394944190979004, 0.004439871788024902, 0.004490719795227051, 0.004572864055633545, 0.004417568206787109, 0.004428607940673828, 0.004688992023468018, 0.004508096218109131, 0.004639328002929688, 0.004730112075805664, 0.0047972798347473145, 0.004632351875305175, 0.004614143848419189, 0.004610047817230224, 0.004691487789154053, 0.00459552001953125, 0.004616864204406738, 0.004605023860931396, 0.004600863933563232, 0.004611839771270752, 0.004609663963317871, 0.0044577598571777345, 0.004776512145996094, 0.004689216136932373, 0.004757472038269043, 0.004671487808227539, 0.004762335777282715, 0.004795775890350342, 0.004689727783203125, 0.004675871849060058, 0.00460649585723877, 0.004612095832824707, 0.004607071876525879, 0.004567647933959961, 0.004639328002929688, 0.004585279941558838, 0.0048817601203918455, 0.004644991874694825, 0.004745759963989258, 0.004611423969268799, 0.004596288204193115, 0.004544511795043945, 0.004562943935394287, 0.00449126386642456, 0.004616191864013672, 0.004644608020782471, 0.004704768180847168, 0.004795360088348389, 0.004756256103515625, 0.0050769920349121095, 0.004657248020172119, 0.00467519998550415, 0.004659264087677002, 0.004679359912872314, 0.0046406078338623044, 0.0046414718627929685, 0.0046728959083557125, 0.004756383895874024, 0.004707071781158447, 0.004758495807647705, 0.005224448204040527, 0.0046356477737426755, 0.004629727840423584, 0.004559743881225586, 0.004522719860076904, 0.0046079039573669435, 0.004581984043121338, 0.0045708160400390625, 0.004577280044555664, 0.0045895037651062015, 0.004669695854187012, 0.004552608013153076, 0.004759712219238281, 0.0046161279678344725, 0.004668896198272705, 0.004624735832214355, 0.004685152053833008, 0.004639391899108887, 0.0046408319473266605, 0.004696000099182129, 0.004671711921691895, 0.004639904022216797, 0.004725376129150391, 0.00463375997543335, 0.004235936164855957, 0.004572224140167236, 0.004659167766571045, 0.004711391925811767, 0.004646912097930909, 0.004651008129119873, 0.00463808012008667, 0.004596352100372314, 0.004859903812408447, 0.0049994559288024905, 0.0046711997985839845, 0.005255167961120606, 0.005378143787384033, 0.004761631965637207, 0.004699552059173584, 0.004702688217163086, 0.005083136081695557, 0.005040224075317383, 0.004831136226654053, 0.00469158411026001, 0.004632031917572022, 0.004929503917694092, 0.004785088062286377, 0.004744319915771484, 0.004669888019561768, 0.0046078720092773435, 0.004741536140441894, 0.004671648025512696, 0.004668831825256348, 0.004791903972625733, 0.004767744064331054, 0.004719840049743652, 0.004771615982055664, 0.004689824104309082, 0.004784224033355713, 0.00474015998840332, 0.004788991928100586, 0.0046555519104003905, 0.004717376232147217, 0.004653600215911866, 0.0048644161224365235, 0.004627711772918701, 0.004950975894927979, 0.00476255989074707, 0.004638912200927735, 0.004596415996551514, 0.004646111965179443, 0.004623136043548584, 0.0046360321044921875, 0.004678271770477295, 0.004642816066741944, 0.004610047817230224, 0.004623936176300049, 0.004597343921661377, 0.004621151924133301, 0.004573376178741455, 0.004689888000488281, 0.004591487884521485, 0.00477180814743042, 0.004769792079925537, 0.004695231914520264, 0.004722784042358399, 0.004669983863830566, 0.004470176219940185, 0.004661856174468994, 0.00470630407333374, 0.00463647985458374, 0.0048969597816467285, 0.004728447914123535, 0.004656928062438965, 0.004647200107574463, 0.004577184200286865, 0.004524799823760986, 0.004582240104675293, 0.00453715181350708, 0.004575104236602783, 0.004573311805725098, 0.004571040153503418, 0.004642911911010742, 0.004616191864013672, 0.004652895927429199, 0.004628896236419678, 0.004589312076568604, 0.004526368141174316, 0.004589280128479004, 0.004584671974182129, 0.0046005439758300785, 0.004597760200500488, 0.004593728065490722, 0.004650047779083252, 0.004591807842254638, 0.00455679988861084, 0.004614463806152344, 0.004627071857452393, 0.004546432018280029, 0.004640575885772705, 0.004616096019744873, 0.004833568096160889, 0.004798399925231934, 0.004765696048736572, 0.004683775901794434, 0.004736767768859863, 0.004669695854187012, 0.004648831844329834, 0.004662496089935303, 0.004645792007446289, 0.0046694397926330565, 0.0047504959106445315, 0.004719232082366943, 0.004628704071044922, 0.00466758394241333, 0.004660895824432373, 0.004655263900756836, 0.004634528160095215, 0.0046382398605346676, 0.004624959945678711, 0.004643008232116699, 0.004632544040679931, 0.004619296073913574, 0.004559679985046387, 0.004493216037750244, 0.005666816234588623, 0.004638912200927735, 0.004517695903778076, 0.004618336200714111, 0.00446454381942749, 0.004121119976043701, 0.004521535873413086, 0.00445465612411499, 0.004423679828643799, 0.004470784187316895, 0.004607999801635742, 0.004493311882019043, 0.004519231796264648, 0.004600287914276123, 0.004493535995483399, 0.004478975772857666, 0.004487167835235595, 0.0045015039443969725, 0.004536320209503173, 0.004472832202911377, 0.004489215850830078, 0.004454495906829834, 0.0044902081489562986, 0.004402112007141113, 0.004364223957061768, 0.004372672080993652, 0.004392831802368164, 0.004384768009185791, 0.004409152030944824, 0.004726592063903808, 0.004780384063720703, 0.004642848014831543, 0.004622335910797119, 0.004576799869537354, 0.004573376178741455, 0.0046226558685302736, 0.004575200080871582, 0.004526080131530762, 0.004470784187316895, 0.004663296222686767, 0.004413536071777344, 0.004449759960174561, 0.004501664161682129, 0.004482816219329834, 0.004603456020355225, 0.004531167984008789, 0.004529248237609864, 0.004772704124450683, 0.004593408107757569, 0.004927807807922363, 0.004582880020141601, 0.004831552028656006, 0.0045378880500793455, 0.00456496000289917, 0.004604127883911133, 0.004467264175415039, 0.004509600162506103, 0.00442736005783081, 0.004474431991577149, 0.004496255874633789, 0.004470784187316895, 0.00454041576385498, 0.004622079849243164, 0.0045792322158813474, 0.004595168113708496, 0.004700895786285401, 0.004591968059539795, 0.004552351951599121, 0.004265888214111328, 0.0046150717735290525, 0.004613823890686035, 0.004646975994110108, 0.004674047946929932, 0.004668960094451904, 0.0046962881088256836, 0.004834976196289063, 0.004692319869995117, 0.004708352088928222, 0.004675583839416504, 0.004696063995361328, 0.004763072013854981, 0.004665599822998047, 0.004669792175292968, 0.004779039859771728, 0.004699071884155273, 0.004691520214080811, 0.004633024215698243, 0.004609344005584717, 0.004700416088104248, 0.0047597761154174805, 0.004620512008666992, 0.0047021121978759765, 0.004603424072265625, 0.004698207855224609, 0.004630847930908203, 0.004641215801239013, 0.0046077117919921876, 0.004657343864440918, 0.004556287765502929, 0.004590112209320069, 0.004625440120697022, 0.0046286401748657224, 0.00462230396270752, 0.004645503997802734, 0.004609407901763916, 0.004604735851287842, 0.004584479808807373, 0.004510496139526367, 0.004720736026763916, 0.004589471817016601, 0.004540256023406982, 0.004528639793395996, 0.004506783962249756, 0.004542975902557373, 0.004616191864013672, 0.004651008129119873, 0.004576608180999756, 0.0046824002265930175, 0.0046135039329528804, 0.004622111797332764, 0.0046713600158691405, 0.0045577921867370605, 0.004550655841827392, 0.0045632638931274415, 0.0046219840049743655, 0.0046483840942382814, 0.004616799831390381, 0.004579391956329346, 0.004632512092590332, 0.004612448215484619, 0.004552608013153076, 0.004284416198730469, 0.004677536010742188, 0.004701695919036865, 0.004596320152282715, 0.004527840137481689, 0.004549312114715576, 0.004534272193908692, 0.004484447956085205, 0.004652927875518799, 0.004492063999176025, 0.004484255790710449, 0.004491903781890869, 0.0045017280578613286, 0.004530176162719727, 0.0046059517860412595, 0.00449945592880249, 0.004526080131530762, 0.004436160087585449, 0.00438047981262207, 0.004382656097412109, 0.004349247932434082, 0.0043220481872558595, 0.0043060479164123535, 0.004275072097778321, 0.004304736137390137, 0.0043393921852111815, 0.004297183990478516, 0.004326911926269531, 0.004388959884643555, 0.004663712024688721, 0.004435359954833984, 0.004338047981262207, 0.004292128086090088, 0.004305568218231201, 0.004544864177703858, 0.0044048957824707035, 0.004323296070098877, 0.004419616222381592, 0.004433663845062255, 0.004490560054779053, 0.004529119968414307, 0.004458432197570801, 0.004378496170043945, 0.004354207992553711, 0.004276351928710937, 0.004265376091003418, 0.0042828798294067385, 0.004284095764160156, 0.004327104091644287, 0.00435097599029541, 0.004328415870666504, 0.004374207973480225, 0.004440735816955566, 0.0043249278068542485, 0.004325632095336914, 0.004331744194030761, 0.004425888061523438, 0.004454527854919434, 0.004558752059936524, 0.004540512084960938, 0.004444096088409424, 0.004417600154876709, 0.004347424030303955, 0.0041519999504089355, 0.004386367797851562, 0.004370880126953125, 0.00437395191192627, 0.00437056016921997, 0.004430496215820313, 0.004402976036071777, 0.004352352142333984, 0.0042780799865722655, 0.004351840019226074, 0.0042782721519470214, 0.00430079984664917, 0.004323328018188476, 0.004304895877838135, 0.004299935817718506, 0.004318272113800049, 0.004263711929321289, 0.0042575359344482425, 0.004314623832702637, 0.004342527866363526, 0.004374271869659424, 0.004451871871948242, 0.00441155195236206, 0.004407839775085449, 0.004421664237976074, 0.004448256015777588, 0.004484767913818359, 0.004524384021759033, 0.004448256015777588, 0.004738207817077637, 0.0045023679733276365, 0.004433919906616211, 0.004376575946807861, 0.00441315221786499, 0.004419871807098389, 0.00445577621459961, 0.004545184135437011, 0.004569087982177734, 0.0045272641181945805, 0.004490143775939941, 0.0044208641052246095, 0.004497663974761963, 0.004421247959136963, 0.0043261761665344236, 0.0043192639350891114, 0.004356095790863037, 0.004275487899780273, 0.004274400234222412, 0.004335391998291016, 0.004366911888122558, 0.004364799976348877, 0.004431519985198975, 0.004419583797454834, 0.004390912055969238, 0.00445030403137207, 0.004669280052185059, 0.00450707197189331, 0.004596320152282715, 0.004452576160430908, 0.004536223888397217, 0.00450764799118042, 0.004632800102233887, 0.004420959949493408]",tokens/s,219.9553181426936,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1895.972864,2715.680768,0.0,2329.935872,2292.903424,s,1,11.1633642578125,11.1633642578125,0.0,11.1633642578125,11.1633642578125,11.1633642578125,11.1633642578125,[11.1633642578125],,kWh,5.310302582083182e-05,5.8503506570090226e-06,1.5650568075994964e-05,7.46039445538358e-05,,MB,2048.28672,2847.801344,0.0,2430.599168,2384.184832,s,10,1.3610313262939453,0.13610313262939452,0.0004781361967596805,0.1360871658325195,0.13664994201660155,0.13679170684814454,0.1369051187133789,"[0.13631954956054687, 0.13530755615234374, 0.1356815643310547, 0.13661843872070312, 0.13594691467285155, 0.13583110046386718, 0.1362274169921875, 0.13565901184082033, 0.1369334716796875, 0.1365063018798828]",tokens/s,1880.9265815878146,kWh,4.055303671238506e-06,4.471386905264353e-07,2.691387183972265e-06,7.193829545737207e-06,tokens/kWh,35586053.07123186,MB,2056.175616,2877.161472,0.0,2459.959296,2390.977024,s,10,31.311693847656247,3.131169384765625,0.02220496981613575,3.1342395019531253,3.153752734375,3.1627068115234374,3.1698700732421874,"[3.13244677734375, 3.10662060546875, 3.151762939453125, 3.171660888671875, 3.14071240234375, 3.1173271484375, 3.100508056640625, 3.1360322265625, 3.149520263671875, 3.1051025390625]",tokens/s,20.120278483342315,kWh,9.105606609126154e-05,1.0043635197919252e-05,4.1703092776027634e-05,0.00014280279406520845,tokens/kWh,441167.83857346745,,s,630,31.309724002838117,0.049697974607679586,0.0007609546981276979,0.049537792205810546,0.05057697525024414,0.051004879379272464,0.05274876079559328,"[0.049755615234375, 0.04923865509033203, 0.048844478607177735, 0.04902540969848633, 0.0491682243347168, 0.04907417678833008, 0.049960960388183595, 0.050339839935302735, 0.04915337753295899, 0.04943734359741211, 0.04950364685058594, 0.048785888671875, 0.04894246292114258, 0.049324417114257814, 0.049797088623046874, 0.04973942565917969, 0.05003286361694336, 0.04969116973876953, 0.04927897644042969, 0.049479679107666014, 0.049089569091796875, 0.049373695373535156, 0.049926624298095704, 0.04908236694335937, 0.049819038391113284, 0.04934915161132813, 0.051191520690917966, 0.04987529754638672, 0.05021900939941406, 0.049796897888183596, 0.049708385467529294, 0.049163040161132814, 0.04938278579711914, 0.04952131271362305, 0.05011257553100586, 0.049798656463623046, 0.05063888168334961, 0.04973932647705078, 0.049463775634765624, 0.04995961761474609, 0.04968832015991211, 0.04933363342285156, 0.04934924697875977, 0.04964966583251953, 0.0497151985168457, 0.04920697784423828, 0.04934604644775391, 0.04899923324584961, 0.050343265533447264, 0.049361568450927734, 0.049620990753173826, 0.05019443130493164, 0.05022719955444336, 0.050374656677246096, 0.05074943923950195, 0.05038079833984375, 0.051019775390625, 0.05126348876953125, 0.050323455810546876, 0.05204582214355469, 0.04926668930053711, 0.04895948791503906, 0.04905894470214844, 0.049982784271240234, 0.049895519256591796, 0.049557598114013675, 0.04906790542602539, 0.048845439910888674, 0.04894924926757813, 0.04876473617553711, 0.049017024993896485, 0.04881331253051758, 0.048694015502929684, 0.04878540802001953, 0.048928672790527344, 0.04979065704345703, 0.050204959869384766, 0.049065311431884764, 0.04931046295166015, 0.04954291152954102, 0.049547168731689455, 0.04897625732421875, 0.049127422332763675, 0.04886431884765625, 0.04874131011962891, 0.04907779312133789, 0.04908086395263672, 0.050840511322021484, 0.04930854415893555, 0.049334400177001955, 0.050239486694335936, 0.04918675231933594, 0.050018047332763674, 0.049256065368652346, 0.049740478515625, 0.04915999984741211, 0.04915209579467773, 0.049047649383544924, 0.04904140853881836, 0.049073982238769534, 0.0499857292175293, 0.04931584167480469, 0.04949395370483398, 0.04929542541503906, 0.04938665771484375, 0.04922585678100586, 0.04975075149536133, 0.04910899353027344, 0.04971478271484375, 0.0490294075012207, 0.04914803314208984, 0.04877900695800781, 0.0488900146484375, 0.04927423858642578, 0.0494947509765625, 0.04928307342529297, 0.049136928558349606, 0.04900294494628906, 0.049077919006347656, 0.04899084854125976, 0.049223678588867184, 0.04960992050170898, 0.04926358413696289, 0.04980924987792969, 0.04979916763305664, 0.049309696197509766, 0.050525886535644535, 0.04925807952880859, 0.049476318359375, 0.049833984375, 0.04937836837768555, 0.05030393600463867, 0.051095550537109374, 0.04916588973999023, 0.04994911956787109, 0.04967139053344727, 0.049562145233154296, 0.049309951782226566, 0.049052734375, 0.049345184326171875, 0.04972777557373047, 0.04929769515991211, 0.05002620697021484, 0.0495814094543457, 0.05062518310546875, 0.05015961456298828, 0.049886367797851563, 0.049385345458984375, 0.04942470550537109, 0.0490882568359375, 0.04915091323852539, 0.04974172973632812, 0.053163936614990234, 0.04969295883178711, 0.04933001708984375, 0.05052419281005859, 0.053354110717773434, 0.049629566192626956, 0.050484703063964846, 0.04986115264892578, 0.049604606628417966, 0.050275745391845705, 0.04970089721679687, 0.05041785430908203, 0.049482112884521486, 0.04981174468994141, 0.04980057525634766, 0.05021116638183594, 0.04989980697631836, 0.05104982376098633, 0.052850719451904296, 0.05117779159545898, 0.05052316665649414, 0.04943360137939453, 0.05025574493408203, 0.050108543395996095, 0.049678558349609374, 0.04988022232055664, 0.05088524627685547, 0.050474239349365235, 0.050119361877441405, 0.04963273620605469, 0.051062782287597655, 0.05037503814697265, 0.049479393005371096, 0.04981398391723633, 0.04933635330200195, 0.04932575988769531, 0.048834880828857424, 0.05114227294921875, 0.049972095489501954, 0.05065903854370117, 0.050350399017333985, 0.050266078948974606, 0.050451904296875, 0.050874401092529296, 0.05018268966674805, 0.050206718444824217, 0.05084121704101562, 0.05041094589233398, 0.05047187042236328, 0.0501473274230957, 0.05132649612426758, 0.05067209625244141, 0.050278560638427734, 0.04992979049682617, 0.049614112854003904, 0.04965795135498047, 0.049928638458251955, 0.04945967864990235, 0.05016985702514649, 0.04977664184570312, 0.050800640106201174, 0.051045600891113284, 0.05072553634643555, 0.049731422424316406, 0.05041136169433594, 0.04949446487426758, 0.0518205451965332, 0.04978073501586914, 0.05077372741699219, 0.050294113159179685, 0.05174332809448242, 0.05119583892822266, 0.050894302368164064, 0.05071558380126953, 0.05065318298339844, 0.050348033905029295, 0.05034806442260742, 0.04994854354858398, 0.0501984977722168, 0.05054886245727539, 0.05059151840209961, 0.05012911987304688, 0.050974720001220705, 0.050425918579101565, 0.050005950927734376, 0.05017599868774414, 0.04987289428710937, 0.05018624114990235, 0.050315521240234376, 0.05008358383178711, 0.04995276641845703, 0.049696769714355465, 0.04968991851806641, 0.04941279983520508, 0.050165630340576174, 0.05066969680786133, 0.05049919891357422, 0.05100172805786133, 0.049942527770996094, 0.049410049438476565, 0.05144374465942383, 0.050083358764648436, 0.049724929809570315, 0.050035392761230466, 0.05002060699462891, 0.04991727828979492, 0.0500984001159668, 0.04965625762939453, 0.049993728637695314, 0.04913971328735352, 0.04972339248657227, 0.049137664794921876, 0.049786880493164064, 0.049391231536865234, 0.05026665496826172, 0.05024956893920898, 0.04969625473022461, 0.049283584594726565, 0.04928828811645508, 0.04915043258666992, 0.04947808074951172, 0.04923187255859375, 0.04953606414794922, 0.04933657455444336, 0.049573982238769534, 0.0497608642578125, 0.049133377075195314, 0.04977407836914063, 0.04909660720825195, 0.050238304138183594, 0.0492031364440918, 0.050272254943847655, 0.050943008422851564, 0.05089993667602539, 0.05070438385009766, 0.05094150543212891, 0.04968902587890625, 0.04942851257324219, 0.04933817672729492, 0.04903952026367187, 0.049392894744873045, 0.05043427276611328, 0.05034038543701172, 0.05007513427734375, 0.049818111419677735, 0.0494029426574707, 0.04952569580078125, 0.04933552169799805, 0.04977334213256836, 0.04908665466308594, 0.04962694549560547, 0.04897382354736328, 0.049625057220458985, 0.05055014419555664, 0.05066371154785156, 0.049629566192626956, 0.05035212707519531, 0.050455615997314456, 0.05117433547973633, 0.051257568359375, 0.050452255249023435, 0.04961075210571289, 0.04923187255859375, 0.050520065307617185, 0.05004073715209961, 0.05047315216064453, 0.04954102325439453, 0.04940403366088867, 0.04942220687866211, 0.04981097412109375, 0.049187297821044924, 0.05287526321411133, 0.04996915054321289, 0.049152000427246094, 0.04960659027099609, 0.04909033584594726, 0.049637664794921876, 0.049084415435791014, 0.04935270309448242, 0.0489760971069336, 0.0491824951171875, 0.048962974548339845, 0.04890070343017578, 0.04926464080810547, 0.04895907211303711, 0.04900495910644531, 0.04915951919555664, 0.04880563354492187, 0.049159137725830075, 0.04906182479858399, 0.04926806259155273, 0.05081974411010742, 0.0524925422668457, 0.049388511657714844, 0.04969091033935547, 0.04884643173217774, 0.04944579315185547, 0.04919705581665039, 0.04994224166870117, 0.04982022476196289, 0.04918447875976562, 0.04908367919921875, 0.04916457748413086, 0.049159744262695315, 0.04945500946044922, 0.0494202880859375, 0.04924844741821289, 0.049146656036376954, 0.04992953491210937, 0.05035446548461914, 0.05100896072387695, 0.04918985748291015, 0.049620990753173826, 0.04920630264282227, 0.0492872314453125, 0.0489780158996582, 0.04898284912109375, 0.048507999420166016, 0.048531967163085936, 0.049164703369140625, 0.04906742477416992, 0.048921184539794924, 0.04884307098388672, 0.04956719970703125, 0.049342689514160154, 0.04922528076171875, 0.04990531158447266, 0.04943487930297852, 0.049466014862060544, 0.04888169479370117, 0.04890428924560547, 0.04923587036132813, 0.04902092742919922, 0.04909465789794922, 0.04890828704833984, 0.049010719299316406, 0.048779167175292966, 0.048967742919921876, 0.04897587203979492, 0.04995673751831055, 0.04936281585693359, 0.04899660873413086, 0.048773120880126954, 0.048889854431152346, 0.04859904098510742, 0.04905804824829101, 0.04877286529541015, 0.048742401123046876, 0.04898556900024414, 0.04881667327880859, 0.04898783874511719, 0.04908230209350586, 0.049142143249511716, 0.04876015853881836, 0.049220417022705076, 0.04952617645263672, 0.04919347381591797, 0.04931782531738281, 0.04885913467407227, 0.04993843078613281, 0.04904959869384766, 0.048754112243652344, 0.04951039886474609, 0.04879417419433594, 0.04924825668334961, 0.0493524169921875, 0.04908265686035156, 0.04977196884155274, 0.04970348739624023, 0.04939763259887695, 0.049483966827392575, 0.04983596801757813, 0.04927603149414062, 0.0493617935180664, 0.04923187255859375, 0.04935065460205078, 0.04935209655761719, 0.04930326461791992, 0.05075568008422852, 0.04907497787475586, 0.04893513488769531, 0.048838432312011716, 0.04885456085205078, 0.048930561065673825, 0.0495684814453125, 0.04878335952758789, 0.05035827255249024, 0.04959817504882812, 0.04919529724121094, 0.05008076858520508, 0.0489681282043457, 0.05009616088867187, 0.053296928405761716, 0.04883327865600586, 0.0489431037902832, 0.04882243347167969, 0.04899414443969727, 0.04912332916259766, 0.049051902770996095, 0.048883071899414064, 0.04922963333129883, 0.049081153869628906, 0.04965299224853516, 0.04907574462890625, 0.04913865661621094, 0.048971328735351566, 0.04953952026367187, 0.049182048797607424, 0.04919363021850586, 0.04943369674682617, 0.04915830230712891, 0.04960438537597656, 0.049611743927001954, 0.04941766357421875, 0.049433151245117185, 0.049734848022460934, 0.05033216094970703, 0.05061769485473633, 0.049736671447753907, 0.049522689819335934, 0.0500469741821289, 0.0500469741821289, 0.049786880493164064, 0.04960665512084961, 0.05009743881225586, 0.05017059326171875, 0.0510074577331543, 0.049907745361328124, 0.049997665405273437, 0.04989459228515625, 0.05075577545166016, 0.05069084930419922, 0.050091678619384766, 0.049547615051269533, 0.049347774505615234, 0.04953171157836914, 0.04972889709472656, 0.05072339248657227, 0.05080275344848633, 0.04978073501586914, 0.04979487991333008, 0.05031919860839844, 0.05008940887451172, 0.049533855438232424, 0.04952867126464844, 0.050221214294433596, 0.05054422378540039, 0.05032387161254883, 0.050216320037841794, 0.049339008331298825, 0.049944576263427735, 0.04966543960571289, 0.05057535934448242, 0.05004492950439453, 0.04957798385620117, 0.05026816177368164, 0.05011251068115234, 0.050151424407958986, 0.049796768188476566, 0.049887584686279296, 0.049397758483886715, 0.04926876831054688, 0.04991382217407227, 0.04964556884765625, 0.04963923263549805, 0.054898174285888675, 0.05249913787841797, 0.04950406265258789, 0.04921772766113281, 0.049152000427246094, 0.04945510482788086, 0.04956953430175781, 0.04934793472290039, 0.04930652618408203, 0.049347679138183595, 0.04916691207885742, 0.049953121185302735, 0.05118102264404297, 0.050108734130859374, 0.04987459182739258, 0.048949825286865235, 0.05007564926147461, 0.049549312591552735, 0.05038265609741211, 0.05123455810546875, 0.05374115371704102, 0.05112851333618164, 0.050427806854248046, 0.0505263671875, 0.05024214553833008, 0.049980449676513675, 0.049232864379882814, 0.04932342529296875, 0.04881983947753906, 0.04982851028442383, 0.05021708679199219, 0.05034143829345703, 0.0501317138671875, 0.0501163215637207, 0.050167774200439455, 0.0494266242980957, 0.04974591827392578, 0.04989132690429687, 0.052133888244628904, 0.04951244735717773, 0.04959436798095703, 0.04961385726928711, 0.050222049713134764, 0.04852684783935547, 0.048721824645996094, 0.049076831817626954, 0.05016723251342774, 0.04923654556274414, 0.0492410888671875, 0.04893183898925781, 0.050265247344970704, 0.04868320083618164, 0.04905590438842773, 0.04882688140869141, 0.04869260787963867, 0.049232513427734374, 0.04937113571166992, 0.04948905563354492, 0.04916924667358399, 0.0488611831665039, 0.04928905487060547, 0.04922079849243164, 0.04909519958496094, 0.049912254333496095, 0.04951388931274414, 0.04898262405395508, 0.04913545608520508, 0.04959804916381836, 0.049130046844482425, 0.049035263061523435, 0.04928307342529297, 0.04954521560668945, 0.049999423980712894, 0.04956204986572266, 0.048842464447021484, 0.049035552978515626, 0.04883180618286133, 0.04907468795776367, 0.04926396942138672, 0.05007238388061523, 0.04883859252929688, 0.04886649703979492, 0.04892348861694336, 0.04914780807495117, 0.04968054580688477, 0.04922163009643555, 0.04907212829589844, 0.04891593551635742, 0.05028303909301758, 0.04910089492797851, 0.04914575958251953, 0.04889788818359375, 0.049162208557128904, 0.049361087799072265, 0.049702911376953124, 0.049240062713623044, 0.049083488464355465, 0.04887235260009765, 0.04917657470703125, 0.0493191032409668, 0.04961545562744141, 0.04934463882446289, 0.049254497528076174, 0.0495022087097168, 0.05037088012695313, 0.04937900924682617, 0.04922102355957031, 0.04891094589233398, 0.0497151985168457, 0.04887347030639649, 0.04899993515014649, 0.04912368011474609, 0.05051116943359375]",tokens/s,20.121544346506926,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1897.312256,2707.29216,0.0,2321.547264,2284.117504,s,1,10.665443359375,10.665443359375,0.0,10.665443359375,10.665443359375,10.665443359375,10.665443359375,[10.665443359375],,kWh,5.070575014165873e-05,5.585546904213815e-06,1.4660011728000188e-05,7.095130877387273e-05,,MB,2043.240448,2839.412736,0.0,2422.21056,2375.398912,s,10,1.3757783660888674,0.13757783660888673,0.0005565784162949701,0.13767249298095702,0.13813314361572265,0.1382130485534668,0.1382769725036621,"[0.13657760620117188, 0.13811538696289063, 0.13793391418457032, 0.13741107177734374, 0.1381038055419922, 0.13829295349121093, 0.13710783386230468, 0.13690232849121095, 0.13737286376953126, 0.13796060180664063]",tokens/s,1860.7648318222202,kWh,4.165497792840376e-06,4.593731601387409e-07,2.754892657746488e-06,7.379763610725606e-06,tokens/kWh,34689458.02382268,MB,2053.12,2868.772864,0.0,2451.570688,2382.191104,s,10,36.673345947265624,3.6673345947265625,0.03659779701401277,3.661540283203125,3.713846801757813,3.7276989135742187,3.738780603027344,"[3.66636669921875, 3.664807373046875, 3.7107685546875, 3.699270751953125, 3.741551025390625, 3.658273193359375, 3.629663818359375, 3.632937255859375, 3.64428125, 3.625426025390625]",tokens/s,17.178688874091485,kWh,0.00010679739466132639,1.177989422144468e-05,4.516345123245343e-05,0.0001637407401152245,tokens/kWh,384754.5818814966,,s,630,36.67133528137209,0.05820846870059058,0.0011938018367534662,0.05796006393432617,0.05958099327087402,0.06002524127960205,0.06168442733764648,"[0.058548225402832034, 0.05837619018554688, 0.06048723220825195, 0.058447681427001956, 0.05753494262695313, 0.0574219856262207, 0.058298366546630856, 0.05847244644165039, 0.05690777587890625, 0.05669472122192383, 0.057065216064453125, 0.056852638244628904, 0.05700979232788086, 0.05757596969604492, 0.05839052963256836, 0.05825913619995117, 0.05895116806030273, 0.057756385803222655, 0.05865059280395508, 0.06370252990722657, 0.060010654449462894, 0.060013023376464844, 0.05793791961669922, 0.058234878540039066, 0.06264627075195313, 0.059115520477294924, 0.058946559906005856, 0.05872332763671875, 0.05874208068847656, 0.0580588493347168, 0.0576038703918457, 0.05729977416992187, 0.05678633499145508, 0.05688911819458008, 0.057938751220703126, 0.058898017883300784, 0.05958083343505859, 0.05785190582275391, 0.057667518615722654, 0.05846636962890625, 0.058435390472412106, 0.05934447860717774, 0.058533470153808595, 0.05795942306518555, 0.057767391204833984, 0.0571479377746582, 0.057226783752441404, 0.057598430633544924, 0.0576341438293457, 0.05882742309570312, 0.05761843109130859, 0.05869158554077149, 0.05741567993164062, 0.05643468856811523, 0.05650022506713867, 0.057116127014160155, 0.057264671325683594, 0.05781708908081055, 0.05788166427612305, 0.0581644172668457, 0.05707545471191406, 0.05808294296264648, 0.05882080078125, 0.05768515014648438, 0.05700284957885742, 0.057882625579833986, 0.0570241584777832, 0.057613983154296874, 0.05747705459594726, 0.05818239974975586, 0.057805889129638674, 0.056544193267822264, 0.056543231964111325, 0.05662105560302735, 0.05670409774780273, 0.056990623474121094, 0.05795004653930664, 0.05747747039794922, 0.057126720428466796, 0.05714944076538086, 0.05694486236572266, 0.056905632019042966, 0.05685644912719726, 0.058179519653320313, 0.05749151992797852, 0.057796607971191405, 0.0578600959777832, 0.05938995361328125, 0.05828812789916992, 0.057425918579101565, 0.05797468948364258, 0.05781827163696289, 0.05757535934448242, 0.057760768890380856, 0.058639614105224606, 0.057860862731933596, 0.05779046249389649, 0.06339583969116211, 0.05873459243774414, 0.05834735870361328, 0.05790652847290039, 0.05836064147949219, 0.05873881530761719, 0.05799103927612305, 0.059600894927978515, 0.05878720092773437, 0.059085086822509764, 0.058863327026367186, 0.060047454833984375, 0.05813238525390625, 0.05850790405273437, 0.05861171340942383, 0.05815849685668945, 0.05784793472290039, 0.058313217163085934, 0.057826526641845705, 0.058403583526611326, 0.05884223937988281, 0.05959564971923828, 0.05891891098022461, 0.059215873718261716, 0.059262977600097654, 0.059057727813720706, 0.05989961624145508, 0.05869232177734375, 0.059116737365722656, 0.059972286224365234, 0.05925001525878906, 0.05975830459594727, 0.060244705200195314, 0.059412479400634766, 0.05938585662841797, 0.058496894836425783, 0.05780697631835938, 0.05779235076904297, 0.05860572814941406, 0.05798857498168945, 0.05729129409790039, 0.05766144180297852, 0.05823030471801758, 0.05880879974365234, 0.05795849609375, 0.05777113723754883, 0.057788543701171875, 0.058237598419189456, 0.057929729461669924, 0.058636287689208984, 0.059119422912597655, 0.05899078369140625, 0.05928134536743164, 0.0596431999206543, 0.059431808471679684, 0.05934272003173828, 0.05886294555664062, 0.06025801467895508, 0.05977318572998047, 0.058965919494628906, 0.06032028961181641, 0.0588034553527832, 0.05900185775756836, 0.058826751708984375, 0.05909030532836914, 0.059090721130371095, 0.059433822631835935, 0.05864652633666992, 0.05843337631225586, 0.05860131072998047, 0.058663486480712894, 0.059455230712890626, 0.0616278076171875, 0.058611839294433594, 0.05866748809814453, 0.058393665313720707, 0.0579420166015625, 0.05931209564208984, 0.05850531387329101, 0.059079551696777345, 0.05814886474609375, 0.05948825454711914, 0.05876531219482422, 0.058525409698486325, 0.05825360107421875, 0.058597377777099606, 0.05842704010009766, 0.05829667282104492, 0.060020736694335934, 0.061044513702392576, 0.05888227081298828, 0.05891465759277344, 0.05957475280761719, 0.05875539016723633, 0.05878579330444336, 0.05889664077758789, 0.05786732864379883, 0.0575536003112793, 0.058431488037109375, 0.05850323104858399, 0.057966529846191404, 0.06076115036010742, 0.06063814544677734, 0.05942201614379883, 0.05848892974853516, 0.059593311309814455, 0.059404289245605466, 0.05871737670898437, 0.05839072036743164, 0.059238784790039065, 0.05821174240112305, 0.05806576156616211, 0.05779452896118164, 0.05829430389404297, 0.05733171081542969, 0.057132766723632815, 0.05761667251586914, 0.05751375961303711, 0.05727664184570312, 0.05845727920532227, 0.05729942321777344, 0.057798847198486325, 0.05787385559082031, 0.058329727172851564, 0.05860681533813476, 0.05743414306640625, 0.057688831329345706, 0.05864457702636719, 0.059116992950439456, 0.057969120025634764, 0.05781718444824219, 0.057565345764160156, 0.0586399040222168, 0.05857686233520508, 0.058356063842773434, 0.0591514892578125, 0.0584508171081543, 0.05895167922973633, 0.06052185440063477, 0.05870419311523437, 0.05937388610839844, 0.05932646560668945, 0.059504638671875, 0.059496448516845706, 0.059875328063964846, 0.06167951965332031, 0.0607355842590332, 0.05967462539672851, 0.05988556671142578, 0.05972377777099609, 0.05861763381958008, 0.05883663940429688, 0.05895174407958984, 0.05925308990478516, 0.05795967864990234, 0.0585300178527832, 0.058275550842285154, 0.05792723083496094, 0.058693313598632814, 0.059681854248046874, 0.05890860748291016, 0.05800320053100586, 0.05801171112060547, 0.058759166717529294, 0.06983270263671874, 0.05800960159301758, 0.05847244644165039, 0.058871776580810546, 0.05973372650146484, 0.05976070404052734, 0.05884944152832031, 0.05996694564819336, 0.06010739135742187, 0.05958758544921875, 0.06281017684936524, 0.06014252853393555, 0.05943632125854492, 0.05996771240234375, 0.0590382080078125, 0.058525184631347656, 0.05930649566650391, 0.059635711669921876, 0.06050611114501953, 0.059499614715576174, 0.059400737762451174, 0.0594169921875, 0.05958243179321289, 0.06067731094360351, 0.05928559875488281, 0.05896265411376953, 0.059172863006591796, 0.058589183807373046, 0.058084800720214845, 0.05849302291870117, 0.058451774597167966, 0.05920630264282226, 0.058619937896728515, 0.05866454315185547, 0.057985374450683594, 0.05803830337524414, 0.058259456634521485, 0.05876089477539063, 0.059762046813964846, 0.05949945449829101, 0.05850726318359375, 0.060028926849365234, 0.05980979156494141, 0.0591352653503418, 0.06001123046875, 0.06027376174926758, 0.05937039947509765, 0.06015404891967773, 0.06045471954345703, 0.05987936019897461, 0.059541568756103516, 0.05907865524291992, 0.05873664093017578, 0.05860995101928711, 0.05926396942138672, 0.05895695877075195, 0.057584449768066405, 0.05717814254760742, 0.057669086456298826, 0.056739646911621096, 0.05708464050292969, 0.057847518920898434, 0.05855670547485352, 0.057831039428710936, 0.05764524841308594, 0.05934867095947265, 0.0578337287902832, 0.058433246612548825, 0.05886947250366211, 0.05819203186035156, 0.06168643188476562, 0.05878492736816406, 0.05784889602661133, 0.05715049743652344, 0.0573488655090332, 0.05719244766235351, 0.05778636932373047, 0.05756288146972656, 0.05856447982788086, 0.05746694564819336, 0.05774678421020508, 0.058840030670166014, 0.05735343933105469, 0.05744451141357422, 0.05732534408569336, 0.05741571044921875, 0.058325824737548826, 0.05969919967651367, 0.05969100952148437, 0.057855392456054686, 0.05755491256713867, 0.05820070266723633, 0.05791766357421875, 0.05963958358764648, 0.05718243026733399, 0.05787599945068359, 0.057871742248535155, 0.05778112030029297, 0.057445854187011716, 0.057694751739501955, 0.058043968200683596, 0.057196990966796875, 0.05778335952758789, 0.06697875213623047, 0.057442302703857424, 0.057630718231201174, 0.05758156967163086, 0.0581192626953125, 0.05711318588256836, 0.057151809692382816, 0.05688934326171875, 0.05742563247680664, 0.05720707321166992, 0.05696921539306641, 0.05727622222900391, 0.05832313537597656, 0.057649150848388675, 0.057637664794921876, 0.057480766296386716, 0.057774528503417966, 0.05754265594482422, 0.05700140762329101, 0.05709302520751953, 0.05740508651733398, 0.05708723068237305, 0.05655558395385742, 0.05730783843994141, 0.05727436828613281, 0.05858099365234375, 0.05817465591430664, 0.057334590911865234, 0.057837566375732424, 0.05724979019165039, 0.056834049224853515, 0.057197856903076175, 0.05717382431030273, 0.05798163223266602, 0.05775791931152344, 0.057786144256591794, 0.05728073501586914, 0.058517505645751956, 0.057382911682128904, 0.05752627182006836, 0.05765859222412109, 0.05741017532348633, 0.05934915161132812, 0.057673728942871094, 0.05773516845703125, 0.056784065246582034, 0.05735427093505859, 0.05682659149169922, 0.056467521667480466, 0.05808886337280274, 0.057667678833007815, 0.05816166305541992, 0.05764710235595703, 0.057812992095947265, 0.057837566375732424, 0.05777148818969727, 0.05778684616088867, 0.05750566482543945, 0.0578573112487793, 0.057423809051513675, 0.05771567916870117, 0.05866291046142578, 0.05671091079711914, 0.05748537445068359, 0.06045920181274414, 0.058038272857666016, 0.05775360107421875, 0.057450496673583984, 0.057935871124267575, 0.056974494934082034, 0.05758857727050781, 0.057055233001708984, 0.057148414611816405, 0.05763379287719726, 0.057434112548828124, 0.05804377746582031, 0.05777612686157227, 0.05851055908203125, 0.05738780975341797, 0.05730508804321289, 0.05759590530395508, 0.057853950500488284, 0.05732476806640625, 0.05846636962890625, 0.0590847053527832, 0.05973894500732422, 0.058158622741699216, 0.05880223846435547, 0.05850124740600586, 0.05776412963867188, 0.05827532958984375, 0.05788518524169922, 0.058119583129882815, 0.05914275360107422, 0.05796044921875, 0.05748057556152344, 0.05643942260742187, 0.05652444839477539, 0.05684649658203125, 0.056879169464111326, 0.057137279510498046, 0.05732556915283203, 0.05746035385131836, 0.057299327850341794, 0.05653708648681641, 0.056519840240478514, 0.05639664077758789, 0.056997886657714845, 0.057052734375, 0.05787039947509766, 0.057696640014648436, 0.057802047729492184, 0.0574587516784668, 0.058479232788085936, 0.05682995223999023, 0.05704908752441406, 0.05694169616699219, 0.057618881225585936, 0.0572031364440918, 0.0572149772644043, 0.05796870422363281, 0.05724153518676758, 0.05807718276977539, 0.05725183868408203, 0.056869152069091794, 0.05751772689819336, 0.05746694564819336, 0.058238655090332034, 0.05805302429199219, 0.05802384185791016, 0.05767987060546875, 0.05813164901733398, 0.05695097732543945, 0.056631935119628905, 0.05787184143066406, 0.058296257019042966, 0.060928607940673826, 0.058517505645751956, 0.05717196655273438, 0.056879390716552736, 0.05823497772216797, 0.05802188873291016, 0.05730428695678711, 0.05694134521484375, 0.056594432830810545, 0.05718220901489258, 0.05693622589111328, 0.05687113571166992, 0.05729894256591797, 0.057716064453125, 0.05803484725952148, 0.061300735473632816, 0.05754403305053711, 0.0601545295715332, 0.05697299194335938, 0.05713747024536133, 0.05705532836914062, 0.05698140716552735, 0.05722451019287109, 0.05765785598754883, 0.05784595108032226, 0.057401344299316405, 0.0578682861328125, 0.05736838531494141, 0.05720035171508789, 0.05835615921020508, 0.05913603210449219, 0.05839462280273437, 0.05787443161010742, 0.058482688903808595, 0.0586907844543457, 0.05884188842773438, 0.05847974395751953, 0.05808012771606445, 0.058007007598876954, 0.057415359497070315, 0.05758038330078125, 0.057451713562011716, 0.05824291229248047, 0.057398239135742185, 0.057876640319824216, 0.05796847915649414, 0.05812815856933594, 0.05719398498535156, 0.05788902282714844, 0.05821417617797851, 0.05808198547363281, 0.05767168045043945, 0.05761395263671875, 0.05811151885986328, 0.05792185592651367, 0.05763731384277344, 0.057542751312255856, 0.057614238739013675, 0.05770454406738281, 0.05794815826416016, 0.058193920135498046, 0.058170879364013675, 0.05769062423706055, 0.05815283203125, 0.05776707077026367, 0.057703392028808594, 0.058008670806884766, 0.05793001556396484, 0.05751657485961914, 0.05692972946166992, 0.05732755279541016, 0.05754719924926758, 0.05824076843261719, 0.05734854507446289, 0.056766464233398435, 0.056971263885498044, 0.058381950378417966, 0.05728006362915039, 0.056965953826904295, 0.05781232070922852, 0.057194305419921876, 0.05731926345825195, 0.0577171516418457, 0.057219680786132814, 0.05793996810913086, 0.05723955154418945, 0.0581058578491211, 0.058208255767822265, 0.057458686828613284, 0.05807452774047851, 0.057401248931884766, 0.05728691101074219, 0.05677510452270508, 0.05725132751464844, 0.05710879898071289, 0.061249664306640625, 0.05800886535644531, 0.05733251190185547, 0.057212448120117186, 0.05720252990722656, 0.05700672149658203, 0.0571146240234375, 0.05681356811523437, 0.057924736022949216, 0.05726614379882813, 0.059149215698242184, 0.05810761642456055, 0.05760412979125976, 0.057219329833984374, 0.056620990753173825, 0.056628734588623046, 0.05734595108032227, 0.056801952362060544, 0.05718163299560547, 0.0574142074584961, 0.05756313705444336, 0.05768396759033203, 0.05772281646728516, 0.058490943908691403, 0.057583614349365236, 0.05707958221435547, 0.05715785598754883, 0.0570145263671875, 0.05706047821044922, 0.05667903900146484, 0.0570695686340332, 0.05874687957763672, 0.057896961212158204, 0.05859891128540039, 0.058354175567626954]",tokens/s,17.179630770631388,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1892.835328,2707.29216,0.0,2321.547264,2284.117504,s,1,10.7844990234375,10.7844990234375,0.0,10.7844990234375,10.7844990234375,10.7844990234375,10.7844990234375,[10.7844990234375],,kWh,5.2379806616674313e-05,5.763094458413049e-06,1.5655845857998063e-05,7.379874693308542e-05,,MB,2047.127552,2839.412736,0.0,2422.21056,2375.398912,s,10,1.3761192932128905,0.13761192932128907,0.000738049921409758,0.13755030822753905,0.13858999786376952,0.13879538040161132,0.13895968643188478,"[0.1385443572998047, 0.1376527404785156, 0.13720509338378906, 0.13637962341308593, 0.1377493133544922, 0.1374478759765625, 0.1373709716796875, 0.13900076293945313, 0.13676960754394532, 0.13799894714355468]",tokens/s,1860.303836030848,kWh,4.165820352347361e-06,4.594161555194709e-07,2.7548026733239928e-06,7.380039181190823e-06,tokens/kWh,34688162.72038986,MB,2047.127552,2868.772864,0.0,2451.570688,2382.191104,s,10,34.74687084960937,3.4746870849609373,0.02543227322724961,3.4651737060546877,3.5143853027343748,3.5209443603515624,3.5261916064453125,"[3.442729248046875, 3.4836123046875, 3.477168701171875, 3.46190576171875, 3.45785888671875, 3.4607578125, 3.468441650390625, 3.45396533203125, 3.512927734375, 3.52750341796875]",tokens/s,18.131129065599946,kWh,0.00010272988641806842,1.1331163071990943e-05,4.3877845743875724e-05,0.00015793889523393512,tokens/kWh,398888.4429429874,,s,630,34.744585933685315,0.05515013640267508,0.0010826007520365409,0.05500196838378906,0.056272843170166016,0.056659156990051265,0.057859247970581056,"[0.05488633728027344, 0.05476675033569336, 0.05471478271484375, 0.05404307174682617, 0.054242622375488284, 0.05457929611206055, 0.055390369415283205, 0.054843841552734376, 0.055564289093017576, 0.054706302642822266, 0.05432419204711914, 0.053811294555664066, 0.05438864135742188, 0.05457193756103516, 0.05462195205688476, 0.05437260818481445, 0.05433958435058594, 0.05535728073120117, 0.055462047576904296, 0.05526323318481445, 0.05482905578613281, 0.05548031997680664, 0.054749183654785157, 0.05491302490234375, 0.054769248962402345, 0.05446390533447266, 0.05466016006469727, 0.05528351974487305, 0.05410601425170898, 0.0537213134765625, 0.05398080062866211, 0.05373478317260742, 0.05425254440307617, 0.053751808166503906, 0.05385830307006836, 0.05403868865966797, 0.05516502380371094, 0.05386624145507812, 0.054814208984375, 0.05390591812133789, 0.054261760711669924, 0.05418598556518555, 0.054675457000732425, 0.054287872314453124, 0.0545715217590332, 0.054781951904296876, 0.05429033660888672, 0.05381071853637695, 0.0541124496459961, 0.053876480102539065, 0.05440371322631836, 0.05477782440185547, 0.056072223663330076, 0.057133056640625, 0.055341056823730465, 0.05506851196289062, 0.05462236785888672, 0.0555233268737793, 0.05468979263305664, 0.05443337631225586, 0.0549128303527832, 0.05493411254882812, 0.055123966217041014, 0.05548598480224609, 0.055009662628173826, 0.05499347305297852, 0.05496012878417969, 0.05428224182128906, 0.054443870544433594, 0.0550032958984375, 0.05491097640991211, 0.05452163314819336, 0.055144672393798826, 0.0546192626953125, 0.054705249786376954, 0.055843902587890626, 0.05431785583496094, 0.05491500854492187, 0.055526912689208986, 0.05593548965454102, 0.05599027252197265, 0.05603942489624023, 0.05488217544555664, 0.055268768310546876, 0.05600035095214844, 0.05535833740234375, 0.05516828918457031, 0.05506246566772461, 0.05697763061523437, 0.05532115173339844, 0.05603868865966797, 0.055385025024414065, 0.05542889785766601, 0.056010753631591796, 0.054675457000732425, 0.05549055862426758, 0.054742401123046874, 0.05485631942749023, 0.055102657318115235, 0.05456787109375, 0.05455779266357422, 0.054235904693603516, 0.055373855590820316, 0.05495318222045899, 0.055479007720947264, 0.05468985748291016, 0.05523564910888672, 0.05474399948120117, 0.055531520843505856, 0.05459276962280273, 0.05513497543334961, 0.05544476699829102, 0.05581078338623047, 0.055382015228271485, 0.056000511169433595, 0.056051231384277346, 0.05494831848144531, 0.05541398239135742, 0.054991168975830076, 0.0549257926940918, 0.056020000457763675, 0.05663843154907226, 0.05585276794433594, 0.05586767959594727, 0.0563460807800293, 0.056202816009521483, 0.05584864044189453, 0.05471859359741211, 0.054452095031738285, 0.055115745544433596, 0.05695734405517578, 0.055697151184082035, 0.056317279815673825, 0.055401153564453125, 0.05562739181518555, 0.05515913772583008, 0.05559014511108398, 0.05531887817382813, 0.05483366394042969, 0.054267807006835936, 0.05506867218017578, 0.05488435363769531, 0.05523401641845703, 0.05502518463134766, 0.054991870880126956, 0.05411948776245117, 0.05398774337768555, 0.05479888153076172, 0.057657344818115235, 0.05495759963989258, 0.05456480026245117, 0.0586776008605957, 0.05543267059326172, 0.055741439819335936, 0.05610041427612305, 0.05542515182495117, 0.05451993560791016, 0.0549453125, 0.054413375854492185, 0.0548513298034668, 0.05539897537231445, 0.05482905578613281, 0.05561692810058594, 0.05508572769165039, 0.05678688049316406, 0.05690572738647461, 0.055695358276367186, 0.055375873565673826, 0.055315647125244144, 0.05502444839477539, 0.05513123321533203, 0.05543619155883789, 0.05453788757324219, 0.05473446273803711, 0.055077472686767576, 0.05426969528198242, 0.0539815673828125, 0.05439209747314453, 0.05476220703125, 0.05478604888916016, 0.05508915328979492, 0.055357440948486325, 0.05450239944458008, 0.05413731384277344, 0.05417219161987305, 0.05498470306396484, 0.05519769668579102, 0.054675457000732425, 0.055000640869140624, 0.05523043060302734, 0.054324832916259766, 0.05424579238891602, 0.05442067337036133, 0.054270782470703126, 0.05472979354858398, 0.054387649536132815, 0.05515491104125977, 0.05378025436401367, 0.05380300903320313, 0.05382758331298828, 0.05506252670288086, 0.054749183654785157, 0.0549826545715332, 0.05570268630981445, 0.05548118209838867, 0.05561731338500977, 0.05497673416137695, 0.055113086700439455, 0.05450099182128906, 0.05520624160766602, 0.05823161697387695, 0.055129951477050784, 0.05548428726196289, 0.05517119979858399, 0.05630976104736328, 0.05588489532470703, 0.05660268783569336, 0.055937694549560546, 0.05491843032836914, 0.05459856033325195, 0.05519529724121094, 0.05471062469482422, 0.05426704025268555, 0.053824352264404296, 0.05414092636108398, 0.05511372756958008, 0.05419558334350586, 0.054589214324951174, 0.05554230499267578, 0.05479209518432617, 0.05396521759033203, 0.05433331298828125, 0.05432656097412109, 0.055143489837646484, 0.05531830215454102, 0.05514441680908203, 0.055553249359130856, 0.05517599868774414, 0.054796287536621094, 0.05424553680419922, 0.05418972778320313, 0.05406041717529297, 0.05416329574584961, 0.05486892700195312, 0.05568924713134766, 0.05522163009643555, 0.055241344451904296, 0.05580550384521484, 0.05494339370727539, 0.054401023864746094, 0.055677440643310545, 0.05517136001586914, 0.056177696228027346, 0.05507891082763672, 0.054000545501708984, 0.05386131286621094, 0.05429766464233399, 0.05479702377319336, 0.05415958404541016, 0.05497241592407227, 0.054657024383544923, 0.05446041488647461, 0.05431631851196289, 0.06378569412231445, 0.05537897491455078, 0.054582241058349606, 0.054542335510253906, 0.054712223052978515, 0.05523993682861328, 0.05489955139160156, 0.055019519805908204, 0.05543731307983398, 0.05614591979980469, 0.056354816436767576, 0.05583052825927735, 0.05480448150634765, 0.054361984252929686, 0.05467337417602539, 0.05413727951049805, 0.053987041473388675, 0.053991424560546876, 0.053901313781738285, 0.053819232940673825, 0.05371062469482422, 0.05375619125366211, 0.05392313766479492, 0.053972801208496096, 0.05389638519287109, 0.05367916870117188, 0.05402288055419922, 0.054248832702636716, 0.0542520637512207, 0.05458323287963867, 0.054433311462402344, 0.05523238372802734, 0.05468793487548828, 0.05439136123657227, 0.05410815811157227, 0.054403072357177736, 0.054691326141357424, 0.05462681579589844, 0.05561548614501953, 0.05504723358154297, 0.05507932662963867, 0.05517059326171875, 0.05507980728149414, 0.05488172912597656, 0.05715004730224609, 0.05571798324584961, 0.05679635238647461, 0.055397342681884766, 0.0547940788269043, 0.05482227325439453, 0.0544343376159668, 0.05465631866455078, 0.05573212814331055, 0.05521417617797852, 0.05511328125, 0.05470662307739258, 0.05461401748657226, 0.054720481872558596, 0.054341217041015626, 0.05610249710083008, 0.05948489761352539, 0.05490700912475586, 0.054583297729492185, 0.05510863876342773, 0.054451168060302736, 0.0546297607421875, 0.05650291061401367, 0.05457715225219727, 0.055187553405761716, 0.05510873413085938, 0.05481468963623047, 0.05504492950439453, 0.05519564819335938, 0.05554380798339844, 0.05501094436645508, 0.05532915115356445, 0.059435073852539065, 0.05659203338623047, 0.05770883178710937, 0.05644518280029297, 0.055836639404296874, 0.05570537567138672, 0.054171646118164066, 0.054306175231933596, 0.05448563385009766, 0.05437142562866211, 0.05354172897338867, 0.05533612823486328, 0.05436431884765625, 0.05394921493530273, 0.05415107345581055, 0.05410736083984375, 0.05464739227294922, 0.053946529388427734, 0.054118431091308594, 0.05411779022216797, 0.054604415893554685, 0.05425065612792969, 0.053924545288085934, 0.05383388900756836, 0.05449318313598633, 0.054226497650146484, 0.05399596786499023, 0.05431238555908203, 0.05462483215332031, 0.053980926513671874, 0.05402447891235351, 0.05408150482177734, 0.05472256088256836, 0.054040576934814455, 0.05462540817260742, 0.05477638244628906, 0.055277889251708984, 0.05484326553344727, 0.05443596649169922, 0.055076160430908204, 0.05484780883789062, 0.05433782577514648, 0.05488044738769531, 0.05458534240722656, 0.054926654815673825, 0.05548448181152344, 0.05631782531738281, 0.05555263900756836, 0.05638156890869141, 0.05666611099243164, 0.05625241470336914, 0.05601267242431641, 0.05600678253173828, 0.05527347183227539, 0.055193599700927735, 0.05430249786376953, 0.054452449798583984, 0.053850112915039064, 0.0539607048034668, 0.054046718597412106, 0.05419366455078125, 0.05573273468017578, 0.0547512321472168, 0.05471846389770508, 0.054523902893066405, 0.05485363388061523, 0.05567638397216797, 0.05535184097290039, 0.05574041748046875, 0.05550422286987305, 0.05575542449951172, 0.055169025421142576, 0.05554934310913086, 0.05680188751220703, 0.05443119812011719, 0.05533340835571289, 0.05598611068725586, 0.05631596755981445, 0.055524608612060544, 0.05490867233276367, 0.05591286468505859, 0.05481123352050781, 0.054599681854248044, 0.054666622161865235, 0.05578201675415039, 0.053991424560546876, 0.05467136001586914, 0.0550645751953125, 0.054284286499023435, 0.05439859390258789, 0.054456703186035155, 0.05412384033203125, 0.054074047088623046, 0.05451887893676758, 0.055203807830810546, 0.05465760040283203, 0.05478364944458008, 0.054728862762451175, 0.054945758819580075, 0.054725440979003906, 0.05390095901489258, 0.054678657531738284, 0.0555417594909668, 0.05537177658081055, 0.055531520843505856, 0.05487206268310547, 0.05495321655273438, 0.05456291198730469, 0.05496284866333008, 0.0540113296508789, 0.05392230224609375, 0.05421820831298828, 0.054878814697265625, 0.05396908950805664, 0.05389497756958008, 0.05411344146728515, 0.05445513534545898, 0.0542938232421875, 0.05534790420532226, 0.053982624053955076, 0.05382428741455078, 0.0543570556640625, 0.053687328338623046, 0.0543485107421875, 0.05453107070922852, 0.05447817611694336, 0.054610591888427734, 0.05423923110961914, 0.05387699127197266, 0.05379558563232422, 0.05459628677368164, 0.05464710235595703, 0.05421670532226563, 0.054434913635253906, 0.053984161376953124, 0.054422561645507815, 0.05427503967285156, 0.05453033447265625, 0.054193950653076174, 0.05473664093017578, 0.05480467224121094, 0.05511782455444336, 0.055828479766845705, 0.05720441436767578, 0.05493100738525391, 0.05503667068481445, 0.055504894256591795, 0.05472051239013672, 0.054727809906005856, 0.05416422271728515, 0.054548606872558594, 0.05558476638793945, 0.05504143905639648, 0.05553171157836914, 0.05554217529296875, 0.05571583938598633, 0.055244800567626956, 0.05483011245727539, 0.05541577529907227, 0.05565151977539062, 0.055616321563720705, 0.057613632202148435, 0.05594777679443359, 0.05526473617553711, 0.05550358581542969, 0.056545280456542966, 0.05529388809204101, 0.05549881744384766, 0.05536495971679688, 0.05506032180786133, 0.05574931335449219, 0.056295551300048825, 0.05632732772827148, 0.056626014709472657, 0.0578682861328125, 0.057697601318359375, 0.057066177368164064, 0.0568197135925293, 0.05610291290283203, 0.05627260971069336, 0.055984031677246096, 0.054952224731445315, 0.055226463317871094, 0.05694259262084961, 0.057831424713134766, 0.057837120056152345, 0.05657440185546875, 0.055742462158203124, 0.05529600143432617, 0.055752704620361325, 0.055809215545654295, 0.05606278228759766, 0.05558051300048828, 0.0563078727722168, 0.055640064239501956, 0.05544345474243164, 0.055335968017578126, 0.055144577026367186, 0.054993759155273436, 0.05462428665161133, 0.05528745651245117, 0.05563388824462891, 0.05530620956420899, 0.05580633544921875, 0.05497446441650391, 0.05528118515014648, 0.05616073608398438, 0.0557786865234375, 0.055304832458496093, 0.05672540664672852, 0.05631804656982422, 0.05582793426513672, 0.05628982543945313, 0.05547977447509766, 0.05632592010498047, 0.056473438262939456, 0.05665065765380859, 0.05569331359863281, 0.054877281188964844, 0.05451020812988281, 0.05413711929321289, 0.05426335906982422, 0.05523231887817383, 0.05450960159301758, 0.05466787338256836, 0.05437577438354492, 0.0541409912109375, 0.05500783920288086, 0.05702492904663086, 0.05563759994506836, 0.05476147079467773, 0.054965984344482424, 0.055104991912841794, 0.05536441421508789, 0.05571788787841797, 0.05579776000976563, 0.0554516487121582, 0.05627494430541992, 0.05567011260986328, 0.05623056030273438, 0.055820289611816405, 0.054869216918945314, 0.05609142303466797, 0.05600460815429688, 0.055391742706298826, 0.055470592498779295, 0.05573555374145508, 0.055018241882324216, 0.055834144592285154, 0.056595008850097654, 0.0567110710144043, 0.05664767837524414, 0.056128990173339846, 0.05535798263549805, 0.055560127258300784, 0.05553081512451172, 0.05576492691040039, 0.05603615951538086, 0.05511167907714844, 0.055799007415771484, 0.05660752105712891, 0.05631734466552735, 0.05603923034667969, 0.05629212951660156, 0.05626787185668945, 0.05508975982666016, 0.054652641296386716, 0.05519004821777344, 0.05562374496459961, 0.05609247970581055, 0.057259937286376954, 0.055871776580810543, 0.05561161422729492, 0.05630748748779297, 0.05650636672973633, 0.06885990142822265, 0.05495004653930664, 0.05487500762939453, 0.05489718246459961, 0.05567881774902344, 0.05549321746826172, 0.05507276916503906, 0.05764444732666016, 0.056048351287841795, 0.055413856506347656, 0.05502851104736328, 0.054994945526123044, 0.055736320495605465, 0.05748735809326172, 0.056215553283691405, 0.05570323181152344]",tokens/s,18.132321427068938,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1892.39296,2975.727616,0.0,2573.205504,2499.110912,s,1,11.4968857421875,11.4968857421875,0.0,11.4968857421875,11.4968857421875,11.4968857421875,11.4968857421875,[11.4968857421875],,kWh,6.55778764375024e-05,7.217959602694612e-06,2.3057518446001346e-05,9.585335448619836e-05,,MB,1993.854976,3032.35072,0.0,2615.148544,2344.859136,s,10,9.599883117675782,0.9599883117675783,0.0006789888656806986,0.9597271728515625,0.9609683288574219,0.961060366821289,0.9611339971923828,"[0.9592802734375, 0.9596979370117188, 0.9591192016601563, 0.960321044921875, 0.9611524047851563, 0.9596486206054687, 0.9597564086914062, 0.9593824462890626, 0.960576904296875, 0.9609478759765625]",tokens/s,266.6699134374251,kWh,2.80635266704542e-05,3.0939695579013955e-06,1.8546050190363196e-05,4.9703546418718786e-05,tokens/kWh,5150537.908168022,MB,2000.809984,3061.710848,0.0,2644.508672,2355.667456,s,10,30.164549072265622,3.0164549072265623,0.01451851269936126,3.013283325195313,3.0379307373046873,3.037942053222656,3.037951105957031,"[3.03792822265625, 3.012458740234375, 3.0058427734375, 2.998713134765625, 2.998117431640625, 3.020995849609375, 3.005732666015625, 3.01410791015625, 3.032698974609375, 3.037953369140625]",tokens/s,20.885443985610404,kWh,8.957075364204452e-05,9.881222123815586e-06,4.000413553863661e-05,0.00013945611130449672,tokens/kWh,451755.0318210299,,s,630,30.162378604888893,0.04787679143633161,0.00076079481385525,0.04773520088195801,0.048693135833740235,0.04903439998626709,0.05097193622589113,"[0.048758785247802736, 0.05180006408691406, 0.0481209602355957, 0.048996353149414064, 0.04868527984619141, 0.04794015884399414, 0.04798064041137695, 0.04833894348144531, 0.049194881439208984, 0.049258625030517575, 0.051879936218261716, 0.04816796875, 0.048722911834716796, 0.049119232177734375, 0.0477757453918457, 0.048271198272705075, 0.04779004669189453, 0.048036033630371094, 0.047961345672607424, 0.04832092666625976, 0.0483059196472168, 0.04866025543212891, 0.04794208145141601, 0.047411582946777345, 0.0479109115600586, 0.048115711212158206, 0.04836924743652344, 0.04776182556152344, 0.04778188705444336, 0.04732876968383789, 0.04802816009521484, 0.04772454452514648, 0.048019264221191404, 0.04764889526367187, 0.047728702545166014, 0.04751932907104492, 0.04779606246948242, 0.04801590347290039, 0.047906848907470705, 0.0488950080871582, 0.04846080017089844, 0.04874576187133789, 0.04802585601806641, 0.048092639923095704, 0.04761395263671875, 0.04728140640258789, 0.047785694122314454, 0.047645984649658205, 0.047997665405273435, 0.04763868713378906, 0.047853408813476564, 0.04797439956665039, 0.0477470703125, 0.04869260787963867, 0.04800166320800781, 0.04756480026245117, 0.048404224395751955, 0.048102752685546875, 0.04790774536132812, 0.048451583862304685, 0.04817715072631836, 0.047939617156982424, 0.04764201736450195, 0.04850483322143555, 0.04795548629760742, 0.04769148635864258, 0.04819635009765625, 0.04789657592773437, 0.048072704315185545, 0.04797836685180664, 0.04776358413696289, 0.047420513153076174, 0.04762716674804687, 0.04820716857910156, 0.047699649810791014, 0.04741632080078125, 0.047413246154785156, 0.04774911880493164, 0.047445697784423826, 0.047661376953125, 0.04827340698242188, 0.05045248031616211, 0.04743926239013672, 0.047809120178222655, 0.04723689651489258, 0.04684003067016602, 0.04703641510009766, 0.047480831146240236, 0.047116287231445314, 0.047083518981933595, 0.04751769638061523, 0.04768767929077149, 0.04731276702880859, 0.04707855987548828, 0.04764566421508789, 0.04805571365356445, 0.04727852630615234, 0.04710211181640625, 0.04742758560180664, 0.04870515060424805, 0.05107545471191406, 0.04753203201293945, 0.048189441680908204, 0.04755865478515625, 0.04786908721923828, 0.04776348876953125, 0.04766598510742188, 0.047966209411621094, 0.047529983520507815, 0.047892478942871096, 0.04796982574462891, 0.04763817596435547, 0.04811449432373047, 0.04769164657592773, 0.04704678344726562, 0.04739686584472656, 0.04725468826293945, 0.047311710357666015, 0.04713475036621094, 0.04815254211425781, 0.04897177505493164, 0.04845363235473633, 0.04788169479370117, 0.048724800109863284, 0.04812726211547851, 0.04805267333984375, 0.04823046493530273, 0.04706259155273437, 0.04739904022216797, 0.0470645751953125, 0.04719721603393555, 0.047578079223632816, 0.04721343994140625, 0.04737625503540039, 0.049128734588623046, 0.04790755081176758, 0.047570945739746094, 0.04791321563720703, 0.04732688140869141, 0.04744406509399414, 0.047421440124511716, 0.04806585693359375, 0.04727878570556641, 0.047865856170654295, 0.047233024597167966, 0.04791689682006836, 0.047978912353515625, 0.04783283233642578, 0.04744998550415039, 0.04715327835083008, 0.04736614227294922, 0.047364097595214844, 0.04709785461425781, 0.04757708740234375, 0.04744192123413086, 0.047817920684814455, 0.047299392700195314, 0.04735795211791992, 0.04719615936279297, 0.04781465530395508, 0.04766672134399414, 0.04814828872680664, 0.04874716949462891, 0.04771430587768555, 0.04833679962158203, 0.049500255584716796, 0.0485164794921875, 0.04790131378173828, 0.04848393630981445, 0.04753785705566406, 0.04745859146118164, 0.047925697326660154, 0.04785356903076172, 0.04755251312255859, 0.047318401336669924, 0.04729219055175781, 0.04719526290893555, 0.04725936126708984, 0.0474126091003418, 0.050145790100097655, 0.04769804763793945, 0.04717977523803711, 0.0477470703125, 0.04795619201660156, 0.047214366912841796, 0.04747660827636719, 0.047470718383789065, 0.04779212951660156, 0.04818124771118164, 0.049356353759765624, 0.04742598342895508, 0.04716134262084961, 0.04785971069335938, 0.04710604858398437, 0.047160480499267576, 0.04720022583007812, 0.047311744689941405, 0.04723712158203125, 0.04735772705078125, 0.047338783264160154, 0.04742444610595703, 0.047288257598876955, 0.048555137634277344, 0.047164222717285154, 0.04711340713500976, 0.04791961669921875, 0.04762809753417969, 0.047702720642089844, 0.04764838409423828, 0.04875859069824219, 0.04762879943847656, 0.04714905548095703, 0.04719615936279297, 0.047101951599121096, 0.04708464050292969, 0.04747971343994141, 0.047388671875, 0.04701696014404297, 0.04784025573730469, 0.047409088134765624, 0.04748870468139649, 0.048210304260253904, 0.04715929412841797, 0.047470592498779295, 0.04708687973022461, 0.047007808685302736, 0.047268512725830075, 0.04732928085327148, 0.04716134262084961, 0.04778819274902344, 0.04976214218139648, 0.04726169586181641, 0.04741734313964844, 0.04701593780517578, 0.046895233154296875, 0.04725910568237305, 0.04692745590209961, 0.04724409484863281, 0.04743577575683594, 0.047101951599121096, 0.04737212753295898, 0.048036128997802734, 0.04837891387939453, 0.047121246337890624, 0.04715520095825195, 0.047254718780517575, 0.04720870590209961, 0.048024127960205075, 0.04837113571166992, 0.04811756896972656, 0.04829056167602539, 0.05188198471069336, 0.04926723098754883, 0.04816716766357422, 0.04764031982421875, 0.04784479904174805, 0.04797907257080078, 0.047529983520507815, 0.04712768173217773, 0.04731584167480469, 0.04750681686401367, 0.04732787322998047, 0.04736614227294922, 0.047787487030029295, 0.047316959381103516, 0.04793196868896484, 0.04716953659057617, 0.047414688110351565, 0.047064769744873045, 0.04744252777099609, 0.047732673645019534, 0.04814886474609375, 0.047538177490234375, 0.048205249786376955, 0.047678016662597654, 0.04760124969482422, 0.047709888458251956, 0.04778675079345703, 0.04730672073364258, 0.04722022247314453, 0.047500030517578125, 0.047529727935791015, 0.047529983520507815, 0.04727545547485352, 0.04754489517211914, 0.0477573127746582, 0.04762419128417969, 0.04760371017456055, 0.04724531173706055, 0.04798175811767578, 0.04747766494750977, 0.048218017578125, 0.04782422256469727, 0.047529953002929684, 0.04806921768188477, 0.04731638336181641, 0.04695724868774414, 0.047867904663085936, 0.04720640182495117, 0.04729241561889649, 0.04709580612182617, 0.04741939163208008, 0.047095169067382814, 0.04706777572631836, 0.0488611831665039, 0.04699955368041992, 0.04755862426757813, 0.04717571258544922, 0.047266849517822264, 0.047433696746826175, 0.04704358291625976, 0.04791296005249023, 0.04767692947387695, 0.0479997444152832, 0.04782985687255859, 0.0486987190246582, 0.04832937622070312, 0.04772454452514648, 0.047742401123046875, 0.04755513763427734, 0.04800102233886719, 0.04835737609863281, 0.04806825637817383, 0.05053449630737305, 0.04903756713867188, 0.04880358505249023, 0.04783900833129883, 0.048400863647460934, 0.049024127960205076, 0.05332812881469726, 0.04896422576904297, 0.04860044860839844, 0.04763449478149414, 0.0479667854309082, 0.04815052795410156, 0.04861542510986328, 0.04918476867675781, 0.04890214538574219, 0.04747468948364258, 0.047347103118896484, 0.047398880004882814, 0.04719375991821289, 0.04742790222167969, 0.047755680084228515, 0.04759782409667969, 0.04797235107421875, 0.047440990447998044, 0.04707740783691406, 0.04777798461914062, 0.04720505523681641, 0.04753612899780273, 0.04727974319458008, 0.047463840484619144, 0.04713513565063476, 0.04773843383789062, 0.047626976013183595, 0.04845315170288086, 0.04825983810424805, 0.04872806549072266, 0.04742879867553711, 0.047422271728515625, 0.04692307281494141, 0.04702454376220703, 0.046956832885742185, 0.04778188705444336, 0.04706054306030273, 0.04749356842041016, 0.04712038421630859, 0.04753385543823242, 0.04793775939941406, 0.04765695953369141, 0.04758454513549805, 0.047938270568847655, 0.04741712188720703, 0.04686665725708008, 0.047265182495117186, 0.04831907272338867, 0.047703678131103516, 0.049569377899169924, 0.049115550994873046, 0.048468223571777345, 0.047632129669189456, 0.04744953536987305, 0.04738719940185547, 0.04757503890991211, 0.04731916809082031, 0.04717350387573242, 0.047611167907714844, 0.0474918098449707, 0.047472640991210936, 0.04691366577148438, 0.04810124969482422, 0.04724528121948242, 0.04709584045410156, 0.04747238540649414, 0.04715100860595703, 0.047438175201416015, 0.0481976318359375, 0.047376094818115236, 0.04866915130615234, 0.049169601440429686, 0.047921791076660156, 0.047661056518554686, 0.04761804962158203, 0.047188129425048825, 0.047357791900634764, 0.04701388931274414, 0.047042560577392575, 0.047168895721435546, 0.04725823974609375, 0.048586753845214846, 0.04722848129272461, 0.047868350982666015, 0.047519744873046874, 0.04768902587890625, 0.04763043212890625, 0.04768767929077149, 0.0473606071472168, 0.04912947082519531, 0.04873830413818359, 0.04794777679443359, 0.04882636642456055, 0.04834304046630859, 0.04849407958984375, 0.047263999938964844, 0.04788864135742187, 0.04790179061889648, 0.04790774536132812, 0.047009792327880856, 0.047201663970947265, 0.04730534362792969, 0.047434974670410156, 0.04693791961669922, 0.04747740936279297, 0.047655231475830076, 0.04749107360839844, 0.04763852691650391, 0.04724531173706055, 0.0474983024597168, 0.048081855773925784, 0.04716543960571289, 0.04795651245117188, 0.04731862258911133, 0.048480670928955076, 0.04700364685058594, 0.04718783950805664, 0.047044223785400394, 0.046904960632324216, 0.04703913497924805, 0.04678473663330078, 0.04685526275634765, 0.04748585510253906, 0.04734991836547851, 0.048955230712890624, 0.04741939163208008, 0.04705807876586914, 0.04703836822509765, 0.04757600021362305, 0.04754227066040039, 0.047247360229492184, 0.04726784133911133, 0.04761734390258789, 0.04715184020996094, 0.04750947189331055, 0.047271934509277344, 0.04716284942626953, 0.04772854232788086, 0.04760435104370117, 0.04768124771118164, 0.04835948944091797, 0.047425086975097654, 0.04794025421142578, 0.04740496063232422, 0.04746435165405274, 0.04788243103027344, 0.048156673431396485, 0.04810559844970703, 0.047505279541015626, 0.04847206497192383, 0.04894902420043945, 0.04767977523803711, 0.04787343978881836, 0.0486978874206543, 0.0478105583190918, 0.05017424011230469, 0.049729248046875, 0.04809318542480469, 0.0476255989074707, 0.04765145492553711, 0.04723046493530274, 0.04844412612915039, 0.04877257537841797, 0.04784067153930664, 0.04833683013916015, 0.04753097534179687, 0.047836414337158205, 0.04767142486572266, 0.04760435104370117, 0.048047904968261716, 0.05252054214477539, 0.04823523330688476, 0.04783494567871094, 0.04756079864501953, 0.048132095336914066, 0.049030529022216794, 0.0475299186706543, 0.047798816680908206, 0.047891872406005856, 0.04772735977172852, 0.047596736907958986, 0.04788102340698242, 0.04839574432373047, 0.04825276947021485, 0.04793824005126953, 0.047636478424072266, 0.04760575866699219, 0.04739686584472656, 0.0481607666015625, 0.047763038635253906, 0.048044448852539064, 0.048527359008789066, 0.048377857208251954, 0.04742758560180664, 0.04753958511352539, 0.04752243041992187, 0.048312286376953124, 0.04774915313720703, 0.04806803131103515, 0.047522369384765624, 0.04768972778320312, 0.04824883270263672, 0.04774841690063476, 0.04775142288208008, 0.047704734802246095, 0.04820070266723633, 0.04825167846679688, 0.047857662200927735, 0.04763404846191406, 0.04814451217651367, 0.04812416076660156, 0.047833087921142575, 0.04828979110717774, 0.04872505569458008, 0.04809209442138672, 0.04779827117919922, 0.047916927337646485, 0.048187519073486326, 0.04946944046020508, 0.050718494415283207, 0.0482955207824707, 0.04911577606201172, 0.04880998229980469, 0.048601089477539064, 0.048132095336914066, 0.04824396896362305, 0.04817107009887695, 0.048560832977294924, 0.048140289306640625, 0.04857651138305664, 0.04806351852416992, 0.04819657516479492, 0.047699169158935545, 0.04884969711303711, 0.04859423828125, 0.048083648681640626, 0.04801052856445313, 0.04826291275024414, 0.0527174072265625, 0.04888995361328125, 0.04857475280761719, 0.04845292663574219, 0.0489152946472168, 0.047892478942871096, 0.04804163360595703, 0.047466464996337894, 0.04720064163208008, 0.04746575927734375, 0.04820608139038086, 0.048204254150390625, 0.04803747177124024, 0.047992576599121095, 0.048554656982421875, 0.04894924926757813, 0.04807011032104492, 0.048038143157958985, 0.04822969436645508, 0.047516639709472654, 0.04756412887573242, 0.04882271957397461, 0.048316001892089844, 0.04825075149536133, 0.04774124908447266, 0.04801580810546875, 0.04787152099609375, 0.047779647827148435, 0.04734019088745117, 0.04776265716552734, 0.047936256408691404, 0.04861040115356445, 0.04841932678222656, 0.0482492790222168, 0.048511070251464845, 0.04823196792602539, 0.0477938232421875, 0.048251777648925784, 0.047279361724853516, 0.04762908935546875, 0.04767504119873047, 0.04803177642822266, 0.04755590438842774, 0.048245567321777344, 0.04914521789550781, 0.0474826545715332, 0.047414112091064456, 0.04803903961181641, 0.04773772811889648, 0.04830204772949219, 0.048375839233398436, 0.04823785781860351, 0.0479024658203125, 0.04867375946044922, 0.05063065719604492, 0.048405632019042966, 0.047667198181152344, 0.04756569671630859, 0.047761409759521485, 0.04838988876342774, 0.0482163200378418, 0.04843110275268555, 0.05004822540283203]",tokens/s,20.886946890119784,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1896.26368,2715.680768,0.0,2329.935872,2292.903424,s,1,11.04098828125,11.04098828125,0.0,11.04098828125,11.04098828125,11.04098828125,11.04098828125,[11.04098828125],,kWh,5.250644011667304e-05,5.784508517034582e-06,1.5649734742002874e-05,7.39406833757105e-05,,MB,1997.889536,2847.801344,0.0,2430.599168,2384.184832,s,10,1.3622839965820313,0.13622839965820313,0.00038062492271410513,0.1363505401611328,0.13652589721679687,0.13672606811523438,0.13688620483398437,"[0.13638514709472657, 0.13573648071289063, 0.13601939392089843, 0.13648141479492187, 0.13552362060546874, 0.1360833282470703, 0.13636405944824218, 0.13642729187011718, 0.13692623901367187, 0.13633702087402344]",tokens/s,1879.1970003487058,kWh,4.05406274259253e-06,4.470868759578211e-07,2.6862714391389205e-06,7.18742105768927e-06,tokens/kWh,35617782.504355334,MB,1999.044608,2877.161472,0.0,2459.959296,2390.977024,s,10,29.333292968749998,2.9333292968749998,0.03348032385115024,2.9431298828125,2.9752155761718746,2.9753485107421875,2.9754548583984377,"[2.947513427734375, 2.90059375, 2.938746337890625, 2.91302392578125, 2.876139892578125, 2.894212158203125, 2.963922607421875, 2.948473388671875, 2.9754814453125, 2.97518603515625]",tokens/s,21.477302281444015,kWh,8.515815635282347e-05,9.392951494166061e-06,3.984087369366045e-05,0.00013439198154064998,tokens/kWh,468777.96783541125,,s,630,29.331110233306877,0.046557317830645845,0.0010496275801741895,0.04646590423583984,0.04767670440673828,0.04807800559997558,0.0502185495376587,"[0.05121007919311524, 0.04676198577880859, 0.04595619201660156, 0.04596963119506836, 0.046577919006347654, 0.04548448181152344, 0.045395969390869144, 0.045219520568847656, 0.045150527954101564, 0.045453311920166016, 0.04568678283691406, 0.045819232940673825, 0.046469791412353516, 0.046010303497314456, 0.04602671813964844, 0.04694230270385742, 0.04676563262939453, 0.04567903900146485, 0.04651212692260742, 0.04582329559326172, 0.04522054290771484, 0.046061569213867185, 0.04590111923217773, 0.046489727020263674, 0.04754489517211914, 0.04738252639770508, 0.04661248016357422, 0.04690739059448242, 0.04629708862304688, 0.04676124954223633, 0.047042560577392575, 0.047921886444091795, 0.048233600616455076, 0.048391040802001954, 0.04790630340576172, 0.04743423843383789, 0.047838336944580076, 0.04603993606567383, 0.046980350494384766, 0.04631833648681641, 0.04716134262084961, 0.04617216110229492, 0.04578470230102539, 0.04546598434448242, 0.04597350311279297, 0.046448673248291016, 0.04592544174194336, 0.04641475296020508, 0.046558494567871096, 0.04665008163452149, 0.04635443115234375, 0.04683161544799805, 0.046936065673828124, 0.04730220794677734, 0.04746284866333008, 0.04715520095825195, 0.048500160217285156, 0.04773535919189453, 0.05026102447509766, 0.04816073608398438, 0.048728256225585936, 0.04712326431274414, 0.04798054504394531, 0.04765718460083008, 0.046465023040771485, 0.04576665496826172, 0.046163105010986326, 0.04588355255126953, 0.045840511322021486, 0.04663312149047852, 0.0456011848449707, 0.047271934509277344, 0.04693196868896484, 0.046650623321533205, 0.04660915374755859, 0.04618441772460938, 0.04522192001342774, 0.04713216018676758, 0.04566579055786133, 0.04602982330322265, 0.046383102416992186, 0.046723072052001956, 0.046867904663085935, 0.046047809600830075, 0.04786156845092773, 0.04678416061401367, 0.04600608062744141, 0.04607567977905273, 0.04543993759155274, 0.045748222351074216, 0.04561219024658203, 0.04554166412353516, 0.045905696868896485, 0.04542544174194336, 0.045477760314941405, 0.045587711334228516, 0.04517593765258789, 0.04645862579345703, 0.045862560272216794, 0.046821727752685546, 0.04540825653076172, 0.045350910186767575, 0.045832191467285156, 0.04534067153930664, 0.04523401641845703, 0.04574224090576172, 0.04552220916748047, 0.04646575927734375, 0.045517887115478516, 0.04573004913330078, 0.045583038330078124, 0.045158401489257816, 0.04540108871459961, 0.045292415618896485, 0.04538972854614258, 0.04526716613769531, 0.04563347244262695, 0.045643169403076174, 0.046420417785644534, 0.04590518569946289, 0.046209983825683594, 0.04614128112792969, 0.04582796859741211, 0.047804065704345707, 0.04639324951171875, 0.04665827178955078, 0.04869007873535156, 0.045225696563720705, 0.04625766372680664, 0.04671977615356445, 0.04623759841918945, 0.04600227355957031, 0.045903903961181644, 0.045438945770263674, 0.0459532470703125, 0.0459015998840332, 0.04653184127807617, 0.046197502136230466, 0.04713676834106445, 0.04723846435546875, 0.046823135375976564, 0.04727292633056641, 0.046944255828857424, 0.0462110710144043, 0.04631075286865234, 0.04566902542114258, 0.04588259124755859, 0.04581660842895508, 0.04611811065673828, 0.045943519592285154, 0.04585683059692383, 0.045445247650146486, 0.046079872131347656, 0.047685630798339845, 0.0461578254699707, 0.04572115325927734, 0.04656937789916992, 0.04648963165283203, 0.047327743530273435, 0.04772419357299805, 0.047628639221191406, 0.04791017532348633, 0.048285598754882815, 0.04702239990234375, 0.04655974578857422, 0.04765491104125977, 0.04778937530517578, 0.047424064636230466, 0.05188415908813476, 0.04739446258544922, 0.04679305648803711, 0.04609145736694336, 0.04668105697631836, 0.04772444915771484, 0.047354942321777345, 0.04663100814819336, 0.0469343376159668, 0.046389312744140626, 0.045553375244140625, 0.045771358489990234, 0.04607612609863281, 0.04629286575317383, 0.04607590484619141, 0.04603084945678711, 0.045835838317871094, 0.045752769470214845, 0.04625955200195313, 0.04665731048583984, 0.04655193710327148, 0.04640563201904297, 0.046333953857421874, 0.04582313537597656, 0.04551971054077148, 0.04573545455932617, 0.04508051300048828, 0.045826591491699216, 0.04573388671875, 0.0458260498046875, 0.04577907180786133, 0.046245761871337894, 0.045690879821777344, 0.045780990600585936, 0.045213695526123046, 0.04569036865234375, 0.0460313606262207, 0.04663849639892578, 0.04924476623535156, 0.045824001312255856, 0.04557580947875976, 0.04595750427246094, 0.04611686325073242, 0.04620470428466797, 0.047144577026367186, 0.04783135986328125, 0.046588191986083986, 0.04637286376953125, 0.04645788955688476, 0.046373855590820315, 0.04617830276489258, 0.046790271759033206, 0.046711166381835936, 0.04693926239013672, 0.04803647994995117, 0.047742977142333984, 0.04725376129150391, 0.046507999420166014, 0.04648963165283203, 0.045916160583496096, 0.045864959716796876, 0.04592227172851562, 0.04599776077270508, 0.04685798263549805, 0.04586310577392578, 0.04627433776855469, 0.045736446380615234, 0.045896064758300784, 0.04583545684814453, 0.04632163238525391, 0.04612566375732422, 0.04575436782836914, 0.045735553741455076, 0.04535744094848633, 0.04628652954101563, 0.04767571258544922, 0.046448638916015625, 0.04533603286743164, 0.04595356750488281, 0.046202880859375, 0.046034942626953124, 0.0457751350402832, 0.04583958435058594, 0.04611532974243164, 0.04824371337890625, 0.04588825607299805, 0.045310462951660156, 0.0455711669921875, 0.04545808029174805, 0.045709312438964846, 0.04574003219604492, 0.04525993728637695, 0.04746681594848633, 0.0456440315246582, 0.04520297622680664, 0.04519193649291992, 0.04493926239013672, 0.04551270294189453, 0.04486348724365234, 0.045352001190185544, 0.04618745422363281, 0.04687388610839844, 0.04742627334594727, 0.046712833404541014, 0.04612422561645508, 0.04550537490844726, 0.04569606399536133, 0.04489104080200195, 0.04532633590698242, 0.04540150451660156, 0.04553993606567383, 0.04569702529907226, 0.04529110336303711, 0.044988414764404294, 0.045597087860107424, 0.04589910507202148, 0.04558028793334961, 0.04595369720458985, 0.04622915267944336, 0.04558198547363281, 0.04650595092773437, 0.0454315185546875, 0.04557385635375977, 0.04674956893920899, 0.04573225784301758, 0.045518848419189455, 0.04591001510620117, 0.045400062561035154, 0.045625343322753906, 0.045270751953125, 0.04544960021972656, 0.045813663482666016, 0.045227935791015625, 0.04487491226196289, 0.04478047943115234, 0.044980224609375, 0.04481024169921875, 0.04571529769897461, 0.045152416229248045, 0.044988414764404294, 0.04507212829589844, 0.04499203109741211, 0.04534457778930664, 0.045249439239501955, 0.04592844772338867, 0.04621311950683594, 0.04569702529907226, 0.04632918548583984, 0.045952926635742186, 0.04582003021240234, 0.045886302947998045, 0.045454368591308594, 0.04589376068115234, 0.04615385437011719, 0.04642275238037109, 0.04653263854980469, 0.04673247909545898, 0.0454536018371582, 0.04571718215942383, 0.04592313766479492, 0.04565135955810547, 0.04576662445068359, 0.04597414398193359, 0.04661043167114258, 0.0470687370300293, 0.04564361572265625, 0.04817763137817383, 0.04555174255371094, 0.04598281478881836, 0.04503647994995117, 0.04501619338989258, 0.04505481719970703, 0.04516044616699219, 0.04524643325805664, 0.046617855072021486, 0.04484790420532227, 0.04516352081298828, 0.04646604919433594, 0.046276607513427735, 0.045716800689697266, 0.04572639846801758, 0.046308639526367185, 0.04614140701293945, 0.04637494277954102, 0.0458837776184082, 0.045303489685058596, 0.04643088150024414, 0.045524673461914064, 0.04562982559204101, 0.04650569534301758, 0.046782272338867184, 0.04703478240966797, 0.046446048736572265, 0.046161663055419924, 0.04581660842895508, 0.0457665901184082, 0.04556911849975586, 0.04544803237915039, 0.04539740753173828, 0.04582883071899414, 0.045107200622558595, 0.04518016052246094, 0.04527763366699219, 0.04526726531982422, 0.045592575073242186, 0.045791233062744144, 0.04513587188720703, 0.04573990249633789, 0.04959990310668945, 0.04595785522460937, 0.04612710571289062, 0.04544636917114258, 0.04500086212158203, 0.04504556655883789, 0.04503241729736328, 0.04541628646850586, 0.046202880859375, 0.0457540168762207, 0.04618889617919922, 0.04705811309814453, 0.04629196929931641, 0.04644172668457031, 0.04717728042602539, 0.04636979293823242, 0.04667391967773438, 0.046729217529296874, 0.04793753433227539, 0.04926259231567383, 0.05007747268676758, 0.04748028945922852, 0.046981887817382814, 0.04678847885131836, 0.04681296157836914, 0.046712257385253905, 0.04734659194946289, 0.04703337478637695, 0.04662112045288086, 0.04732915115356445, 0.04697974395751953, 0.046449951171875, 0.047020736694335936, 0.047435935974121095, 0.04583411026000977, 0.045615039825439456, 0.04680704116821289, 0.0467784309387207, 0.050114559173583983, 0.04756598281860352, 0.047612350463867185, 0.04783145523071289, 0.04867071914672851, 0.047816703796386716, 0.04797862243652344, 0.047938655853271485, 0.04740380859375, 0.048717697143554686, 0.046731201171875, 0.04673555374145508, 0.047421440124511716, 0.046900672912597655, 0.04680966567993164, 0.04714873504638672, 0.04731900787353516, 0.047300224304199216, 0.04690198516845703, 0.04711401748657226, 0.047492927551269534, 0.04693443298339844, 0.047497215270996096, 0.047628288269042966, 0.04720435333251953, 0.047478782653808595, 0.0471690559387207, 0.04706022262573242, 0.04613606262207031, 0.04631552124023437, 0.046515743255615236, 0.04662422561645508, 0.0463403205871582, 0.04639811325073242, 0.047152511596679686, 0.047155967712402345, 0.04722892761230469, 0.046888927459716796, 0.047863838195800784, 0.04686438369750977, 0.04682342529296875, 0.04698031997680664, 0.04734553527832031, 0.046582687377929685, 0.04697449493408203, 0.047402782440185545, 0.04732524871826172, 0.04715760040283203, 0.04696937561035156, 0.046889793395996096, 0.046504894256591794, 0.04635238265991211, 0.047124481201171874, 0.048345088958740234, 0.04732460784912109, 0.04707788848876953, 0.047575103759765626, 0.04748489761352539, 0.04743695831298828, 0.04813094329833984, 0.04681727981567383, 0.04664022445678711, 0.04635740661621094, 0.046556961059570315, 0.04669379043579101, 0.046097217559814455, 0.046034942626953124, 0.045658111572265625, 0.04592639923095703, 0.046570720672607424, 0.0456196174621582, 0.046208576202392576, 0.04614640045166016, 0.045537246704101565, 0.045782398223876954, 0.04604787063598633, 0.04660838317871094, 0.04684182357788086, 0.04620495986938476, 0.047001377105712894, 0.04637308883666992, 0.048131935119628905, 0.04734787368774414, 0.04708761596679688, 0.0474439697265625, 0.047052192687988284, 0.046563934326171875, 0.04682342529296875, 0.047052192687988284, 0.04668476867675781, 0.04726416015625, 0.04706524658203125, 0.046695518493652347, 0.04694851303100586, 0.04776406478881836, 0.048053470611572266, 0.04809807968139648, 0.048710689544677735, 0.04766409683227539, 0.04840179061889648, 0.047665790557861326, 0.0473436164855957, 0.04801536178588867, 0.047032318115234374, 0.0469749755859375, 0.04656118392944336, 0.047112289428710936, 0.04783718490600586, 0.048226016998291016, 0.047634719848632816, 0.046604095458984376, 0.046677566528320315, 0.046043647766113284, 0.04621884918212891, 0.04712307357788086, 0.04681878280639649, 0.046925342559814454, 0.04734454345703125, 0.046497791290283204, 0.04617184066772461, 0.04644076919555664, 0.04685004806518555, 0.0468746223449707, 0.047695873260498046, 0.046852096557617184, 0.04692172622680664, 0.05264323043823242, 0.047079486846923826, 0.046701087951660156, 0.04677977752685547, 0.04700409698486328, 0.046354881286621095, 0.046763774871826175, 0.04680287933349609, 0.04753343963623047, 0.047018688201904295, 0.047295841217041015, 0.04696656036376953, 0.04695539093017578, 0.04661862564086914, 0.04657766342163086, 0.04733865737915039, 0.047586143493652346, 0.04755046463012695, 0.047210689544677734, 0.04832236862182617, 0.04707478332519531, 0.04698780822753906, 0.047124481201171874, 0.0465797119140625, 0.047195873260498046, 0.04719139099121094, 0.046879680633544925, 0.047443775177001955, 0.04701715087890625, 0.04712550354003906, 0.0474370231628418, 0.04729753494262695, 0.04737615966796875, 0.04748287963867188, 0.047531169891357423, 0.04772687911987305, 0.04781318283081055, 0.05139865493774414, 0.0475392951965332, 0.04760464096069336, 0.047373950958251955, 0.046878753662109376, 0.04711663818359375, 0.047851520538330077, 0.04735795211791992, 0.046831584930419924, 0.047056350708007816, 0.046828033447265625, 0.046886974334716794, 0.04700070571899414, 0.04644339370727539, 0.047728641510009766, 0.047265792846679686, 0.04677427291870117, 0.047230976104736325, 0.04651212692260742, 0.048130046844482424, 0.046827518463134765, 0.05425148773193359, 0.04660163116455078, 0.04679510498046875, 0.0464439697265625, 0.04616278457641602, 0.04680499267578125, 0.04655001449584961, 0.04639846420288086, 0.046442497253417966, 0.047323135375976565, 0.0476611213684082, 0.04728582382202148, 0.047976577758789066, 0.04731110382080078, 0.04765193557739258, 0.04711721420288086, 0.046481182098388675, 0.04664137649536133, 0.04582374572753906, 0.04682086563110351, 0.046755615234375, 0.04631024169921875, 0.05201747131347656, 0.04629183959960938, 0.045714366912841795, 0.045995361328125, 0.046213790893554686, 0.04653814315795898, 0.046147777557373044, 0.046085697174072265, 0.046529022216796875, 0.04694992065429687]",tokens/s,21.478900559468247,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1892.057088,2975.727616,0.0,2573.205504,2499.110912,s,1,11.675587890625,11.675587890625,0.0,11.675587890625,11.675587890625,11.675587890625,11.675587890625,[11.675587890625],,kWh,6.780007844166296e-05,7.471423946569921e-06,2.4100019280007268e-05,9.937152166824015e-05,,MB,1999.691776,3032.35072,0.0,2615.148544,2344.859136,s,10,9.592548950195313,0.9592548950195313,0.0006210982009440326,0.95938427734375,0.9598419677734376,0.9599385925292969,0.9600158923339844,"[0.957951171875, 0.9597216186523437, 0.9588151245117188, 0.9600352172851563, 0.9598204956054688, 0.959634521484375, 0.9597730712890625, 0.9587039184570313, 0.959134033203125, 0.9589597778320312]",tokens/s,266.87380103990773,kWh,2.8054276040531378e-05,3.0938902053364327e-06,1.8612161354363506e-05,4.976032760023132e-05,tokens/kWh,5144660.663343582,MB,1999.691776,3061.710848,0.0,2644.508672,2355.667456,s,10,28.193939208984375,2.8193939208984373,0.012987559169918141,2.8196749267578127,2.836574145507812,2.838882043457031,2.840728361816406,"[2.836061279296875, 2.825163330078125, 2.8077255859375, 2.811230712890625, 2.793153076171875, 2.82187939453125, 2.84118994140625, 2.81818603515625, 2.81882373046875, 2.820526123046875]",tokens/s,22.34522800557228,kWh,8.148025194696646e-05,8.987849777098214e-06,3.75581891374358e-05,0.00012802629086150046,tokens/kWh,492086.4267492819,,s,630,28.19180489349364,0.04474889665633913,0.0006734947470283583,0.0446308650970459,0.04547315368652344,0.045891248321533205,0.04734571071624757,"[0.04594287872314453, 0.04462540817260742, 0.04508108901977539, 0.04489625549316406, 0.04473855972290039, 0.04458041763305664, 0.0445997428894043, 0.04475904083251953, 0.04437206268310547, 0.0446317138671875, 0.04562150573730469, 0.044799999237060545, 0.045053855895996094, 0.04521779251098633, 0.04511753463745117, 0.0449249267578125, 0.045090335845947266, 0.04532886505126953, 0.044576766967773435, 0.044679168701171876, 0.04528332901000977, 0.04442716979980469, 0.044402015686035155, 0.0444218864440918, 0.04414233779907226, 0.04423721694946289, 0.04466057586669922, 0.04434700775146484, 0.044413185119628905, 0.04486156845092774, 0.044101024627685545, 0.044388351440429685, 0.045664768218994144, 0.04534076690673828, 0.04478688049316406, 0.04494009780883789, 0.04582195281982422, 0.04613702392578125, 0.04599635314941406, 0.04603286361694336, 0.04626230239868164, 0.045096031188964845, 0.04506880187988281, 0.04554153442382813, 0.044577022552490235, 0.045055999755859374, 0.04527635192871094, 0.04540480041503906, 0.04497395324707031, 0.04525199890136719, 0.04472659301757813, 0.04509321594238281, 0.04500419235229492, 0.04519356918334961, 0.04468172836303711, 0.044990463256835936, 0.04649574279785156, 0.04472857666015625, 0.0451165771484375, 0.04465142440795898, 0.045120864868164065, 0.04495804977416992, 0.045551265716552734, 0.045533439636230466, 0.04440703964233399, 0.04434246444702149, 0.04497894287109375, 0.045801055908203124, 0.04597398376464844, 0.04483212661743164, 0.045316734313964845, 0.04516460800170898, 0.045657920837402347, 0.04472639846801758, 0.04509286499023438, 0.04866252899169922, 0.045123584747314455, 0.045211647033691404, 0.04474380874633789, 0.044415870666503904, 0.0443834228515625, 0.04470867156982422, 0.043976703643798826, 0.043894783020019534, 0.04430233764648438, 0.044216320037841796, 0.044335105895996096, 0.0444026870727539, 0.04428595352172852, 0.04480547332763672, 0.04522076797485351, 0.044429054260253904, 0.04475689697265625, 0.04415830230712891, 0.04453260803222656, 0.04517670440673828, 0.044878974914550784, 0.04484975814819336, 0.04418291091918945, 0.04447939300537109, 0.04473651123046875, 0.04466447830200195, 0.04429564666748047, 0.04435647964477539, 0.04475289535522461, 0.044543998718261715, 0.044211231231689456, 0.04460028839111328, 0.044873119354248044, 0.045436672210693356, 0.04499919891357422, 0.04547577667236328, 0.04425481414794922, 0.04438409423828125, 0.04426601409912109, 0.04462992095947266, 0.04498995208740234, 0.04470991897583008, 0.044717025756835935, 0.045066207885742185, 0.04456006240844727, 0.04435788726806641, 0.04861142349243164, 0.044765281677246097, 0.04430838394165039, 0.04443878555297852, 0.04500038528442383, 0.04528787231445312, 0.04439846420288086, 0.044404735565185545, 0.04519680023193359, 0.04511747360229492, 0.045109695434570315, 0.044396575927734376, 0.04626588821411133, 0.045144126892089846, 0.04445775985717773, 0.04405107116699219, 0.04413622283935547, 0.044068641662597656, 0.04451168060302734, 0.044384223937988285, 0.04411804962158203, 0.04399212646484375, 0.04408006286621094, 0.044044288635253906, 0.04468300628662109, 0.04412646484375, 0.04412211227416992, 0.0441343994140625, 0.04412416076660156, 0.04388044738769531, 0.044439552307128906, 0.04435324859619141, 0.04394188690185547, 0.044445377349853515, 0.04506623840332031, 0.04692233657836914, 0.04452729415893555, 0.04439686584472656, 0.04418048095703125, 0.04422553634643555, 0.04403785705566406, 0.04477571105957031, 0.04434310531616211, 0.04436150360107422, 0.044619937896728516, 0.0444582405090332, 0.0444901123046875, 0.04436553573608398, 0.04449577713012695, 0.04414579010009766, 0.044723072052001954, 0.043896800994873045, 0.044189727783203125, 0.04426339340209961, 0.044805728912353515, 0.04630963134765625, 0.044376129150390624, 0.045010398864746094, 0.04457113647460938, 0.04526265716552735, 0.04483488082885742, 0.044507423400878904, 0.044734462738037106, 0.04401126480102539, 0.04509926223754883, 0.04439420700073242, 0.04470403289794922, 0.04738671875, 0.04434505462646485, 0.04472284698486328, 0.04427775955200195, 0.044303455352783204, 0.04431145477294922, 0.0452732162475586, 0.0442262077331543, 0.04404659271240234, 0.04424832153320313, 0.044116703033447266, 0.04437609481811523, 0.04456035232543945, 0.04510435104370117, 0.04966870498657226, 0.0457852783203125, 0.0448135986328125, 0.044738880157470705, 0.04440099334716797, 0.044550209045410155, 0.044959583282470704, 0.04443116760253906, 0.04426339340209961, 0.04392339324951172, 0.04384403228759766, 0.043894783020019534, 0.04652790451049805, 0.04479036712646484, 0.044349441528320314, 0.04426876831054687, 0.044739360809326174, 0.0444661750793457, 0.04411923217773438, 0.04410860824584961, 0.04389807891845703, 0.04416707229614258, 0.04490729522705078, 0.04441916656494141, 0.04432304000854492, 0.044197662353515625, 0.04424703979492187, 0.044316673278808595, 0.04475904083251953, 0.04431187057495117, 0.04516902542114258, 0.04432044982910156, 0.044155006408691404, 0.04433299255371094, 0.0443583984375, 0.04396588897705078, 0.044197662353515625, 0.044418750762939455, 0.04428297424316406, 0.044058433532714845, 0.04422796630859375, 0.04449244689941406, 0.04429308700561523, 0.04415049743652344, 0.04473680114746094, 0.044535839080810546, 0.04749871826171875, 0.044488800048828124, 0.04386038589477539, 0.04525875091552734, 0.0449536018371582, 0.04418547058105469, 0.04429654312133789, 0.0444659538269043, 0.04416716766357422, 0.04458905410766602, 0.044315807342529295, 0.0442289924621582, 0.04424079895019531, 0.0437902717590332, 0.044534175872802735, 0.04455984115600586, 0.043982688903808596, 0.044007488250732425, 0.0439796142578125, 0.04374854278564453, 0.044135231018066406, 0.044578369140625, 0.045002559661865234, 0.0446196174621582, 0.044567329406738285, 0.04405452728271484, 0.044168704986572264, 0.04471244812011719, 0.04426956939697266, 0.04409929656982422, 0.044163360595703124, 0.04414230346679687, 0.044068382263183596, 0.044491519927978514, 0.04724531173706055, 0.04467004776000977, 0.04479068756103516, 0.045022750854492186, 0.044507007598876956, 0.044146625518798825, 0.044352161407470704, 0.04408886337280273, 0.043992641448974606, 0.04392556762695313, 0.044715137481689454, 0.04415235137939453, 0.04418374252319336, 0.043974655151367184, 0.043853824615478515, 0.04398220825195313, 0.04430006408691406, 0.04542265701293945, 0.04411004638671875, 0.04393222427368164, 0.044117919921875, 0.04391945648193359, 0.044115486145019533, 0.04412873458862305, 0.04391526412963867, 0.04416697692871094, 0.043827678680419924, 0.04385353469848633, 0.04393097686767578, 0.044257312774658206, 0.04469811248779297, 0.04428521728515625, 0.0450970573425293, 0.04622323226928711, 0.04636751937866211, 0.04444569778442383, 0.04427516937255859, 0.04487942504882812, 0.04444054412841797, 0.0446300163269043, 0.04470579147338867, 0.045107105255126956, 0.04469504165649414, 0.04587276840209961, 0.04473955154418945, 0.04452467346191406, 0.04448960113525391, 0.044437503814697264, 0.04443052673339844, 0.0445857925415039, 0.04435753631591797, 0.04493852615356445, 0.0450629768371582, 0.045699073791503904, 0.046483455657958986, 0.04522598266601562, 0.045383007049560546, 0.044841438293457034, 0.04500700759887695, 0.04440067291259766, 0.04430825424194336, 0.04523644638061523, 0.04468656158447266, 0.04471798324584961, 0.044345630645751956, 0.04525731277465821, 0.04524617767333984, 0.04530313491821289, 0.044794334411621096, 0.04405500793457031, 0.04543078231811523, 0.04440627288818359, 0.044050079345703125, 0.044329822540283205, 0.04437427139282227, 0.044639713287353514, 0.044937503814697265, 0.04473360061645508, 0.04443222427368164, 0.04493926239013672, 0.04460748672485351, 0.04458649444580078, 0.044591041564941404, 0.044050079345703125, 0.044225440979003904, 0.0447077751159668, 0.043980510711669925, 0.043800193786621096, 0.044124897003173826, 0.04526406478881836, 0.044740734100341795, 0.04551545715332031, 0.044898078918457034, 0.04455241775512695, 0.044467422485351564, 0.046391422271728516, 0.045281150817871096, 0.04557932662963867, 0.046119873046875, 0.04568076705932617, 0.04512652969360351, 0.045941280364990233, 0.046217697143554684, 0.04506828689575195, 0.04561510467529297, 0.04536028671264648, 0.04507529449462891, 0.04490195083618164, 0.04432940673828125, 0.045149600982666016, 0.04539350509643555, 0.04558950424194336, 0.04581999969482422, 0.04542044830322266, 0.04486348724365234, 0.04470105743408203, 0.045459678649902344, 0.04507635116577149, 0.04461212921142578, 0.04437184143066406, 0.04478579330444336, 0.04493494415283203, 0.04497830581665039, 0.04508067321777344, 0.04527417755126953, 0.044657440185546876, 0.04487798309326172, 0.044355583190917966, 0.04503756713867187, 0.044478462219238284, 0.04431872177124024, 0.04425296020507812, 0.044642463684082034, 0.044637729644775394, 0.044913185119628905, 0.044801025390625, 0.04511641693115234, 0.045219390869140626, 0.04664934539794922, 0.04590636825561523, 0.045147327423095705, 0.04486371231079102, 0.045158016204833985, 0.04464089584350586, 0.04542620849609375, 0.04444601440429687, 0.04462643051147461, 0.045021183013916014, 0.045217025756835935, 0.04504652786254883, 0.0452751350402832, 0.04465641784667969, 0.04528144073486328, 0.045479999542236325, 0.04409142303466797, 0.04487164688110352, 0.044439552307128906, 0.04510044860839844, 0.045099105834960934, 0.04762319946289063, 0.04436057662963867, 0.044611583709716796, 0.04512710571289062, 0.04441337585449219, 0.04450112152099609, 0.0444653434753418, 0.04428678512573242, 0.044802047729492187, 0.044168926239013674, 0.04491702270507812, 0.044521472930908204, 0.04447641754150391, 0.044010688781738284, 0.04520332717895508, 0.044501056671142576, 0.044888671875, 0.044865825653076175, 0.04469145584106445, 0.04472422409057617, 0.044480510711669925, 0.04481782531738281, 0.044507328033447265, 0.045674846649169924, 0.04482204818725586, 0.04525247955322265, 0.04478806304931641, 0.04489804840087891, 0.044251712799072265, 0.0441932487487793, 0.04467548751831055, 0.04431475067138672, 0.046260223388671876, 0.04461116790771484, 0.04429660797119141, 0.04492262268066406, 0.044482654571533206, 0.044184768676757816, 0.04395516967773438, 0.04420115280151367, 0.04446291351318359, 0.04401907348632812, 0.044362369537353515, 0.045241470336914065, 0.04488281631469727, 0.04443859100341797, 0.045321151733398436, 0.04374528121948242, 0.04406380844116211, 0.044388671875, 0.0445909423828125, 0.044583072662353514, 0.044103649139404295, 0.04381763076782227, 0.04430233764648438, 0.04640768051147461, 0.048216064453125, 0.04461772918701172, 0.04449590301513672, 0.0449463996887207, 0.0448614387512207, 0.04424643325805664, 0.045598785400390626, 0.04503494262695312, 0.04504012680053711, 0.04510335922241211, 0.04530969619750977, 0.04500620651245117, 0.04550003051757812, 0.0454923210144043, 0.04561155319213867, 0.04512396621704102, 0.044834815979003906, 0.04537139129638672, 0.044556289672851565, 0.04489011383056641, 0.04549222564697265, 0.04516681671142578, 0.04526467132568359, 0.0449617919921875, 0.0448713264465332, 0.044650177001953124, 0.044757152557373045, 0.04483327865600586, 0.044649822235107425, 0.044415454864501956, 0.044474464416503906, 0.04447856140136719, 0.045303489685058596, 0.04513539123535156, 0.04459945678710937, 0.04482831954956055, 0.04465526580810547, 0.04500243377685547, 0.04525455856323242, 0.04494588851928711, 0.04509280014038086, 0.044724384307861326, 0.04466828918457031, 0.04459750366210938, 0.044660865783691404, 0.044816638946533205, 0.04484444808959961, 0.0448045768737793, 0.04457670211791992, 0.044249343872070315, 0.04393715286254883, 0.0440857925415039, 0.044318912506103515, 0.044230655670166014, 0.044052352905273436, 0.04400966262817383, 0.04406265640258789, 0.045518592834472654, 0.043896224975585936, 0.04361097717285156, 0.04367488098144531, 0.044274272918701174, 0.044535968780517576, 0.044386302947998044, 0.04461568069458008, 0.04451270294189453, 0.044570976257324216, 0.04451148986816406, 0.04456809616088867, 0.04530963134765625, 0.04427206420898438, 0.044003166198730466, 0.045472862243652344, 0.04498236846923828, 0.04455833435058594, 0.04433011245727539, 0.04446723175048828, 0.04443145751953125, 0.04454256057739258, 0.04436991882324219, 0.04418463897705078, 0.0440780143737793, 0.04437811279296875, 0.043972095489501956, 0.04426803207397461, 0.044326656341552736, 0.044940959930419924, 0.04472892761230469, 0.04488771057128906, 0.04477167892456055, 0.04509286499023438, 0.04485302352905273, 0.04451103973388672, 0.0443109130859375, 0.04509251022338867, 0.044693889617919924, 0.044893566131591796, 0.04483484649658203, 0.045031105041503906, 0.04473750305175781, 0.044808128356933596, 0.045246368408203126, 0.04493116760253906, 0.044783039093017576, 0.04525494384765625, 0.045060382843017575, 0.04551679992675781, 0.0456519660949707, 0.04556367874145508, 0.04530940628051758, 0.04527999877929687, 0.046395008087158206, 0.04630771255493164, 0.04585472106933594, 0.04501708984375, 0.04472012710571289, 0.04467097473144531, 0.0446300163269043, 0.04467622375488281, 0.04439334487915039, 0.04421775817871094, 0.04432547378540039, 0.044012672424316404, 0.0449620475769043, 0.044460670471191406, 0.04558348846435547, 0.04523023986816406, 0.04380950546264648, 0.043507713317871094, 0.044039775848388675, 0.044206497192382815, 0.04457062530517578]",tokens/s,22.346919694573966,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1892.769792,3021.86496,0.0,2619.342848,2543.282176,s,1,11.8207939453125,11.8207939453125,0.0,11.8207939453125,11.8207939453125,11.8207939453125,11.8207939453125,[11.8207939453125],,kWh,7.269159432499539e-05,8.011016950254458e-06,2.6610854622001467e-05,0.00010731346589725131,,MB,2006.290432,3078.488064,0.0,2661.285888,2389.0304,s,10,0.8083549728393555,0.08083549728393555,0.0002769330854676988,0.08083545684814453,0.08108773193359375,0.08125356369018555,0.08138622909545898,"[0.080698974609375, 0.0805277099609375, 0.08093577575683594, 0.08141939544677734, 0.08073513793945312, 0.08093974304199218, 0.08099180603027344, 0.08063123321533203, 0.08042431640625, 0.08105088043212891]",tokens/s,3166.925529025909,kWh,2.3960333429303145e-06,2.6413137160433987e-07,1.58599853655733e-06,4.246163251091984e-06,tokens/kWh,60289721.53488554,MB,2006.622208,3107.848192,0.0,2690.646016,2399.83872,s,10,27.935753662109377,2.7935753662109377,0.025513553004527786,2.7938515625,2.8272446044921873,2.8321114379882815,2.8360049047851565,"[2.768962890625, 2.765713134765625, 2.79670849609375, 2.806996337890625, 2.8261630859375, 2.76464501953125, 2.79099462890625, 2.765722900390625, 2.812868896484375, 2.836978271484375]",tokens/s,22.551745251623537,kWh,8.109040883956894e-05,8.944266997944852e-06,3.4955641807843437e-05,0.0001249903176453572,tokens/kWh,504039.0422780892,,s,630,27.9336492805481,0.04433912584213984,0.000817982184830969,0.04422115325927735,0.04527696800231933,0.04566677894592285,0.04741294059753418,"[0.04460003280639648, 0.044518497467041014, 0.04446675109863281, 0.04404268646240234, 0.04413222503662109, 0.04428739166259766, 0.043444862365722654, 0.04453318405151367, 0.044786334991455075, 0.045350910186767575, 0.04436902236938477, 0.043336513519287106, 0.04421795272827148, 0.04444208145141602, 0.04392758560180664, 0.04396569442749024, 0.04362457656860352, 0.04401532745361328, 0.04328268814086914, 0.04335590362548828, 0.04347548675537109, 0.04337289428710937, 0.044330528259277344, 0.04459772872924805, 0.04402499389648438, 0.04353519821166992, 0.04364054489135742, 0.04336624145507813, 0.0434323844909668, 0.04334182357788086, 0.04395596694946289, 0.044434814453125, 0.04419673538208008, 0.04399513626098633, 0.04388454437255859, 0.043558784484863285, 0.043558048248291015, 0.04353737640380859, 0.04306534576416016, 0.043417598724365236, 0.043665409088134766, 0.043859745025634764, 0.04386633682250977, 0.04391526412963867, 0.04357734298706055, 0.044307872772216796, 0.04409814453125, 0.04568678283691406, 0.04430809783935547, 0.044149120330810546, 0.04402499389648438, 0.04388950347900391, 0.043525409698486325, 0.044509918212890624, 0.04431039810180664, 0.04380873489379883, 0.04408902359008789, 0.04346928024291992, 0.04370841598510742, 0.04389683151245117, 0.044144641876220705, 0.043396446228027345, 0.0431479377746582, 0.04410764694213867, 0.043960319519042966, 0.043821056365966796, 0.043184192657470706, 0.043896671295166015, 0.04315760040283203, 0.04336054229736328, 0.04325142288208008, 0.04338483047485352, 0.04338483047485352, 0.042931488037109375, 0.042995838165283205, 0.04335004806518555, 0.04353081512451172, 0.04392755126953125, 0.04389199829101562, 0.04389859390258789, 0.04386076736450195, 0.04412457656860352, 0.04403385543823242, 0.04349715042114258, 0.04374262237548828, 0.043657184600830075, 0.0441530876159668, 0.04390572738647461, 0.043665409088134766, 0.04338483047485352, 0.04378009414672852, 0.04398604965209961, 0.04327104187011719, 0.04351171112060547, 0.04390457534790039, 0.04357993698120117, 0.04397983932495117, 0.044072097778320315, 0.044759872436523435, 0.043676639556884764, 0.043801822662353516, 0.04484790420532227, 0.04444569778442383, 0.0440893440246582, 0.043812862396240236, 0.04373299026489258, 0.04447647857666016, 0.044277694702148436, 0.04451327896118164, 0.04410480117797851, 0.04392643356323242, 0.04423177719116211, 0.04400739288330078, 0.04427872085571289, 0.04410367965698242, 0.0442347526550293, 0.04380403137207031, 0.04422719955444336, 0.044153854370117186, 0.04378236770629883, 0.04388943862915039, 0.044049503326416016, 0.04410460662841797, 0.0446668815612793, 0.04440678405761719, 0.044947265625, 0.04602675247192383, 0.04509900665283203, 0.044300289154052735, 0.04525782394409179, 0.04506451034545898, 0.044461856842041014, 0.04486812973022461, 0.044552478790283206, 0.04531609725952149, 0.045618560791015624, 0.04430092620849609, 0.04426342391967773, 0.0442916488647461, 0.04384198379516602, 0.04420127868652344, 0.04421027374267578, 0.04658441543579102, 0.044836353302001954, 0.044275646209716794, 0.04430905532836914, 0.04474060821533203, 0.043990718841552735, 0.044063041687011716, 0.04401356887817383, 0.04448873519897461, 0.04414051055908203, 0.044237087249755856, 0.044010208129882815, 0.043842559814453126, 0.04394931030273438, 0.04361203384399414, 0.044301216125488284, 0.04365625762939453, 0.04382992172241211, 0.04370252990722656, 0.04338822555541992, 0.043733535766601564, 0.04383145523071289, 0.043782112121582034, 0.04432191848754883, 0.047231327056884764, 0.044265087127685544, 0.04431967926025391, 0.044042240142822264, 0.04388982391357422, 0.043770721435546875, 0.043794559478759765, 0.043765377044677735, 0.043939998626708984, 0.043833408355712894, 0.04435561752319336, 0.04441088104248047, 0.044291839599609376, 0.044385536193847656, 0.044565502166748046, 0.044729984283447266, 0.04459148788452148, 0.04412211227416992, 0.04407059097290039, 0.043939422607421875, 0.0443276481628418, 0.04468326568603516, 0.045780990600585936, 0.04458905410766602, 0.04389683151245117, 0.04385532760620117, 0.043823646545410155, 0.04397395324707031, 0.04440956878662109, 0.04505513763427734, 0.04422739028930664, 0.04404627227783203, 0.04462419128417969, 0.04422617721557617, 0.044553409576416014, 0.043994049072265624, 0.04439603042602539, 0.043999744415283204, 0.04430847930908203, 0.044093441009521485, 0.04450857543945313, 0.0473620491027832, 0.04483935928344727, 0.044980384826660155, 0.04449683380126953, 0.04418566513061523, 0.044361408233642576, 0.044722496032714845, 0.04530323028564453, 0.04481081771850586, 0.044834815979003906, 0.04463616180419922, 0.04524236679077148, 0.044943359375, 0.04544918441772461, 0.04454608154296875, 0.0445665283203125, 0.04436787033081055, 0.0448798713684082, 0.04511449432373047, 0.04424179077148437, 0.044951553344726565, 0.044659809112548826, 0.04463692855834961, 0.04498444747924805, 0.04465462493896484, 0.044793281555175785, 0.04463264083862305, 0.04469881439208984, 0.04412659072875977, 0.04417580795288086, 0.04456220626831055, 0.0445618896484375, 0.0447020149230957, 0.044032161712646484, 0.04425551986694336, 0.04425833511352539, 0.04443593597412109, 0.044558849334716794, 0.04398105621337891, 0.04420377731323242, 0.04415283203125, 0.0442531852722168, 0.044611583709716796, 0.04389270401000977, 0.0455445442199707, 0.04569887924194336, 0.046450912475585936, 0.04446556854248047, 0.044448318481445315, 0.043788288116455076, 0.044201118469238285, 0.04405539321899414, 0.04370166397094727, 0.04467363357543945, 0.044243968963623044, 0.043966846466064455, 0.044833118438720704, 0.04468703842163086, 0.04549897766113281, 0.04555535888671875, 0.045082977294921875, 0.04457823944091797, 0.04467097473144531, 0.04479852676391602, 0.045112319946289066, 0.04578406524658203, 0.045653472900390624, 0.04526649475097656, 0.04527407836914062, 0.04542259216308594, 0.045328384399414064, 0.04601241683959961, 0.04561663818359375, 0.04501760101318359, 0.044966144561767576, 0.045049598693847656, 0.04495939254760742, 0.044826976776123045, 0.04531731033325195, 0.044982784271240236, 0.04429011154174805, 0.04573747253417969, 0.04506204986572265, 0.04467305755615234, 0.044660961151123044, 0.04409404754638672, 0.044907745361328126, 0.04514281463623047, 0.04517279815673828, 0.045302974700927735, 0.0457322883605957, 0.04583865737915039, 0.04567766571044922, 0.04491561508178711, 0.044799999237060545, 0.04458700942993164, 0.044666816711425784, 0.044329025268554687, 0.044537761688232425, 0.044379936218261716, 0.044773696899414066, 0.044688545227050784, 0.04436220932006836, 0.043596160888671874, 0.04429619216918945, 0.04404585647583008, 0.04405500793457031, 0.04364879989624024, 0.04397689437866211, 0.04342502212524414, 0.04389564895629883, 0.044455841064453126, 0.04415427017211914, 0.04366009521484375, 0.043417377471923826, 0.043345054626464846, 0.04384643173217773, 0.04351391983032227, 0.04337619018554688, 0.04412255859375, 0.04356880187988281, 0.043106849670410154, 0.0433744010925293, 0.04331744003295898, 0.04341945648193359, 0.043028480529785154, 0.04326563262939453, 0.04386579132080078, 0.044347518920898436, 0.043917919158935545, 0.04354246520996094, 0.04367776107788086, 0.0431739501953125, 0.043792320251464845, 0.04384947204589844, 0.044386558532714844, 0.04365430450439453, 0.04361238479614258, 0.0437289924621582, 0.044835296630859375, 0.04427907180786133, 0.04416592025756836, 0.04335615921020508, 0.043506847381591794, 0.043378913879394534, 0.04345100784301758, 0.04418764877319336, 0.043853824615478515, 0.04416652679443359, 0.04404867172241211, 0.043818431854248045, 0.04428451156616211, 0.043888961791992184, 0.044039424896240235, 0.04335283279418945, 0.04340022277832031, 0.04768252944946289, 0.04371775817871094, 0.044407135009765626, 0.04360246276855469, 0.043069438934326174, 0.043493377685546876, 0.04377964782714844, 0.04465299224853515, 0.04438425445556641, 0.044349441528320314, 0.04452556610107422, 0.044568256378173826, 0.043888607025146485, 0.04477779388427734, 0.043708446502685544, 0.04465852737426758, 0.043745441436767576, 0.043582977294921874, 0.043450401306152346, 0.04399100875854492, 0.04599552154541016, 0.04644124984741211, 0.04491491317749023, 0.04494255828857422, 0.04685606384277344, 0.04416195297241211, 0.04399513626098633, 0.04394742584228516, 0.04365555191040039, 0.043919582366943356, 0.04378992080688476, 0.044351806640625, 0.04342748641967773, 0.04359161758422852, 0.043469310760498044, 0.04368998336791992, 0.04387430572509766, 0.04357324981689453, 0.04347903823852539, 0.04429523086547851, 0.04387884902954101, 0.04355123138427734, 0.043755168914794924, 0.046899265289306644, 0.04440524673461914, 0.04482038497924805, 0.044296001434326174, 0.0440300178527832, 0.04427494430541992, 0.04454444885253906, 0.043847999572753905, 0.04379443359375, 0.0436756477355957, 0.044279041290283205, 0.04469833755493164, 0.04454524612426758, 0.04479180908203125, 0.04473324966430664, 0.04450028610229492, 0.04383382415771484, 0.044310752868652346, 0.04432032012939453, 0.043538944244384765, 0.04417670440673828, 0.04753705596923828, 0.04386787033081055, 0.04416921615600586, 0.04442550277709961, 0.04459312057495117, 0.044300033569335935, 0.04386816024780273, 0.04359481430053711, 0.04424560165405273, 0.04386851119995117, 0.04471356964111328, 0.04393939208984375, 0.04443376159667969, 0.04393584060668945, 0.044062080383300783, 0.043915454864501956, 0.04455878448486328, 0.04441907119750976, 0.04415462493896485, 0.04418115234375, 0.0438004150390625, 0.044149505615234376, 0.04425296020507812, 0.044172992706298826, 0.04415875244140625, 0.04492393493652344, 0.044523231506347655, 0.04464831924438477, 0.04434956741333008, 0.04478771209716797, 0.04422419357299805, 0.04409366226196289, 0.04381612777709961, 0.04443638229370117, 0.044097343444824216, 0.0438744010925293, 0.044200065612792966, 0.04369612884521484, 0.04376502227783203, 0.0436190071105957, 0.04374323272705078, 0.04380652618408203, 0.04375548934936523, 0.04369635009765625, 0.04336640167236328, 0.044273662567138675, 0.044074081420898435, 0.043295295715332034, 0.043460224151611326, 0.04353734588623047, 0.04343315124511719, 0.04325833511352539, 0.04325315093994141, 0.043432415008544924, 0.04364518356323242, 0.043407360076904294, 0.043407360076904294, 0.04416508865356445, 0.04398227310180664, 0.045683296203613284, 0.043404735565185544, 0.04378630447387695, 0.04341984176635742, 0.04327622222900391, 0.04323980712890625, 0.04376166534423828, 0.04390864181518555, 0.04395990371704102, 0.04423884963989258, 0.04356800079345703, 0.043870208740234375, 0.04381110382080078, 0.04390063858032227, 0.04381459045410156, 0.04365283203125, 0.04327193450927734, 0.04309715270996094, 0.044163169860839846, 0.04328531265258789, 0.04390467071533203, 0.04390544128417969, 0.04456857681274414, 0.044367679595947264, 0.044120254516601565, 0.04376688003540039, 0.043727649688720706, 0.04416320037841797, 0.04327219009399414, 0.04425737762451172, 0.04428380966186524, 0.043277694702148435, 0.043700862884521484, 0.04356454467773437, 0.04414924621582031, 0.04508224105834961, 0.04363257598876953, 0.04378854370117188, 0.043872447967529295, 0.043614208221435545, 0.044335105895996096, 0.04432217788696289, 0.0447529296875, 0.044170974731445316, 0.04489100646972656, 0.04445792007446289, 0.04427372741699219, 0.045008289337158204, 0.04504435348510742, 0.044528926849365234, 0.044593311309814455, 0.046330398559570315, 0.04382310485839844, 0.044232704162597655, 0.0445843505859375, 0.04394454574584961, 0.044197887420654294, 0.04546355056762695, 0.045235870361328125, 0.04440127944946289, 0.045058879852294925, 0.04469164657592774, 0.04521555328369141, 0.044520030975341796, 0.04532579040527344, 0.044954143524169925, 0.04535504150390625, 0.04494979095458984, 0.046106464385986326, 0.04481244659423828, 0.04494131088256836, 0.04520959854125976, 0.04517068862915039, 0.04765491104125977, 0.04628665542602539, 0.04554572677612305, 0.04522367858886719, 0.04617030334472656, 0.045574142456054685, 0.045453407287597655, 0.045365150451660154, 0.04548723220825195, 0.04489603042602539, 0.04525884628295898, 0.04544092941284179, 0.04562944030761719, 0.04537497711181641, 0.04523263931274414, 0.044862590789794925, 0.04451212692260742, 0.044996383666992185, 0.04562966537475586, 0.045179134368896486, 0.045199104309082035, 0.0474337272644043, 0.04607590484619141, 0.04664710235595703, 0.04474079895019531, 0.044218112945556644, 0.04463232040405273, 0.044972030639648435, 0.04475699234008789, 0.045343902587890624, 0.04521046447753906, 0.04473823928833008, 0.04517123031616211, 0.04465641784667969, 0.0437125129699707, 0.04375247955322266, 0.044229598999023435, 0.04428121566772461, 0.04391180801391602, 0.04325580978393555, 0.04380876922607422, 0.044746753692626956, 0.043431934356689454, 0.04378195190429687, 0.04540812683105469, 0.04987936019897461, 0.04402614212036133, 0.04476899337768555, 0.04506828689575195, 0.045521953582763675, 0.04500326538085937, 0.0447410888671875, 0.045139968872070314, 0.04451942443847656, 0.04459929656982422, 0.04444160079956055, 0.045445121765136716, 0.04460140609741211, 0.04556540679931641, 0.04444153594970703, 0.044028129577636715, 0.04455660629272461, 0.044472320556640625, 0.04441907119750976, 0.044214271545410154, 0.04434841537475586, 0.04478851318359375, 0.048578784942626956, 0.04948992156982422, 0.04470374298095703, 0.044792991638183594]",tokens/s,22.553444187426933,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1891.753984,3021.86496,0.0,2619.342848,2543.282176,s,1,11.7483486328125,11.7483486328125,0.0,11.7483486328125,11.7483486328125,11.7483486328125,11.7483486328125,[11.7483486328125],,kWh,7.051145756666793e-05,7.770248310142495e-06,2.5052520042000148e-05,0.00010333422591881058,,MB,1971.785728,3078.488064,0.0,2661.285888,2389.0304,s,10,0.8052822341918946,0.08052822341918946,0.0002941214137530679,0.08043400192260743,0.08074251251220703,0.08102052154541016,0.08124292877197266,"[0.08068073272705079, 0.08044019317626953, 0.08033106994628907, 0.08023436737060546, 0.08042781066894532, 0.08031539154052734, 0.08129853057861328, 0.08033388519287109, 0.08054338836669922, 0.08067686462402343]",tokens/s,3179.009658109464,kWh,2.3938671501365605e-06,2.6400167584843587e-07,1.5946324232459135e-06,4.25250124923091e-06,tokens/kWh,60199864.73755866,MB,1983.311872,3107.848192,0.0,2690.646016,2399.83872,s,10,31.08762524414063,3.1087625244140624,0.013978073533579773,3.1104844970703125,3.124408349609375,3.1272379882812498,3.12950169921875,"[3.09463720703125, 3.115612060546875, 3.097947021484375, 3.106021728515625, 3.130067626953125, 3.086629150390625, 3.114947265625, 3.123779541015625, 3.095293701171875, 3.12268994140625]",tokens/s,20.26529833180944,kWh,9.13496575673646e-05,1.0075584370145571e-05,3.8094704883754174e-05,0.00013951994682126435,tokens/kWh,451548.33724748896,,s,630,31.085577987670884,0.04934218728201731,0.0007161379180270538,0.049214897155761717,0.050156044006347655,0.050550782203674316,0.05175925231933594,"[0.05079449462890625, 0.05003414535522461, 0.049062400817871096, 0.04863593673706055, 0.048567745208740236, 0.049064510345458986, 0.04888371276855469, 0.04931942367553711, 0.050612449645996094, 0.04871129608154297, 0.048751262664794924, 0.04931379318237305, 0.04883865737915039, 0.04878131103515625, 0.048889854431152346, 0.04944012832641602, 0.04942665481567383, 0.04936540985107422, 0.049062240600585935, 0.048885406494140624, 0.0491253776550293, 0.049522689819335934, 0.04873603057861328, 0.04967007827758789, 0.04977897644042969, 0.04864531326293945, 0.04939788818359375, 0.04860588836669922, 0.04861849594116211, 0.04842195129394531, 0.04928710556030273, 0.04890544128417969, 0.04895209503173828, 0.049111038208007815, 0.048996353149414064, 0.048965503692626956, 0.048629249572753906, 0.04881411361694336, 0.04866864013671875, 0.04874076843261719, 0.048627937316894534, 0.04924969482421875, 0.04947407913208008, 0.04950841522216797, 0.048731937408447265, 0.048868831634521485, 0.04870016098022461, 0.04871295928955078, 0.04870585632324219, 0.049344959259033205, 0.049613887786865235, 0.04921408081054687, 0.050552352905273434, 0.049625888824462894, 0.049067489624023436, 0.04991756820678711, 0.04869827270507812, 0.048922367095947265, 0.04840249633789063, 0.04881836700439453, 0.048752574920654296, 0.04937881469726563, 0.04952121734619141, 0.049925792694091795, 0.049032928466796875, 0.04856051254272461, 0.048781791687011716, 0.04899411010742188, 0.0508230094909668, 0.04978124618530273, 0.049626976013183596, 0.050124832153320316, 0.04933014297485352, 0.05010636901855469, 0.05039718246459961, 0.0509194221496582, 0.049970558166503906, 0.05009174346923828, 0.04996185684204101, 0.05010412979125976, 0.05246319961547852, 0.0502336311340332, 0.049616832733154294, 0.04902339172363281, 0.049389694213867186, 0.04929523086547852, 0.04946092987060547, 0.049006336212158205, 0.048691646575927734, 0.048468097686767575, 0.04921548843383789, 0.0487955207824707, 0.04870099258422852, 0.048736190795898436, 0.04848611068725586, 0.04869622421264649, 0.049132606506347654, 0.04868950271606445, 0.04994054412841797, 0.04893500900268555, 0.049179073333740234, 0.0501104621887207, 0.04926579284667969, 0.05338816070556641, 0.04957798385620117, 0.04889395141601562, 0.04979097747802735, 0.04958003234863281, 0.04941209411621094, 0.049890304565429686, 0.04928204727172852, 0.049049537658691404, 0.049156158447265626, 0.04931391906738281, 0.0489409294128418, 0.04846182250976563, 0.04833280181884766, 0.04933232116699219, 0.04853062438964844, 0.048599777221679685, 0.048660190582275394, 0.04901097488403321, 0.04944003295898437, 0.04993097686767578, 0.04926054382324219, 0.04951244735717773, 0.04979916763305664, 0.04955340957641602, 0.04890828704833984, 0.04895129776000977, 0.0487014389038086, 0.049053695678710936, 0.04855398559570313, 0.04886751937866211, 0.048624832153320315, 0.04882495880126953, 0.049974369049072265, 0.049114017486572265, 0.04848003387451172, 0.04931606292724609, 0.04941209411621094, 0.04913356781005859, 0.048775169372558595, 0.04907167816162109, 0.04877356719970703, 0.048301822662353514, 0.04881638336181641, 0.0493559341430664, 0.05006217575073242, 0.04972544097900391, 0.04964147186279297, 0.04913971328735352, 0.04967625427246094, 0.04889193725585937, 0.04864432144165039, 0.04867644882202148, 0.0484552001953125, 0.0489273910522461, 0.04860646438598633, 0.048796478271484374, 0.05063673782348633, 0.04976025772094726, 0.04899391937255859, 0.048381759643554685, 0.04883718490600586, 0.049219104766845705, 0.04887955093383789, 0.04932457733154297, 0.049068031311035154, 0.0493199348449707, 0.04893491363525391, 0.04955340957641602, 0.050032638549804685, 0.04948912048339844, 0.049517345428466794, 0.04923337554931641, 0.0487677116394043, 0.0492064323425293, 0.049371551513671875, 0.04986825561523438, 0.051800865173339844, 0.049387550354003905, 0.04955081558227539, 0.049099262237548826, 0.04857014465332031, 0.04883859252929688, 0.048762561798095704, 0.0491525764465332, 0.048578590393066404, 0.04976054382324219, 0.04937289428710938, 0.04852121734619141, 0.04926259231567383, 0.048686241149902346, 0.04881699371337891, 0.048864990234375, 0.04942291259765625, 0.05155196762084961, 0.049759456634521484, 0.04919513702392578, 0.04912633514404297, 0.04934012985229492, 0.04920083236694336, 0.049374561309814456, 0.04924691009521484, 0.04876227188110351, 0.049116031646728515, 0.04901068878173828, 0.048801246643066405, 0.04963587188720703, 0.04890342330932617, 0.04900531387329102, 0.04862534332275391, 0.049100479125976565, 0.04867084884643555, 0.04975462341308594, 0.048775169372558595, 0.04910403060913086, 0.04906070327758789, 0.04881388854980469, 0.048565601348876955, 0.049549793243408205, 0.04866006469726562, 0.05003343963623047, 0.049821697235107425, 0.050298881530761716, 0.050624225616455076, 0.050082080841064455, 0.049356033325195316, 0.049805374145507814, 0.0498337287902832, 0.049480415344238284, 0.04918294525146484, 0.050569217681884764, 0.04904140853881836, 0.04913151931762695, 0.049140960693359374, 0.04907292938232422, 0.04872192001342773, 0.04904345703125, 0.05019571304321289, 0.04921430587768555, 0.04871728134155273, 0.048544193267822264, 0.048922592163085935, 0.04938115310668945, 0.048812286376953125, 0.049669696807861326, 0.048857536315917965, 0.048930526733398434, 0.05056108856201172, 0.04935404968261719, 0.04949107360839844, 0.04879353713989258, 0.04910995101928711, 0.049689632415771484, 0.048659423828125, 0.04874444961547852, 0.05022057723999023, 0.04900912094116211, 0.049430526733398435, 0.04933536148071289, 0.049515457153320314, 0.04917356872558594, 0.049328319549560545, 0.04869123077392578, 0.04902550506591797, 0.04964172744750977, 0.04917795181274414, 0.04911964797973633, 0.04916611099243164, 0.04919753646850586, 0.04949603271484375, 0.05047491073608398, 0.049272350311279293, 0.0501063346862793, 0.050403968811035156, 0.050493438720703124, 0.051046302795410156, 0.05102569580078125, 0.05074288177490234, 0.05030985641479492, 0.05015564727783203, 0.04988649749755859, 0.049654369354248044, 0.04974387359619141, 0.049622207641601565, 0.04976723098754883, 0.049721343994140625, 0.04974291229248047, 0.04960966491699219, 0.04954521560668945, 0.049459201812744144, 0.05098291015625, 0.04923766326904297, 0.04913910293579102, 0.04915500640869141, 0.049481727600097655, 0.048987457275390625, 0.050205345153808596, 0.049427902221679684, 0.0555423698425293, 0.049342464447021485, 0.04960217666625977, 0.050132545471191406, 0.049230239868164063, 0.04898652648925781, 0.048598369598388674, 0.04896220779418945, 0.049127422332763675, 0.04931350326538086, 0.05008780670166016, 0.049775009155273435, 0.04862527847290039, 0.051132289886474606, 0.04948652648925781, 0.048709342956542966, 0.049342464447021485, 0.049049217224121096, 0.04894259262084961, 0.04903139114379883, 0.0489192008972168, 0.048383201599121094, 0.04853942489624023, 0.04873523330688476, 0.04834918212890625, 0.04847372817993164, 0.0482606086730957, 0.049058078765869144, 0.04885948944091797, 0.048623870849609375, 0.04878691101074219, 0.04842755126953125, 0.04844319915771484, 0.048506206512451175, 0.04870636749267578, 0.04915631866455078, 0.049305408477783204, 0.048698558807373046, 0.04883744049072265, 0.04871372985839844, 0.0484529914855957, 0.048376449584960936, 0.048607231140136715, 0.048451583862304685, 0.04852073669433594, 0.04837980651855469, 0.04859945678710938, 0.04910860824584961, 0.04879004669189453, 0.04923801422119141, 0.04940390396118164, 0.04844950485229492, 0.04839161682128906, 0.04854374313354492, 0.04867747116088867, 0.04847820663452149, 0.049796382904052736, 0.05187593460083008, 0.05019916915893555, 0.050374656677246096, 0.04963555145263672, 0.048971553802490235, 0.0494284782409668, 0.04844131088256836, 0.04922166442871094, 0.05039007949829102, 0.04981651306152344, 0.04930355072021484, 0.050132991790771485, 0.04921366500854492, 0.049454559326171876, 0.04903459167480469, 0.04881651306152344, 0.049097278594970706, 0.04894704055786133, 0.04839443206787109, 0.049070079803466796, 0.049210750579833984, 0.04958067321777344, 0.0491412467956543, 0.048896255493164065, 0.048574848175048826, 0.04868083190917969, 0.04897289657592773, 0.04878246307373047, 0.04899817657470703, 0.04909494400024414, 0.049392799377441406, 0.04991033554077148, 0.050170879364013675, 0.049727615356445314, 0.04905459213256836, 0.04890524673461914, 0.05013516616821289, 0.05019465637207031, 0.04954601669311524, 0.050372161865234376, 0.04909075164794922, 0.04896777725219727, 0.04875088119506836, 0.04907539367675781, 0.048775135040283205, 0.04858025741577148, 0.04902595138549805, 0.049430526733398435, 0.04966976165771484, 0.049109375, 0.04901036834716797, 0.04928713607788086, 0.04916073608398437, 0.04873196792602539, 0.04919910430908203, 0.049362049102783204, 0.04922662353515625, 0.04933017730712891, 0.049532928466796876, 0.050018177032470704, 0.049307647705078124, 0.04906921768188476, 0.048613407135009765, 0.04898441696166992, 0.0498337287902832, 0.04965571212768555, 0.049978305816650394, 0.04967129516601562, 0.05128678512573242, 0.04986278533935547, 0.050182144165039064, 0.0514785270690918, 0.0505283203125, 0.04954515075683594, 0.04954742431640625, 0.05004819107055664, 0.049533409118652345, 0.05012704086303711, 0.04932799911499024, 0.049546913146972654, 0.04950883102416992, 0.04862928009033203, 0.04975049591064453, 0.04906393432617188, 0.04865545654296875, 0.04940687942504883, 0.04954521560668945, 0.04921305465698242, 0.05176153564453125, 0.04996323013305664, 0.049161151885986326, 0.049062366485595706, 0.04936742401123047, 0.05012819290161133, 0.05000672149658203, 0.04974729537963867, 0.05021148681640625, 0.04955955123901367, 0.05134246444702149, 0.04983216094970703, 0.04996761703491211, 0.0492852783203125, 0.04868531036376953, 0.04942979049682617, 0.04875516891479492, 0.049347904205322264, 0.04995331192016601, 0.05034000015258789, 0.0506420783996582, 0.05074121475219726, 0.05054886245727539, 0.050376895904541016, 0.049758750915527346, 0.049296993255615235, 0.04921343994140625, 0.04900457763671875, 0.04879987335205078, 0.05007753753662109, 0.04903519821166992, 0.04846623992919922, 0.04928646469116211, 0.04862860870361328, 0.04897177505493164, 0.049450111389160153, 0.04854054260253906, 0.048903873443603516, 0.049541439056396484, 0.048858592987060544, 0.0492224006652832, 0.049084190368652345, 0.04944851303100586, 0.04910847854614258, 0.04890867233276367, 0.04947001647949219, 0.04901068878173828, 0.049198783874511716, 0.05026201629638672, 0.049871166229248046, 0.05039923095703125, 0.05009356689453125, 0.0501479377746582, 0.051433376312255856, 0.04934860610961914, 0.049231201171875, 0.049175201416015626, 0.05022515106201172, 0.05021241760253906, 0.04924665451049805, 0.04937052917480469, 0.04884867095947266, 0.04873865509033203, 0.04922211074829102, 0.04878131103515625, 0.049393665313720705, 0.049618976593017575, 0.04966191864013672, 0.04937043380737305, 0.05033644866943359, 0.04919852828979492, 0.04930822372436523, 0.04911308670043945, 0.04943788909912109, 0.04899308776855469, 0.04863935852050781, 0.049070465087890626, 0.04851238250732422, 0.04856003189086914, 0.048810302734375, 0.04875740814208984, 0.048535552978515625, 0.0490967025756836, 0.04836966323852539, 0.048216064453125, 0.04844323348999023, 0.048406688690185544, 0.04855129623413086, 0.04842969512939453, 0.04896473693847656, 0.04950339126586914, 0.04875833511352539, 0.04830633544921875, 0.048928768157958984, 0.048519168853759766, 0.04899001693725586, 0.04879702377319336, 0.04911190414428711, 0.048997920989990236, 0.04994857788085937, 0.04898463821411133, 0.04946697616577148, 0.04920892715454102, 0.04953897476196289, 0.04963343811035156, 0.04954803085327148, 0.0490885124206543, 0.04985036849975586, 0.04925574493408203, 0.051753662109375, 0.05017740631103516, 0.04916400146484375, 0.04959027099609375, 0.04882115173339844, 0.04841849517822266, 0.04883283233642578, 0.04901078414916992, 0.049123134613037106, 0.04926835250854492, 0.04930403137207031, 0.04898015975952148, 0.04980831909179687, 0.04906393432617188, 0.049398880004882816, 0.04959324645996094, 0.04883251190185547, 0.049049087524414066, 0.048954910278320315, 0.05126448059082031, 0.04953475189208984, 0.04934064102172851, 0.049746974945068356, 0.04974895858764648, 0.049911457061767577, 0.0498914566040039, 0.049627391815185544, 0.050124767303466794, 0.04976371383666992, 0.049804000854492186, 0.04909455871582031, 0.0506879997253418, 0.04928716659545898, 0.04969881439208984, 0.0495022087097168, 0.05015961456298828, 0.05036822509765625, 0.05294723129272461, 0.05052604675292969, 0.050557086944580075, 0.049591712951660157, 0.04927660751342773, 0.0494002571105957, 0.04894316864013672, 0.049319774627685546, 0.0496830062866211, 0.048846847534179685, 0.04907212829589844, 0.04917248153686524, 0.049084129333496096, 0.049452449798583986, 0.04968947219848633, 0.04958176040649414, 0.04946054458618164, 0.04894003295898437, 0.04927632141113281, 0.04924607849121094, 0.04899913787841797, 0.04984012985229492, 0.04989471817016602, 0.04919113540649414, 0.049616512298583985, 0.049148574829101566, 0.04929753494262695, 0.04895135879516602, 0.04858246231079102, 0.048677055358886716, 0.049001697540283204, 0.049317920684814456, 0.04964755249023438, 0.04959110260009766, 0.04988518524169922, 0.04961075210571289, 0.04978073501586914, 0.0491247673034668]",tokens/s,20.266632978478615,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2200.715264,2914.910208,0.0,2512.388096,2240.694784,s,1,9.4992939453125,9.4992939453125,0.0,9.4992939453125,9.4992939453125,9.4992939453125,9.4992939453125,[9.4992939453125],,kWh,6.661917307498394e-05,7.341286538706019e-06,2.733807742599792e-05,0.00010129853703968789,,MB,2019.04128,2931.687424,0.0,2514.485248,2226.413568,s,10,12.133697265625,1.2133697265625,0.002490970584605167,1.2138874511718751,1.2158770874023437,1.216111700439453,1.2162993908691406,"[1.208701904296875, 1.210319580078125, 1.2125035400390625, 1.2113226318359376, 1.2129737548828126, 1.2148011474609375, 1.215095947265625, 1.2158074951171876, 1.215824951171875, 1.2163463134765624]",tokens/s,210.98268268588913,kWh,3.5573517708755084e-05,3.923278561513023e-06,2.3611352222396853e-05,6.310814849266495e-05,tokens/kWh,4056528.4533700882,MB,2020.53632,2931.687424,0.0,2514.485248,2337.090048,s,10,14.856078125,1.4856078125,0.003209220801273441,1.4851223754882812,1.4885975219726562,1.4906019226074219,1.4922054431152345,"[1.4841192626953126, 1.4819937744140625, 1.48783544921875, 1.4851070556640624, 1.488152099609375, 1.4817113037109375, 1.4869462890625, 1.4851376953125, 1.4824688720703125, 1.4926063232421876]",tokens/s,42.40688522900454,kWh,4.350714711582894e-05,4.7991829750973685e-06,2.66758824518043e-05,7.498221254273064e-05,tokens/kWh,840199.2667807414,,s,630,14.853321687698365,0.023576701091584707,0.00038874042169081437,0.023452799797058103,0.02396704902648926,0.02432166118621826,0.025203486328125,"[0.024897632598876954, 0.024321920394897462, 0.02348259162902832, 0.023558143615722657, 0.02347007942199707, 0.023492416381835936, 0.02327571105957031, 0.023598495483398436, 0.02341651153564453, 0.02334617614746094, 0.023355295181274414, 0.023343103408813477, 0.023658496856689453, 0.023683263778686525, 0.023823904037475585, 0.023396703720092775, 0.02360313606262207, 0.023496383666992186, 0.023322368621826173, 0.023406496047973634, 0.02329871940612793, 0.02358585548400879, 0.02350713539123535, 0.023570751190185545, 0.023423423767089845, 0.023480319976806642, 0.023393375396728516, 0.02346691131591797, 0.02369945526123047, 0.023516832351684572, 0.023547712326049804, 0.023534112930297852, 0.023362655639648438, 0.023536544799804687, 0.023605056762695312, 0.023557504653930663, 0.023333696365356444, 0.02337763214111328, 0.02336387252807617, 0.02342678451538086, 0.023314720153808595, 0.023443519592285158, 0.023358591079711916, 0.02363680076599121, 0.023397567749023438, 0.02340447998046875, 0.023180160522460937, 0.0233123836517334, 0.023184511184692384, 0.023487232208251954, 0.02345587158203125, 0.0241213436126709, 0.023572479248046875, 0.023587072372436523, 0.02360851287841797, 0.023644607543945314, 0.023898239135742187, 0.024125408172607422, 0.023924768447875975, 0.02392064094543457, 0.02369638442993164, 0.02359756851196289, 0.02343724822998047, 0.024799903869628905, 0.025190399169921874, 0.02382057571411133, 0.023774656295776367, 0.023586271286010742, 0.02377606391906738, 0.023396352767944335, 0.02334681510925293, 0.02339779281616211, 0.02341308784484863, 0.023407072067260743, 0.02345180892944336, 0.023355424880981444, 0.023533536911010743, 0.02344051170349121, 0.02334195137023926, 0.025614336013793947, 0.023545856475830077, 0.02336463928222656, 0.02357744026184082, 0.023531551361083983, 0.023449695587158204, 0.023318527221679687, 0.023476224899291992, 0.02342911911010742, 0.023518400192260744, 0.023394271850585936, 0.023630687713623047, 0.02343948745727539, 0.023422239303588867, 0.02341539192199707, 0.023557823181152345, 0.023346752166748048, 0.02336790466308594, 0.02341075134277344, 0.023420480728149413, 0.023280384063720704, 0.02340265655517578, 0.023367679595947266, 0.02342310333251953, 0.023261056900024415, 0.023453664779663087, 0.023355615615844726, 0.02345302391052246, 0.02322425651550293, 0.023298591613769532, 0.02332876777648926, 0.023627775192260742, 0.023403743743896484, 0.0235313606262207, 0.0232478084564209, 0.02390998458862305, 0.023268928527832033, 0.023376607894897462, 0.023206016540527345, 0.023371776580810546, 0.023463935852050782, 0.023418880462646483, 0.023218175888061524, 0.02339574432373047, 0.023331424713134766, 0.023410560607910157, 0.02342515182495117, 0.024828351974487305, 0.02413363265991211, 0.023893695831298828, 0.024097152709960937, 0.02409942436218262, 0.02417843246459961, 0.023910400390625, 0.02391859245300293, 0.023782560348510742, 0.023802719116210937, 0.02395955276489258, 0.023910400390625, 0.02351923179626465, 0.023475263595581056, 0.0234136962890625, 0.023341056823730468, 0.023322368621826173, 0.023265247344970704, 0.023585344314575197, 0.023301855087280273, 0.023240480422973633, 0.02329417610168457, 0.023277183532714844, 0.023226591110229493, 0.0232587833404541, 0.023361248016357423, 0.023857952117919922, 0.023506975173950194, 0.023260576248168945, 0.023321151733398438, 0.02326323127746582, 0.02322553634643555, 0.023190336227416994, 0.02331158447265625, 0.023355775833129883, 0.023422847747802733, 0.02339484786987305, 0.023589120864868165, 0.023910144805908203, 0.023791135787963866, 0.023673311233520507, 0.023682239532470704, 0.023538240432739256, 0.023550207138061524, 0.023305440902709963, 0.02333161544799805, 0.02330806350708008, 0.023478496551513673, 0.023742719650268553, 0.023502592086791993, 0.023352767944335936, 0.024005184173583983, 0.023416608810424806, 0.02344905662536621, 0.0235784969329834, 0.023384672164916992, 0.023427391052246095, 0.023469152450561522, 0.02327231979370117, 0.024143871307373048, 0.025579423904418946, 0.023895872116088866, 0.02367830467224121, 0.024846368789672852, 0.023966047286987306, 0.02375052833557129, 0.02365571212768555, 0.023366048812866212, 0.02338400077819824, 0.023282047271728515, 0.02359846305847168, 0.024258367538452147, 0.023636224746704102, 0.023622207641601563, 0.02346342468261719, 0.023218687057495118, 0.0233155517578125, 0.023324703216552733, 0.02334409523010254, 0.023288032531738282, 0.023381696701049805, 0.023396415710449217, 0.023439552307128905, 0.02326092720031738, 0.02385081672668457, 0.023740543365478515, 0.023603103637695313, 0.023469823837280274, 0.02339676856994629, 0.023315872192382812, 0.023454303741455077, 0.02328780746459961, 0.02476851272583008, 0.024823680877685547, 0.024000543594360352, 0.023451168060302733, 0.023572704315185548, 0.023406112670898437, 0.023442047119140625, 0.023417087554931642, 0.023340896606445314, 0.02331452751159668, 0.02337286376953125, 0.023315391540527343, 0.023542848587036133, 0.02338819122314453, 0.02345257568359375, 0.0247040958404541, 0.024471872329711913, 0.023685247421264648, 0.02343779182434082, 0.02331827163696289, 0.02348480033874512, 0.023354656219482423, 0.024031455993652345, 0.02347772789001465, 0.023370655059814453, 0.02330419158935547, 0.02329599952697754, 0.023242752075195314, 0.023349407196044922, 0.023252960205078124, 0.023490432739257813, 0.023296031951904297, 0.023490528106689453, 0.023262880325317384, 0.025002080917358397, 0.02454528045654297, 0.02422982406616211, 0.023864959716796873, 0.02392233657836914, 0.023946111679077148, 0.023928287506103516, 0.02386284828186035, 0.023597408294677734, 0.023841344833374023, 0.023738367080688477, 0.023476415634155274, 0.023533344268798828, 0.023371776580810546, 0.02432134437561035, 0.02364473533630371, 0.023543264389038084, 0.02347894477844238, 0.023522527694702148, 0.023656991958618163, 0.023677471160888672, 0.023621408462524415, 0.02367395210266113, 0.0237289924621582, 0.023572479248046875, 0.023554304122924804, 0.023584512710571288, 0.023571456909179687, 0.02347654342651367, 0.023619327545166015, 0.023518144607543947, 0.023439456939697265, 0.023331968307495118, 0.02343609619140625, 0.023281631469726564, 0.02339955139160156, 0.023591232299804688, 0.023525791168212892, 0.023679136276245117, 0.023644159317016602, 0.023518848419189452, 0.023392255783081056, 0.02332908821105957, 0.023399520874023437, 0.023532512664794922, 0.02335113525390625, 0.023190784454345702, 0.023131040573120116, 0.02316854476928711, 0.023541536331176758, 0.02324460792541504, 0.023436159133911134, 0.023179264068603517, 0.023352319717407227, 0.023141376495361327, 0.024029184341430664, 0.023390207290649414, 0.023328128814697265, 0.023382656097412108, 0.023296159744262697, 0.02325712013244629, 0.023746368408203124, 0.02555308723449707, 0.024725791931152343, 0.0241363525390625, 0.02383785629272461, 0.02358768081665039, 0.023424095153808593, 0.023499679565429688, 0.023432191848754884, 0.02348953628540039, 0.023468032836914062, 0.02347007942199707, 0.0233123836517334, 0.02351923179626465, 0.0236167049407959, 0.023513919830322267, 0.023627552032470703, 0.023360864639282226, 0.02318854331970215, 0.023221471786499023, 0.023116384506225586, 0.02336470413208008, 0.02316374397277832, 0.02329155158996582, 0.023219776153564454, 0.02326950454711914, 0.023394271850585936, 0.023347232818603517, 0.023408479690551758, 0.02350921630859375, 0.02327132797241211, 0.02332041549682617, 0.02321254348754883, 0.023392671585083007, 0.023283615112304687, 0.023320671081542968, 0.02344700813293457, 0.023235103607177735, 0.023289791107177736, 0.023468160629272462, 0.023390144348144532, 0.023393312454223634, 0.023281728744506836, 0.023364320755004882, 0.023443647384643555, 0.023439359664916993, 0.0232857608795166, 0.023346336364746093, 0.023318687438964845, 0.02340281677246094, 0.023404767990112305, 0.023389568328857423, 0.023397151947021484, 0.023432287216186523, 0.02429635238647461, 0.025307199478149415, 0.023615423202514647, 0.023860799789428712, 0.0238573112487793, 0.024007007598876952, 0.023563520431518555, 0.02375497627258301, 0.023828672409057616, 0.023668895721435546, 0.023633344650268555, 0.024706655502319336, 0.023987136840820312, 0.02359097671508789, 0.023934240341186522, 0.024383712768554687, 0.024188831329345704, 0.02450908851623535, 0.02406387138366699, 0.023891424179077147, 0.023898656845092774, 0.023762399673461915, 0.02396182441711426, 0.023588768005371095, 0.023703807830810546, 0.02343747138977051, 0.023375904083251953, 0.023295743942260742, 0.023605344772338867, 0.02365657615661621, 0.023879039764404298, 0.024033920288085937, 0.02373027229309082, 0.023795616149902343, 0.023422367095947267, 0.02337443161010742, 0.024180736541748047, 0.024803327560424804, 0.024152063369750978, 0.023715328216552735, 0.023488351821899414, 0.02326799964904785, 0.023343103408813477, 0.023471424102783203, 0.023417184829711914, 0.023289215087890624, 0.02330531120300293, 0.023188768386840822, 0.02344428825378418, 0.02319126319885254, 0.023789535522460936, 0.023353055953979494, 0.023416608810424806, 0.02401763153076172, 0.02352720069885254, 0.023280767440795897, 0.02338915252685547, 0.023236608505249022, 0.023309696197509767, 0.023168800354003906, 0.023284000396728517, 0.0231278076171875, 0.023288000106811525, 0.02334886360168457, 0.02345471954345703, 0.02328780746459961, 0.023310335159301757, 0.023178911209106444, 0.02329840087890625, 0.023232511520385742, 0.02333286476135254, 0.023305248260498047, 0.023349855422973635, 0.02335548782348633, 0.02475040054321289, 0.02393769645690918, 0.023748767852783202, 0.02358406448364258, 0.02354777526855469, 0.023490272521972656, 0.023391040802001953, 0.023566463470458984, 0.023500511169433594, 0.023400768280029297, 0.02413155174255371, 0.02560848045349121, 0.02391731262207031, 0.023669727325439455, 0.023357440948486328, 0.023746559143066406, 0.023830047607421877, 0.023681503295898437, 0.02367804718017578, 0.023748607635498048, 0.02362835121154785, 0.02373017692565918, 0.023826784133911132, 0.023463935852050782, 0.02367487907409668, 0.023715776443481447, 0.023261247634887697, 0.023401472091674806, 0.023206911087036132, 0.023342559814453125, 0.023246559143066406, 0.023325504302978514, 0.023228191375732423, 0.023601600646972656, 0.02330601692199707, 0.023350528717041016, 0.02332339286804199, 0.023504896163940428, 0.023370752334594725, 0.023419904708862304, 0.023405855178833007, 0.0233417911529541, 0.02340415954589844, 0.02344313621520996, 0.023346975326538087, 0.023414880752563476, 0.023386016845703125, 0.023445823669433593, 0.023155296325683594, 0.02326323127746582, 0.023346271514892578, 0.024224672317504883, 0.023455743789672853, 0.023513088226318358, 0.023509183883666993, 0.0234453125, 0.023324127197265624, 0.023400127410888674, 0.023202911376953125, 0.02336355209350586, 0.02325481605529785, 0.024618463516235353, 0.023376415252685547, 0.02459436798095703, 0.02393401527404785, 0.02350713539123535, 0.023568288803100586, 0.023434080123901368, 0.023545856475830077, 0.023350751876831055, 0.023375743865966796, 0.023459903717041014, 0.023362144470214844, 0.02331827163696289, 0.023529727935791014, 0.023224319458007812, 0.02328371238708496, 0.023162815093994142, 0.023694816589355468, 0.023491359710693358, 0.023539039611816408, 0.023556640625, 0.023691200256347657, 0.023414016723632813, 0.023546623229980468, 0.023445375442504884, 0.023577823638916015, 0.023305120468139647, 0.023386207580566407, 0.02339788818359375, 0.023452064514160157, 0.02345779228210449, 0.025786367416381836, 0.024997888565063478, 0.023879295349121095, 0.023691648483276366, 0.023547903060913086, 0.023475711822509765, 0.023482879638671874, 0.023275039672851563, 0.023384544372558595, 0.023267328262329103, 0.023355295181274414, 0.023324735641479494, 0.023359519958496094, 0.023339008331298827, 0.02333852767944336, 0.02321148872375488, 0.023368831634521484, 0.023351167678833006, 0.02337289619445801, 0.023301023483276367, 0.023385503768920898, 0.023294464111328125, 0.023976064682006835, 0.02324239921569824, 0.023482688903808592, 0.02328335952758789, 0.023422975540161133, 0.023428863525390625, 0.023562847137451173, 0.02355734443664551, 0.023388959884643554, 0.023361536026000978, 0.02351263999938965, 0.02355366325378418, 0.025208831787109375, 0.02445270347595215, 0.02362614440917969, 0.023764991760253908, 0.023459840774536132, 0.023551616668701172, 0.023359392166137697, 0.02348841667175293, 0.023390783309936523, 0.023418367385864256, 0.023239168167114257, 0.023386112213134767, 0.023224319458007812, 0.023345151901245118, 0.023301376342773437, 0.023488832473754884, 0.02367532730102539, 0.02353561592102051, 0.024660160064697265, 0.024977216720581053, 0.023793664932250977, 0.024002559661865236, 0.023816192626953125, 0.02393609619140625, 0.023597055435180665, 0.023820608139038087, 0.0237922248840332, 0.02383363151550293, 0.023765983581542968, 0.02369740867614746, 0.02340425682067871, 0.02356252861022949, 0.023435392379760743, 0.023404512405395508, 0.023299999237060547, 0.02342092704772949, 0.023584768295288085, 0.02370908737182617, 0.02340105628967285, 0.023590911865234376, 0.023662591934204103, 0.02371708869934082, 0.023720159530639648, 0.023984703063964843, 0.024002559661865236, 0.024081695556640626, 0.02392108726501465, 0.023884063720703126, 0.023775232315063476, 0.023735904693603517, 0.023707584381103517, 0.024640159606933595, 0.02370707130432129, 0.023617631912231447, 0.023371135711669922, 0.02349148750305176, 0.023385759353637695, 0.023451999664306642, 0.02343654441833496, 0.023353471755981445, 0.02354240036010742, 0.023402687072753905, 0.023324480056762697]",tokens/s,42.41475497846188,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11154.624512,12232.556544,0.0,11846.811648,11814.785024,s,1,13.9456513671875,13.9456513671875,0.0,13.9456513671875,13.9456513671875,13.9456513671875,13.9456513671875,[13.9456513671875],,kWh,0.00020056447188750704,2.2116391386084295e-05,6.674088672600775e-05,0.0002894217499995991,,MB,2109.837312,13165.789184,0.0,12750.68416,12632.68864,s,10,13.195233032226563,1.319523303222656,0.0035917818569887604,1.3200253295898436,1.3227274658203123,1.3236703125,1.3244245898437499,"[1.310845947265625, 1.31626318359375, 1.3188310546875, 1.3194871826171874, 1.32050634765625, 1.3195443115234375, 1.3217384033203126, 1.3246131591796875, 1.3225179443359374, 1.320885498046875]",tokens/s,194.00945733567133,kWh,3.8616403169164636e-05,4.258920920285562e-06,2.5662187196401344e-05,6.853751128585155e-05,tokens/kWh,3735180.854937857,MB,2115.56352,13438.418944,0.0,13023.31392,12936.694272,s,10,49.11848388671874,4.911848388671874,0.009988632634575632,4.914186767578125,4.924168359375,4.924963330078125,4.925599306640625,"[4.8953994140625, 4.9005400390625, 4.901716796875, 4.90577490234375, 4.9131796875, 4.91544091796875, 4.91519384765625, 4.92148828125, 4.92399169921875, 4.92575830078125]",tokens/s,12.826128783880218,kWh,0.00014388313097666544,1.5871450628861423e-05,9.573715992299836e-05,0.00025549174152852527,tokens/kWh,246583.31272506568,,s,630,49.11402199554446,0.07795876507229275,0.0008300637520886697,0.07799558639526367,0.07888939819335938,0.07910635833740234,0.08081627265930176,"[0.07985330963134765, 0.07613299560546875, 0.07660749053955078, 0.07690579223632812, 0.07664915466308594, 0.07699046325683594, 0.0773958740234375, 0.07751590728759766, 0.07700374603271484, 0.07727104187011719, 0.07722748565673829, 0.07664630126953124, 0.07772367858886718, 0.07766281890869141, 0.07661094665527343, 0.07705859375, 0.0770763168334961, 0.07820515441894531, 0.07690959930419922, 0.07714918518066406, 0.07756745910644532, 0.07748662567138671, 0.07693280029296876, 0.07742495727539063, 0.07804927825927735, 0.0777193603515625, 0.07717094421386719, 0.07786627197265625, 0.07748989105224609, 0.07814240264892579, 0.07779532623291016, 0.07692473602294922, 0.077334716796875, 0.07768883514404297, 0.07726044464111329, 0.0776910400390625, 0.07742041778564453, 0.07815142059326172, 0.07767097473144531, 0.07787459564208984, 0.07811952209472656, 0.07752694702148437, 0.07807977294921875, 0.0783683853149414, 0.07756003570556641, 0.0781214370727539, 0.07749017333984375, 0.07820697784423829, 0.07800422668457031, 0.07812300872802734, 0.0781107177734375, 0.07803699493408203, 0.07858790588378907, 0.0783073272705078, 0.07823709106445312, 0.07880284881591797, 0.07832032012939454, 0.07831510162353515, 0.07828316497802734, 0.07827804565429687, 0.07845094299316406, 0.07880108642578125, 0.07858601379394531, 0.08140847778320312, 0.07631385803222657, 0.07640499114990235, 0.07716505432128906, 0.07702118682861328, 0.07689183807373047, 0.07691910552978516, 0.07650099182128907, 0.07687782287597657, 0.07746150207519531, 0.0767979507446289, 0.07716454315185547, 0.07827865600585937, 0.07829901123046876, 0.07780159759521485, 0.07731609344482422, 0.076943359375, 0.07732835388183594, 0.07688966369628907, 0.07657491302490234, 0.07737372589111328, 0.07717826843261719, 0.07745187377929688, 0.07765401458740234, 0.07786492919921875, 0.07801859283447266, 0.07696998596191407, 0.07701667022705078, 0.07726531219482421, 0.07794073486328125, 0.07822096252441406, 0.07810646057128906, 0.07718508911132813, 0.07668780517578125, 0.07762739562988281, 0.07770867156982422, 0.07791884613037109, 0.07828684997558594, 0.07886179351806641, 0.07811241912841797, 0.07762419128417969, 0.07816790771484375, 0.07821660614013672, 0.07826918029785156, 0.07822335815429687, 0.07792192077636718, 0.07773222351074219, 0.07703510284423828, 0.0776114273071289, 0.07870655822753907, 0.07911436462402344, 0.07886431884765625, 0.07842991638183594, 0.0783199691772461, 0.07878240203857421, 0.07863712310791016, 0.07851007843017578, 0.07773574066162109, 0.07833001708984375, 0.07829097747802734, 0.07833805084228515, 0.07825202941894531, 0.07921842956542968, 0.08131343841552735, 0.07707462310791016, 0.0771376953125, 0.07678604888916016, 0.07697760009765625, 0.07626802825927734, 0.07616928100585937, 0.07633074951171875, 0.07731423950195312, 0.07780358123779296, 0.07755145263671875, 0.0770274887084961, 0.07847936248779297, 0.07828275299072265, 0.07770111846923829, 0.07659295654296874, 0.07740748596191406, 0.07709321594238282, 0.07706870269775391, 0.0769230728149414, 0.07684288024902344, 0.0774590072631836, 0.07714463806152344, 0.07693440246582031, 0.0778900146484375, 0.07875526428222657, 0.07879357147216796, 0.07761100769042968, 0.07724556732177734, 0.07717945861816407, 0.07695523071289062, 0.07681622314453125, 0.0771695327758789, 0.07794886779785157, 0.0771888656616211, 0.0768760986328125, 0.07847634887695312, 0.07843436431884766, 0.07946943664550782, 0.07815782165527344, 0.07850393676757812, 0.07853651428222656, 0.07723955535888671, 0.07721670532226563, 0.07753036499023437, 0.0778186264038086, 0.07811666870117187, 0.07777913665771484, 0.0780800018310547, 0.07867142486572265, 0.07861293029785156, 0.0785264663696289, 0.07889071655273437, 0.07888925170898438, 0.07873945617675782, 0.07755763244628906, 0.07781356811523438, 0.07847148895263673, 0.07878246307373046, 0.07837686157226563, 0.07880242919921875, 0.07901449584960937, 0.07863705444335937, 0.08130521392822265, 0.07710128021240234, 0.07633526611328124, 0.07627366638183594, 0.07647142028808594, 0.07717158508300781, 0.07702323150634766, 0.07733417510986328, 0.0768485107421875, 0.07696876525878907, 0.07755942535400391, 0.07687222290039063, 0.07746969604492188, 0.07829708862304688, 0.07788134765625, 0.07688956451416015, 0.07688246154785157, 0.07754710388183594, 0.07669580841064454, 0.07738998413085937, 0.07730995178222656, 0.07765401458740234, 0.07693714904785157, 0.07715436553955078, 0.07741830444335937, 0.07858809661865235, 0.078129150390625, 0.07752681732177734, 0.07720982360839844, 0.07792617797851563, 0.0766362533569336, 0.07753330993652344, 0.07776461029052735, 0.07726080322265624, 0.07808576202392578, 0.0783404769897461, 0.0782581787109375, 0.07864044952392578, 0.07875353240966797, 0.07816854095458985, 0.07758857727050782, 0.07822169494628907, 0.07838278198242188, 0.07920467376708984, 0.07701913452148437, 0.07804303741455078, 0.07851344299316407, 0.07815251159667969, 0.07840153503417968, 0.07898111724853515, 0.07867391967773438, 0.07847321319580078, 0.07808614349365234, 0.07796047973632812, 0.07822614288330078, 0.07803212738037109, 0.07814425659179687, 0.07878656005859375, 0.07854489898681641, 0.07931897735595703, 0.07881324768066406, 0.0788556137084961, 0.07926022338867188, 0.08077283477783204, 0.07703577423095703, 0.07657472229003906, 0.07658412933349609, 0.07650300598144531, 0.07771836853027343, 0.07762230682373047, 0.0764876480102539, 0.07764173126220703, 0.07734803009033203, 0.07774285125732422, 0.07673760223388672, 0.07797599792480468, 0.07775698852539062, 0.07736729431152344, 0.0769226531982422, 0.07764803314208985, 0.07792031860351563, 0.07776461029052735, 0.07814454650878906, 0.07766835021972657, 0.07763993835449219, 0.0780738525390625, 0.0781688003540039, 0.07797337341308594, 0.07748550415039063, 0.07665939331054687, 0.07746969604492188, 0.07772569274902344, 0.07757414245605469, 0.07784652709960938, 0.07824384307861328, 0.07753033447265625, 0.07820777893066407, 0.07825308990478516, 0.07798876953125, 0.07839865875244141, 0.07847974395751953, 0.07817881774902344, 0.0777523193359375, 0.07806947326660156, 0.07830556488037109, 0.07822541046142578, 0.07830025482177734, 0.07832774353027344, 0.07759356689453124, 0.07805542755126953, 0.0786534423828125, 0.07841382598876953, 0.07839129638671875, 0.07853446197509766, 0.07879698944091797, 0.07883980560302735, 0.07983865356445312, 0.07821778869628906, 0.07863910675048828, 0.07886643218994141, 0.07818592071533204, 0.07807382202148437, 0.0784450912475586, 0.07887059020996094, 0.07888076782226562, 0.07863091278076172, 0.08087535858154297, 0.0768636474609375, 0.0764067840576172, 0.07653376007080079, 0.07733042907714843, 0.07755347442626953, 0.07731145477294922, 0.077000732421875, 0.07709728240966797, 0.07727142333984376, 0.07694732666015625, 0.07703257751464844, 0.07855535888671875, 0.0782548828125, 0.07657062530517578, 0.0769966049194336, 0.07781990051269531, 0.07777033233642579, 0.0769224624633789, 0.07684588623046874, 0.07753724670410156, 0.07789366149902344, 0.07812831878662109, 0.07791651153564454, 0.07794048309326172, 0.07894089508056641, 0.07841149139404296, 0.0778262710571289, 0.07748409271240235, 0.07764787292480468, 0.07809024047851562, 0.07701503753662109, 0.07753727722167969, 0.07816191864013672, 0.07802601623535156, 0.07826300811767578, 0.0788329315185547, 0.07849238586425782, 0.07856537628173828, 0.07839743804931641, 0.0779051513671875, 0.07820722961425781, 0.07821977233886719, 0.07814323425292968, 0.07794303894042968, 0.07826022338867188, 0.07843840026855468, 0.07814073944091797, 0.07800902557373048, 0.07866748809814453, 0.08022249603271485, 0.07947673797607421, 0.07880863952636719, 0.07880339050292968, 0.07860633850097656, 0.07927974700927734, 0.07795545959472656, 0.07829901123046876, 0.07852044677734375, 0.07841910552978516, 0.07812592315673828, 0.0785244140625, 0.07895622253417969, 0.08132422637939453, 0.07679750061035157, 0.07684486389160156, 0.07634832000732422, 0.07648185729980468, 0.07718534088134765, 0.07767072296142578, 0.0772870101928711, 0.077476318359375, 0.07659315490722657, 0.07751884460449218, 0.07802674865722656, 0.07856275177001953, 0.07811949157714844, 0.07785225677490235, 0.07753769683837891, 0.07694745635986328, 0.077106689453125, 0.07694796752929688, 0.07730790710449219, 0.07736720275878907, 0.07769302368164062, 0.07671603393554688, 0.07780966186523437, 0.07796736145019531, 0.07918592071533204, 0.07836262512207032, 0.07747583770751953, 0.07808614349365234, 0.07736726379394532, 0.07711312103271484, 0.0774306869506836, 0.07778678131103516, 0.07798649597167968, 0.07810253143310547, 0.07821046447753906, 0.07736790466308593, 0.07815337371826171, 0.07859609222412109, 0.07846659088134765, 0.07845152282714844, 0.07803494262695312, 0.07768883514404297, 0.07800800323486329, 0.07775801849365234, 0.0785006103515625, 0.07791577911376953, 0.07838963317871094, 0.07854892730712891, 0.07902819061279297, 0.07887677001953125, 0.079052001953125, 0.07897097778320313, 0.07899353790283203, 0.07841849517822265, 0.07828070068359375, 0.07831942749023438, 0.08004422760009766, 0.07862419128417969, 0.07893651580810547, 0.07842182159423829, 0.07933353424072266, 0.07899561309814453, 0.08110717010498047, 0.07688396453857421, 0.07634518432617188, 0.0766629409790039, 0.0765761947631836, 0.07744963073730468, 0.07706591796875, 0.07681686401367188, 0.07745331573486328, 0.07688105773925781, 0.07727938842773438, 0.07806992340087891, 0.07881986999511718, 0.0778608627319336, 0.07771094512939453, 0.07726326751708984, 0.07759053039550781, 0.07778099060058594, 0.07760076904296875, 0.07725843048095703, 0.07798201751708984, 0.07819667053222656, 0.07791007995605469, 0.0773939208984375, 0.07819878387451172, 0.07824758148193359, 0.0781983642578125, 0.07764556884765625, 0.07798226928710937, 0.07791251373291015, 0.07796038055419922, 0.07788114929199219, 0.07774515533447265, 0.07815987396240234, 0.07837872314453125, 0.07805366516113281, 0.07830118560791016, 0.07868211364746094, 0.0786145248413086, 0.07861248016357422, 0.07818649291992187, 0.07826022338867188, 0.07851007843017578, 0.0777991714477539, 0.07804691314697265, 0.07768326568603516, 0.07896880340576172, 0.07898707580566407, 0.07880499267578125, 0.07862089538574218, 0.07916880035400391, 0.07901990509033203, 0.07885699462890625, 0.07782189178466797, 0.0784610595703125, 0.07868943786621094, 0.0783059539794922, 0.07968991851806641, 0.0786145248413086, 0.07903641510009765, 0.07881318664550781, 0.07892562866210938, 0.07919635009765626, 0.08017385864257813, 0.07707443237304687, 0.0770840301513672, 0.0774856948852539, 0.07720448303222656, 0.0766750717163086, 0.07729357147216796, 0.07688355255126954, 0.07743638610839844, 0.07751776123046875, 0.07763871765136719, 0.07889170837402344, 0.07873356628417968, 0.07826636505126953, 0.07733657836914062, 0.0767891845703125, 0.07754605102539062, 0.07738982391357421, 0.07721369934082031, 0.07691468811035156, 0.07776051330566407, 0.07775190734863281, 0.07808041381835938, 0.07843379211425781, 0.07833004760742188, 0.07815916442871093, 0.07829196929931641, 0.07814553833007812, 0.07851129913330078, 0.07805187225341798, 0.07826051330566407, 0.07788748931884766, 0.07803903961181641, 0.0779653091430664, 0.07793622589111328, 0.07850828552246093, 0.07822351837158203, 0.07813465881347656, 0.07892623901367188, 0.07859942626953124, 0.07839842987060547, 0.07845200347900391, 0.07858592224121094, 0.07774253082275391, 0.07800240325927735, 0.07790592193603516, 0.07879475402832031, 0.07857520294189453, 0.0786006088256836, 0.07856537628173828, 0.07858175659179688, 0.07919999694824219, 0.07884825897216798, 0.07871897888183593, 0.07898931121826172, 0.07846707153320312, 0.07863001251220703, 0.07904902648925781, 0.07874617767333984, 0.0786673583984375, 0.07864726257324219, 0.07885810852050781, 0.07897555541992188, 0.08083401489257812, 0.07633602905273437, 0.07631871795654296, 0.07727922821044922, 0.07738368225097657, 0.07694076538085938, 0.07717469024658204, 0.07669414520263672, 0.07735501098632812, 0.077053955078125, 0.0776903076171875, 0.07763410949707031, 0.079570556640625, 0.07867021179199218, 0.07737916564941406, 0.07703584289550781, 0.07738518524169923, 0.07690831756591797, 0.07736201477050782, 0.07679590606689453, 0.07756390380859375, 0.07782585906982421, 0.0786864013671875, 0.07809587097167969, 0.07891792297363281, 0.07946057891845704, 0.07826022338867188, 0.07769087982177734, 0.07761510467529296, 0.0777194595336914, 0.07797325134277344, 0.07817151641845703, 0.07822025299072266, 0.07787725067138672, 0.0781534423828125, 0.07847465515136719, 0.07893436431884765, 0.0789378890991211, 0.07909657287597656, 0.07880703735351563, 0.07808204650878907, 0.07697612762451173, 0.0779277114868164, 0.0784227523803711, 0.07833347320556641, 0.07815216064453125, 0.07860774230957031, 0.07857215881347657, 0.07842610931396485, 0.07885004425048828, 0.07947468566894532, 0.07890329742431641, 0.07897702026367187, 0.07874883270263672, 0.07802761840820313, 0.07863091278076172, 0.07907737731933594, 0.08017033386230468, 0.07893468475341797, 0.07861567687988281, 0.07892176055908204, 0.07916012573242187, 0.07899958038330078]",tokens/s,12.82729400693661,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4176.900096,4733.140992,0.0,4347.396096,4328.833024,s,1,9.96478125,9.96478125,0.0,9.96478125,9.96478125,9.96478125,9.96478125,[9.96478125],,kWh,8.260262876257608e-05,9.104433635876655e-06,2.6616132404067017e-05,0.00011832319480251975,,MB,1387.245568,5276.30336,0.0,4859.101184,4756.928512,s,10,4.043050720214844,0.4043050720214844,0.0029363709005211657,0.40495066833496096,0.4067591827392578,0.4073766006469726,0.4078705349731445,"[0.39653961181640623, 0.40662197875976563, 0.4025369873046875, 0.40501275634765627, 0.40514923095703126, 0.40579074096679685, 0.4079940185546875, 0.40442025756835936, 0.4040965576171875, 0.4048885803222656]",tokens/s,633.1852299552561,kWh,1.1836643684834902e-05,1.3053748223162876e-06,7.837295158721337e-06,2.0979313665872525e-05,tokens/kWh,12202496.424677629,MB,1412.44416,5318.2464,0.0,4901.044224,4876.091904,s,10,23.99097705078125,2.3990977050781255,0.003572961778087123,2.399523803710937,2.4026566894531247,2.4040495361328125,2.4051638134765625,"[2.40018798828125, 2.399446533203125, 2.4054423828125, 2.401989990234375, 2.39487939453125, 2.394831298828125, 2.398699951171875, 2.40234716796875, 2.39960107421875, 2.39355126953125]",tokens/s,26.259872562359202,kWh,7.035771212808201e-05,7.760536900768138e-06,4.673610961107633e-05,0.00012485435863992647,tokens/kWh,504587.9109570276,,s,630,23.988052108764645,0.038076273188515315,0.0005532842657525565,0.03796171188354493,0.03839788131713867,0.03890213222503662,0.04052451679229736,"[0.039855457305908205, 0.038548095703125, 0.03837952041625976, 0.038316032409667966, 0.03827846527099609, 0.03796652984619141, 0.03788521575927734, 0.038464225769042966, 0.04050668716430664, 0.038121150970458983, 0.03773952102661133, 0.03751731109619141, 0.03751878356933594, 0.03761532974243164, 0.037582752227783206, 0.0376431999206543, 0.03768096160888672, 0.03820899200439453, 0.037838848114013675, 0.03779043197631836, 0.03794054412841797, 0.03792348861694336, 0.03781571197509766, 0.0385145263671875, 0.03864246368408203, 0.03838771057128906, 0.03828060913085937, 0.03826646423339844, 0.0377968635559082, 0.038012928009033206, 0.037862720489501955, 0.037774017333984375, 0.0377446403503418, 0.037830142974853515, 0.03778406524658203, 0.03762176132202148, 0.03857727813720703, 0.04123030471801758, 0.038112159729003905, 0.037969921112060545, 0.037992000579833984, 0.037914592742919924, 0.03785504150390625, 0.03797856140136719, 0.038133663177490236, 0.03789814376831055, 0.03797836685180664, 0.03804959869384766, 0.03792521667480469, 0.03797196960449219, 0.03797116851806641, 0.0379315185546875, 0.038045055389404295, 0.03788822555541992, 0.037970623016357424, 0.037754688262939456, 0.037869472503662106, 0.03782640075683594, 0.037884353637695316, 0.03774054336547852, 0.03805510330200195, 0.037773441314697266, 0.03796038436889648, 0.039892959594726565, 0.038029312133789066, 0.03813580703735352, 0.03914956665039063, 0.03780934524536133, 0.03804652786254883, 0.0379576301574707, 0.03804159927368164, 0.03794083023071289, 0.03776348876953125, 0.03805558395385742, 0.037867870330810544, 0.03802854537963867, 0.03806079864501953, 0.03778108978271484, 0.038219966888427735, 0.03777526473999023, 0.03773212814331055, 0.03768092727661133, 0.0374607048034668, 0.037490592956542966, 0.0375968017578125, 0.03769500732421875, 0.037854175567626956, 0.03972710418701172, 0.03992911911010742, 0.039600864410400394, 0.03880550384521484, 0.03808028793334961, 0.03808278274536133, 0.038117374420166016, 0.03817631912231445, 0.037855678558349606, 0.03794438552856445, 0.03840095901489258, 0.038225215911865236, 0.03772691345214844, 0.038035457611083984, 0.03784195327758789, 0.03768793487548828, 0.0375975341796875, 0.03768524932861328, 0.03783055877685547, 0.03790447998046875, 0.0376627197265625, 0.03766400146484375, 0.037773406982421875, 0.03785337448120117, 0.037564895629882813, 0.037961727142333986, 0.037964927673339845, 0.03797081756591797, 0.03799814224243164, 0.03911520004272461, 0.03806790542602539, 0.03915603256225586, 0.03797942352294922, 0.03800252914428711, 0.0378961296081543, 0.03779420852661133, 0.03774915313720703, 0.037787776947021484, 0.037873279571533205, 0.040263294219970706, 0.038024063110351565, 0.03801414489746094, 0.03817350387573242, 0.03782656097412109, 0.038151935577392576, 0.038230270385742185, 0.03813372802734375, 0.038317569732666014, 0.038283744812011716, 0.038195262908935546, 0.038182910919189454, 0.037979999542236326, 0.03799372863769531, 0.03796060943603516, 0.03804569625854492, 0.037988353729248046, 0.03783679962158203, 0.03790643310546875, 0.03785507202148437, 0.03834076690673828, 0.03845939254760742, 0.03821932983398438, 0.03818889617919922, 0.038739681243896484, 0.03819731140136719, 0.03817555236816406, 0.03803667068481445, 0.03794412612915039, 0.038239551544189454, 0.0379747200012207, 0.03791171264648437, 0.038203968048095706, 0.03811948776245117, 0.039460384368896484, 0.03840070343017578, 0.03816243362426758, 0.03828863906860352, 0.038124160766601564, 0.0382158088684082, 0.03794739151000977, 0.03802425765991211, 0.03806057739257813, 0.037927295684814455, 0.037804065704345705, 0.03786716842651367, 0.03823651123046875, 0.03824639892578125, 0.037787647247314454, 0.037789695739746096, 0.037923934936523435, 0.03791759872436523, 0.03793030548095703, 0.03779439926147461, 0.037752864837646484, 0.037830623626708984, 0.03793929672241211, 0.03809395217895508, 0.0393875503540039, 0.03836156845092773, 0.038457313537597654, 0.038907936096191406, 0.03836723327636719, 0.04048726272583008, 0.0383054084777832, 0.038120384216308596, 0.03820748901367187, 0.03819878387451172, 0.03805644989013672, 0.03810655975341797, 0.03803603363037109, 0.038089729309082034, 0.03827609634399414, 0.03816377639770508, 0.037921470642089845, 0.03808051300048828, 0.03871686553955078, 0.03805036926269531, 0.03811260986328125, 0.03801155090332031, 0.03778326416015625, 0.037908767700195314, 0.03812966537475586, 0.038250495910644534, 0.03811328125, 0.03803750228881836, 0.03886489486694336, 0.03954278564453125, 0.03841228866577148, 0.03793868637084961, 0.03799619293212891, 0.037753215789794924, 0.03791872024536133, 0.03787404632568359, 0.03793110275268555, 0.03762377548217773, 0.03781951904296875, 0.038028190612792966, 0.03784908676147461, 0.037967777252197264, 0.038203102111816406, 0.03814761734008789, 0.03822214508056641, 0.03871587371826172, 0.03816361618041992, 0.03819561767578125, 0.0380687370300293, 0.03765248107910156, 0.03789823913574219, 0.03780198287963867, 0.03806556701660156, 0.03801948928833008, 0.03793529510498047, 0.040521312713623046, 0.037971454620361327, 0.03779471969604492, 0.03776006317138672, 0.03773040008544922, 0.03774755096435547, 0.03769744110107422, 0.03784304046630859, 0.03783033752441406, 0.03769760131835938, 0.037643966674804685, 0.037910430908203126, 0.037767841339111326, 0.040525825500488284, 0.03797401428222656, 0.038012928009033206, 0.03785932922363281, 0.03872499084472656, 0.039360321044921875, 0.03803564834594726, 0.03773708724975586, 0.03775827026367187, 0.037517921447753906, 0.037800033569335936, 0.03783065414428711, 0.03810460662841797, 0.03801337432861328, 0.03802320098876953, 0.03776102447509765, 0.03800175857543946, 0.03782704162597656, 0.037803585052490235, 0.03776192092895508, 0.03793100738525391, 0.0380948486328125, 0.037852481842041014, 0.038119518280029296, 0.03781897735595703, 0.0378790397644043, 0.0379420166015625, 0.03812137603759766, 0.03788604736328125, 0.03785728073120117, 0.03786751937866211, 0.03789004898071289, 0.03782860946655273, 0.037691265106201174, 0.037654655456542965, 0.037689342498779296, 0.037664768218994144, 0.03808665466308594, 0.03773795318603516, 0.037746559143066405, 0.03773916625976562, 0.037643871307373046, 0.03785897445678711, 0.03808454513549805, 0.03859743881225586, 0.03834470367431641, 0.03812351989746094, 0.03827916717529297, 0.038035457611083984, 0.038072032928466795, 0.038082847595214846, 0.03806585693359375, 0.03802115249633789, 0.037985599517822266, 0.03793609619140625, 0.03800035095214844, 0.03779817581176758, 0.038065376281738283, 0.03808089447021484, 0.03811366271972656, 0.037979774475097657, 0.03778192138671875, 0.038109184265136715, 0.04040118408203125, 0.038477630615234376, 0.03839174270629883, 0.03828556823730469, 0.03814060974121094, 0.03797923278808594, 0.038316959381103514, 0.03803891372680664, 0.03798614501953125, 0.03805235290527344, 0.037808414459228515, 0.03779695892333984, 0.03776575851440429, 0.037916126251220705, 0.037747520446777344, 0.037765121459960936, 0.03769139099121094, 0.038102912902832034, 0.037926464080810546, 0.037956161499023436, 0.03802294540405273, 0.03793123245239258, 0.03789619064331055, 0.038199295043945314, 0.038027263641357424, 0.03785932922363281, 0.037937152862548826, 0.0378081283569336, 0.03788595199584961, 0.03803750228881836, 0.03859558486938477, 0.03789926528930664, 0.03789596939086914, 0.037781726837158205, 0.03800883102416992, 0.037891551971435546, 0.03799020767211914, 0.037870304107666015, 0.03796582412719727, 0.03772991943359375, 0.03779324722290039, 0.037677696228027344, 0.03768348693847656, 0.037804031372070314, 0.037727840423583986, 0.037685089111328125, 0.03784304046630859, 0.03789052963256836, 0.03806003189086914, 0.038053150177001956, 0.03828345489501953, 0.038217857360839845, 0.03811369705200195, 0.03803340911865234, 0.03800822448730469, 0.038004352569580076, 0.03804214477539063, 0.037953983306884764, 0.03798220825195313, 0.03789136123657227, 0.03773923110961914, 0.03820457458496094, 0.03808451080322266, 0.04058726501464844, 0.03823996734619141, 0.03808489608764649, 0.0380682258605957, 0.03786342239379883, 0.03828326416015625, 0.03778499221801758, 0.03776752090454102, 0.037766433715820315, 0.03791151809692383, 0.03805971145629883, 0.03770566558837891, 0.03798422241210937, 0.03763036727905274, 0.03779174423217774, 0.0379774398803711, 0.03774649429321289, 0.03771683120727539, 0.03791667175292969, 0.03811635208129883, 0.03864473724365235, 0.03828326416015625, 0.03831398391723633, 0.03824435043334961, 0.03820243072509766, 0.038342975616455076, 0.04006563186645508, 0.03838288116455078, 0.037958400726318356, 0.038074176788330076, 0.03790457534790039, 0.037763072967529294, 0.03784499359130859, 0.037804031372070314, 0.03766886520385742, 0.037680225372314455, 0.037921695709228515, 0.03826015853881836, 0.038002655029296874, 0.03823267364501953, 0.03842035293579102, 0.0381973762512207, 0.03811328125, 0.03804569625854492, 0.037787647247314454, 0.037779457092285154, 0.03775875091552734, 0.0379189453125, 0.038263904571533204, 0.03778591918945313, 0.03773500823974609, 0.03770528030395508, 0.03794102478027344, 0.038136161804199216, 0.03889503860473633, 0.038271839141845704, 0.038115360260009765, 0.03812351989746094, 0.03779379272460937, 0.037789695739746096, 0.03786137771606445, 0.037703678131103514, 0.03767295837402344, 0.040558944702148436, 0.03816425704956055, 0.037796062469482423, 0.03798220825195313, 0.037822463989257815, 0.0379923210144043, 0.03790611267089844, 0.038023616790771486, 0.03813785552978516, 0.03780723190307617, 0.037948287963867185, 0.03775030517578125, 0.03780441665649414, 0.037786945343017575, 0.0376409912109375, 0.03759283065795899, 0.038019329071044924, 0.037953056335449216, 0.03801081466674805, 0.04133942413330078, 0.03845939254760742, 0.03808051300048828, 0.03803126525878906, 0.037983776092529294, 0.037964351654052736, 0.03803446578979492, 0.03756307220458984, 0.03774249649047851, 0.03787200164794922, 0.03776102447509765, 0.03777711868286133, 0.03790467071533203, 0.03818700790405274, 0.0384716796875, 0.038553600311279294, 0.03844838333129883, 0.03825872039794922, 0.03807920074462891, 0.038044960021972656, 0.03786368179321289, 0.0377097282409668, 0.037742305755615234, 0.037671775817871095, 0.03782860946655273, 0.03812351989746094, 0.03817267227172851, 0.040427520751953126, 0.038932289123535156, 0.03829983901977539, 0.037969921112060545, 0.037943294525146484, 0.03796169662475586, 0.03783478546142578, 0.03828736114501953, 0.037745983123779296, 0.03772022247314453, 0.03796985626220703, 0.037833023071289065, 0.03795177459716797, 0.03819724655151367, 0.038182910919189454, 0.03826409530639648, 0.03812835311889649, 0.04037311935424805, 0.038061214447021485, 0.0384356803894043, 0.03797196960449219, 0.03796377563476563, 0.03788534545898437, 0.038142559051513675, 0.037695392608642575, 0.03791622543334961, 0.03761939239501953, 0.037765697479248045, 0.037848926544189455, 0.038142398834228514, 0.0379694709777832, 0.03774201583862305, 0.03773251342773438, 0.037716766357421876, 0.03775081634521484, 0.03774201583862305, 0.03763056182861328, 0.03785318374633789, 0.03786240005493164, 0.03808563232421875, 0.043052318572998044, 0.038290145874023435, 0.03858432006835937, 0.038065376281738283, 0.03788880157470703, 0.03804159927368164, 0.037722110748291016, 0.03779379272460937, 0.037711742401123044, 0.038094463348388674, 0.03792707061767578, 0.04146163177490234, 0.03839638519287109, 0.03804713439941406, 0.03822243118286133, 0.037993759155273435, 0.037976799011230467, 0.037965023040771484, 0.03796867370605469, 0.037838848114013675, 0.037797664642333986, 0.037798110961914065, 0.03773971176147461, 0.03755091094970703, 0.037838111877441405, 0.038116062164306644, 0.03789619064331055, 0.03910041427612305, 0.03781577682495117, 0.03786396789550781, 0.037787071228027345, 0.03763801574707031, 0.037468864440917966, 0.03774054336547852, 0.03760947036743164, 0.03765980911254883, 0.03761648178100586, 0.03782860946655273, 0.037797889709472655, 0.037707649230957034, 0.04039884948730469, 0.03804953765869141, 0.03761507034301758, 0.03792771148681641, 0.037949249267578124, 0.0377509765625, 0.038012928009033206, 0.037829696655273436, 0.03779884719848633, 0.03772931289672852, 0.037778400421142576, 0.037819679260253904, 0.03758972930908203, 0.037804031372070314, 0.037814273834228515, 0.037746688842773435, 0.03775619125366211, 0.038107872009277344, 0.03832831954956055, 0.038182334899902345, 0.03825312042236328, 0.03787776184082031, 0.03782243347167969, 0.0378752326965332, 0.03776563262939453, 0.037516704559326174, 0.037788257598876954, 0.03789619064331055, 0.03767030334472656, 0.038180545806884764, 0.037833633422851565, 0.03822387313842773, 0.0377262077331543, 0.03783065414428711, 0.03794124984741211, 0.0382033920288086, 0.037996543884277346, 0.03772208023071289, 0.037639968872070315, 0.03807257461547851, 0.03787980651855469, 0.0377446403503418, 0.037725311279296875, 0.03802816009521484, 0.03911884689331055, 0.0387459831237793, 0.03791475296020508, 0.037731521606445315, 0.03794412612915039, 0.03774435043334961, 0.03771420669555664, 0.03769916915893555, 0.0380392951965332, 0.03797849655151367, 0.03780409622192383, 0.03781654357910156, 0.038397567749023434, 0.03832870483398437, 0.037956607818603515, 0.038063102722167966, 0.03811270523071289, 0.03848454284667969, 0.0384634895324707]",tokens/s,26.26307451490875,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2201.710592,2206.072832,0.0,1820.327936,1730.89792,s,1,8.824205078125,8.824205078125,0.0,8.824205078125,8.824205078125,8.824205078125,8.824205078125,[8.824205078125],,kWh,4.9560551074955586e-05,5.459665444843831e-06,1.649695764199821e-05,7.151717416179763e-05,,MB,2284.023808,2350.77632,0.0,1935.671296,1893.8368,s,10,1.6837865905761717,0.1683786590576172,0.00047206260861695234,0.16845658874511718,0.16881590576171876,0.1689777618408203,0.16910724670410157,"[0.16791337585449218, 0.16845890808105468, 0.16845426940917968, 0.16850901794433593, 0.16839389038085936, 0.168408447265625, 0.16846163940429687, 0.16913961791992188, 0.16877993774414063, 0.16726748657226562]",tokens/s,1520.3826983347087,kWh,4.989181887923367e-06,5.50217370396802e-07,3.3060243020674218e-06,8.845423560387592e-06,tokens/kWh,28941519.674246386,MB,2290.536448,2499.674112,0.0,2082.471936,1953.434112,s,10,16.23874475097656,1.6238744750976561,0.0036601242243363926,1.6236317749023437,1.629042138671875,1.6295904663085936,1.6300291284179687,"[1.6289202880859375, 1.6224979248046876, 1.623896484375, 1.6233670654296875, 1.6200091552734375, 1.6301387939453125, 1.6256204833984376, 1.6250142822265625, 1.6171944580078126, 1.6220858154296875]",tokens/s,38.79610214096833,kWh,4.7533203409162066e-05,5.242655204766752e-06,2.8737306888133883e-05,8.151316550206271e-05,tokens/kWh,772881.2838020107,,s,630,16.235946065902727,0.02577134296175033,0.00040555064998865986,0.025671184539794924,0.026128869819641114,0.026308357238769533,0.027710485343933115,"[0.02779136085510254, 0.02625926399230957, 0.02905721664428711, 0.026171392440795898, 0.026058496475219725, 0.02569203186035156, 0.02606070327758789, 0.025952735900878907, 0.026001216888427735, 0.02599728012084961, 0.025835424423217773, 0.02564031982421875, 0.025556224822998047, 0.025798240661621095, 0.02580691146850586, 0.026410783767700195, 0.025850015640258787, 0.02572287940979004, 0.025481184005737303, 0.025614208221435546, 0.025614336013793947, 0.025583871841430662, 0.025675775527954102, 0.02573311996459961, 0.02586777687072754, 0.025688575744628905, 0.025552896499633788, 0.025915008544921875, 0.025887071609497072, 0.025579551696777343, 0.025583583831787108, 0.025570623397827147, 0.025836256027221678, 0.025589759826660157, 0.02552137565612793, 0.02547929573059082, 0.025614528656005858, 0.02564143943786621, 0.02552169609069824, 0.025428447723388672, 0.02550169563293457, 0.025505504608154296, 0.025635103225708007, 0.025991167068481445, 0.025939647674560546, 0.026102144241333006, 0.02581839942932129, 0.02570070457458496, 0.02561827278137207, 0.025823328018188478, 0.025444736480712892, 0.025451967239379883, 0.02572073554992676, 0.025516063690185546, 0.025669855117797853, 0.0257007999420166, 0.025722848892211915, 0.025647104263305662, 0.025595903396606445, 0.026070528030395508, 0.026237247467041015, 0.02673196792602539, 0.025854240417480467, 0.02607043266296387, 0.025920095443725585, 0.0259420166015625, 0.025794559478759766, 0.026033887863159178, 0.026085472106933592, 0.02617136001586914, 0.02599065589904785, 0.025966911315917968, 0.025996831893920897, 0.026010528564453125, 0.0260402889251709, 0.025810943603515626, 0.025903263092041016, 0.026320383071899413, 0.02566771125793457, 0.026218175888061523, 0.02692313575744629, 0.025782623291015626, 0.025870399475097658, 0.025671680450439452, 0.02589695930480957, 0.025613983154296874, 0.02569865608215332, 0.0255467529296875, 0.02549897575378418, 0.02549331283569336, 0.02543497657775879, 0.02555001640319824, 0.02547385597229004, 0.025567136764526367, 0.025483327865600584, 0.025507871627807616, 0.02552739143371582, 0.025453472137451173, 0.025432319641113282, 0.025444128036499022, 0.025456607818603514, 0.02558060836791992, 0.02545555114746094, 0.025452320098876952, 0.025503040313720703, 0.02542403221130371, 0.02535910415649414, 0.025266176223754884, 0.025413087844848633, 0.025405248641967772, 0.02545257568359375, 0.025350847244262696, 0.025564544677734374, 0.025760320663452147, 0.026100799560546874, 0.026018815994262694, 0.026168832778930663, 0.026013599395751954, 0.02595027160644531, 0.025954240798950194, 0.02589299201965332, 0.025772064208984376, 0.02572537612915039, 0.026012992858886717, 0.02573299217224121, 0.025632991790771484, 0.026226591110229493, 0.028895103454589843, 0.027957311630249025, 0.026091552734375, 0.02599734306335449, 0.02572902488708496, 0.025625919342041014, 0.025655008316040038, 0.025789407730102538, 0.02573846435546875, 0.025559839248657228, 0.025825088500976562, 0.025882816314697264, 0.025656320571899413, 0.02595337677001953, 0.025966495513916017, 0.02607513618469238, 0.025918975830078125, 0.025982591629028322, 0.02615920066833496, 0.026281951904296875, 0.026178207397460938, 0.02607529640197754, 0.02600559997558594, 0.026019744873046875, 0.025798784255981446, 0.025916479110717774, 0.025438751220703125, 0.025417728424072264, 0.025669919967651368, 0.02540105628967285, 0.025465024948120116, 0.025788511276245117, 0.025738399505615236, 0.025541183471679687, 0.02558425521850586, 0.025458335876464844, 0.025516063690185546, 0.0254703369140625, 0.02551254463195801, 0.025403615951538085, 0.02557439994812012, 0.02533046340942383, 0.025341951370239257, 0.025208351135253906, 0.025498048782348633, 0.025384992599487306, 0.025440256118774415, 0.025693471908569337, 0.025578432083129883, 0.025721727371215822, 0.025350175857543945, 0.025459583282470704, 0.025335744857788087, 0.02544374465942383, 0.025369247436523436, 0.025675264358520508, 0.025476800918579103, 0.025586496353149413, 0.025711999893188477, 0.025651840209960936, 0.02586185646057129, 0.02555276870727539, 0.026119295120239257, 0.02577292823791504, 0.025829376220703124, 0.025665056228637694, 0.0257326717376709, 0.02558777618408203, 0.02549545669555664, 0.025410879135131837, 0.028844736099243165, 0.02584671974182129, 0.025681568145751954, 0.02549385643005371, 0.025595584869384767, 0.025598400115966796, 0.025630367279052733, 0.0255817928314209, 0.025573183059692382, 0.02576406478881836, 0.025575391769409178, 0.02602774429321289, 0.02584307289123535, 0.025781152725219726, 0.026261503219604493, 0.02865567970275879, 0.026084863662719726, 0.025909343719482423, 0.025669727325439453, 0.025954912185668946, 0.025679519653320312, 0.02558176040649414, 0.025562944412231444, 0.025597759246826172, 0.02555411148071289, 0.02571500778198242, 0.0257205753326416, 0.025581663131713867, 0.02564796829223633, 0.02557542419433594, 0.0255928955078125, 0.02550912094116211, 0.025620159149169923, 0.02553862380981445, 0.025784255981445313, 0.026005504608154296, 0.02570240020751953, 0.025556991577148438, 0.025488800048828125, 0.025565792083740234, 0.025484352111816405, 0.025506656646728517, 0.025448543548583984, 0.025460416793823243, 0.025421791076660157, 0.02550931167602539, 0.02537104034423828, 0.02544492721557617, 0.025573312759399416, 0.02551148796081543, 0.02557382392883301, 0.025974079132080077, 0.02591814422607422, 0.025753536224365235, 0.025575872421264648, 0.02636911964416504, 0.02618953514099121, 0.026432575225830077, 0.026281248092651366, 0.026344928741455078, 0.026083520889282227, 0.02586956787109375, 0.025907968521118162, 0.02568796730041504, 0.025573471069335937, 0.025554143905639648, 0.02559651184082031, 0.025688127517700197, 0.02573846435546875, 0.025571807861328125, 0.025690816879272462, 0.025646303176879885, 0.02554934310913086, 0.025444351196289062, 0.025444351196289062, 0.02538502311706543, 0.02541804885864258, 0.025638208389282227, 0.025468896865844727, 0.025516384124755858, 0.025670688629150392, 0.025512928009033202, 0.025464832305908205, 0.025859487533569335, 0.025747903823852537, 0.026134687423706053, 0.02569196891784668, 0.025639263153076172, 0.025569311141967775, 0.02567740821838379, 0.025782495498657226, 0.025988544464111328, 0.025991487503051757, 0.026033824920654296, 0.026051168441772462, 0.025979167938232423, 0.026047840118408203, 0.025926015853881837, 0.025711999893188477, 0.025580127716064452, 0.025616287231445312, 0.02554876708984375, 0.025601760864257812, 0.025471168518066405, 0.025534719467163087, 0.025513216018676756, 0.025631488800048827, 0.02562006378173828, 0.025346399307250977, 0.025491519927978514, 0.025514047622680665, 0.02557535934448242, 0.02537673568725586, 0.02550399971008301, 0.025403167724609373, 0.025427967071533202, 0.025421951293945314, 0.025960319519042967, 0.026431360244750977, 0.026271392822265625, 0.026031103134155274, 0.025796255111694335, 0.026787839889526367, 0.026070400238037108, 0.026534528732299806, 0.026183328628540038, 0.02631228828430176, 0.026710784912109375, 0.026149951934814453, 0.026272703170776367, 0.026189823150634766, 0.026011423110961916, 0.025894336700439453, 0.02575596809387207, 0.02573686408996582, 0.025598783493041993, 0.025605535507202147, 0.025503679275512694, 0.025522239685058595, 0.02586262321472168, 0.025788543701171875, 0.02552342414855957, 0.025585792541503907, 0.025756351470947264, 0.025905120849609376, 0.02598681640625, 0.025585920333862304, 0.025467168807983397, 0.025550559997558595, 0.025704160690307617, 0.025682016372680663, 0.025516576766967773, 0.02557859230041504, 0.0254552001953125, 0.025468128204345703, 0.026005567550659178, 0.025830080032348633, 0.02569011116027832, 0.02572697639465332, 0.02570649528503418, 0.025476831436157227, 0.02700931167602539, 0.027303936004638672, 0.02597068786621094, 0.026222591400146485, 0.025976800918579103, 0.025767488479614256, 0.025610719680786133, 0.025800960540771484, 0.025706239700317383, 0.025707551956176758, 0.02584419250488281, 0.025768447875976562, 0.02548121643066406, 0.025511936187744142, 0.025444351196289062, 0.025533824920654296, 0.025901952743530274, 0.025837312698364256, 0.02566975975036621, 0.02557119941711426, 0.026095359802246094, 0.025743616104125976, 0.025642208099365234, 0.025580480575561525, 0.025573087692260743, 0.025528736114501953, 0.025724639892578127, 0.02573209571838379, 0.025989599227905273, 0.025551103591918947, 0.02562281608581543, 0.025651264190673827, 0.025535999298095705, 0.025752351760864257, 0.026087072372436522, 0.025692384719848634, 0.025634592056274413, 0.025659488677978515, 0.025552799224853515, 0.02588876724243164, 0.025908544540405275, 0.025805248260498046, 0.025660736083984375, 0.02567475128173828, 0.02566547203063965, 0.025595840454101564, 0.02555833625793457, 0.026011552810668945, 0.02576265525817871, 0.027328863143920898, 0.02751247978210449, 0.02689638328552246, 0.02614271926879883, 0.02569625663757324, 0.02572287940979004, 0.02559382438659668, 0.025552928924560545, 0.025542144775390626, 0.02562713623046875, 0.025523584365844728, 0.025544767379760743, 0.0255053768157959, 0.02555763244628906, 0.025430368423461913, 0.025430015563964844, 0.02562060737609863, 0.025599008560180665, 0.025713056564331056, 0.02557145690917969, 0.025623136520385743, 0.025693920135498045, 0.026125375747680663, 0.0261907844543457, 0.0259421443939209, 0.025894655227661132, 0.025784032821655273, 0.025789920806884765, 0.02584441566467285, 0.02615648078918457, 0.02579948806762695, 0.025833471298217774, 0.025707712173461916, 0.025965375900268553, 0.02614271926879883, 0.026318239212036132, 0.025768543243408205, 0.025786239624023436, 0.025526304244995118, 0.025535615921020507, 0.025352415084838868, 0.02558153533935547, 0.025524831771850585, 0.02556332778930664, 0.025556543350219726, 0.025770015716552734, 0.02548294448852539, 0.025573280334472655, 0.025527103424072266, 0.0255831356048584, 0.025561567306518554, 0.02548684883117676, 0.02548384094238281, 0.02550966453552246, 0.02559712028503418, 0.026049503326416014, 0.02651136016845703, 0.026348831176757813, 0.0262007999420166, 0.02612451171875, 0.026113088607788087, 0.026303552627563478, 0.025860864639282225, 0.025654176712036132, 0.02575881576538086, 0.025756576538085937, 0.025739551544189453, 0.02562633514404297, 0.025548799514770508, 0.025776159286499022, 0.025587039947509764, 0.02578700828552246, 0.025564416885375977, 0.025756416320800782, 0.025810079574584963, 0.026370912551879882, 0.027862560272216796, 0.02611827278137207, 0.025817440032958983, 0.02562607955932617, 0.025631263732910158, 0.025591808319091795, 0.025427167892456054, 0.025561567306518554, 0.02558188819885254, 0.02549728012084961, 0.026103904724121094, 0.025641183853149414, 0.025843711853027345, 0.025860095977783205, 0.02579987144470215, 0.025657535552978516, 0.025692800521850585, 0.025570528030395508, 0.025682624816894532, 0.025718751907348632, 0.025948287963867188, 0.026390464782714843, 0.02616092872619629, 0.025827552795410155, 0.025700128555297852, 0.025649375915527343, 0.025513055801391602, 0.025615264892578125, 0.025602048873901367, 0.025792512893676758, 0.026048351287841796, 0.02591049575805664, 0.025761823654174804, 0.025729440689086915, 0.025605791091918944, 0.025644191741943358, 0.025609920501708985, 0.025630975723266603, 0.025601215362548828, 0.025608352661132813, 0.025647520065307617, 0.025707647323608397, 0.02616595268249512, 0.025483776092529296, 0.025694944381713866, 0.025429056167602538, 0.025626399993896484, 0.025434240341186524, 0.02546073532104492, 0.025483327865600584, 0.02566752052307129, 0.025821184158325194, 0.0258940486907959, 0.02577699279785156, 0.025798656463623046, 0.02554265594482422, 0.025652320861816406, 0.02587945556640625, 0.025601888656616213, 0.02549510383605957, 0.025494112014770507, 0.0255546875, 0.02540729522705078, 0.02554252815246582, 0.025516639709472655, 0.025542623519897462, 0.025573375701904297, 0.025592960357666016, 0.02553945541381836, 0.025659135818481445, 0.0254769287109375, 0.025682367324829102, 0.02550783920288086, 0.02556755256652832, 0.025822912216186523, 0.025830560684204102, 0.025665504455566406, 0.025512575149536133, 0.025530847549438476, 0.02559129524230957, 0.025618911743164063, 0.025599071502685547, 0.025674463272094727, 0.02573686408996582, 0.026360511779785156, 0.02602137565612793, 0.026128223419189453, 0.026075807571411133, 0.0260598087310791, 0.025897951126098634, 0.025776128768920898, 0.025654848098754884, 0.025565088272094725, 0.025784767150878907, 0.025570943832397462, 0.025727487564086913, 0.025636064529418946, 0.026323711395263672, 0.025907520294189454, 0.026361343383789062, 0.02576963233947754, 0.02582172775268555, 0.02575974464416504, 0.025863391876220703, 0.02569705581665039, 0.02562879943847656, 0.025517120361328124, 0.025506048202514647, 0.025583295822143554, 0.025371519088745118, 0.025446399688720703, 0.025534271240234375, 0.025565376281738283, 0.026061887741088866, 0.025612735748291017, 0.025754112243652344, 0.02575507164001465, 0.025737407684326172, 0.026275520324707032, 0.025592512130737304, 0.025697568893432617, 0.025746143341064454, 0.02592767906188965, 0.025602048873901367, 0.025617536544799806, 0.02556403160095215, 0.025720703125, 0.02606716728210449, 0.026171424865722655, 0.02582659149169922, 0.025522783279418947, 0.02558393669128418, 0.02568979263305664, 0.026222591400146485, 0.025554943084716796, 0.025565183639526368, 0.025593759536743164, 0.025692256927490234, 0.025602048873901367, 0.025509632110595704, 0.025701887130737306, 0.02546735954284668, 0.025664831161499025, 0.025510719299316406, 0.02546089553833008, 0.02537676811218262, 0.02545254325866699]",tokens/s,38.80278965221928,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,916.598784,698.220544,0.0,295.698432,277.263872,s,1,7.80066943359375,7.80066943359375,0.0,7.80066943359375,7.80066943359375,7.80066943359375,7.80066943359375,[7.80066943359375],,kWh,1.6885162729219397e-05,1.8553838785548659e-06,4.83055942002153e-06,2.357110602779579e-05,,MB,1352.384512,752.746496,0.0,335.54432,313.833472,s,13,0.5626000328063965,0.043276925600492046,0.001651299767595226,0.043009246826171875,0.04358422317504883,0.0456360122680664,0.04809347442626953,"[0.048707839965820315, 0.04302323150634765, 0.042444576263427736, 0.04238399887084961, 0.043568607330322265, 0.043009246826171875, 0.04157855987548828, 0.04358812713623047, 0.04313024139404297, 0.042713760375976566, 0.043149024963378906, 0.042899776458740234, 0.04240304183959961]",tokens/s,5915.392474115337,kWh,1.4954549491070187e-06,1.6489293107593426e-07,9.94817972724328e-07,2.6551658529072805e-06,tokens/kWh,96415822.65744045,MB,1391.32928,777.91232,0.0,360.710144,313.836032,s,13,9.700478149414062,0.7461906268780047,0.003894170993894546,0.7451903076171875,0.7496636352539062,0.752587890625,0.755840283203125,"[0.7416971435546875, 0.7566533813476563, 0.7435324096679687, 0.74607958984375, 0.7488079223632812, 0.7450740356445312, 0.7498775634765625, 0.7451903076171875, 0.7431558227539062, 0.7427476196289062, 0.7427779541015626, 0.7464763793945313, 0.7484080200195312]",tokens/s,84.4288278768475,kWh,2.148441266627824e-05,2.369402492959856e-06,8.569399080352405e-06,3.2423214239590496e-05,tokens/kWh,1943052.2691076568,,s,819,9.694087070465084,0.01183649214953002,0.0002387998402609491,0.011779935836791992,0.011970732498168945,0.012171747589111328,0.012735061416625972,"[0.01189686393737793, 0.012034048080444336, 0.011896832466125488, 0.012518688201904297, 0.01181100845336914, 0.011921119689941407, 0.011723584175109863, 0.011800576210021972, 0.011704319953918458, 0.011808287620544434, 0.011713151931762695, 0.011673439979553222, 0.011698176383972168, 0.011734047889709472, 0.01171724796295166, 0.011801024436950684, 0.011720255851745605, 0.011741824150085449, 0.01168899154663086, 0.011729120254516601, 0.011749600410461426, 0.011722111701965332, 0.011821951866149902, 0.01175932788848877, 0.011790495872497558, 0.011733119964599609, 0.01174630355834961, 0.011725279808044433, 0.011698719978332519, 0.011744640350341796, 0.011745920181274414, 0.011716704368591308, 0.011872223854064941, 0.011794367790222167, 0.011697792053222656, 0.011764096260070802, 0.01169369602203369, 0.011714943885803223, 0.011704319953918458, 0.011754912376403808, 0.011733599662780762, 0.01173299217224121, 0.01175971221923828, 0.01175055980682373, 0.01173910427093506, 0.01167356777191162, 0.011718976020812987, 0.011768320083618163, 0.011720704078674317, 0.011665247917175292, 0.011773183822631835, 0.011701151847839356, 0.011806336402893067, 0.011708000183105468, 0.011696288108825684, 0.011704959869384766, 0.011732799530029297, 0.011739328384399414, 0.011743231773376465, 0.011773823738098144, 0.011746784210205079, 0.0117739839553833, 0.01176137638092041, 0.011778304100036621, 0.012014880180358886, 0.012069600105285645, 0.012064000129699708, 0.011942208290100098, 0.01198038387298584, 0.011869024276733399, 0.01195740795135498, 0.01192636775970459, 0.01185587215423584, 0.011828607559204101, 0.011761823654174804, 0.011876959800720215, 0.011908927917480468, 0.011902560234069824, 0.01212668800354004, 0.01183948802947998, 0.011741375923156739, 0.012127103805541993, 0.011807104110717774, 0.011968768119812012, 0.012046655654907227, 0.011788288116455077, 0.01183340835571289, 0.01176915168762207, 0.011797120094299316, 0.011767807960510255, 0.011812864303588867, 0.011816287994384765, 0.011934368133544922, 0.01182089614868164, 0.011753503799438476, 0.011780384063720704, 0.011799679756164552, 0.011716544151306152, 0.011707167625427246, 0.01172435188293457, 0.011796031951904297, 0.011854720115661622, 0.011744640350341796, 0.011758208274841309, 0.011737088203430175, 0.011756863594055176, 0.011717599868774414, 0.011691200256347655, 0.011760160446166993, 0.01169859218597412, 0.011681344032287598, 0.011819040298461914, 0.011970303535461425, 0.013006976127624512, 0.012298368453979492, 0.012191743850708007, 0.0124518404006958, 0.012683551788330078, 0.014199775695800781, 0.012471039772033692, 0.012025664329528809, 0.011860159873962403, 0.0118538236618042, 0.013381983757019043, 0.013018783569335937, 0.012518848419189453, 0.011643168449401855, 0.011763903617858886, 0.01182857608795166, 0.011751775741577148, 0.011860287666320801, 0.011718303680419921, 0.011778464317321777, 0.011826751708984375, 0.011737792015075684, 0.011801759719848632, 0.011797023773193359, 0.011737088203430175, 0.011847552299499511, 0.011705984115600586, 0.011775487899780274, 0.011754783630371093, 0.011853535652160645, 0.011735039710998535, 0.011849568367004395, 0.013135583877563476, 0.011744000434875489, 0.011732671737670898, 0.011745439529418946, 0.01174307155609131, 0.011708576202392578, 0.011714048385620117, 0.011763263702392578, 0.011742079734802246, 0.011761568069458007, 0.011734304428100585, 0.011725248336791992, 0.012095775604248047, 0.01177513599395752, 0.01175766372680664, 0.011763839721679688, 0.011786879539489746, 0.011751328468322754, 0.011729344367980957, 0.011748255729675293, 0.011846400260925294, 0.011703359603881835, 0.011756735801696777, 0.01174294376373291, 0.011720735549926758, 0.01188268756866455, 0.011750207901000977, 0.011748096466064454, 0.011829504013061523, 0.01176371192932129, 0.011780096054077148, 0.011738752365112305, 0.011746720314025879, 0.01171555233001709, 0.011724831581115723, 0.011768992424011231, 0.011729408264160156, 0.011710559844970703, 0.011904671669006347, 0.011804448127746583, 0.011747167587280274, 0.011792767524719238, 0.011835968017578125, 0.011877792358398438, 0.011819071769714355, 0.011860383987426757, 0.011854368209838867, 0.011722496032714844, 0.011859519958496094, 0.011765503883361816, 0.011731295585632325, 0.011928447723388672, 0.011819071769714355, 0.011875071525573731, 0.01176416015625, 0.011777536392211914, 0.012108863830566407, 0.011710047721862793, 0.012429632186889649, 0.011822560310363769, 0.011884480476379395, 0.011751232147216796, 0.011756319999694825, 0.011737088203430175, 0.01173299217224121, 0.01174732780456543, 0.01174556827545166, 0.011753408432006836, 0.011708191871643066, 0.011755167961120605, 0.011802528381347656, 0.01176416015625, 0.011782143592834473, 0.011796256065368653, 0.011804896354675293, 0.011792384147644042, 0.011874303817749024, 0.011865920066833496, 0.011758111953735351, 0.011749024391174317, 0.011804672241210937, 0.011780096054077148, 0.011787839889526367, 0.011758015632629395, 0.011753536224365235, 0.011812447547912597, 0.011782496452331544, 0.011771648406982423, 0.011783455848693848, 0.01177888011932373, 0.011741279602050781, 0.011870207786560059, 0.011775327682495116, 0.011799648284912109, 0.011808256149291992, 0.01185427188873291, 0.011937343597412109, 0.012177536010742188, 0.012002816200256347, 0.012076576232910157, 0.01199407958984375, 0.012171104431152343, 0.011850015640258789, 0.011857888221740723, 0.011843135833740234, 0.0118372163772583, 0.011754048347473145, 0.011624447822570801, 0.011767840385437012, 0.011859999656677247, 0.011925375938415528, 0.011857600212097167, 0.011882431983947754, 0.011784640312194825, 0.011843584060668945, 0.011849408149719238, 0.011776320457458496, 0.011817055702209473, 0.011782272338867188, 0.01181868839263916, 0.011741536140441895, 0.011757535934448242, 0.011869983673095703, 0.011808768272399902, 0.011752863883972169, 0.011942496299743652, 0.012023936271667481, 0.011871487617492675, 0.01173516845703125, 0.011700927734375, 0.011765664100646972, 0.011839072227478027, 0.011815232276916504, 0.011774975776672364, 0.011803647994995118, 0.012463968276977539, 0.012660415649414062, 0.013633855819702148, 0.011927040100097656, 0.012487327575683594, 0.01187775993347168, 0.011903136253356934, 0.011954336166381837, 0.011919679641723633, 0.011773951530456543, 0.011907072067260742, 0.0117903995513916, 0.011770943641662597, 0.011776896476745605, 0.0118023681640625, 0.011766016006469726, 0.011768992424011231, 0.011811488151550293, 0.011829536437988281, 0.011796383857727051, 0.01170537567138672, 0.011796480178833007, 0.011751744270324707, 0.011793055534362794, 0.011728896141052245, 0.011777312278747558, 0.011808639526367187, 0.011896960258483887, 0.011828000068664551, 0.011935168266296387, 0.01174118423461914, 0.01183516788482666, 0.011744223594665527, 0.011779840469360352, 0.011780096054077148, 0.011542752265930175, 0.011843584060668945, 0.012294431686401368, 0.01186185646057129, 0.011829119682312011, 0.011855423927307129, 0.011885024070739747, 0.011718144416809082, 0.011846112251281739, 0.011761664390563965, 0.011769472122192383, 0.01176313591003418, 0.011754752159118653, 0.011693344116210938, 0.011796832084655762, 0.011820608139038086, 0.011780608177185058, 0.011726943969726563, 0.011765727996826172, 0.011718591690063477, 0.011771327972412109, 0.011767359733581543, 0.01181827163696289, 0.011712224006652831, 0.011738495826721191, 0.01174729633331299, 0.01175004768371582, 0.011778047561645508, 0.011744895935058593, 0.011809151649475097, 0.012746368408203125, 0.011936127662658691, 0.011774239540100097, 0.011712224006652831, 0.011801823616027832, 0.011972800254821777, 0.011809568405151367, 0.01200534439086914, 0.011820351600646973, 0.011831839561462403, 0.011739263534545899, 0.011736639976501465, 0.011741503715515137, 0.011669119834899903, 0.011708800315856933, 0.011791904449462891, 0.01176419162750244, 0.011712767601013184, 0.011861760139465332, 0.011771327972412109, 0.012417119979858398, 0.011782560348510742, 0.01177190399169922, 0.01170803165435791, 0.011800095558166504, 0.011776063919067383, 0.0122193603515625, 0.011763584136962891, 0.011827168464660644, 0.011656895637512207, 0.011798239707946778, 0.011768768310546875, 0.01173020839691162, 0.011515872001647949, 0.011809887886047364, 0.011819552421569825, 0.01174675178527832, 0.011785183906555176, 0.01170803165435791, 0.011792960166931153, 0.011832223892211915, 0.011756223678588867, 0.011774175643920899, 0.011780096054077148, 0.011745216369628907, 0.01179859161376953, 0.011786239624023438, 0.011877504348754883, 0.011801471710205078, 0.011791808128356934, 0.0118154878616333, 0.011782143592834473, 0.011726335525512695, 0.011724927902221679, 0.011810976028442383, 0.012069087982177734, 0.011845536231994629, 0.012156288146972656, 0.011815103530883789, 0.011768159866333007, 0.011760064125061035, 0.012062015533447266, 0.014284383773803712, 0.014474080085754395, 0.011890912055969238, 0.011830047607421875, 0.011787615776062011, 0.011781791687011718, 0.011917311668395996, 0.011876319885253906, 0.01180419158935547, 0.011799039840698243, 0.011801823616027832, 0.011780896186828614, 0.011816608428955077, 0.011762016296386719, 0.011806719779968262, 0.011763968467712403, 0.011817855834960938, 0.011864959716796875, 0.011884544372558594, 0.01184563159942627, 0.011821056365966797, 0.01181606388092041, 0.011792927742004395, 0.011778400421142579, 0.012017472267150879, 0.011778559684753418, 0.011953632354736328, 0.011702464103698731, 0.011870240211486817, 0.01174118423461914, 0.01171776008605957, 0.011803392410278321, 0.01179251194000244, 0.011752479553222657, 0.011699199676513672, 0.012502304077148437, 0.01198755168914795, 0.011993023872375488, 0.011959967613220214, 0.011872544288635253, 0.011720895767211914, 0.012263615608215333, 0.011837247848510743, 0.011736960411071777, 0.011739263534545899, 0.011737088203430175, 0.011780096054077148, 0.011735199928283691, 0.011744319915771484, 0.011825119972229004, 0.011773823738098144, 0.01171894359588623, 0.011838239669799805, 0.011722816467285156, 0.011711359977722168, 0.01169651222229004, 0.011704575538635255, 0.011819328308105469, 0.011675392150878906, 0.012048704147338867, 0.011758560180664063, 0.012645343780517578, 0.012312704086303711, 0.011773504257202149, 0.011798848152160644, 0.011693984031677247, 0.011759552001953125, 0.011773823738098144, 0.011772543907165527, 0.011787679672241211, 0.011805184364318847, 0.011830400466918946, 0.011850655555725098, 0.011806431770324708, 0.011870207786560059, 0.011812735557556152, 0.0118023681640625, 0.011821279525756836, 0.01183350372314453, 0.011808128356933593, 0.011741600036621093, 0.011735039710998535, 0.011751487731933593, 0.011786751747131348, 0.011730591773986816, 0.01172480010986328, 0.011759615898132325, 0.011735039710998535, 0.011773344039916991, 0.011720447540283204, 0.011688799858093261, 0.011689984321594238, 0.011766112327575684, 0.01180022430419922, 0.011699935913085938, 0.011713983535766602, 0.011713215827941895, 0.01185689640045166, 0.011847583770751954, 0.011772000312805175, 0.01222822380065918, 0.011972448348999024, 0.011792256355285644, 0.011976896286010742, 0.01177238368988037, 0.011881759643554687, 0.011708895683288574, 0.011708800315856933, 0.011716480255126954, 0.011703935623168946, 0.011692319869995117, 0.011718751907348633, 0.011833344459533691, 0.011737407684326172, 0.011693311691284179, 0.011694656372070313, 0.0123919038772583, 0.01179689598083496, 0.011786080360412598, 0.011678912162780762, 0.011848671913146973, 0.011824511528015136, 0.012209888458251952, 0.011690752029418945, 0.011737248420715333, 0.011776255607604981, 0.011698975563049316, 0.011704447746276856, 0.01174614429473877, 0.011738207817077637, 0.011792799949645997, 0.011674079895019531, 0.01173904037475586, 0.011675488471984864, 0.011689567565917968, 0.011696831703186035, 0.011695743560791016, 0.011686271667480468, 0.011706111907958984, 0.01171497631072998, 0.011728256225585938, 0.011732768058776856, 0.01173369598388672, 0.011876383781433105, 0.011737248420715333, 0.011762816429138184, 0.011737088203430175, 0.011750080108642579, 0.011708415985107423, 0.01174937629699707, 0.011888992309570313, 0.011781248092651367, 0.011743776321411133, 0.011681792259216308, 0.012163071632385255, 0.011707712173461914, 0.011758272171020507, 0.011905311584472656, 0.011755200386047363, 0.011759488105773926, 0.011481087684631347, 0.011738592147827149, 0.011670304298400878, 0.011755264282226563, 0.01194598388671875, 0.011714400291442871, 0.01178873634338379, 0.011683487892150879, 0.011759552001953125, 0.011701663970947266, 0.011746335983276367, 0.01173084831237793, 0.01171168041229248, 0.011737695693969727, 0.011735199928283691, 0.01178166389465332, 0.011753791809082032, 0.011806719779968262, 0.012015263557434082, 0.011938143730163574, 0.01176576042175293, 0.011743295669555663, 0.011829183578491212, 0.01174732780456543, 0.011902560234069824, 0.011798944473266602, 0.011685919761657716, 0.011720671653747558, 0.011765952110290528, 0.011742912292480468, 0.011720319747924805, 0.011825663566589355, 0.011783455848693848, 0.011751839637756348, 0.011742527961730958, 0.011756544113159179, 0.01193513584136963, 0.011862591743469238, 0.012335136413574218, 0.011716608047485352, 0.011712512016296387, 0.01175759983062744, 0.011735136032104493, 0.011956095695495605, 0.011766783714294434, 0.012035072326660156, 0.011794783592224122, 0.011828991889953613, 0.011727999687194825, 0.011741984367370605, 0.011783455848693848, 0.011735008239746094, 0.012165887832641602, 0.01172480010986328, 0.01174118423461914, 0.01174732780456543, 0.011747648239135742, 0.011701536178588868, 0.011682208061218263, 0.01163702392578125, 0.011783807754516602, 0.011688032150268556, 0.011720864295959473, 0.011479040145874024, 0.011779935836791992, 0.01176915168762207, 0.011860832214355469, 0.011876352310180664, 0.01178831958770752, 0.011931615829467774, 0.011892288208007813, 0.01189833641052246, 0.011815903663635254, 0.011831647872924804, 0.01175107192993164, 0.011763584136962891, 0.01172435188293457, 0.01176643180847168, 0.011865471839904785, 0.011763680458068847, 0.011672127723693847, 0.011747455596923828, 0.011679936408996582, 0.011799551963806153, 0.0117193603515625, 0.011696127891540528, 0.011747072219848633, 0.011743328094482423, 0.011790335655212402, 0.01172054386138916, 0.011684288024902343, 0.011743103981018067, 0.011714271545410156, 0.011728351593017577, 0.011686400413513183, 0.01170259189605713, 0.011713760375976563, 0.011675968170166015, 0.011717087745666504, 0.011773823738098144, 0.011765888214111328, 0.01176576042175293, 0.01178831958770752, 0.01176908779144287, 0.011966560363769531, 0.011791168212890625, 0.01188806438446045, 0.011852191925048829, 0.011853792190551759, 0.011718111991882324, 0.011753312110900879, 0.011757887840270997, 0.01179081630706787, 0.011718496322631836, 0.01192147159576416, 0.011728896141052245, 0.011699968338012696, 0.01175107192993164, 0.011731488227844238, 0.012335359573364258, 0.01197424030303955, 0.011746944427490235, 0.011801183700561524, 0.011784192085266113, 0.011884544372558594, 0.01174454402923584, 0.011593728065490723, 0.011819007873535157, 0.011816512107849121, 0.01235427188873291, 0.011898048400878906, 0.011741727828979491, 0.011907072067260742, 0.011794431686401367, 0.0119072322845459, 0.011779232025146484, 0.011833184242248536, 0.011751392364501952, 0.011744128227233887, 0.011751423835754395, 0.011929599761962891, 0.011880448341369629, 0.011843584060668945, 0.011774111747741699, 0.011834464073181152, 0.011739904403686524, 0.011796575546264648, 0.011747424125671386, 0.011746879577636718, 0.01179081630706787, 0.0119334716796875, 0.011866111755371094, 0.011823103904724122, 0.012031488418579102, 0.011915776252746582, 0.01203974437713623, 0.01211404800415039, 0.012006976127624512, 0.011911935806274414, 0.011851776123046874, 0.011801823616027832, 0.011769824028015137, 0.011758432388305665, 0.011767807960510255, 0.011810784339904784, 0.011881600379943848, 0.011791232109069825, 0.011750592231750488, 0.011772128105163574, 0.011753376007080079, 0.011829855918884278, 0.011761759757995606, 0.01176371192932129, 0.011818495750427246, 0.011837344169616699, 0.011730624198913573, 0.01177280044555664, 0.011689727783203124, 0.011739711761474609, 0.011792096138000488, 0.011773088455200195, 0.011862943649291992, 0.011927488327026368, 0.011977984428405761, 0.011830016136169434, 0.011839232444763183, 0.011854432106018066, 0.012213791847229004, 0.011828448295593262, 0.011601408004760743, 0.011742719650268555, 0.011856351852416992, 0.011825663566589355, 0.011890720367431641, 0.011816864013671874, 0.011863360404968262, 0.012388895988464356, 0.011954400062561035, 0.011876352310180664, 0.011808799743652345, 0.011755871772766113, 0.01176540756225586, 0.01223465633392334, 0.012281184196472167, 0.01201638412475586, 0.011869919776916505, 0.01182249641418457, 0.01180678367614746, 0.011845536231994629, 0.011856800079345703, 0.01234124755859375, 0.011800479888916016, 0.011869407653808594, 0.011885120391845702, 0.011900768280029297, 0.011817440032958985, 0.012000351905822755, 0.011885727882385254, 0.011810111999511718, 0.011742783546447753, 0.01181379222869873, 0.011868127822875977, 0.011921664237976075, 0.01173840045928955, 0.011712991714477539, 0.011749407768249513, 0.011739392280578614, 0.01176863956451416, 0.011758496284484863, 0.011802047729492187, 0.011747903823852539, 0.011917311668395996, 0.011928735733032226, 0.011821439743041992, 0.011882975578308105, 0.011872447967529298, 0.012283712387084962, 0.012178751945495605, 0.011889408111572266, 0.011765695571899415, 0.011802176475524902, 0.011724672317504882, 0.011747008323669433, 0.011768704414367675, 0.01198636817932129, 0.011812959671020508, 0.012065247535705566, 0.011790335655212402, 0.011813023567199706, 0.011767744064331054, 0.011793343544006348, 0.011757760047912598]",tokens/s,84.48448977678797,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1581.248512,1553.85856,0.0,1168.113664,1154.613248,s,1,8.28330859375,8.28330859375,0.0,8.28330859375,8.28330859375,8.28330859375,8.28330859375,[8.28330859375],,kWh,3.450629597912969e-05,3.799122238522658e-06,1.143945359602383e-05,4.974487181367618e-05,,MB,1716.199424,1805.5168,0.0,1388.314624,1334.065152,s,10,0.846244026184082,0.0846244026184082,0.0008828550123258785,0.08465350341796876,0.08576259841918946,0.08578077049255371,0.08579530815124511,"[0.08575856018066406, 0.08494595336914063, 0.08443280029296875, 0.08579894256591797, 0.08527359771728515, 0.08470320129394532, 0.08265939331054688, 0.08460380554199219, 0.08400563049316406, 0.08406214141845703]",tokens/s,3025.132137763685,kWh,2.676629257007197e-06,2.9518463146079036e-07,1.7804862728729417e-06,4.752300161340929e-06,tokens/kWh,53868651.24440413,MB,1720.397824,1805.5168,0.0,1388.314624,1372.847616,s,10,17.728699096679687,1.7728699096679688,0.0044378917021037105,1.7733442993164061,1.7784985961914064,1.7795244445800782,1.7803451232910157,"[1.78055029296875, 1.7782706298828126, 1.7746197509765624, 1.7721705322265624, 1.771226806640625, 1.76574462890625, 1.77451806640625, 1.774655029296875, 1.770366455078125, 1.766576904296875]",tokens/s,35.535602277664545,kWh,5.165233297715241e-05,5.696926931272492e-06,2.3569645118323966e-05,8.091890502674887e-05,tokens/kWh,778557.2478912619,,s,630,17.726187850952144,0.02813680611262246,0.0004606315348822752,0.02801966381072998,0.028473770141601563,0.028626952648162843,0.03045628454208375,"[0.028313312530517578, 0.02841004753112793, 0.028319456100463866, 0.02804595184326172, 0.028016639709472657, 0.028345983505249025, 0.028479040145874022, 0.028359519958496095, 0.028414047241210938, 0.02827782440185547, 0.02830419158935547, 0.028470687866210938, 0.02891632080078125, 0.030183359146118163, 0.03211475372314453, 0.028286975860595705, 0.028096511840820314, 0.02813542366027832, 0.027885568618774413, 0.027964992523193358, 0.027953216552734375, 0.02807027244567871, 0.027957439422607422, 0.02783148765563965, 0.027910783767700197, 0.027795135498046877, 0.027902080535888673, 0.027936960220336916, 0.027940319061279296, 0.027937599182128906, 0.027724735260009764, 0.0280993595123291, 0.02789344024658203, 0.028303552627563476, 0.028030111312866212, 0.02777801513671875, 0.02811903953552246, 0.027854848861694335, 0.027971584320068358, 0.027807584762573244, 0.027969696044921874, 0.028001407623291015, 0.027999103546142576, 0.02796134376525879, 0.028082176208496092, 0.028004159927368166, 0.02808835220336914, 0.02890768051147461, 0.03016294479370117, 0.029116416931152345, 0.02858121681213379, 0.02839743995666504, 0.02853353691101074, 0.028097631454467774, 0.02789423942565918, 0.027986368179321288, 0.027983871459960938, 0.02811903953552246, 0.028053407669067384, 0.02819081687927246, 0.027970848083496095, 0.028048095703125, 0.028003679275512696, 0.028458560943603516, 0.028324287414550783, 0.02842153549194336, 0.028504671096801756, 0.028631040573120117, 0.028612607955932616, 0.028246015548706056, 0.028325887680053712, 0.028276575088500976, 0.02820515251159668, 0.027983936309814453, 0.02811871910095215, 0.027963712692260743, 0.02796134376525879, 0.02797337532043457, 0.028211456298828125, 0.028247072219848634, 0.028281152725219725, 0.028151968002319335, 0.027968000411987305, 0.028100608825683594, 0.028184576034545897, 0.02815510368347168, 0.027958047866821288, 0.027906047821044923, 0.027950111389160155, 0.02788582420349121, 0.02787401580810547, 0.028067840576171874, 0.028014272689819337, 0.02813814353942871, 0.028001951217651366, 0.027948959350585938, 0.028074079513549805, 0.028059648513793944, 0.028088319778442384, 0.028045312881469726, 0.028126655578613283, 0.028371519088745117, 0.028624576568603517, 0.028178047180175782, 0.02837071990966797, 0.028363679885864256, 0.027969024658203126, 0.027955711364746092, 0.028024480819702147, 0.028854623794555664, 0.027970592498779298, 0.028052448272705078, 0.02798591995239258, 0.028089792251586913, 0.028600831985473633, 0.02811296081542969, 0.02836070442199707, 0.028033023834228517, 0.028628992080688476, 0.030740480422973632, 0.028633087158203126, 0.028313087463378905, 0.028153472900390625, 0.0282346248626709, 0.02793471908569336, 0.028012544631958007, 0.028023136138916015, 0.028051071166992188, 0.027937152862548827, 0.028035072326660155, 0.02790809631347656, 0.028130943298339844, 0.028010623931884766, 0.02804902458190918, 0.027884159088134765, 0.027877376556396483, 0.02772377586364746, 0.027983871459960938, 0.027889663696289063, 0.027873279571533204, 0.027931840896606445, 0.027870016098022463, 0.02819891166687012, 0.027848640441894532, 0.0280515193939209, 0.02803638458251953, 0.028113439559936525, 0.02812710380554199, 0.028236095428466796, 0.028053247451782226, 0.027945215225219727, 0.02839094352722168, 0.032999809265136716, 0.028099775314331055, 0.028287839889526368, 0.028146783828735353, 0.02823676872253418, 0.027923744201660158, 0.027880159378051758, 0.02813747215270996, 0.028309440612792967, 0.027981536865234375, 0.027901376724243164, 0.027962272644042968, 0.027987775802612306, 0.02792233657836914, 0.027873567581176758, 0.028069536209106447, 0.028628896713256836, 0.028387456893920898, 0.02876038360595703, 0.028483583450317384, 0.028041215896606447, 0.02818252754211426, 0.028057600021362306, 0.028071264266967773, 0.028035743713378906, 0.02830745506286621, 0.028000255584716797, 0.028010496139526365, 0.028100608825683594, 0.02788118362426758, 0.028391712188720702, 0.028106752395629882, 0.02855936050415039, 0.028026687622070313, 0.028084320068359377, 0.028133472442626952, 0.028227584838867188, 0.028503456115722657, 0.02849190330505371, 0.0282587833404541, 0.028495872497558594, 0.028200223922729493, 0.028205024719238282, 0.028608480453491212, 0.029675935745239256, 0.028438911437988282, 0.028289024353027343, 0.028220672607421875, 0.02799078369140625, 0.028241920471191406, 0.028169504165649416, 0.028023296356201172, 0.02799817657470703, 0.028137727737426756, 0.02794643211364746, 0.027803903579711915, 0.027871551513671874, 0.027888959884643554, 0.027971712112426758, 0.027890239715576172, 0.027957151412963867, 0.027840608596801757, 0.02792448043823242, 0.02817024040222168, 0.028276607513427733, 0.028239999771118164, 0.027950496673583985, 0.02789436721801758, 0.027983871459960938, 0.028100608825683594, 0.027862720489501953, 0.02784492874145508, 0.028523584365844727, 0.028123872756958008, 0.028229631423950196, 0.02800662422180176, 0.02795724868774414, 0.02788547134399414, 0.027956640243530274, 0.02788422393798828, 0.027887584686279297, 0.027936800003051758, 0.027971584320068358, 0.028128992080688475, 0.028131616592407226, 0.028051456451416015, 0.028245695114135744, 0.02786131286621094, 0.027834367752075196, 0.027850143432617186, 0.028004959106445314, 0.027821855545043947, 0.028473184585571288, 0.028010879516601563, 0.028108800888061523, 0.028689823150634765, 0.028530271530151367, 0.028136447906494142, 0.028043264389038085, 0.02826192092895508, 0.02818118476867676, 0.028448768615722656, 0.02800230407714844, 0.027938816070556642, 0.02777209663391113, 0.02818662452697754, 0.028338560104370115, 0.02812294387817383, 0.027957887649536134, 0.02852659225463867, 0.02802070426940918, 0.028159936904907226, 0.02795939254760742, 0.02795315170288086, 0.027930624008178712, 0.028088319778442384, 0.028156959533691406, 0.028103263854980468, 0.028309600830078125, 0.02862009620666504, 0.028402143478393555, 0.02823382377624512, 0.028174463272094726, 0.028102943420410156, 0.027944448471069337, 0.027795967102050782, 0.02795315170288086, 0.027799104690551756, 0.027826623916625978, 0.02785638427734375, 0.02800265693664551, 0.027846624374389648, 0.02817452812194824, 0.028366847991943358, 0.027975231170654296, 0.027888063430786134, 0.027906047821044923, 0.027985504150390625, 0.02781772804260254, 0.027836927413940428, 0.02787958335876465, 0.027752384185791016, 0.027959360122680663, 0.027982847213745117, 0.027972320556640624, 0.028635040283203125, 0.02802649688720703, 0.027878015518188477, 0.028260255813598634, 0.02779097557067871, 0.02787798309326172, 0.028071359634399416, 0.027898431777954102, 0.028442623138427735, 0.02856755256652832, 0.02813705635070801, 0.027861408233642578, 0.02828463935852051, 0.028141855239868164, 0.027938816070556642, 0.030896127700805662, 0.02810041618347168, 0.027961055755615236, 0.028154048919677734, 0.028076032638549804, 0.028667903900146483, 0.02854243278503418, 0.028096511840820314, 0.02801862335205078, 0.02831952095031738, 0.028086240768432618, 0.027908832550048827, 0.028011648178100586, 0.02811939239501953, 0.027896480560302736, 0.027917760848999024, 0.028010944366455078, 0.028211231231689452, 0.02861631965637207, 0.02791811180114746, 0.02793120002746582, 0.0278078727722168, 0.02776851272583008, 0.027656511306762697, 0.02785456085205078, 0.027871519088745116, 0.02816924858093262, 0.027843839645385744, 0.02789756774902344, 0.02786832046508789, 0.027745119094848635, 0.027797088623046876, 0.02774982452392578, 0.028537759780883788, 0.027992128372192383, 0.027878623962402344, 0.027689760208129882, 0.027703296661376952, 0.02778108787536621, 0.027883552551269532, 0.027927616119384765, 0.028518943786621093, 0.028188831329345704, 0.02792838478088379, 0.02796384048461914, 0.028235776901245117, 0.028030975341796875, 0.027836416244506838, 0.027983871459960938, 0.028053407669067384, 0.02780364799499512, 0.02776278305053711, 0.027852767944335936, 0.027997535705566408, 0.028256959915161133, 0.027966623306274415, 0.028037439346313475, 0.02797369575500488, 0.02826288032531738, 0.02810451126098633, 0.028010463714599608, 0.02841212844848633, 0.028274688720703125, 0.02816979217529297, 0.027959648132324218, 0.02797113609313965, 0.028642303466796876, 0.028059776306152345, 0.02798271942138672, 0.027829439163208007, 0.027963775634765625, 0.02799251174926758, 0.027983808517456056, 0.029564319610595705, 0.029889184951782225, 0.028674047470092775, 0.02812518310546875, 0.028184576034545897, 0.028130624771118166, 0.02795939254760742, 0.02789232063293457, 0.027862144470214845, 0.028027263641357422, 0.028006912231445313, 0.027906047821044923, 0.028827648162841796, 0.028081888198852538, 0.028078048706054688, 0.02816441535949707, 0.02793471908569336, 0.02773606491088867, 0.027825855255126954, 0.02840403175354004, 0.02792572784423828, 0.027944864273071288, 0.028081056594848632, 0.028231647491455077, 0.02839756774902344, 0.028554784774780274, 0.028391904830932617, 0.028368576049804688, 0.028559680938720702, 0.028819456100463867, 0.028358367919921874, 0.028211488723754882, 0.028102655410766602, 0.028049407958984376, 0.02794495964050293, 0.02789936065673828, 0.028056095123291016, 0.028438079833984376, 0.028297311782836915, 0.028028575897216797, 0.02786115264892578, 0.027860639572143554, 0.028106687545776367, 0.027841472625732423, 0.027850143432617186, 0.027912799835205077, 0.027873279571533204, 0.027975263595581053, 0.027877792358398438, 0.027893760681152343, 0.027901952743530273, 0.028069183349609374, 0.02821171188354492, 0.02822777557373047, 0.028188671112060547, 0.028205055236816406, 0.028433727264404296, 0.02834022331237793, 0.028162080764770506, 0.027996768951416017, 0.02798918342590332, 0.028015487670898436, 0.027916160583496094, 0.02786924743652344, 0.027969600677490235, 0.027858943939208985, 0.028022783279418945, 0.02799001693725586, 0.028051040649414063, 0.028108928680419924, 0.028113183975219728, 0.03028540802001953, 0.028450368881225586, 0.02824073600769043, 0.02811494445800781, 0.028018592834472656, 0.02791360092163086, 0.028135616302490233, 0.027944671630859376, 0.027912736892700196, 0.028010784149169923, 0.028270591735839845, 0.028397151947021484, 0.027976095199584963, 0.02795315170288086, 0.028172063827514648, 0.028261856079101564, 0.02825040054321289, 0.028455392837524414, 0.028383232116699218, 0.028585119247436522, 0.028375743865966797, 0.02824003219604492, 0.028022783279418945, 0.02778726387023926, 0.02775449562072754, 0.027856895446777344, 0.027821247100830077, 0.02781011199951172, 0.027877824783325195, 0.02790559959411621, 0.02785536003112793, 0.027975679397583008, 0.027880992889404297, 0.027943391799926758, 0.02805718421936035, 0.02787721633911133, 0.028121023178100585, 0.030526079177856446, 0.029849599838256836, 0.027895391464233397, 0.028506528854370116, 0.027893760681152343, 0.028065792083740236, 0.028092416763305664, 0.027919679641723632, 0.02794361686706543, 0.028067520141601562, 0.02791046333312988, 0.02837779235839844, 0.028508447647094728, 0.028630176544189454, 0.028293983459472656, 0.027963392257690428, 0.027926528930664062, 0.027922399520874025, 0.02797113609313965, 0.02793724822998047, 0.027940736770629884, 0.02774377632141113, 0.027974239349365236, 0.02786083221435547, 0.027994272232055663, 0.027989248275756835, 0.028187135696411132, 0.027985504150390625, 0.027918943405151365, 0.028056671142578125, 0.027895999908447267, 0.03130220794677734, 0.028501888275146485, 0.02829961585998535, 0.02809017562866211, 0.02800217628479004, 0.027939136505126954, 0.02795315170288086, 0.027860992431640624, 0.02777907180786133, 0.02794495964050293, 0.027854848861694335, 0.027962623596191408, 0.03062041664123535, 0.028535839080810546, 0.028297855377197267, 0.028048736572265625, 0.028040191650390626, 0.02842624092102051, 0.028057600021362306, 0.027938079833984376, 0.027936832427978515, 0.027919008255004884, 0.027905664443969726, 0.027936800003051758, 0.027760000228881837, 0.027959583282470703, 0.02769987106323242, 0.02787331199645996, 0.027826175689697266, 0.02779545593261719, 0.027875328063964845, 0.028006399154663086, 0.02783132743835449, 0.02774928092956543, 0.02806380844116211, 0.027891424179077147, 0.027831680297851564, 0.027824832916259767, 0.02771990394592285, 0.027991104125976562, 0.028285503387451172, 0.028004735946655274, 0.02788761520385742, 0.02817638397216797, 0.028067840576171874, 0.027971584320068358, 0.027919456481933592, 0.02795737648010254, 0.02811350440979004, 0.0282890567779541, 0.028604576110839844, 0.028698015213012695, 0.02849033546447754, 0.028075359344482423, 0.028282943725585936, 0.027929183959960937, 0.02796544075012207, 0.027930624008178712, 0.028098560333251952, 0.0278603515625, 0.02787187194824219, 0.027841856002807617, 0.027855552673339844, 0.027849983215332032, 0.027893983840942382, 0.027865087509155274, 0.02820479965209961, 0.02788166427612305, 0.027957216262817382, 0.02800499153137207, 0.02803023910522461, 0.02807881546020508, 0.027963392257690428, 0.027858720779418946, 0.027956512451171874, 0.027976640701293944, 0.028059648513793944, 0.027883007049560548, 0.027820383071899414, 0.02791235160827637, 0.02780134391784668, 0.028010751724243162, 0.027967487335205078, 0.02810176086425781, 0.02796224021911621, 0.02814899253845215, 0.028229408264160157, 0.028248159408569336, 0.028451711654663085, 0.028471296310424804, 0.028098560333251952, 0.027996160507202147, 0.027967487335205078, 0.02798521614074707, 0.0279354248046875, 0.027964704513549803, 0.02787196731567383, 0.02789580726623535, 0.02813481521606445, 0.027963232040405274, 0.0279149112701416, 0.028047456741333007, 0.028237823486328126, 0.02787721633911133, 0.027914335250854492, 0.027938880920410157]",tokens/s,35.54063655971918,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,842.510336,545.128448,0.0,159.383552,141.514752,s,1,7.57496044921875,7.57496044921875,0.0,7.57496044921875,7.57496044921875,7.57496044921875,7.57496044921875,[7.57496044921875],,kWh,1.2108504829166122e-05,1.328401506566294e-06,3.7916697000484056e-06,1.722857603578082e-05,,MB,1292.464128,639.500288,0.0,222.298112,184.525824,s,20,0.1824712963104248,0.009123564815521238,0.000667296504583729,0.008931951999664307,0.009126998519897463,0.010006673192977907,0.011498243207931516,"[0.009040160179138184, 0.00892751979827881, 0.00886303997039795, 0.008888895988464356, 0.008936384201049804, 0.008852704048156738, 0.00889459228515625, 0.00892191982269287, 0.009908543586730958, 0.00896560001373291, 0.008953120231628418, 0.008945568084716797, 0.008908576011657714, 0.008847455978393554, 0.008885536193847656, 0.008912223815917969, 0.008961888313293457, 0.008971487998962403, 0.009014944076538085, 0.011871135711669922]",tokens/s,28059.20768650499,kWh,2.615339991898063e-07,2.8842573377113347e-08,1.477253770412616e-07,4.381019496081813e-07,tokens/kWh,584338874.1569283,MB,1331.13856,639.500288,0.0,222.298112,184.528384,s,20,9.997774597167968,0.4998887298583984,0.0033748104490906755,0.49986500549316404,0.5048492492675781,0.5050133590698243,0.5063004745483398,"[0.4987699279785156, 0.4992510986328125, 0.4955430603027344, 0.5019569396972656, 0.4964801940917969, 0.5018320617675781, 0.499589111328125, 0.5066222534179687, 0.5027232360839844, 0.5048404235839844, 0.4985836486816406, 0.5001408996582031, 0.49613778686523435, 0.5015815734863281, 0.49656143188476565, 0.5007901306152344, 0.5010433044433594, 0.4972351989746094, 0.5049286804199219, 0.4931636352539063]",tokens/s,126.02804631711896,kWh,1.423421721887062e-05,1.5697975882382622e-06,5.23796676219416e-06,2.1041981569303042e-05,tokens/kWh,2994014.598506594,,s,1260,9.988504226684558,0.007927384306892516,0.000295142498228238,0.00787120008468628,0.00798737907409668,0.008081066846847534,0.009875584077835089,"[0.007825151920318603, 0.00818611240386963, 0.007853184223175049, 0.007829599857330323, 0.007992095947265625, 0.007859712123870849, 0.007854591846466065, 0.00798531198501587, 0.007853407859802247, 0.00787286376953125, 0.0078537278175354, 0.007854015827178954, 0.007851808071136475, 0.007852447986602782, 0.007885183811187744, 0.007819263935089112, 0.008054816246032714, 0.008167360305786133, 0.007895071983337403, 0.007858463764190674, 0.00808521556854248, 0.0079967999458313, 0.007961567878723144, 0.00786191987991333, 0.007834815979003906, 0.007821983814239502, 0.007934175968170165, 0.00793183994293213, 0.007817024230957032, 0.007841599941253661, 0.008001919746398925, 0.007995007991790771, 0.007960383892059325, 0.008036479949951172, 0.00785587215423584, 0.007867008209228516, 0.007853119850158692, 0.00784496021270752, 0.007830687999725342, 0.007851840019226074, 0.00799225616455078, 0.00786620807647705, 0.008018048286437988, 0.007825439929962158, 0.008214271545410157, 0.008056672096252441, 0.007870687961578368, 0.007932096004486085, 0.007823296070098877, 0.00786236810684204, 0.007830976009368896, 0.007868319988250732, 0.007847968101501466, 0.007879263877868652, 0.007895040035247802, 0.007946239948272706, 0.007829504013061523, 0.007852352142333985, 0.008114015579223633, 0.007851808071136475, 0.007859456062316894, 0.007837823867797852, 0.007860928058624268, 0.007897664070129394, 0.007854080200195313, 0.007866464138031005, 0.007905504226684571, 0.00785097599029541, 0.007860415935516358, 0.007860991954803467, 0.007879871845245362, 0.007870880126953125, 0.007848127841949463, 0.007942143917083741, 0.007869984149932861, 0.007845344066619873, 0.007848063945770263, 0.0078406081199646, 0.007858208179473877, 0.007839712142944336, 0.007970464229583741, 0.00783132791519165, 0.007846432209014893, 0.007841119766235352, 0.007911968231201171, 0.00782758378982544, 0.007874335765838623, 0.007805215835571289, 0.007906432151794434, 0.007797376155853271, 0.007829855918884278, 0.007819168090820313, 0.007823359966278077, 0.007792128086090088, 0.007801343917846679, 0.007817215919494629, 0.007794591903686523, 0.007800415992736816, 0.007831776142120361, 0.007822656154632568, 0.007843999862670899, 0.007826240062713622, 0.007814911842346192, 0.008003680229187012, 0.00783785581588745, 0.00783190393447876, 0.010155712127685547, 0.0096278076171875, 0.00787017583847046, 0.007887231826782226, 0.00784335994720459, 0.007921855926513672, 0.007815167903900147, 0.007847936153411865, 0.007818624019622803, 0.007808864116668701, 0.007852831840515136, 0.00788908815383911, 0.007853888034820556, 0.0078438401222229, 0.007924736022949219, 0.007822656154632568, 0.007823040008544922, 0.007880000114440918, 0.007846399784088135, 0.00782316780090332, 0.007747583866119385, 0.00782307195663452, 0.007863615989685059, 0.007830016136169434, 0.007823840141296386, 0.007829599857330323, 0.007850048065185546, 0.00785097599029541, 0.007852191925048829, 0.00784281587600708, 0.007834368228912354, 0.00782588815689087, 0.00785811185836792, 0.007846496105194091, 0.007849631786346435, 0.007852352142333985, 0.007866559982299804, 0.007898687839508057, 0.007957856178283691, 0.007894207954406738, 0.007980480194091796, 0.007870368003845215, 0.00788479995727539, 0.00788483190536499, 0.007938399791717529, 0.008019968032836914, 0.007865375995635986, 0.007860608100891113, 0.007893663883209228, 0.007861343860626221, 0.007853280067443848, 0.007952000141143799, 0.007946335792541503, 0.007874207973480225, 0.007831007957458495, 0.007928224086761474, 0.007840127944946288, 0.00783785581588745, 0.00783519983291626, 0.007825503826141358, 0.00784607982635498, 0.007841792106628418, 0.007874527931213378, 0.007810751914978028, 0.007847296237945557, 0.007990240097045898, 0.007822432041168214, 0.007826496124267578, 0.007819104194641113, 0.007829599857330323, 0.007810976028442383, 0.007820672035217285, 0.007837823867797852, 0.007852128028869629, 0.007866655826568604, 0.00778275203704834, 0.007892767906188964, 0.007815264225006103, 0.007810976028442383, 0.00782153606414795, 0.007817344188690185, 0.007833568096160889, 0.007838880062103272, 0.007727168083190918, 0.007818719863891602, 0.0078111357688903805, 0.00781769609451294, 0.007802144050598144, 0.007799488067626953, 0.007821343898773193, 0.007806975841522217, 0.0078022398948669434, 0.007998079776763915, 0.007832799911499023, 0.007821856021881103, 0.00783132791519165, 0.007810751914978028, 0.007902112007141113, 0.007857439994812011, 0.007807328224182129, 0.007876512050628661, 0.007849567890167237, 0.00781388807296753, 0.007913792133331298, 0.007841472148895263, 0.007851168155670166, 0.007844704151153565, 0.00791315221786499, 0.00796063995361328, 0.0079585599899292, 0.008615839958190917, 0.008218527793884278, 0.007907328128814697, 0.008019583702087402, 0.007930848121643066, 0.007927616119384766, 0.007835231781005859, 0.00785481595993042, 0.007876287937164307, 0.007892032146453858, 0.007822239875793457, 0.007817247867584228, 0.007811071872711181, 0.007828512191772462, 0.007904223918914795, 0.007858176231384278, 0.008033344268798828, 0.007940927982330322, 0.010236031532287597, 0.00979593563079834, 0.00792745590209961, 0.00790118408203125, 0.007858176231384278, 0.007899136066436767, 0.007894432067871094, 0.007880703926086426, 0.00799190378189087, 0.007827455997467042, 0.007964128017425538, 0.00803276824951172, 0.00785206413269043, 0.007906752109527588, 0.007848000049591065, 0.007846367835998536, 0.00786464023590088, 0.007984831809997558, 0.007757855892181397, 0.00782092809677124, 0.007877280235290527, 0.007887040138244629, 0.00786521577835083, 0.007869696140289307, 0.007861951828002929, 0.007841824054718017, 0.007854047775268555, 0.007869791984558106, 0.00788313579559326, 0.007840032100677491, 0.007853759765625, 0.007847424030303956, 0.007844672203063965, 0.007816224098205567, 0.007819647789001466, 0.007872960090637208, 0.00802182388305664, 0.007901535987854005, 0.007910719871520996, 0.007885503768920898, 0.007972095966339111, 0.007887360095977783, 0.007880928039550782, 0.007890719890594483, 0.007915775775909423, 0.00800153636932373, 0.007930975914001465, 0.007911583900451661, 0.007879424095153808, 0.007903232097625732, 0.007859392166137695, 0.007879487991333008, 0.007870431900024413, 0.007924767971038818, 0.007860447883605957, 0.007857183933258056, 0.007823008060455322, 0.007849567890167237, 0.007872928142547607, 0.007839839935302734, 0.007822720050811768, 0.007858816146850587, 0.007858176231384278, 0.007815392017364501, 0.007801631927490235, 0.007828832149505615, 0.007825056076049804, 0.007894783973693848, 0.007890336036682129, 0.00795244789123535, 0.007934016227722168, 0.0078787841796875, 0.0079651198387146, 0.007896671772003174, 0.007819104194641113, 0.007876800060272218, 0.007831456184387207, 0.007844511985778808, 0.007873824119567871, 0.007873216152191161, 0.007870463848114014, 0.007811232089996338, 0.007865824222564698, 0.007848512172698975, 0.007845695972442626, 0.007879936218261718, 0.007867136001586915, 0.007849599838256836, 0.00787507200241089, 0.007905151844024658, 0.00787660789489746, 0.007831424236297608, 0.007839231967926026, 0.007845632076263428, 0.007941279888153076, 0.007868127822875976, 0.007864543914794922, 0.007851808071136475, 0.00786636781692505, 0.00799129581451416, 0.007839360237121583, 0.007877344131469727, 0.007851679801940918, 0.007870495796203613, 0.007827455997467042, 0.007882751941680909, 0.008021984100341797, 0.007851712226867676, 0.007854623794555664, 0.007908927917480468, 0.00786464023590088, 0.007880415916442871, 0.0078788480758667, 0.00787660789489746, 0.007838719844818116, 0.00797388792037964, 0.007932928085327149, 0.007853055953979492, 0.007845888137817383, 0.007855648040771485, 0.007859712123870849, 0.007971807956695557, 0.007888256072998047, 0.007928256034851075, 0.007864511966705323, 0.007890944004058837, 0.008033760070800781, 0.007879199981689454, 0.01023977565765381, 0.009910719871520996, 0.008012736320495605, 0.007969632148742675, 0.007940095901489258, 0.007888607978820801, 0.007987167835235596, 0.007858496189117431, 0.007960927963256837, 0.007902880191802979, 0.007878367900848388, 0.007814559936523437, 0.007883647918701171, 0.007903232097625732, 0.007925759792327881, 0.00787001609802246, 0.007718495845794678, 0.0078504319190979, 0.007904640197753906, 0.007889952182769775, 0.007955455780029297, 0.007852767944335938, 0.007886975765228272, 0.00783564805984497, 0.00787779188156128, 0.007828383922576905, 0.007864352226257323, 0.00787011194229126, 0.007878943920135498, 0.007839104175567628, 0.007850016117095947, 0.007888864040374756, 0.008030912399291993, 0.007939455986022949, 0.007954368114471436, 0.007943039894104003, 0.00802995204925537, 0.008081279754638671, 0.00788320016860962, 0.007831232070922851, 0.007892032146453858, 0.008297472000122071, 0.007940095901489258, 0.007966047763824462, 0.007964704036712646, 0.007958655834197997, 0.007919456005096435, 0.007877344131469727, 0.007897024154663085, 0.007984992027282714, 0.007864223957061768, 0.008299103736877441, 0.007943840026855468, 0.007913663864135742, 0.007884607791900635, 0.008081055641174317, 0.007926112174987792, 0.008067423820495606, 0.00792739200592041, 0.007893055915832519, 0.00787660789489746, 0.007871967792510986, 0.007856991767883301, 0.007886528015136719, 0.007931519985198974, 0.007886847972869874, 0.007919167995452881, 0.007899231910705566, 0.007961120128631592, 0.00785584020614624, 0.008038592338562012, 0.007845888137817383, 0.007884160041809082, 0.007895967960357665, 0.007927552223205566, 0.00789743995666504, 0.007874815940856934, 0.00788649606704712, 0.007950335979461669, 0.007884640216827393, 0.007866528034210205, 0.007919072151184082, 0.007872384071350097, 0.007868927955627441, 0.007880864143371582, 0.007868063926696778, 0.007870495796203613, 0.007858496189117431, 0.007896543979644776, 0.007879487991333008, 0.007890143871307372, 0.008079360008239746, 0.007889344215393066, 0.007884160041809082, 0.008039072036743164, 0.007897151947021484, 0.007860191822052002, 0.00786246395111084, 0.007883808135986329, 0.007875167846679687, 0.007858367919921875, 0.007933504104614257, 0.008185664176940917, 0.0078886399269104, 0.007842688083648682, 0.00836524772644043, 0.009064543724060058, 0.008942303657531738, 0.00800972843170166, 0.008728416442871093, 0.007954592227935792, 0.007868415832519531, 0.007864480018615722, 0.007828351974487304, 0.007858560085296631, 0.007818975925445556, 0.007877503871917725, 0.007860223770141601, 0.007849440097808838, 0.007938591957092286, 0.007847936153411865, 0.007868415832519531, 0.007835040092468262, 0.007866975784301757, 0.007868415832519531, 0.008038399696350097, 0.007912767887115478, 0.008248000144958497, 0.010194527626037597, 0.009531935691833497, 0.00802188777923584, 0.007942143917083741, 0.008069087982177734, 0.007892032146453858, 0.007877600193023682, 0.007873760223388673, 0.007879456043243409, 0.007859807968139648, 0.007864448070526123, 0.007861631870269776, 0.008199071884155274, 0.008239423751831055, 0.009756544113159179, 0.007941376209259034, 0.007963776111602783, 0.007914912223815919, 0.00789743995666504, 0.007901472091674805, 0.007917280197143555, 0.007949920177459716, 0.007917984008789063, 0.007924767971038818, 0.007868703842163085, 0.008049599647521973, 0.008654591560363769, 0.008204607963562012, 0.007941696166992187, 0.007950463771820068, 0.00793017578125, 0.008072159767150878, 0.007877344131469727, 0.007899392127990722, 0.007874176025390625, 0.008403264045715331, 0.008076160430908204, 0.007942272186279296, 0.007881216049194336, 0.007888256072998047, 0.007895552158355712, 0.007926208019256592, 0.007886847972869874, 0.007942143917083741, 0.007890944004058837, 0.007952383995056152, 0.007855552196502686, 0.00790335988998413, 0.007877056121826171, 0.007921664237976075, 0.007909183979034423, 0.00791980791091919, 0.007915520191192627, 0.007976960182189942, 0.00790118408203125, 0.0078788161277771, 0.007908448219299317, 0.007930047988891601, 0.007893407821655273, 0.007894432067871094, 0.00792793607711792, 0.007938240051269531, 0.007976607799530029, 0.007889503955841065, 0.007874752044677734, 0.007883967876434327, 0.00788972806930542, 0.007885119915008544, 0.007883615970611572, 0.007873151779174805, 0.007935743808746338, 0.007993824005126953, 0.007938047885894776, 0.00787052822113037, 0.007901152133941651, 0.007909120082855225, 0.00791923189163208, 0.007811071872711181, 0.007912831783294678, 0.0078887038230896, 0.00786959981918335, 0.00787011194229126, 0.007882751941680909, 0.007917759895324707, 0.007908192157745362, 0.007890111923217773, 0.007979839801788331, 0.007881824016571046, 0.007947519779205322, 0.007872511863708496, 0.007881343841552734, 0.007880576133728027, 0.007855360031127929, 0.007900095939636231, 0.007841728210449218, 0.007831552028656007, 0.007870240211486816, 0.007895040035247802, 0.007905663967132569, 0.007903071880340576, 0.008275967597961426, 0.008984576225280762, 0.00884335994720459, 0.007929215908050537, 0.007914015769958496, 0.00792406415939331, 0.00787017583847046, 0.007888832092285157, 0.00787663984298706, 0.007837664127349854, 0.00789299201965332, 0.007859839916229247, 0.007880191802978515, 0.00789798402786255, 0.007886847972869874, 0.007893216133117676, 0.00787225580215454, 0.007856160163879395, 0.007868415832519531, 0.007890175819396973, 0.007913631916046143, 0.007920224189758301, 0.00789299201965332, 0.007856160163879395, 0.008115967750549317, 0.007920928001403808, 0.007844192028045654, 0.01036729621887207, 0.009851167678833008, 0.007939839839935302, 0.007901440143585205, 0.008069024085998536, 0.00790447998046875, 0.007887743949890136, 0.007920735836029053, 0.007877120018005371, 0.007893407821655273, 0.00787769603729248, 0.00798960018157959, 0.007903840065002441, 0.007834432125091552, 0.007875967979431152, 0.007905407905578614, 0.007845759868621826, 0.007869056224822998, 0.007864319801330566, 0.007911808013916015, 0.007905951976776123, 0.007957471847534179, 0.007977312088012695, 0.007974559783935548, 0.00799948787689209, 0.007983295917510987, 0.007912831783294678, 0.008075712203979492, 0.00824841594696045, 0.007958528041839599, 0.007928736209869384, 0.008321056365966797, 0.007930111885070801, 0.007922592163085938, 0.007906239986419678, 0.007933216094970703, 0.007884928226470947, 0.00793228816986084, 0.0078787841796875, 0.007860000133514405, 0.007898687839508057, 0.008012096405029296, 0.007897408008575439, 0.007880288124084473, 0.007839327812194824, 0.007917920112609863, 0.007884960174560546, 0.007894815921783448, 0.007891488075256348, 0.007901472091674805, 0.007884511947631836, 0.007869631767272948, 0.007866623878479004, 0.007884960174560546, 0.007855999946594238, 0.007879263877868652, 0.00788700819015503, 0.007860127925872804, 0.007878528118133546, 0.007880703926086426, 0.007849215984344482, 0.00785916805267334, 0.007872288227081298, 0.007895040035247802, 0.007890175819396973, 0.007883679866790772, 0.007863999843597412, 0.007876768112182617, 0.007863808155059814, 0.007852543830871582, 0.007858176231384278, 0.007829504013061523, 0.007833216190338135, 0.007850368022918702, 0.007833600044250488, 0.00783292818069458, 0.007736671924591065, 0.00781388807296753, 0.007839583873748779, 0.007805088043212891, 0.0078438401222229, 0.007882751941680909, 0.007837215900421142, 0.007815199851989746, 0.00781766414642334, 0.007833856105804443, 0.007826591968536377, 0.00785264015197754, 0.007847936153411865, 0.007870463848114014, 0.007856128215789794, 0.007808000087738037, 0.007817728042602539, 0.007831424236297608, 0.007840384006500244, 0.007833600044250488, 0.007849984169006348, 0.007833600044250488, 0.007813119888305664, 0.007811423778533936, 0.007790239810943603, 0.007880703926086426, 0.007844064235687256, 0.007820960044860839, 0.00782867193222046, 0.007841792106628418, 0.007848864078521728, 0.007826720237731934, 0.0078050241470336915, 0.00784819221496582, 0.007882207870483398, 0.00785913610458374, 0.007882751941680909, 0.007847040176391602, 0.007852928161621094, 0.00787660789489746, 0.007882751941680909, 0.007905280113220215, 0.007898560047149658, 0.007862847805023193, 0.007983104228973388, 0.00789299201965332, 0.007853919982910157, 0.007892511844635009, 0.007893695831298828, 0.008023903846740723, 0.007859327793121339, 0.007889887809753419, 0.01022976016998291, 0.01007539176940918, 0.00794432020187378, 0.007938303947448731, 0.007869887828826904, 0.007895199775695801, 0.00788585615158081, 0.007931712150573731, 0.007869919776916505, 0.007864543914794922, 0.007864575862884521, 0.0077844481468200685, 0.007884607791900635, 0.007878975868225098, 0.00790451192855835, 0.00790006399154663, 0.00789247989654541, 0.00788262414932251, 0.007928160190582276, 0.00800153636932373, 0.007924831867218018, 0.007873439788818359, 0.007902239799499512, 0.007986144065856933, 0.007896671772003174, 0.007942560195922852, 0.007931519985198974, 0.007940608024597168, 0.007952032089233398, 0.007923679828643799, 0.007868095874786377, 0.007858911991119384, 0.007867807865142823, 0.00788320016860962, 0.007857312202453613, 0.007852287769317626, 0.007826015949249268, 0.007888895988464355, 0.007902495861053467, 0.007865056037902832, 0.007822751998901367, 0.007854752063751221, 0.007857279777526855, 0.007860640048980712, 0.007854112148284912, 0.007868800163269043, 0.007841792106628418, 0.00783945608139038, 0.007813407897949218, 0.007886528015136719, 0.007856031894683839, 0.007889311790466308, 0.007890944004058837, 0.007833600044250488, 0.007815167903900147, 0.007837728023529052, 0.0078438720703125, 0.007863296031951903, 0.007825407981872558, 0.00780998420715332, 0.007808000087738037, 0.007832575798034667, 0.007823359966278077, 0.007833343982696533, 0.007838111877441406, 0.0078048319816589355, 0.007838848114013672, 0.007813504219055176, 0.007830080032348632, 0.007865888118743896, 0.007832096099853516, 0.007905087947845458, 0.007845888137817383, 0.007845344066619873, 0.007774400234222412, 0.007837503910064698, 0.007829247951507568, 0.00786252784729004, 0.007833216190338135, 0.00781379222869873, 0.007853536128997803, 0.007891456127166747, 0.007837440013885498, 0.007845952033996581, 0.007810175895690918, 0.007846720218658447, 0.007839744091033935, 0.007857215881347657, 0.007830463886260986, 0.007857247829437256, 0.00786729621887207, 0.008276224136352539, 0.007874303817749024, 0.007900864124298095, 0.007907296180725098, 0.007855807781219483, 0.007903840065002441, 0.0078788161277771, 0.007831711769104004, 0.007879519939422607, 0.00781001615524292, 0.00785811185836792, 0.00782966423034668, 0.007840703964233399, 0.00785260820388794, 0.007845344066619873, 0.007944640159606933, 0.007858016014099122, 0.007835999965667724, 0.00784819221496582, 0.00783075189590454, 0.008823583602905274, 0.007854080200195313, 0.00789299201965332, 0.007860544204711915, 0.007876224040985107, 0.00782751989364624, 0.007811264038085937, 0.007884672164916992, 0.007880288124084473, 0.007883103847503662, 0.00793555212020874, 0.007819712162017822, 0.00784006404876709, 0.007861311912536622, 0.008128543853759766, 0.008026559829711915, 0.007874720096588135, 0.007898655891418457, 0.01019337558746338, 0.009803168296813965, 0.007895648002624512, 0.007886015892028808, 0.007863103866577148, 0.00785203218460083, 0.007842144012451173, 0.007860159873962403, 0.0077478399276733395, 0.007801983833312988, 0.007822207927703858, 0.007800159931182861, 0.0078056640625, 0.007849984169006348, 0.00788095998764038, 0.007816895961761475, 0.007871647834777833, 0.007825920104980469, 0.007789152145385742, 0.008210176467895508, 0.007854080200195313, 0.007854080200195313, 0.0078471999168396, 0.008103808403015137, 0.007856991767883301, 0.007830624103546142, 0.007855008125305176, 0.00839299201965332, 0.007881855964660645, 0.007887455940246582, 0.007997439861297608, 0.007862271785736084, 0.007858176231384278, 0.007845856189727784, 0.00786252784729004, 0.007830880165100097, 0.00782918405532837, 0.008097567558288575, 0.007832799911499023, 0.007851647853851318, 0.007861663818359375, 0.007932640075683594, 0.007859263896942139, 0.007845056056976319, 0.007841536045074463, 0.007874048233032227, 0.007907839775085449, 0.007880191802978515, 0.007816927909851074, 0.007819551944732666, 0.007848000049591065, 0.007811423778533936, 0.007852159976959228, 0.007856095790863037, 0.007859839916229247, 0.007933695793151856, 0.007869247913360595, 0.007819071769714355, 0.007841792106628418, 0.007848063945770263, 0.007839327812194824, 0.007837984085083008, 0.007857791900634766, 0.007856863975524903, 0.007826784133911132, 0.007854400157928466, 0.007892608165740967, 0.00788643217086792, 0.007856607913970947, 0.007886240005493164, 0.007875040054321289, 0.007767744064331054, 0.007908736228942871, 0.007838335990905762, 0.007847392082214355, 0.007821280002593994, 0.007842368125915527, 0.007837024211883545, 0.007853824138641358, 0.007840223789215088, 0.007831295967102051, 0.007831456184387207, 0.00782857608795166, 0.007798463821411133, 0.0078438081741333, 0.007837503910064698, 0.007889120101928711, 0.007856128215789794, 0.007854176044464112, 0.007806528091430664, 0.007866464138031005, 0.00783180809020996, 0.007888895988464355, 0.007870399951934814, 0.007882495880126954, 0.007824927806854247, 0.007829599857330323, 0.00784659194946289, 0.007837696075439453, 0.007972864151000977, 0.007858176231384278, 0.007841792106628418, 0.007919616222381591, 0.00784768009185791, 0.007819295883178711, 0.007825632095336914, 0.007853536128997803, 0.007833695888519288, 0.007868671894073486, 0.0078788480758667, 0.007880703926086426, 0.008048640251159669, 0.007830880165100097, 0.007895711898803712, 0.007838751792907715, 0.007931007862091064, 0.007892831802368164, 0.00785155200958252, 0.00782377576828003, 0.007851679801940918, 0.007975327968597411, 0.007868415832519531, 0.007849984169006348, 0.007822656154632568, 0.007877312183380126, 0.007813119888305664, 0.007974783897399902, 0.007857312202453613, 0.010275199890136718, 0.009954208374023438, 0.008227680206298828, 0.007952832221984863, 0.007930272102355957, 0.007944223880767822, 0.007850304126739501, 0.007868735790252686, 0.007863359928131104, 0.007912511825561524, 0.007863167762756348, 0.007913631916046143, 0.007870368003845215, 0.007928736209869384, 0.007899136066436767, 0.007918816089630127, 0.008241120338439942, 0.008096735954284668, 0.007986464023590089, 0.008376864433288575, 0.007989280223846436, 0.007992703914642334, 0.007959167957305908, 0.007925759792327881, 0.007908895969390869, 0.007909855842590331, 0.009314240455627442, 0.007935296058654786, 0.007940159797668458, 0.007918272018432617, 0.007901440143585205, 0.00790127992630005, 0.007998239994049073, 0.007909535884857178, 0.007917664051055907, 0.007917247772216796, 0.007973152160644531, 0.007863999843597412, 0.007897664070129394, 0.007877120018005371, 0.007947807788848876, 0.007856512069702148, 0.007860223770141601, 0.00788044786453247, 0.00786895990371704, 0.00783465576171875, 0.007858719825744629, 0.007882688045501709, 0.007983327865600586, 0.00785203218460083, 0.00790556812286377, 0.007857888221740723, 0.007923711776733398, 0.007872767925262451, 0.007874176025390625, 0.007932032108306885, 0.007890944004058837, 0.007868671894073486, 0.007849855899810791, 0.007878528118133546, 0.008339455604553223, 0.007929120063781738, 0.007881440162658692, 0.007944543838500977, 0.007871520042419434, 0.007850240230560303, 0.00784764814376831, 0.007850143909454346, 0.007855648040771485, 0.00785587215423584, 0.008077312469482421, 0.00782144021987915, 0.007924736022949219, 0.007917503833770751, 0.007826528072357179, 0.007890783786773681, 0.007833439826965332, 0.007862016201019287, 0.007868832111358642, 0.007882751941680909, 0.007860032081604004, 0.00784607982635498, 0.007903232097625732, 0.007926112174987792, 0.007919360160827636, 0.007892896175384521, 0.007825695991516113, 0.007902751922607422, 0.007896800041198731, 0.00790780782699585, 0.007894368171691894, 0.007883264064788818, 0.007832064151763915, 0.00786191987991333, 0.007894559860229492, 0.007864799976348877, 0.007929632186889649, 0.007829919815063476, 0.007937856197357178, 0.007860544204711915, 0.00782483196258545, 0.007842048168182373, 0.008032416343688965, 0.007874623775482177, 0.007854207992553711, 0.00788649606704712, 0.00787436819076538, 0.007844031810760498, 0.007854047775268555, 0.007882336139678954, 0.008067520141601562, 0.00786198377609253, 0.007868639945983887, 0.007871935844421387, 0.007842432022094727, 0.007823520183563233, 0.008042336463928223, 0.007878623962402343, 0.007854015827178954, 0.00790127992630005, 0.007849215984344482, 0.007864480018615722, 0.007870368003845215, 0.007827680110931396, 0.007856607913970947, 0.007864223957061768, 0.007876768112182617, 0.007906784057617187, 0.007900703907012939, 0.007918432235717773, 0.007951871871948242, 0.007866847991943359, 0.007853568077087402, 0.00786252784729004, 0.00789308786392212, 0.007882912158966065, 0.007829504013061523, 0.007821311950683594, 0.007896704196929932, 0.008020352363586426, 0.007864319801330566, 0.00801587200164795, 0.008354944229125976, 0.008144831657409668, 0.008018879890441895, 0.00791929578781128, 0.007876927852630616, 0.00806227207183838, 0.008178112030029296, 0.007923744201660156, 0.008026335716247559, 0.007882751941680909, 0.007888351917266846, 0.007864192008972168, 0.007901631832122802, 0.007878079891204834, 0.007921631813049317, 0.007884928226470947, 0.00785593605041504, 0.007830527782440186, 0.007921535968780518, 0.007866079807281495, 0.007899456024169922, 0.007904223918914795, 0.007889920234680176, 0.00785203218460083, 0.007870463848114014, 0.007863391876220703, 0.007893919944763184, 0.007940095901489258, 0.007870463848114014, 0.007878655910491944, 0.00790499210357666, 0.007960480213165283, 0.007876575946807862, 0.007908991813659668, 0.00787331199645996, 0.0078787841796875, 0.007870336055755616, 0.00790118408203125, 0.007902912139892578, 0.007866687774658203, 0.007888895988464355, 0.007882751941680909, 0.007884096145629883, 0.00787936019897461, 0.007918719768524169, 0.007900032043457031, 0.007903232097625732, 0.007879712104797364, 0.007866591930389404, 0.00809660816192627, 0.007933728218078613, 0.011203743934631348, 0.010277983665466308, 0.007805344104766846, 0.008003904342651367, 0.007831039905548096, 0.0078046398162841795, 0.007998144149780274, 0.007841023921966552, 0.007817408084869385, 0.00781715202331543, 0.007811967849731445, 0.0077985281944274905, 0.0078438401222229, 0.007827455997467042, 0.007891071796417235, 0.007847807884216308, 0.007891136169433594, 0.007767871856689453, 0.007839744091033935, 0.007796544075012207, 0.007814591884613037, 0.00778223991394043, 0.007797696113586426, 0.007828991889953613, 0.007791071891784668, 0.007755775928497314, 0.007792640209197998, 0.007776383876800537, 0.007804448127746582, 0.007805280208587647, 0.007786719799041748, 0.007833248138427734, 0.007828959941864014, 0.007793312072753906, 0.007837696075439453, 0.007823359966278077, 0.0078089599609375, 0.007821375846862794, 0.007802879810333252, 0.007753344058990479, 0.007770495891571045, 0.007831200122833252, 0.007862688064575196, 0.007798719882965088, 0.007818848133087157, 0.0077983360290527345, 0.007797599792480469, 0.007962143898010254, 0.00780950403213501, 0.007815135955810546, 0.00782467222213745, 0.007772799968719483, 0.007751808166503906, 0.007792640209197998, 0.007766016006469726, 0.007814720153808593, 0.007825151920318603, 0.007840447902679444, 0.00779699182510376, 0.007753824234008789, 0.007782048225402832, 0.007766016006469726, 0.0077547521591186525, 0.007807168006896972, 0.008055071830749511]",tokens/s,126.14501344794695,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7423.266816,7815.954432,0.0,7430.209536,7414.23104,s,1,11.6047802734375,11.6047802734375,0.0,11.6047802734375,11.6047802734375,11.6047802734375,11.6047802734375,[11.6047802734375],,kWh,0.00013077375914167532,1.4418056964855545e-05,4.412586863400403e-05,0.00018931768474053488,,MB,1760.354304,8593.997824,0.0,8176.795648,8052.041728,s,10,7.3686964721679695,0.7368696472167968,0.0019756480240788895,0.7373280639648437,0.7388389099121094,0.7392782806396484,0.7396297772216797,"[0.7323112182617187, 0.7352952270507812, 0.73593408203125, 0.7361524047851562, 0.7371892700195313, 0.7374668579101562, 0.7379158325195313, 0.73797265625, 0.7387412719726563, 0.7397176513671875]",tokens/s,347.4155856017793,kWh,2.152595891874777e-05,2.3739338720859877e-06,1.4267134429570996e-05,3.816702722040476e-05,tokens/kWh,6707360.21754238,MB,1765.261312,8740.798464,0.0,8323.596288,8263.496192,s,10,33.961278808593754,3.3961278808593747,0.007534583679404035,3.3959044189453125,3.4054197265624997,3.406699853515625,3.407723955078125,"[3.3848232421875, 3.385638427734375, 3.389796875, 3.39316650390625, 3.396112060546875, 3.39569677734375, 3.402529541015625, 3.400400146484375, 3.40797998046875, 3.40513525390625]",tokens/s,18.550538204132096,kWh,9.953443886999896e-05,1.0979204256288664e-05,6.622345774062772e-05,0.00017673710086691535,tokens/kWh,356461.65797095187,,s,630,33.957517089843705,0.05390082077752977,0.0007816434642938544,0.053862159729003906,0.05464696807861328,0.05494703903198242,0.056723680801391604,"[0.05721033477783203, 0.05300265502929687, 0.053209182739257815, 0.053507232666015626, 0.053567649841308594, 0.052662975311279295, 0.053172222137451174, 0.05317776107788086, 0.05308681488037109, 0.05300835037231445, 0.05319270324707031, 0.05337459182739258, 0.05378499221801758, 0.05351164627075195, 0.052986175537109374, 0.05329328155517578, 0.05340768051147461, 0.053752960205078124, 0.0540843505859375, 0.05385030364990234, 0.05337702560424805, 0.05315379333496094, 0.054231040954589846, 0.05345894241333008, 0.053550655364990235, 0.05318041610717773, 0.05306617736816406, 0.053460990905761716, 0.05360969543457031, 0.05347817611694336, 0.053751808166503906, 0.053628929138183595, 0.053610496520996094, 0.05303097534179688, 0.053508033752441404, 0.05382144165039063, 0.05451891326904297, 0.0542663345336914, 0.054276512145996096, 0.054042625427246097, 0.05385622406005859, 0.053849281311035155, 0.05413763046264648, 0.053682239532470706, 0.053473281860351565, 0.05329510498046875, 0.053788257598876954, 0.053827999114990234, 0.05368755340576172, 0.05425961685180664, 0.053819839477539065, 0.05384339141845703, 0.05366636657714844, 0.05415568161010742, 0.05442969512939453, 0.05416457748413086, 0.054438655853271484, 0.054257823944091794, 0.05386604690551758, 0.05404512023925781, 0.054458366394042966, 0.05384563064575195, 0.053782913208007814, 0.05671420669555664, 0.05364691162109375, 0.053047550201416015, 0.05245561599731445, 0.05267232131958008, 0.05296047973632813, 0.05302780914306641, 0.0523548469543457, 0.05330556869506836, 0.0529879035949707, 0.05290911865234375, 0.053015487670898434, 0.053114879608154295, 0.05411423873901367, 0.05364115142822266, 0.05327417755126953, 0.05376467132568359, 0.05415897750854492, 0.05430720138549805, 0.054007808685302736, 0.05418393707275391, 0.05348483276367187, 0.053691104888916014, 0.05305734252929688, 0.05312326431274414, 0.05267251205444336, 0.05342755126953125, 0.0532322883605957, 0.0531572151184082, 0.05333059310913086, 0.05320908737182617, 0.05358182525634766, 0.05362483215332031, 0.05480195236206055, 0.05386288070678711, 0.05395455932617187, 0.05412044906616211, 0.05402828979492187, 0.05421875381469726, 0.05455366516113281, 0.054064064025878905, 0.05393203353881836, 0.0535470085144043, 0.05379993438720703, 0.05342924880981445, 0.05332099151611328, 0.053180225372314455, 0.05366057586669922, 0.053558273315429686, 0.05372979354858398, 0.054663681030273435, 0.054152416229248046, 0.053765953063964846, 0.05426275253295899, 0.05453004837036133, 0.05452790451049805, 0.05409308624267578, 0.054164287567138675, 0.054854881286621096, 0.054199073791503904, 0.05438003158569336, 0.054452705383300784, 0.054118431091308594, 0.058243072509765625, 0.05363916778564453, 0.05291212844848633, 0.05265568161010742, 0.05270163345336914, 0.053309280395507815, 0.05319241714477539, 0.052765121459960936, 0.05291622543334961, 0.05296294403076172, 0.05266828918457031, 0.052644351959228515, 0.05368393707275391, 0.05412857437133789, 0.05369686508178711, 0.053348350524902347, 0.053821342468261715, 0.054454078674316404, 0.054339870452880856, 0.054196224212646485, 0.053626880645751954, 0.05360844802856445, 0.053415679931640626, 0.05348284912109375, 0.05317055892944336, 0.05297001647949219, 0.053411838531494144, 0.05335039901733398, 0.05315379333496094, 0.05382963180541992, 0.05375795364379883, 0.05346303939819336, 0.054090911865234376, 0.054152030944824216, 0.053975040435791016, 0.05406438446044922, 0.05435263824462891, 0.05408563232421875, 0.05414297485351562, 0.0538614387512207, 0.05435203170776367, 0.053918720245361325, 0.053231391906738285, 0.053612545013427736, 0.05336201477050781, 0.05394499206542969, 0.0535203857421875, 0.05389926528930664, 0.05338521575927734, 0.05423513412475586, 0.054523902893066405, 0.05428790283203125, 0.054368736267089844, 0.05430886459350586, 0.0544826545715332, 0.05440284729003906, 0.054389183044433596, 0.054437118530273436, 0.05434246444702148, 0.054046718597412106, 0.05396480178833008, 0.054073345184326174, 0.05397708892822266, 0.056656639099121095, 0.05345817565917969, 0.05276911926269531, 0.05264355087280274, 0.052512542724609375, 0.05302364730834961, 0.052760574340820314, 0.052891647338867184, 0.05327667236328125, 0.052908031463623044, 0.05274214553833008, 0.05298761749267578, 0.05661724853515625, 0.053921791076660154, 0.053790367126464844, 0.05366723251342773, 0.05412345504760742, 0.05475241470336914, 0.05494160079956055, 0.05356844711303711, 0.053613918304443356, 0.05334902572631836, 0.053288959503173826, 0.05294079971313476, 0.05322518539428711, 0.05302096176147461, 0.053155136108398435, 0.052738750457763675, 0.05406105422973633, 0.05403443145751953, 0.05392339324951172, 0.05376454544067383, 0.05382511901855469, 0.05417184066772461, 0.05439049530029297, 0.054886913299560545, 0.05489868927001953, 0.05451161575317383, 0.053728511810302734, 0.0534818229675293, 0.053123489379882816, 0.054130687713623046, 0.0542023696899414, 0.05350809478759765, 0.05377024078369141, 0.05393612670898437, 0.05378387069702149, 0.05398137664794922, 0.054228641510009765, 0.053972991943359375, 0.05395337677001953, 0.053644577026367185, 0.05441305541992188, 0.05516796875, 0.05453414535522461, 0.05418188858032227, 0.05470579147338867, 0.05375980758666992, 0.05434553527832031, 0.054424320220947266, 0.05397638320922851, 0.054022846221923826, 0.054005760192871094, 0.05623580932617187, 0.05345654296875, 0.05274057769775391, 0.052705375671386716, 0.052760223388671874, 0.05268310546875, 0.05273395156860351, 0.056325889587402346, 0.05205632019042969, 0.05299609756469727, 0.05359558486938477, 0.053701183319091794, 0.05378617477416992, 0.05394681549072266, 0.05385830307006836, 0.054106113433837894, 0.0540524787902832, 0.054980705261230466, 0.054087841033935546, 0.053397632598876955, 0.05394636917114258, 0.053700607299804685, 0.05346713638305664, 0.052961280822753906, 0.05375590515136719, 0.05338019180297852, 0.05285161590576172, 0.053172222137451174, 0.053552543640136716, 0.05371535873413086, 0.054076896667480466, 0.05467004776000976, 0.05450723266601563, 0.05421043014526367, 0.05460591888427734, 0.054951488494873045, 0.05460044860839844, 0.054220287322998044, 0.05355571365356445, 0.054271488189697265, 0.053914112091064455, 0.05349494552612305, 0.05431177520751953, 0.05400912094116211, 0.05363955307006836, 0.05344700622558594, 0.053741439819335934, 0.05420441436767578, 0.053864574432373045, 0.053857471466064455, 0.05369120025634765, 0.05439184188842774, 0.054354015350341796, 0.05450617599487305, 0.05502790451049805, 0.0543109130859375, 0.054317054748535154, 0.05384396743774414, 0.0541921272277832, 0.05464678573608398, 0.054002910614013674, 0.05390739059448242, 0.05370723342895508, 0.05805881500244141, 0.053542911529541014, 0.052647937774658204, 0.0522825927734375, 0.05272246551513672, 0.05308156967163086, 0.05288195037841797, 0.05310873413085938, 0.053065727233886716, 0.053174270629882815, 0.05311078262329102, 0.05342822265625, 0.05387059020996094, 0.05353676986694336, 0.05408153533935547, 0.05394636917114258, 0.05356339263916016, 0.054541633605957034, 0.054467262268066405, 0.05404876708984375, 0.05329305648803711, 0.0532213134765625, 0.053262401580810546, 0.05346476745605469, 0.053252384185791014, 0.053106719970703126, 0.05336016082763672, 0.0533078727722168, 0.05323680114746094, 0.05454735946655274, 0.053585952758789065, 0.053835777282714846, 0.053983200073242185, 0.05441107177734375, 0.0544150390625, 0.055078784942626954, 0.054895423889160154, 0.054437728881835935, 0.054525951385498046, 0.05346252822875976, 0.05354751968383789, 0.05347532653808594, 0.05434368133544922, 0.053751808166503906, 0.05419772720336914, 0.05383983993530273, 0.05419247817993164, 0.053488929748535155, 0.054180801391601564, 0.054177791595458984, 0.054195777893066406, 0.05459344100952149, 0.05487875366210938, 0.05652070236206055, 0.054163551330566405, 0.05458319854736328, 0.05413040161132812, 0.05423116683959961, 0.053870494842529294, 0.05374521636962891, 0.05358457565307617, 0.05394364929199219, 0.05385692977905274, 0.05623270416259766, 0.05389683151245117, 0.053074302673339846, 0.053098495483398435, 0.05262540817260742, 0.0525513916015625, 0.05351603317260742, 0.05334889602661133, 0.05282611083984375, 0.05293190383911133, 0.05361734390258789, 0.053771839141845704, 0.05361094284057617, 0.05475843048095703, 0.05571180725097656, 0.05327337646484375, 0.05380220794677734, 0.054322078704833986, 0.05391769790649414, 0.053717151641845706, 0.05373116683959961, 0.05332892990112305, 0.053375614166259765, 0.05304716873168945, 0.053174144744873045, 0.0533202896118164, 0.05422214508056641, 0.05394668960571289, 0.053444992065429686, 0.053534015655517575, 0.05386924743652344, 0.054155136108398436, 0.05398233413696289, 0.05543219375610352, 0.05491302490234375, 0.054286334991455076, 0.05440419387817383, 0.0540681266784668, 0.05335859298706055, 0.05392998504638672, 0.05485932922363281, 0.0543256950378418, 0.05381488037109375, 0.053776256561279295, 0.05378499221801758, 0.053700736999511715, 0.05399552154541016, 0.054214656829833986, 0.05411183929443359, 0.0549031982421875, 0.054648609161376954, 0.0543848648071289, 0.05427814483642578, 0.0567275505065918, 0.05514582443237305, 0.054154911041259766, 0.05384499359130859, 0.05418188858032227, 0.05485081481933594, 0.05426457595825195, 0.054401023864746094, 0.053751327514648437, 0.053900798797607424, 0.05741254425048828, 0.053495807647705076, 0.052795391082763675, 0.05257830429077148, 0.052622814178466794, 0.05306399917602539, 0.05269667053222656, 0.05307455825805664, 0.05318355178833008, 0.05328377532958985, 0.05318355178833008, 0.05383571243286133, 0.05349683380126953, 0.05404876708984375, 0.056164352416992185, 0.05311020660400391, 0.05373142242431641, 0.05449980926513672, 0.054306079864501956, 0.054104705810546876, 0.053719135284423826, 0.053106689453125, 0.05289494323730469, 0.05351830291748047, 0.05326704025268555, 0.05348486328125, 0.05352732849121094, 0.05396806335449219, 0.05391046524047852, 0.054406177520751955, 0.05379734420776367, 0.05433804702758789, 0.054267326354980466, 0.05589241409301758, 0.05422911834716797, 0.054319103240966796, 0.054726367950439454, 0.054851871490478515, 0.054196224212646485, 0.053897216796875, 0.05357785415649414, 0.05331526565551758, 0.054524097442626956, 0.053343616485595706, 0.05352886581420899, 0.05402387237548828, 0.0539365119934082, 0.05394432067871094, 0.05427996826171875, 0.0542110710144043, 0.054419456481933595, 0.05395865631103516, 0.05529529571533203, 0.055019390106201174, 0.054434623718261715, 0.05487376022338867, 0.05424585723876953, 0.05392544174194336, 0.05400153732299805, 0.05447724914550781, 0.05430038452148438, 0.05391296005249024, 0.05349407958984375, 0.057062110900878905, 0.05383139038085937, 0.05312492752075195, 0.05309859085083008, 0.053053184509277346, 0.053203582763671875, 0.0531701774597168, 0.05308137512207031, 0.0534958381652832, 0.05328761672973633, 0.05330124664306641, 0.05400700759887695, 0.05359478378295898, 0.05371068954467773, 0.05566239929199219, 0.05357206344604492, 0.053959678649902344, 0.054201343536376956, 0.05407065582275391, 0.05445491027832031, 0.053991424560546876, 0.05409791946411133, 0.05334758377075195, 0.053154560089111326, 0.05331763076782227, 0.053190654754638675, 0.053684223175048826, 0.0534466552734375, 0.0534466552734375, 0.053902721405029295, 0.05396112060546875, 0.05424294281005859, 0.0541352653503418, 0.05592691040039063, 0.05429862213134766, 0.05449932861328125, 0.054492351531982425, 0.054295360565185545, 0.05416531372070312, 0.05456003189086914, 0.05430326461791992, 0.05425395202636719, 0.05437628936767578, 0.05363455963134765, 0.054301345825195316, 0.05395369720458985, 0.054180416107177734, 0.05436444854736328, 0.05442969512939453, 0.0546693115234375, 0.05466038513183594, 0.05452624130249024, 0.05476502227783203, 0.054702144622802734, 0.05461289596557617, 0.05430444717407226, 0.05445868682861328, 0.05454380798339844, 0.05547884750366211, 0.054300670623779294, 0.053644832611083985, 0.05388745498657226, 0.054177791595458984, 0.05861580657958984, 0.05444124984741211, 0.052869342803955076, 0.05299017715454102, 0.05272809600830078, 0.05284569549560547, 0.053187454223632816, 0.05292236709594727, 0.05290393447875977, 0.05472256088256836, 0.053087745666503906, 0.05351001739501953, 0.05382179260253906, 0.0541473617553711, 0.054593536376953126, 0.05369651031494141, 0.05387868881225586, 0.054611328125, 0.054508255004882815, 0.05405286407470703, 0.05357318496704101, 0.05359638214111328, 0.053571807861328126, 0.05303091049194336, 0.05333606338500976, 0.05332787322998047, 0.05468364715576172, 0.05347532653808594, 0.0535470085144043, 0.0539747200012207, 0.05382758331298828, 0.05370297622680664, 0.054281566619873045, 0.05433747100830078, 0.05465078353881836, 0.05534774398803711, 0.054239295959472654, 0.054632095336914065, 0.05436883163452148, 0.05482291030883789, 0.05412198257446289, 0.05331609725952149, 0.05399087905883789, 0.053698753356933596, 0.053354846954345704, 0.053975040435791016, 0.05427609634399414, 0.05391360092163086, 0.053510143280029294, 0.05427977752685547, 0.05394268798828125, 0.054457569122314455, 0.054851646423339846, 0.054112384796142575, 0.05445475387573242, 0.05547430419921875, 0.05532057571411133, 0.054687744140625, 0.054779552459716795, 0.05409552001953125, 0.053938209533691404, 0.053966686248779296, 0.05380339050292969]",tokens/s,18.552593180860818,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,883.01568,566.099968,0.0,163.577856,152.009216,s,1,7.5829443359375,7.5829443359375,0.0,7.5829443359375,7.5829443359375,7.5829443359375,7.5829443359375,[7.5829443359375],,kWh,1.3643450479124416e-05,1.4977545126297604e-06,3.8000030400109353e-06,1.894120803176511e-05,,MB,1306.075136,618.528768,0.0,201.326592,184.525824,s,24,0.29870195007324224,0.01244591458638509,0.0002598817261230951,0.012394591808319092,0.01244597110748291,0.012474417924880982,0.013405029973983764,"[0.013682111740112305, 0.01245747184753418, 0.012393280029296875, 0.01235750389099121, 0.012378687858581544, 0.012387616157531738, 0.012347423553466796, 0.012362048149108887, 0.012351552009582519, 0.012408608436584473, 0.012410623550415039, 0.012327679634094238, 0.012392160415649413, 0.012363519668579102, 0.012375679969787598, 0.012401727676391602, 0.012416064262390137, 0.012368000030517579, 0.012410143852233886, 0.012415007591247558, 0.012477408409118653, 0.012402591705322265, 0.01241913604736328, 0.012395903587341309]",tokens/s,20568.998623857267,kWh,4.2151395084180903e-07,4.64854824938354e-08,2.7927631769705314e-07,7.472757510326976e-07,tokens/kWh,342577689.2214431,MB,1345.29024,618.528768,0.0,201.326592,184.528384,s,24,9.824048706054686,0.40933536275227866,0.002341896151791663,0.40882608032226564,0.4114652099609375,0.41381431884765624,0.4163699331665039,"[0.41422308349609377, 0.41149798583984376, 0.40861111450195314, 0.4090404357910156, 0.41701119995117186, 0.40913986206054687, 0.40840960693359374, 0.40775872802734375, 0.407572998046875, 0.40898876953125, 0.4087735595703125, 0.4097675476074219, 0.40657199096679686, 0.40865603637695314, 0.40887860107421875, 0.4110265808105469, 0.4079571533203125, 0.40654592895507813, 0.4094146423339844, 0.4062956237792969, 0.41138873291015626, 0.4097418212890625, 0.40811196899414065, 0.40866473388671876]",tokens/s,153.90803173320333,kWh,1.1708453195657936e-05,1.291251518732506e-06,4.551349769021084e-06,1.7551054483411525e-05,tokens/kWh,3589527.914664318,,s,1512,9.81288854789734,0.006490005653371257,0.0001575204779432784,0.006465407848358154,0.006551609754562379,0.006620875263214111,0.006983601450920105,"[0.008212479591369629, 0.008652671813964844, 0.007383200168609619, 0.006709216117858886, 0.006612703800201416, 0.006555935859680176, 0.006750207901000976, 0.006494207859039307, 0.006618912220001221, 0.006439136028289795, 0.006454559803009033, 0.0064683198928833004, 0.006461440086364746, 0.006461184024810791, 0.006447296142578125, 0.006471744060516357, 0.006449151992797852, 0.0064839677810668945, 0.0064430079460144046, 0.006466976165771484, 0.006509183883666992, 0.006501471996307373, 0.006495071887969971, 0.0064966402053833, 0.006557343959808349, 0.006432767868041992, 0.006450175762176514, 0.00645849609375, 0.006589888095855713, 0.006470047950744629, 0.006441343784332276, 0.006465184211730957, 0.006485151767730713, 0.0064327998161315915, 0.006445888042449951, 0.006534463882446289, 0.006459551811218262, 0.006509088039398194, 0.0064880638122558594, 0.006539487838745117, 0.006453343868255615, 0.006416255950927734, 0.006454976081848145, 0.006436992168426514, 0.006439231872558594, 0.006490880012512207, 0.006469727993011475, 0.006532000064849853, 0.0064757118225097655, 0.006487552165985107, 0.006464000225067139, 0.006430975914001465, 0.006458367824554443, 0.006486783981323243, 0.006467167854309082, 0.006545216083526611, 0.0064065918922424315, 0.006442624092102051, 0.006568480014801026, 0.006456480026245117, 0.006562655925750733, 0.006505887985229492, 0.006503071784973145, 0.006160448074340821, 0.006478367805480957, 0.006459519863128662, 0.006498303890228272, 0.006498303890228272, 0.006540480136871338, 0.006467616081237793, 0.0066936640739440915, 0.006469664096832276, 0.006631008148193359, 0.00645692777633667, 0.006470047950744629, 0.006488351821899414, 0.006486112117767334, 0.006453248023986816, 0.006668352127075196, 0.006461311817169189, 0.006512703895568847, 0.006441215991973877, 0.006491903781890869, 0.006512639999389648, 0.006513984203338623, 0.006535871982574463, 0.006465536117553711, 0.006436863899230957, 0.006457344055175781, 0.006404096126556396, 0.006440959930419922, 0.006395040035247802, 0.006429535865783691, 0.006512224197387695, 0.006443424224853516, 0.006524511814117432, 0.00646998405456543, 0.006493535995483399, 0.0065133762359619145, 0.006543360233306885, 0.0064430079460144046, 0.006457344055175781, 0.006415616035461426, 0.006514431953430176, 0.006447296142578125, 0.006409023761749268, 0.006506336212158203, 0.0063983678817749025, 0.006414207935333252, 0.006441184043884277, 0.006423999786376953, 0.006483935832977295, 0.0064308161735534665, 0.006465695858001709, 0.006459136009216309, 0.006723968029022217, 0.00653875207901001, 0.006515327930450439, 0.008492799758911132, 0.006957248210906982, 0.0065532798767089845, 0.0065289921760559085, 0.006461440086364746, 0.006464799880981445, 0.006709504127502442, 0.00646230411529541, 0.0063836159706115725, 0.006496255874633789, 0.006873087882995605, 0.0064412798881530765, 0.006481535911560058, 0.006428736209869385, 0.0064345598220825195, 0.0065270719528198245, 0.0065270400047302245, 0.006473343849182129, 0.006443488121032715, 0.006436863899230957, 0.006570335865020752, 0.0064440641403198246, 0.006485727787017823, 0.006469823837280273, 0.006515423774719239, 0.006556992053985596, 0.0064644160270690915, 0.006786752223968506, 0.006525023937225342, 0.006444960117340088, 0.006458975791931152, 0.006494719982147217, 0.00644323205947876, 0.006430496215820312, 0.006467264175415039, 0.006433087825775147, 0.006467391967773437, 0.006418015956878662, 0.006460319995880127, 0.00649286413192749, 0.006498559951782227, 0.006558720111846924, 0.006408095836639404, 0.006452991962432862, 0.006420576095581055, 0.0064555201530456545, 0.006502175807952881, 0.006437151908874512, 0.006491871833801269, 0.0064832959175109865, 0.006431647777557373, 0.006426368236541748, 0.006467584133148193, 0.006461440086364746, 0.006467807769775391, 0.00643782377243042, 0.006560160160064697, 0.006427296161651611, 0.006403520107269287, 0.006435167789459228, 0.006485472202301026, 0.006402592182159424, 0.006450463771820068, 0.006443615913391113, 0.00643836784362793, 0.006408959865570068, 0.0064057598114013676, 0.006549791812896728, 0.006491360187530518, 0.006464288234710693, 0.006581727981567383, 0.006190432071685791, 0.006470719814300537, 0.0064160962104797365, 0.006464896202087403, 0.006441952228546143, 0.006689472198486328, 0.00647654390335083, 0.006469855785369873, 0.006506527900695801, 0.006453216075897217, 0.006450623989105225, 0.006477983951568603, 0.00656828784942627, 0.006484032154083252, 0.006508287906646729, 0.006547391891479492, 0.006437183856964112, 0.006463647842407227, 0.0064338879585266115, 0.0065253438949584965, 0.0064720001220703125, 0.006440671920776367, 0.006488319873809815, 0.006481728076934814, 0.006441215991973877, 0.006487423896789551, 0.006480512142181397, 0.006492447853088379, 0.006542751789093018, 0.006467775821685791, 0.0065047359466552735, 0.006506336212158203, 0.006463840007781983, 0.006734975814819336, 0.006611167907714844, 0.006467999935150147, 0.00648796796798706, 0.006451551914215088, 0.0064644479751586915, 0.006716127872467041, 0.006457344055175781, 0.006523007869720459, 0.006453120231628418, 0.006465888023376465, 0.006501567840576172, 0.006446656227111817, 0.006506400108337402, 0.006464511871337891, 0.006412288188934326, 0.006432735919952392, 0.006401088237762451, 0.006406496047973633, 0.006412928104400635, 0.006406144142150879, 0.006422080039978027, 0.006430784225463867, 0.006459775924682617, 0.006476928234100341, 0.006457695960998535, 0.006490335941314697, 0.006983935832977295, 0.006430079936981201, 0.006458047866821289, 0.006152224063873291, 0.0064254717826843265, 0.006484064102172851, 0.006422080039978027, 0.006424863815307617, 0.006454880237579346, 0.006421247959136963, 0.006446623802185058, 0.006439104080200196, 0.006464672088623047, 0.006574944019317627, 0.006434144020080567, 0.0064802241325378415, 0.006469247817993164, 0.00644371223449707, 0.006455296039581298, 0.00698089599609375, 0.006456255912780762, 0.006536992073059082, 0.006472896099090577, 0.006447616100311279, 0.006449408054351806, 0.0064609599113464354, 0.006443552017211914, 0.00642252779006958, 0.006558752059936524, 0.006452191829681397, 0.006444767951965332, 0.006475584030151367, 0.00656006383895874, 0.006481344223022461, 0.006552159786224365, 0.0064429759979248045, 0.00644326400756836, 0.0064347200393676755, 0.008510848045349122, 0.008253408432006835, 0.007582367897033692, 0.007795807838439942, 0.007108799934387207, 0.0071257281303405764, 0.00685478401184082, 0.006784895896911621, 0.0064778561592102055, 0.006463456153869629, 0.006498176097869873, 0.006433248043060303, 0.00651632022857666, 0.006450592041015625, 0.006531487941741943, 0.006533664226531982, 0.0064263038635253905, 0.006585792064666748, 0.006761248111724853, 0.006454080104827881, 0.0065812478065490725, 0.00658841609954834, 0.006481919765472412, 0.00646281623840332, 0.006420991897583008, 0.0064512319564819336, 0.006461503982543945, 0.006453311920166015, 0.006361855983734131, 0.006512479782104492, 0.006428607940673828, 0.006612959861755371, 0.006510623931884766, 0.006434815883636475, 0.006628608226776123, 0.006883615970611572, 0.006543839931488037, 0.0065413122177124024, 0.006643328189849853, 0.006455615997314453, 0.006555712223052979, 0.006438303947448731, 0.0064661440849304195, 0.006446688175201416, 0.006410975933074951, 0.006477215766906738, 0.006419104099273681, 0.006442016124725342, 0.006442912101745605, 0.0064513921737670895, 0.006643968105316162, 0.006449408054351806, 0.006465536117553711, 0.00646668815612793, 0.006439807891845703, 0.006456992149353027, 0.006473343849182129, 0.006459968090057373, 0.006471839904785157, 0.006445407867431641, 0.006465184211730957, 0.006471680164337158, 0.006449215888977051, 0.006473440170288086, 0.006508800029754638, 0.006465824127197266, 0.006454976081848145, 0.006416384220123291, 0.006432127952575683, 0.006465375900268555, 0.0064085121154785155, 0.006444863796234131, 0.006435359954833984, 0.006443136215209961, 0.006456895828247071, 0.0064354238510131835, 0.006429632186889648, 0.0064275522232055664, 0.006459231853485107, 0.0064802560806274416, 0.006434432029724121, 0.0064382081031799316, 0.006461984157562256, 0.006479519844055176, 0.0064849920272827145, 0.006565536022186279, 0.006643712043762207, 0.0067686400413513184, 0.006460832118988037, 0.006490719795227051, 0.006551551818847656, 0.006137792110443115, 0.006648575782775879, 0.006512415885925293, 0.006448991775512695, 0.0065784001350402835, 0.006472288131713867, 0.006444640159606933, 0.006600255966186523, 0.006427296161651611, 0.006446720123291016, 0.006589920043945313, 0.006431072235107422, 0.006475552082061768, 0.006458208084106445, 0.00646281623840332, 0.006478432178497314, 0.006460800170898438, 0.006466239929199219, 0.0064941120147705075, 0.006453279972076416, 0.006502240180969238, 0.006442431926727295, 0.006672128200531006, 0.0064704642295837405, 0.006456768035888672, 0.006539519786834717, 0.0064560317993164066, 0.006416128158569336, 0.006479936122894287, 0.006418367862701416, 0.00644220781326294, 0.006470719814300537, 0.0064737281799316405, 0.006484000205993652, 0.006433951854705811, 0.006459936141967774, 0.006459392070770263, 0.006453504085540771, 0.006449920177459717, 0.006468512058258056, 0.006512224197387695, 0.006465407848358154, 0.006455615997314453, 0.006474336147308349, 0.006495967864990234, 0.006452864170074463, 0.006479680061340332, 0.006414912223815918, 0.006463424205780029, 0.006487167835235595, 0.0064419198036193845, 0.0064696002006530764, 0.006474912166595459, 0.006447872161865234, 0.006483712196350098, 0.0064960322380065915, 0.006496831893920898, 0.0065718722343444825, 0.00647321605682373, 0.006472383975982666, 0.006506175994873047, 0.0065008320808410645, 0.006473887920379639, 0.006193215847015381, 0.006645760059356689, 0.006459328174591064, 0.006464896202087403, 0.006631904125213623, 0.006582719802856445, 0.00658515214920044, 0.006618080139160157, 0.006483808040618897, 0.00644268798828125, 0.006465727806091308, 0.006512928009033203, 0.006430016040802002, 0.006473504066467285, 0.0064254717826843265, 0.006481503963470459, 0.006471776008605957, 0.0064332160949707035, 0.0064306240081787105, 0.0066212801933288575, 0.006465407848358154, 0.006451295852661133, 0.00643884801864624, 0.006454751968383789, 0.006485824108123779, 0.006431392192840576, 0.00648198413848877, 0.006465727806091308, 0.006485951900482178, 0.00661900806427002, 0.0064447040557861325, 0.006464863777160644, 0.00646889591217041, 0.006425536155700683, 0.006466335773468017, 0.0064471039772033695, 0.006463488101959228, 0.006493567943572998, 0.006459743976593018, 0.006455584049224854, 0.006445024013519287, 0.006441152095794678, 0.006479712009429932, 0.006445055961608887, 0.0064471039772033695, 0.006432767868041992, 0.006428671836853027, 0.006445055961608887, 0.0064102401733398436, 0.006428959846496582, 0.006448863983154297, 0.006402048110961914, 0.006435872077941895, 0.006437856197357178, 0.006425920009613037, 0.0064702401161193844, 0.00638918399810791, 0.006441664218902588, 0.0064163517951965334, 0.006422175884246826, 0.006543360233306885, 0.006420256137847901, 0.00642310380935669, 0.006169568061828614, 0.006449151992797852, 0.0064440641403198246, 0.006493408203125, 0.006448895931243897, 0.0064795842170715335, 0.006455904006958008, 0.0064440321922302245, 0.00646995210647583, 0.006441343784332276, 0.006494207859039307, 0.0064430079460144046, 0.006453279972076416, 0.006494175910949707, 0.006469024181365967, 0.006468480110168457, 0.006473696231842041, 0.006452991962432862, 0.006470880031585693, 0.006750368118286133, 0.006650496006011963, 0.0065391998291015625, 0.00646560001373291, 0.006495520114898682, 0.006566624164581299, 0.006430335998535156, 0.0064815678596496585, 0.006439551830291748, 0.00645743989944458, 0.006475488185882569, 0.006499680042266845, 0.006470592021942139, 0.006451200008392334, 0.006467584133148193, 0.0064839677810668945, 0.006455296039581298, 0.006432064056396484, 0.006467552185058594, 0.006398496150970459, 0.00644326400756836, 0.006534111976623535, 0.006437856197357178, 0.006454976081848145, 0.006420256137847901, 0.006443391799926758, 0.006396063804626465, 0.006451200008392334, 0.006436031818389892, 0.006409311771392822, 0.006538527965545654, 0.006441023826599121, 0.006447648048400879, 0.006460319995880127, 0.006424767971038818, 0.006439680099487304, 0.006468639850616455, 0.006404831886291504, 0.006467840194702148, 0.006426368236541748, 0.0064596481323242185, 0.006455264091491699, 0.006420127868652344, 0.006440800189971924, 0.006160384178161621, 0.006418432235717773, 0.006450719833374023, 0.006432767868041992, 0.0064448962211608885, 0.006464128017425537, 0.006452576160430908, 0.0064412798881530765, 0.00643123197555542, 0.0064448962211608885, 0.0064421439170837405, 0.006425695896148682, 0.006459136009216309, 0.006492159843444824, 0.00642252779006958, 0.006490464210510254, 0.006454783916473389, 0.0064654722213745115, 0.006461535930633545, 0.0064448962211608885, 0.006665919780731201, 0.0064999361038208005, 0.006461728096008301, 0.0064867520332336425, 0.006524928092956543, 0.006598656177520752, 0.0065491518974304195, 0.00651475191116333, 0.006469312191009521, 0.006457952022552491, 0.006424543857574463, 0.006482304096221924, 0.0064626879692077635, 0.006484416007995606, 0.006446271896362305, 0.006461440086364746, 0.006486591815948487, 0.006500288009643555, 0.006459360122680664, 0.00651913595199585, 0.006784863948822021, 0.006515071868896484, 0.006483744144439698, 0.0064750399589538574, 0.006521567821502685, 0.006506591796875, 0.006579872131347656, 0.006538559913635254, 0.00651961612701416, 0.006492288112640381, 0.006514304161071778, 0.006455455780029297, 0.006553088188171387, 0.0065133762359619145, 0.006455488204956054, 0.0065038719177246095, 0.006453216075897217, 0.006502816200256348, 0.006567935943603515, 0.006463488101959228, 0.006492288112640381, 0.006528736114501953, 0.006449344158172607, 0.0061990079879760745, 0.0064924159049987796, 0.006493887901306152, 0.006518943786621094, 0.006461599826812744, 0.006477888107299805, 0.006485951900482178, 0.006463136196136475, 0.00646127986907959, 0.00651526403427124, 0.006447328090667725, 0.006489568233489991, 0.006462944030761719, 0.0064848318099975585, 0.006471551895141601, 0.006448671817779541, 0.006480576038360596, 0.006467199802398681, 0.006446464061737061, 0.0065075201988220215, 0.006450208187103271, 0.006434976100921631, 0.0064867520332336425, 0.006453248023986816, 0.006471680164337158, 0.006402304172515869, 0.0064856958389282226, 0.006469696044921875, 0.006426047801971435, 0.006447072029113769, 0.006427072048187256, 0.006463071823120117, 0.006451583862304687, 0.006420159816741943, 0.006465087890625, 0.006482656002044678, 0.0064555201530456545, 0.006491360187530518, 0.006451648235321045, 0.006437215805053711, 0.006479872226715088, 0.006465536117553711, 0.0064403839111328125, 0.006451200008392334, 0.006453023910522461, 0.006750048160552979, 0.006443871974945069, 0.00660694408416748, 0.006586368083953857, 0.006512639999389648, 0.00653872013092041, 0.006587935924530029, 0.006501376152038574, 0.006541567802429199, 0.006493951797485352, 0.006522655963897705, 0.006502624034881592, 0.006495264053344727, 0.006606847763061524, 0.00658515214920044, 0.006441408157348632, 0.006487071990966797, 0.006441408157348632, 0.00617193603515625, 0.0064624958038330075, 0.006480576038360596, 0.006491136074066162, 0.006508543968200684, 0.006446112155914307, 0.006496479988098144, 0.006521599769592285, 0.006461440086364746, 0.006582335948944092, 0.00647382402420044, 0.006467423915863037, 0.006485055923461914, 0.006473887920379639, 0.006484288215637207, 0.006508351802825928, 0.006451839923858642, 0.006504479885101318, 0.006470911979675293, 0.006474495887756348, 0.00653107213973999, 0.0064737281799316405, 0.006482175827026367, 0.006506239891052246, 0.0064609599113464354, 0.006510752201080322, 0.006474048137664795, 0.0064778242111206055, 0.006526527881622314, 0.006451583862304687, 0.0065025601387023926, 0.0064898238182067875, 0.006441343784332276, 0.006466752052307129, 0.00645798397064209, 0.006494207859039307, 0.0064999680519104005, 0.0064802560806274416, 0.006479872226715088, 0.006479872226715088, 0.006467391967773437, 0.006481503963470459, 0.0064661440849304195, 0.006442272186279297, 0.006617824077606201, 0.006475359916687012, 0.006441376209259033, 0.006432703971862793, 0.00646073579788208, 0.006470367908477783, 0.006631552219390869, 0.006739871978759766, 0.0064960322380065915, 0.00643503999710083, 0.0064737281799316405, 0.006458816051483155, 0.006480447769165039, 0.00645030403137207, 0.006427519798278809, 0.006469632148742676, 0.007045119762420654, 0.00689356803894043, 0.006449151992797852, 0.006181375980377197, 0.006455840110778809, 0.0064633917808532716, 0.006477183818817138, 0.0064354238510131835, 0.006446815967559814, 0.00643833589553833, 0.006443871974945069, 0.006506432056427002, 0.006430079936981201, 0.006443935871124268, 0.006475552082061768, 0.0064834880828857425, 0.006488128185272217, 0.0064325442314147946, 0.006468287944793701, 0.0064858560562133785, 0.006461535930633545, 0.006442751884460449, 0.0064572482109069825, 0.006451648235321045, 0.006526432037353516, 0.006481791973114014, 0.006474271774291992, 0.006484000205993652, 0.006453248023986816, 0.0064674878120422365, 0.006436800003051758, 0.006536672115325928, 0.006431424140930176, 0.006440544128417968, 0.006425280094146729, 0.006428383827209473, 0.006432767868041992, 0.006493216037750244, 0.006451680183410645, 0.0064496641159057615, 0.006465375900268555, 0.0064124479293823245, 0.006449151992797852, 0.0064495038986206055, 0.006426239967346191, 0.006461696147918701, 0.006418015956878662, 0.0064327998161315915, 0.006416543960571289, 0.0064245758056640625, 0.006433919906616211, 0.006463520050048828, 0.006458208084106445, 0.006399775981903076, 0.006401408195495606, 0.00646230411529541, 0.006404096126556396, 0.006403456211090088, 0.006432896137237549, 0.006429183959960938, 0.006469503879547119, 0.006436800003051758, 0.006445248126983643, 0.006479872226715088, 0.006393856048583985, 0.0064650559425354, 0.0061561598777771, 0.006425183773040771, 0.006469632148742676, 0.006456480026245117, 0.0064395198822021485, 0.006471936225891113, 0.006684671878814697, 0.006510464191436768, 0.0067626237869262695, 0.006450463771820068, 0.006505184173583985, 0.0065120959281921385, 0.006477759838104248, 0.006521440029144287, 0.0064839677810668945, 0.006673791885375977, 0.006994559764862061, 0.006486015796661377, 0.006465536117553711, 0.006529024124145508, 0.006455296039581298, 0.00648799991607666, 0.00649232006072998, 0.0064572482109069825, 0.006512639999389648, 0.0064471039772033695, 0.006463488101959228, 0.006497280120849609, 0.006417759895324707, 0.006456992149353027, 0.006471456050872803, 0.006441184043884277, 0.006510784149169922, 0.006532927989959717, 0.006444223880767822, 0.006453248023986816, 0.006441792011260986, 0.00647760009765625, 0.006453152179718018, 0.0064494719505310055, 0.0064737281799316405, 0.006444479942321777, 0.0064620161056518554, 0.006424799919128418, 0.006439839839935303, 0.006464384078979492, 0.006423679828643799, 0.006459455966949463, 0.006444863796234131, 0.006455679893493652, 0.006457759857177735, 0.006428832054138183, 0.006475840091705322, 0.006457344055175781, 0.006442463874816894, 0.00647980785369873, 0.006447328090667725, 0.006451263904571534, 0.006458975791931152, 0.006420767784118652, 0.00644755220413208, 0.00643891191482544, 0.006459392070770263, 0.006207488059997559, 0.006485343933105469, 0.006470304012298584, 0.0069324798583984375, 0.006512639999389648, 0.006539328098297119, 0.006526527881622314, 0.006509183883666992, 0.006662176132202148, 0.006547167778015137, 0.006485311985015869, 0.006603295803070068, 0.00654966402053833, 0.006481919765472412, 0.0065064959526062015, 0.006440735816955566, 0.006450463771820068, 0.006509471893310547, 0.0064306240081787105, 0.006451327800750733, 0.0064471039772033695, 0.006465536117553711, 0.006467167854309082, 0.006438784122467041, 0.00646608018875122, 0.006452928066253662, 0.00659497594833374, 0.006483007907867431, 0.006450016021728516, 0.006475776195526123, 0.006468992233276367, 0.0064293122291564945, 0.006438591957092285, 0.006430496215820312, 0.006454880237579346, 0.0065253438949584965, 0.006514368057250977, 0.006423232078552246, 0.006457503795623779, 0.0063937602043151855, 0.0064410557746887205, 0.006524928092956543, 0.0064345598220825195, 0.006477503776550293, 0.006472415924072265, 0.006575839996337891, 0.006439040184020996, 0.006409632205963135, 0.006531712055206299, 0.006492127895355224, 0.006452672004699707, 0.006463039875030518, 0.006410848140716552, 0.00646608018875122, 0.006424672126770019, 0.0064325442314147946, 0.006514175891876221, 0.006535391807556152, 0.006449440002441406, 0.006446144104003906, 0.006478911876678467, 0.006497695922851563, 0.006471712112426758, 0.006127711772918701, 0.006459296226501465, 0.006711296081542969, 0.006491903781890869, 0.006610176086425781, 0.006468607902526856, 0.0064898238182067875, 0.007018752098083496, 0.006891647815704346, 0.006543263912200927, 0.006481919765472412, 0.006449151992797852, 0.006488383769989014, 0.006487391948699951, 0.006474080085754394, 0.006524640083312988, 0.006481247901916504, 0.006465727806091308, 0.006485951900482178, 0.006473696231842041, 0.006501215934753418, 0.006469247817993164, 0.006518464088439941, 0.006638271808624268, 0.006442048072814941, 0.006525887966156006, 0.00648422384262085, 0.006459136009216309, 0.006487648010253907, 0.006451615810394287, 0.006493824005126953, 0.0065491838455200195, 0.00679423999786377, 0.006624639987945557, 0.006507008075714111, 0.006475520133972168, 0.0064791679382324215, 0.006506400108337402, 0.006527840137481689, 0.006491487979888916, 0.0064921917915344235, 0.006569888114929199, 0.006534175872802734, 0.006465216159820556, 0.006494207859039307, 0.006508639812469482, 0.006520224094390869, 0.006522560119628906, 0.006491104125976563, 0.0065452480316162106, 0.006525184154510498, 0.006434368133544922, 0.00643609619140625, 0.006529983997344971, 0.0064880638122558594, 0.006440896034240723, 0.006545728206634522, 0.006594304084777832, 0.006500383853912353, 0.006445119857788086, 0.006489727973937988, 0.006494495868682861, 0.006440959930419922, 0.0062156801223754886, 0.006435904026031494, 0.006464352130889892, 0.006493663787841797, 0.006443808078765869, 0.006483808040618897, 0.00648422384262085, 0.006572991847991943, 0.006494143962860108, 0.006531136035919189, 0.006488895893096923, 0.006481919765472412, 0.006447231769561768, 0.006473855972290039, 0.0064624958038330075, 0.006494944095611572, 0.006894752025604248, 0.006503263950347901, 0.006421919822692871, 0.006478432178497314, 0.006732128143310547, 0.0064139838218688966, 0.006467584133148193, 0.006436639785766601, 0.006451136112213135, 0.006453536033630371, 0.006464672088623047, 0.006435679912567139, 0.006402048110961914, 0.006526976108551025, 0.006516799926757812, 0.006454239845275879, 0.0064143681526184085, 0.00645356798171997, 0.006441535949707031, 0.006456607818603515, 0.006437664031982422, 0.006470880031585693, 0.006480192184448242, 0.006416863918304443, 0.006455296039581298, 0.006440959930419922, 0.0064245758056640625, 0.006498079776763916, 0.006436768054962158, 0.0064139838218688966, 0.006414847850799561, 0.006448575973510742, 0.006503136157989502, 0.0064280319213867185, 0.0064447040557861325, 0.006480800151824951, 0.006426112174987793, 0.006468160152435303, 0.006442848205566406, 0.006454783916473389, 0.006435135841369629, 0.00646998405456543, 0.006455296039581298, 0.006402048110961914, 0.00646998405456543, 0.006510047912597656, 0.00643123197555542, 0.006129759788513184, 0.006445792198181153, 0.0064225602149963375, 0.006459392070770263, 0.006402048110961914, 0.006453248023986816, 0.006438687801361084, 0.006473951816558838, 0.00643071985244751, 0.006466720104217529, 0.006392000198364257, 0.006466400146484375, 0.006479680061340332, 0.0064633917808532716, 0.006426720142364502, 0.006426623821258545, 0.00643891191482544, 0.006418432235717773, 0.006416255950927734, 0.006446688175201416, 0.0065133762359619145, 0.00656387186050415, 0.006473504066467285, 0.006420639991760254, 0.006455008029937744, 0.006426752090454102, 0.006428671836853027, 0.006461440086364746, 0.00640940809249878, 0.006503168106079101, 0.0064266881942749024, 0.0064430079460144046, 0.00644268798828125, 0.006434144020080567, 0.006444128036499024, 0.0063981437683105465, 0.006411968231201172, 0.006456960201263428, 0.006380256175994873, 0.006454944133758545, 0.006426368236541748, 0.006421984195709229, 0.006558495998382569, 0.006411295890808105, 0.006447679996490478, 0.0064349122047424314, 0.00640447998046875, 0.006426559925079346, 0.006409664154052735, 0.006422688007354736, 0.006457344055175781, 0.006408063888549804, 0.0064488320350646975, 0.006421120166778565, 0.00643503999710083, 0.00642252779006958, 0.0065146880149841305, 0.0064429759979248045, 0.006658080101013183, 0.00653926420211792, 0.006651904106140137, 0.006432576179504395, 0.006447391986846924, 0.006188543796539306, 0.006447167873382569, 0.0064552001953125, 0.006484032154083252, 0.006406911849975586, 0.0065064959526062015, 0.006461440086364746, 0.006446688175201416, 0.006518303871154785, 0.006499519824981689, 0.0066126718521118165, 0.006524672031402588, 0.006477215766906738, 0.0065560641288757325, 0.006488351821899414, 0.006464064121246338, 0.006518112182617187, 0.006576223850250244, 0.006453695774078369, 0.006471392154693604, 0.006442368030548096, 0.00644159984588623, 0.00645849609375, 0.006476672172546387, 0.0064737281799316405, 0.006461440086364746, 0.006463200092315674, 0.006480160236358642, 0.006453248023986816, 0.006466879844665528, 0.006474239826202393, 0.006459328174591064, 0.0064596481323242185, 0.006452703952789306, 0.006446815967559814, 0.006452032089233398, 0.00646668815612793, 0.0065502080917358395, 0.0064432001113891605, 0.006467584133148193, 0.006504640102386474, 0.0064982080459594724, 0.006473599910736084, 0.006469664096832276, 0.006502399921417237, 0.0064839677810668945, 0.006557695865631104, 0.006469823837280273, 0.006491968154907227, 0.0064728641510009765, 0.006458208084106445, 0.006465536117553711, 0.006882463932037354, 0.006522047996520996, 0.00649510383605957, 0.006517536163330078, 0.00652288007736206, 0.006663871765136719, 0.006530367851257325, 0.006532159805297852, 0.006614719867706299, 0.006502528190612793, 0.006452832221984863, 0.00615228796005249, 0.006424736022949218, 0.006433728218078613, 0.006436895847320556, 0.0064349441528320315, 0.006406911849975586, 0.006430751800537109, 0.006578144073486328, 0.006393119812011718, 0.0064560317993164066, 0.0064325442314147946, 0.006457568168640137, 0.006479263782501221, 0.006462111949920655, 0.006440896034240723, 0.006446976184844971, 0.006596096038818359, 0.006482240200042725, 0.006424448013305664, 0.006445695877075195, 0.0065168957710266115, 0.00640780782699585, 0.006475071907043457, 0.006424543857574463, 0.006457183837890625, 0.0064275522232055664, 0.006414752006530761, 0.0064651198387146, 0.0064134721755981445, 0.0064412479400634765, 0.0064644160270690915, 0.006405856132507324, 0.006423808097839355, 0.006421184062957764, 0.0064102401733398436, 0.006432320117950439, 0.006449600219726562, 0.006529024124145508, 0.006452640056610107, 0.006371871948242187, 0.006479104042053222, 0.006437407970428467, 0.006414656162261963, 0.006470816135406494, 0.00643071985244751, 0.006626016139984131, 0.006433055877685547, 0.006438464164733887, 0.00645743989944458, 0.006414495944976807, 0.006453248023986816, 0.006457119941711426, 0.006416607856750489, 0.006453472137451172, 0.006467552185058594, 0.0064254398345947265, 0.006429152011871338, 0.006393631935119629, 0.0064498882293701175, 0.006401919841766358, 0.006415743827819825, 0.006441567897796631, 0.006397535800933838, 0.006620543956756592, 0.008292799949645996, 0.006878880023956299, 0.007286816120147705, 0.006468448162078857, 0.006478879928588867, 0.006460031986236572, 0.006408192157745361, 0.0064851841926574704, 0.0064141759872436525, 0.0064666237831115725, 0.00647708797454834, 0.006437024116516113, 0.00643884801864624, 0.006737887859344482, 0.0064354238510131835, 0.0065433278083801266, 0.0064301438331604, 0.006432320117950439, 0.00673689603805542, 0.006524543762207031, 0.006607295989990235, 0.006517727851867675, 0.00645366382598877, 0.006478400230407715, 0.006453536033630371, 0.006455008029937744, 0.006490111827850342, 0.006457151889801026, 0.006476223945617676, 0.006420224189758301, 0.006461440086364746, 0.006490240097045899, 0.006438271999359131, 0.006484608173370362, 0.006474720001220703, 0.006419104099273681, 0.006424928188323974, 0.0064832959175109865, 0.006517312049865723, 0.006485407829284668, 0.006554207801818848, 0.006481919765472412, 0.0064737281799316405, 0.006416416168212891, 0.006478879928588867, 0.006437600135803223, 0.006440927982330322, 0.006475615978240967, 0.006420447826385498, 0.006446752071380615, 0.006414400100708008, 0.00644374418258667, 0.0064471039772033695, 0.006413919925689697, 0.006508351802825928, 0.00641209602355957, 0.00640451192855835, 0.006563839912414551, 0.006422080039978027, 0.006445888042449951, 0.006451200008392334, 0.006420127868652344, 0.00617574405670166, 0.0065342397689819335, 0.006450047969818115, 0.006513887882232666, 0.006469696044921875, 0.00648799991607666, 0.006535647869110107, 0.006489280223846435, 0.006521632194519043, 0.006527167797088623, 0.006483744144439698, 0.006510752201080322, 0.006520031929016113, 0.006505184173583985, 0.006535232067108154, 0.006520607948303223, 0.006508480072021485, 0.006532544136047363, 0.006492928028106689, 0.006740352153778076, 0.0065268478393554685, 0.006510176181793213, 0.006551616191864014, 0.00656982421875, 0.006510784149169922, 0.006541279792785645, 0.006463679790496826, 0.006493311882019043, 0.006537312030792236, 0.006462175846099854, 0.00652019214630127, 0.006476863861083985, 0.006487679958343506, 0.006496255874633789, 0.006444416046142578, 0.006474368095397949, 0.006528927803039551, 0.006448480129241943, 0.006488831996917724, 0.006544447898864746, 0.0064663681983947756, 0.006477983951568603, 0.006563807964324951, 0.006500351905822754, 0.006502399921417237, 0.006436384201049804, 0.006512928009033203, 0.006479199886322022, 0.0064848318099975585, 0.0065179519653320315, 0.0064460158348083494, 0.006456448078155518, 0.006464255809783936, 0.0064637761116027835, 0.006600351810455323, 0.006487872123718262, 0.006496575832366944, 0.006499872207641601, 0.0064527359008789064, 0.006460319995880127, 0.006458879947662354, 0.0064577279090881345, 0.006463615894317627, 0.006162432193756104, 0.006499584197998047, 0.0065277438163757326, 0.006498303890228272, 0.006516096115112305, 0.006566239833831787, 0.0064453759193420414, 0.006434783935546875, 0.0064204797744750975, 0.006424255847930908, 0.006437183856964112, 0.006445055961608887, 0.0064551358222961425, 0.006449312210083008, 0.006430367946624756, 0.006459743976593018, 0.006449183940887451, 0.006438464164733887, 0.006490560054779053, 0.006428927898406983, 0.006540448188781738, 0.006433568000793457, 0.006463168144226074, 0.006453375816345214, 0.00642460823059082, 0.006475135803222656, 0.006504767894744873, 0.006629983901977539, 0.006454944133758545, 0.006426623821258545, 0.006462975978851319, 0.006437568187713623, 0.006418240070343017, 0.006470911979675293, 0.006398848056793213, 0.006442848205566406, 0.006584352016448975, 0.006432767868041992, 0.006476992130279541, 0.006456128120422363, 0.006578335762023926, 0.006520671844482422, 0.00644924783706665, 0.0064694080352783205, 0.006484096050262451, 0.006821407794952393, 0.006447135925292969, 0.00661359977722168, 0.006410079956054688, 0.006467103958129883, 0.0064490242004394535, 0.006478432178497314, 0.006481919765472412, 0.006445055961608887, 0.006473663806915283, 0.006449408054351806, 0.006461088180541992, 0.006473887920379639, 0.00642252779006958, 0.006483583927154541, 0.006481728076934814, 0.006464032173156739, 0.006465439796447754, 0.006156608104705811, 0.006568863868713379, 0.006450623989105225, 0.00656115198135376, 0.006491007804870605, 0.00652288007736206, 0.006489920139312744, 0.006457536220550537, 0.0065064640045166015, 0.006485375881195068, 0.006468255996704101, 0.0064650559425354, 0.006482687950134278, 0.00648528003692627, 0.00643936014175415, 0.0064748477935791015, 0.006498496055603028, 0.006421311855316162, 0.006487040042877197, 0.006469855785369873, 0.006490816116333007, 0.006457344055175781, 0.006463615894317627, 0.006501984119415283, 0.006431007862091064, 0.006459392070770263, 0.006488351821899414, 0.006458816051483155, 0.006469855785369873, 0.006512703895568847, 0.006461440086364746, 0.006431968212127686, 0.006541279792785645, 0.006449600219726562, 0.006490399837493896, 0.006453343868255615, 0.0067276802062988285, 0.006498303890228272, 0.006483136177062988, 0.006468416213989258, 0.006467584133148193, 0.006543360233306885, 0.006490111827850342, 0.006469759941101074, 0.006617119789123535, 0.0064774718284606935, 0.0064802560806274416, 0.006493887901306152, 0.006445184230804444, 0.006456352233886718, 0.006484960079193115, 0.0064624958038330075, 0.006468575954437256, 0.006504447937011719, 0.006459392070770263, 0.006463103771209717, 0.0064498882293701175, 0.006494944095611572, 0.0064867520332336425, 0.006458655834197998, 0.006482399940490722, 0.006465248107910156, 0.006451136112213135]",tokens/s,154.08307071050797,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6757.2736,7532.838912,0.0,7147.094016,7138.9184,s,1,11.426201171875,11.426201171875,0.0,11.426201171875,11.426201171875,11.426201171875,11.426201171875,[11.426201171875],,kWh,0.00012349634467921837,1.3612730477107807e-05,4.130697749002432e-05,0.0001784160526463505,,MB,1654.882304,8243.77344,0.0,7828.668416,7715.649536,s,10,7.301289245605469,0.7301289245605469,0.002393841286251082,0.7300091552734376,0.7328955505371094,0.7339474090576171,0.7347888958740234,"[0.727542236328125, 0.7326618041992188, 0.73098681640625, 0.7299292602539063, 0.7266624145507813, 0.7292348022460937, 0.7300890502929688, 0.734999267578125, 0.73136767578125, 0.72781591796875]",tokens/s,350.6230083325111,kWh,2.1409066343752176e-05,2.361040402919109e-06,1.4251856639571625e-05,3.802196338624291e-05,tokens/kWh,6732950.568581784,MB,1665.941504,8390.57408,0.0,7975.469056,7906.518528,s,10,30.617468261718756,3.0617468261718748,0.0032802348065067884,3.0623096923828124,3.065609033203125,3.065966748046875,3.066252919921875,"[3.056620361328125, 3.057706787109375, 3.057890625, 3.0620283203125, 3.06050927734375, 3.0633779296875, 3.062591064453125, 3.065529541015625, 3.064889892578125, 3.066324462890625]",tokens/s,20.576489036087082,kWh,8.943296857583693e-05,9.864919170125268e-06,5.936259114082965e-05,0.0001586604788867918,tokens/kWh,397074.30887657957,,s,630,30.611468334197973,0.04858963227650476,0.000584649909795008,0.0485461597442627,0.04912568893432617,0.04929344215393066,0.051517730445861824,"[0.05189267349243164, 0.048623615264892575, 0.04799078369140625, 0.048116897583007814, 0.047922016143798825, 0.047833087921142575, 0.04776275253295898, 0.047643264770507815, 0.047672927856445314, 0.047736831665039066, 0.048056800842285155, 0.0481420783996582, 0.048015617370605466, 0.04794927978515625, 0.04800175857543945, 0.04817081451416016, 0.04817919921875, 0.04791910552978516, 0.048508384704589846, 0.048622112274169925, 0.0490250244140625, 0.04891852951049805, 0.048396289825439455, 0.04877897644042969, 0.048089183807373044, 0.048250720977783206, 0.048190879821777347, 0.047965118408203125, 0.04799407958984375, 0.04809807968139648, 0.04816825485229492, 0.04836393737792969, 0.048126239776611325, 0.0481932487487793, 0.04832284927368164, 0.04856217575073242, 0.04841676712036133, 0.04876902389526367, 0.04885094451904297, 0.0488704948425293, 0.04920207977294922, 0.049209342956542966, 0.04905779266357422, 0.04885094451904297, 0.04864547348022461, 0.04854832077026367, 0.04867910385131836, 0.04869734573364258, 0.04854140853881836, 0.04836412811279297, 0.048686782836914064, 0.04868710327148437, 0.048533504486083984, 0.04869734573364258, 0.04885299301147461, 0.04866457748413086, 0.0485928955078125, 0.049078174591064457, 0.0489595832824707, 0.04917375946044922, 0.04913024139404297, 0.049104862213134766, 0.04920323181152344, 0.05133401489257813, 0.048608928680419924, 0.04773923110961914, 0.04763225555419922, 0.047802497863769534, 0.047761280059814455, 0.047947902679443356, 0.04807619094848633, 0.048427616119384766, 0.04771635055541992, 0.04798873519897461, 0.04815871810913086, 0.04810553741455078, 0.04804191970825195, 0.04814233779907227, 0.048088958740234375, 0.04798271942138672, 0.048056320190429686, 0.0483897590637207, 0.0487979850769043, 0.04883657455444336, 0.048822399139404296, 0.048601089477539064, 0.04828691101074219, 0.04814931106567383, 0.04810873413085937, 0.047937694549560546, 0.04842563247680664, 0.04842086410522461, 0.048353153228759764, 0.04819705581665039, 0.04812870407104492, 0.04821097564697266, 0.048344032287597656, 0.048228351593017575, 0.048532543182373045, 0.048565185546875, 0.04858060836791992, 0.048782974243164065, 0.048740352630615234, 0.049043838500976565, 0.04897280120849609, 0.04879872131347656, 0.04873606491088867, 0.04886956787109375, 0.04883433532714844, 0.04850710296630859, 0.048683006286621096, 0.048666622161865236, 0.04897977447509766, 0.04888380813598633, 0.04878550338745117, 0.048669857025146486, 0.04856099319458008, 0.04871372985839844, 0.048723968505859375, 0.048936958312988284, 0.04909875106811523, 0.048825599670410155, 0.048986881256103516, 0.0493721923828125, 0.049232192993164066, 0.04888966369628906, 0.05140131378173828, 0.04835436630249024, 0.04796220779418945, 0.04788719940185547, 0.04781430435180664, 0.04787747192382812, 0.04782953643798828, 0.04803631973266602, 0.04815462493896484, 0.04776345443725586, 0.04774448013305664, 0.04779446411132812, 0.04839148712158203, 0.04818425750732422, 0.04800921630859375, 0.04838729476928711, 0.04809328079223633, 0.048356033325195315, 0.04843648147583008, 0.048955169677734375, 0.04894204711914062, 0.04865228652954102, 0.04853673553466797, 0.048261985778808594, 0.047951873779296876, 0.04822630310058594, 0.04806246566772461, 0.048320510864257815, 0.04837363052368164, 0.048352447509765625, 0.04810847854614258, 0.04811955261230469, 0.04848028945922851, 0.04848236846923828, 0.04837187194824219, 0.04849641418457031, 0.0487281608581543, 0.048748672485351564, 0.048879615783691405, 0.049188865661621096, 0.04930332946777344, 0.04885321426391601, 0.04882006454467774, 0.048731712341308596, 0.04855795288085937, 0.048933406829833985, 0.04873225784301758, 0.048609375, 0.04856422424316406, 0.04844112014770508, 0.04875286483764649, 0.04869692611694336, 0.04864022445678711, 0.048815296173095706, 0.04866950225830078, 0.04855212783813476, 0.04881737518310547, 0.04903945541381836, 0.048705696105957035, 0.04926108932495117, 0.04937932968139649, 0.04907622528076172, 0.04890534210205078, 0.05166841506958008, 0.04840300750732422, 0.048088897705078126, 0.04795340728759766, 0.047852222442626956, 0.04777711868286133, 0.047587070465087894, 0.04790569686889649, 0.04805827331542969, 0.04836140823364258, 0.048259231567382814, 0.0481525764465332, 0.048263168334960936, 0.048164833068847654, 0.048177089691162106, 0.04829305648803711, 0.048092063903808595, 0.048438560485839846, 0.04824553680419922, 0.04860921478271484, 0.04917606353759765, 0.048665088653564455, 0.048517120361328124, 0.048449600219726566, 0.04825423812866211, 0.04811228942871094, 0.048115711212158206, 0.04816799926757812, 0.048556415557861325, 0.04858528137207031, 0.048377857208251954, 0.048285598754882815, 0.04852131271362305, 0.048377857208251954, 0.04835532760620117, 0.04832460784912109, 0.04855807876586914, 0.04875263977050781, 0.048766590118408205, 0.04900902557373047, 0.04926054382324219, 0.049007743835449216, 0.04896243286132813, 0.04868214416503906, 0.04868796920776367, 0.04860710525512695, 0.04840985488891601, 0.048656288146972655, 0.04862972640991211, 0.04869836807250977, 0.04865564727783203, 0.048748767852783204, 0.04876544189453125, 0.04875183868408203, 0.04903964614868164, 0.04903894424438476, 0.04899679946899414, 0.0490173110961914, 0.04921532821655274, 0.04930166244506836, 0.04945711898803711, 0.049214687347412106, 0.04955372619628906, 0.05137216186523438, 0.04834313583374023, 0.04786988830566406, 0.047726112365722655, 0.0477537612915039, 0.04790476989746094, 0.04765081787109375, 0.04805564880371094, 0.047946399688720706, 0.04813616180419922, 0.048212001800537106, 0.04831612777709961, 0.04836175918579102, 0.04809695816040039, 0.04822175979614258, 0.04833113479614258, 0.048443775177001956, 0.04833484649658203, 0.04839404678344727, 0.04896992111206055, 0.04894876861572266, 0.048493022918701174, 0.048438751220703125, 0.04831081771850586, 0.04831817626953125, 0.04840476989746094, 0.04848812866210937, 0.048234817504882815, 0.04799897766113281, 0.04820991897583008, 0.04860432052612305, 0.048188255310058596, 0.04838598251342773, 0.04833695983886719, 0.04828979110717774, 0.048330753326416016, 0.04861270523071289, 0.04861404800415039, 0.04878540802001953, 0.04903427124023438, 0.04892361450195312, 0.049102848052978515, 0.04909260940551758, 0.048510974884033206, 0.04859689712524414, 0.04883670425415039, 0.04900044631958008, 0.04876844787597656, 0.0484554557800293, 0.048857887268066405, 0.04853251266479492, 0.04882684707641602, 0.048613887786865234, 0.04864604949951172, 0.04878755187988281, 0.04895539093017578, 0.04891587066650391, 0.04890070343017578, 0.04909465789794922, 0.04894028854370117, 0.04952345657348633, 0.04952201461791992, 0.04928374481201172, 0.0517900161743164, 0.04881203079223633, 0.04791273498535156, 0.04817942428588867, 0.04794777679443359, 0.04780441665649414, 0.04789369583129883, 0.04793193435668945, 0.0478164176940918, 0.04820367813110352, 0.047930015563964846, 0.04813372802734375, 0.04805878448486328, 0.04796416091918945, 0.048297950744628906, 0.04805635070800781, 0.04839014434814453, 0.04841033554077148, 0.04832489776611328, 0.04877628707885742, 0.04912015914916992, 0.04914995193481445, 0.04865228652954102, 0.04833280181884766, 0.048333919525146485, 0.04867164611816406, 0.04844723129272461, 0.04797798538208008, 0.048005760192871096, 0.04852134323120117, 0.04849225616455078, 0.04838614273071289, 0.048338878631591795, 0.04871305465698242, 0.048532062530517575, 0.04839456176757812, 0.0485, 0.04858707046508789, 0.048613086700439456, 0.04900236892700195, 0.04895827102661133, 0.049032577514648436, 0.04933695983886719, 0.048881664276123046, 0.04895129776000977, 0.04873011016845703, 0.04877107238769531, 0.048601089477539064, 0.04850688171386719, 0.04891839981079102, 0.04861945724487305, 0.04849683380126953, 0.04845363235473633, 0.04875263977050781, 0.04876902389526367, 0.04905324935913086, 0.04898988723754883, 0.04921619033813476, 0.049123390197753906, 0.048901439666748044, 0.04909331130981445, 0.0493559341430664, 0.04944316864013672, 0.05177932739257812, 0.04844342422485352, 0.047791904449462894, 0.0477720947265625, 0.04771635055541992, 0.04812799835205078, 0.04792115020751953, 0.047958015441894535, 0.04816828918457031, 0.048093856811523436, 0.04813724899291992, 0.04822524642944336, 0.048088382720947266, 0.047823230743408204, 0.048048446655273434, 0.04826521682739258, 0.0481868782043457, 0.048302593231201174, 0.04822188949584961, 0.04885903930664062, 0.04901504135131836, 0.04876508712768555, 0.04850483322143555, 0.04851507186889648, 0.04810956954956055, 0.04816793441772461, 0.04820479965209961, 0.048396289825439455, 0.048075904846191404, 0.048427806854248044, 0.04837795257568359, 0.04844268798828125, 0.04835609436035156, 0.048424896240234376, 0.048570369720458986, 0.04836966323852539, 0.0485560302734375, 0.04884793472290039, 0.04864886474609375, 0.04918255996704102, 0.0490049934387207, 0.04895334243774414, 0.049051647186279294, 0.04900864028930664, 0.04879516983032227, 0.048486015319824216, 0.04845660781860352, 0.048489856719970706, 0.04852380752563477, 0.04888169479370117, 0.049075904846191405, 0.048867649078369144, 0.048692958831787106, 0.049039329528808594, 0.04906172943115234, 0.04901526260375977, 0.0491739501953125, 0.04927072143554687, 0.04912518310546875, 0.04915283203125, 0.049119232177734375, 0.04900454330444336, 0.04940496063232422, 0.05254547119140625, 0.04875686264038086, 0.04806860733032227, 0.04780441665649414, 0.04769750213623047, 0.04774294281005859, 0.04771254348754883, 0.04800118255615234, 0.04808703994750976, 0.04795177459716797, 0.0483837776184082, 0.04831468963623047, 0.048320064544677736, 0.04860153579711914, 0.04801052856445313, 0.04816892623901367, 0.0484769287109375, 0.048489952087402345, 0.048379520416259765, 0.04863888168334961, 0.04897148895263672, 0.0489771842956543, 0.048710655212402344, 0.04869529724121094, 0.04851433563232422, 0.04842502212524414, 0.04807686233520508, 0.048132705688476565, 0.04814438247680664, 0.04852102279663086, 0.04847945785522461, 0.04847027206420899, 0.04863372802734375, 0.04850342559814453, 0.04826544189453125, 0.04853334426879883, 0.048646305084228514, 0.04869734573364258, 0.04871782302856445, 0.04911308670043945, 0.04908031845092774, 0.04904048156738281, 0.048952030181884765, 0.04885523223876953, 0.04916611099243164, 0.04890236663818359, 0.04856016159057617, 0.04851859283447266, 0.0486712646484375, 0.04868422317504883, 0.04892272186279297, 0.04905033493041992, 0.049135616302490234, 0.04924825668334961, 0.04873830413818359, 0.04891033554077148, 0.04880710220336914, 0.04872684860229492, 0.04921753692626953, 0.04940095901489258, 0.04916105651855469, 0.04906396865844727, 0.049006591796875, 0.0520362548828125, 0.04883456039428711, 0.04804137420654297, 0.04787177658081055, 0.047889022827148436, 0.0478271369934082, 0.047884288787841796, 0.047874046325683595, 0.04798463821411133, 0.04833219146728516, 0.04832828903198242, 0.048099742889404294, 0.04854579162597656, 0.048336929321289065, 0.048091712951660155, 0.04823859024047852, 0.048115711212158206, 0.04809849548339844, 0.048495391845703124, 0.048885791778564454, 0.04918067169189453, 0.04916998291015625, 0.048538047790527346, 0.04809523010253906, 0.04834220886230469, 0.04815135955810547, 0.048121150970458985, 0.04861203384399414, 0.04805766296386719, 0.048329151153564454, 0.04842252731323242, 0.04860166549682617, 0.048685024261474606, 0.04841471862792969, 0.0485316162109375, 0.048406463623046875, 0.04862771224975586, 0.04856175994873047, 0.04863343811035156, 0.04874303817749023, 0.048750110626220707, 0.04939228820800781, 0.04930559921264648, 0.04901206588745117, 0.04882908630371094, 0.04870553588867187, 0.048610591888427736, 0.04854652786254883, 0.048678752899169925, 0.04875075149536133, 0.04851507186889648, 0.04926259231567383, 0.04881817626953125, 0.04877721786499024, 0.04868211364746094, 0.04896150588989258, 0.048909217834472656, 0.0488358383178711, 0.049122047424316403, 0.04937113571166992, 0.04933209609985351, 0.04911030578613281, 0.04958499145507812, 0.05156528091430664, 0.04867891311645508, 0.047830078125, 0.04788320159912109, 0.047753345489501955, 0.04763177490234375, 0.048345569610595704, 0.04852463912963867, 0.048108192443847654, 0.04807452774047852, 0.04799910354614258, 0.04813833618164062, 0.048325889587402346, 0.0482509765625, 0.04804985427856445, 0.04813923263549805, 0.048345088958740234, 0.0483262710571289, 0.048258495330810544, 0.04887033462524414, 0.0490781135559082, 0.04906185531616211, 0.048519359588623044, 0.04818739318847656, 0.04803145599365234, 0.048259361267089844, 0.048233856201171876, 0.04842355346679687, 0.048408256530761716, 0.04843075180053711, 0.04822492980957031, 0.048963584899902345, 0.04884259033203125, 0.04838623809814453, 0.04843430328369141, 0.04857228851318359, 0.048583648681640626, 0.04871782302856445, 0.04897792053222656, 0.04927078247070313, 0.049111038208007815, 0.048930816650390625, 0.04890214538574219, 0.048756031036376955, 0.048699615478515625, 0.048871902465820315, 0.04878131103515625, 0.04855398559570313, 0.04904755020141602, 0.04903728103637695, 0.04901235198974609, 0.048636318206787106, 0.048826305389404294, 0.0492606086730957, 0.049219585418701174, 0.048738113403320314, 0.04901087951660156, 0.049027233123779296, 0.04932182312011719, 0.049338081359863284, 0.049301376342773436, 0.04967465591430664, 0.049219585418701174]",tokens/s,20.580522081529406,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6756.691968,7532.838912,0.0,7147.094016,7138.9184,s,1,11.3569716796875,11.3569716796875,0.0,11.3569716796875,11.3569716796875,11.3569716796875,11.3569716796875,[11.3569716796875],,kWh,0.000123738664291659,1.3641902479068927e-05,4.147169984400345e-05,0.0001788522666147314,,MB,1655.525376,8243.77344,0.0,7828.668416,7715.649536,s,10,6.428054260253907,0.6428054260253907,0.001020427930872231,0.6428531494140626,0.6439337646484375,0.6444055725097656,0.6447830187988282,"[0.6410170288085938, 0.6427531127929688, 0.6421327514648437, 0.6421298217773438, 0.6433034057617187, 0.642009765625, 0.6438289184570313, 0.6429531860351563, 0.6448773803710938, 0.6430488891601562]",tokens/s,398.2542611422948,kWh,1.882011453619725e-05,2.0755304523189378e-06,1.2462006497375097e-05,3.335765148589129e-05,tokens/kWh,7674401.182237782,MB,1662.263296,8390.57408,0.0,7975.469056,7906.446848,s,10,29.45817846679688,2.9458178466796876,0.005404849493340141,2.9452669677734375,2.951831396484375,2.952954638671875,2.953853232421875,"[2.94063671875, 2.936659423828125, 2.94206494140625, 2.941861083984375, 2.943523193359375, 2.949413818359375, 2.9470107421875, 2.951581787109375, 2.951348876953125, 2.954077880859375]",tokens/s,21.386251044343776,kWh,8.611570404463842e-05,9.496959229280525e-06,5.7300632646025204e-05,0.00015291329591994417,tokens/kWh,411998.18250587484,,s,630,29.454192474365247,0.0467526864672464,0.000638401956337469,0.04677324867248535,0.04743208541870117,0.047623320388793945,0.04889916187286378,"[0.04937500762939453, 0.046167617797851564, 0.046105247497558594, 0.046516223907470705, 0.04594883346557617, 0.0456126708984375, 0.046528926849365236, 0.046684223175048827, 0.046159870147705076, 0.04532777786254883, 0.046680225372314456, 0.04664886474609375, 0.045658977508544925, 0.0457786865234375, 0.04616633605957031, 0.046921695709228516, 0.04653388977050781, 0.04616460800170898, 0.04629452896118164, 0.04682579040527344, 0.04679919815063477, 0.04619673538208008, 0.047142913818359375, 0.046927871704101565, 0.04648755264282227, 0.04580339050292969, 0.04678054428100586, 0.04660224151611328, 0.04590339279174805, 0.04555769729614258, 0.046338592529296875, 0.04673734283447266, 0.04649110412597656, 0.04648406219482422, 0.04712992095947265, 0.046475967407226565, 0.04708966445922851, 0.04677427291870117, 0.046525600433349606, 0.04709872055053711, 0.046796321868896484, 0.04617644882202149, 0.046551326751708984, 0.04743574523925781, 0.04712966537475586, 0.047494110107421876, 0.04804793548583984, 0.04683590316772461, 0.046663681030273435, 0.04604927825927734, 0.0461578254699707, 0.04740252685546875, 0.04718230438232422, 0.04691107177734375, 0.046586273193359375, 0.04741036987304687, 0.0478482551574707, 0.04720435333251953, 0.04691763305664062, 0.04647081756591797, 0.047454078674316405, 0.047161758422851564, 0.0469502067565918, 0.04878931045532227, 0.045847358703613283, 0.0453647689819336, 0.04581833648681641, 0.04626825714111328, 0.04575862503051758, 0.045143520355224606, 0.04573775863647461, 0.046052097320556644, 0.046648609161376954, 0.0459310417175293, 0.046085887908935544, 0.04704915237426758, 0.046301185607910154, 0.046607742309570314, 0.04612979125976562, 0.04613043212890625, 0.046104705810546875, 0.046787200927734376, 0.04637081527709961, 0.046663681030273435, 0.046854145050048826, 0.046481441497802735, 0.046231521606445315, 0.04584175872802734, 0.047080097198486326, 0.04652032089233398, 0.04637641525268555, 0.045949470520019534, 0.04709580612182617, 0.0462479362487793, 0.047035903930664064, 0.04662732696533203, 0.04617830276489258, 0.047067134857177735, 0.04670873641967774, 0.04625145721435547, 0.04708000183105469, 0.0467946891784668, 0.04641593551635742, 0.04721433639526367, 0.04732320022583008, 0.04707916641235352, 0.047337024688720704, 0.04694822311401367, 0.04623257446289063, 0.04661043167114258, 0.04672041702270508, 0.04688137435913086, 0.0464486083984375, 0.046702625274658204, 0.04657923126220703, 0.04664982223510742, 0.047108097076416014, 0.046812576293945314, 0.047080097198486326, 0.04743161773681641, 0.0469381103515625, 0.046968608856201174, 0.04698953628540039, 0.0472344970703125, 0.04713324737548828, 0.047408992767333985, 0.04861945724487305, 0.046063838958740236, 0.04587152099609375, 0.04615167999267578, 0.046378017425537106, 0.04575126266479492, 0.04567379379272461, 0.046180065155029294, 0.04663600158691406, 0.04662172698974609, 0.04640003204345703, 0.04694499206542969, 0.04671049499511719, 0.04618588638305664, 0.04529622268676758, 0.046036991119384765, 0.04675788879394531, 0.04650342559814453, 0.046819839477539066, 0.04681865692138672, 0.04617078399658203, 0.04719753646850586, 0.04655785751342773, 0.04690739059448242, 0.04656060791015625, 0.04701251220703125, 0.04635033416748047, 0.045723648071289064, 0.04555299377441406, 0.04697974395751953, 0.046663681030273435, 0.04649574279785156, 0.04667801666259765, 0.04711423873901367, 0.046837760925292966, 0.046835712432861325, 0.04653158569335938, 0.046312446594238284, 0.04704201507568359, 0.046792545318603516, 0.04634284973144531, 0.04691558456420898, 0.04722892761230469, 0.046814849853515625, 0.0463691520690918, 0.04672716903686523, 0.04727603149414063, 0.047065086364746093, 0.04651212692260742, 0.047067039489746096, 0.0474788818359375, 0.04718364715576172, 0.04668643188476562, 0.04696873474121094, 0.046798782348632814, 0.046678176879882814, 0.046917182922363285, 0.04680668640136719, 0.046707134246826175, 0.04748527908325195, 0.04743167877197266, 0.04741484832763672, 0.04758284759521485, 0.04894403076171875, 0.04581718444824219, 0.04515523147583008, 0.04614937591552734, 0.046497791290283204, 0.046088191986083986, 0.04596329498291016, 0.04654691314697266, 0.046177696228027344, 0.045599231719970705, 0.04595308685302734, 0.045255710601806644, 0.045570240020751954, 0.04608902359008789, 0.04608147048950195, 0.04569145584106445, 0.04714086532592773, 0.04691558456420898, 0.046425376892089844, 0.04680777740478516, 0.04807884979248047, 0.046951648712158206, 0.046402336120605465, 0.04625571060180664, 0.04660015869140625, 0.04642377471923828, 0.045809375762939454, 0.047155265808105466, 0.046547294616699215, 0.0463298225402832, 0.04636934280395508, 0.04643206405639649, 0.04665744018554688, 0.046819679260253905, 0.046763038635253905, 0.04623664093017578, 0.046499839782714845, 0.047298465728759766, 0.0469095344543457, 0.04689100646972656, 0.047298561096191405, 0.04745625686645508, 0.04781465530395508, 0.04733542251586914, 0.04727974319458008, 0.047013343811035155, 0.046685089111328126, 0.04653807830810547, 0.04633257675170899, 0.04668147277832031, 0.04657376098632812, 0.04688431930541992, 0.047508480072021485, 0.04694012832641602, 0.046519294738769534, 0.04680777740478516, 0.046899486541748046, 0.04698726272583008, 0.04692582321166992, 0.047554561614990234, 0.04753004837036133, 0.04762963104248047, 0.04799932861328125, 0.04800457763671875, 0.04562220764160156, 0.04553104019165039, 0.04553334426879883, 0.046225406646728515, 0.04627251052856445, 0.04595430374145508, 0.04575878524780273, 0.04598988723754883, 0.04572409439086914, 0.046671871185302735, 0.04670873641967774, 0.04607513427734375, 0.04715776062011719, 0.04681264114379883, 0.04605196762084961, 0.0457729606628418, 0.04627619171142578, 0.046707103729248044, 0.04691721725463867, 0.04688528060913086, 0.04655513763427734, 0.04688608169555664, 0.04642486572265625, 0.04695657730102539, 0.04675993728637695, 0.04643766403198242, 0.04606195068359375, 0.04657347106933594, 0.046502334594726566, 0.047099903106689454, 0.04671238327026367, 0.04616032028198242, 0.046437728881835935, 0.04705244827270508, 0.046785537719726565, 0.04676607894897461, 0.04666163253784179, 0.04639129638671875, 0.04717580795288086, 0.04718169784545898, 0.04655503845214844, 0.047515743255615236, 0.04696211242675781, 0.04672774505615234, 0.04731683349609375, 0.04703247833251953, 0.04692566299438477, 0.046700702667236325, 0.04726988983154297, 0.04707049560546875, 0.04691619110107422, 0.046844032287597655, 0.04663056182861328, 0.04741708755493164, 0.047431552886962894, 0.046910175323486326, 0.04835123062133789, 0.047500606536865234, 0.047104705810546876, 0.04689078521728516, 0.04724758529663086, 0.047628288269042966, 0.049442817687988284, 0.04631283187866211, 0.045443679809570314, 0.045530719757080076, 0.04605587387084961, 0.04706918334960938, 0.04626006317138672, 0.04620918273925781, 0.046851520538330076, 0.047120960235595706, 0.045733470916748044, 0.04540988922119141, 0.04623238372802734, 0.04630908966064453, 0.046981407165527345, 0.046863391876220704, 0.04648649597167969, 0.046137344360351565, 0.04562739181518555, 0.04710611343383789, 0.047681472778320313, 0.047331329345703124, 0.04689059066772461, 0.045953441619873046, 0.04570710372924805, 0.04640694427490234, 0.04741542434692383, 0.046485694885253906, 0.04629135894775391, 0.04614364624023438, 0.04732928085327148, 0.046505855560302733, 0.046446495056152344, 0.046806751251220705, 0.046827808380126956, 0.04711446380615234, 0.046835712432861325, 0.046690303802490236, 0.0465715217590332, 0.04647116851806641, 0.04720640182495117, 0.04731916809082031, 0.0475524787902832, 0.047304607391357424, 0.04674560165405273, 0.048032798767089845, 0.047369182586669924, 0.04700112152099609, 0.046811614990234375, 0.04666572952270508, 0.04747673416137695, 0.04699875259399414, 0.046869216918945314, 0.047672542572021484, 0.047220638275146484, 0.04722988891601562, 0.04759347152709961, 0.04716115188598633, 0.04669196701049805, 0.04758380889892578, 0.04754841613769531, 0.04698662567138672, 0.04696128082275391, 0.049575294494628906, 0.046610881805419925, 0.045596511840820315, 0.046055774688720706, 0.046041057586669924, 0.045944862365722657, 0.04563907241821289, 0.04515081787109375, 0.045623294830322264, 0.046854145050048826, 0.046298912048339844, 0.04622687911987305, 0.04591900634765625, 0.04619689559936523, 0.046144542694091795, 0.04610319900512695, 0.04684012985229492, 0.04681916809082031, 0.04601142501831055, 0.04660323333740234, 0.0477470703125, 0.04725363159179687, 0.04712451171875, 0.04690723037719727, 0.04626227188110352, 0.04630323028564453, 0.0465715217590332, 0.04623331069946289, 0.046978527069091794, 0.04674345779418945, 0.046599071502685545, 0.04615913772583008, 0.04645142364501953, 0.046219264984130856, 0.04693791961669922, 0.047128768920898435, 0.04683478546142578, 0.0466789436340332, 0.047034366607666016, 0.04711334228515625, 0.04678860855102539, 0.047303550720214846, 0.047339519500732424, 0.047376224517822266, 0.047285633087158205, 0.0469318733215332, 0.04658377456665039, 0.04735478210449219, 0.047280128479003904, 0.046739456176757815, 0.04681667327880859, 0.04637516784667969, 0.047728992462158205, 0.04713987350463867, 0.04697753524780274, 0.04701788711547852, 0.047454910278320314, 0.04750300979614258, 0.046973121643066405, 0.04717571258544922, 0.04761724853515625, 0.04728656005859375, 0.04804201507568359, 0.0501385269165039, 0.04669033432006836, 0.045435455322265624, 0.04549004745483398, 0.046094463348388674, 0.04607353591918945, 0.0460618896484375, 0.0470362548828125, 0.04622147369384766, 0.04667596817016602, 0.046704639434814454, 0.04639744186401367, 0.04643587112426758, 0.046206527709960935, 0.04673020935058594, 0.046286720275878906, 0.04649276733398437, 0.046390239715576174, 0.04668547058105469, 0.04719887924194336, 0.04721836853027344, 0.047274112701416016, 0.046924030303955075, 0.046453857421875, 0.04628572845458984, 0.04689100646972656, 0.047021408081054684, 0.046477985382080075, 0.04663296127319336, 0.04700521469116211, 0.046717025756835937, 0.047067520141601565, 0.04681318283081055, 0.04631135940551758, 0.04649286270141602, 0.04719091033935547, 0.046704097747802736, 0.04630755233764648, 0.04742176055908203, 0.04690943908691406, 0.046911487579345705, 0.047265888214111325, 0.047429534912109376, 0.04688211059570312, 0.04672991943359375, 0.04735561752319336, 0.04730908966064453, 0.046591487884521485, 0.04824457550048828, 0.04683407974243164, 0.04689535903930664, 0.047480831146240236, 0.04693196868896484, 0.046772224426269535, 0.046342144012451174, 0.04675942230224609, 0.04754073715209961, 0.04704262542724609, 0.0471162223815918, 0.04713075256347656, 0.04711203384399414, 0.04757712173461914, 0.047425537109375, 0.049282943725585934, 0.04658448028564453, 0.04557136154174805, 0.045335262298583985, 0.045594112396240234, 0.04569139099121094, 0.04597516632080078, 0.04570355224609375, 0.04649574279785156, 0.04660815811157227, 0.0463935661315918, 0.04604927825927734, 0.04688076782226563, 0.04674764633178711, 0.046749183654785156, 0.047258113861083986, 0.0468699836730957, 0.046341663360595704, 0.04645580673217774, 0.046964767456054685, 0.04721564865112305, 0.04736470413208008, 0.046784862518310544, 0.046837760925292966, 0.04611270523071289, 0.045992000579833985, 0.04590387344360351, 0.04629923248291016, 0.0466082878112793, 0.046952449798583984, 0.046637054443359374, 0.046978591918945316, 0.047002079010009766, 0.04672524642944336, 0.04724684906005859, 0.0469093132019043, 0.04723353576660156, 0.04720547103881836, 0.04680505752563477, 0.047390625, 0.047360191345214846, 0.04692044830322266, 0.04730879974365235, 0.047255550384521484, 0.04695449447631836, 0.04741046524047852, 0.047075103759765625, 0.04670969772338867, 0.04645478439331055, 0.0474521598815918, 0.04704230499267578, 0.046835487365722656, 0.04673788833618164, 0.04678860855102539, 0.04780764770507812, 0.0475656623840332, 0.04698316955566406, 0.04707123184204102, 0.047486976623535154, 0.04721459197998047, 0.04753203201293945, 0.04756278228759766, 0.04773065567016602, 0.049237823486328124, 0.0463287353515625, 0.045752288818359375, 0.04534675216674805, 0.04589564895629883, 0.0459818229675293, 0.04580729675292969, 0.04657183837890625, 0.046499839782714845, 0.046308448791503906, 0.04709059143066406, 0.04677568054199219, 0.046690975189208984, 0.04631500625610351, 0.04783283233642578, 0.046836448669433595, 0.04573180770874023, 0.046735134124755856, 0.04675404739379883, 0.04648735809326172, 0.04686783981323242, 0.04729324722290039, 0.04692377471923828, 0.04629708862304688, 0.04693196868896484, 0.04667910385131836, 0.04680966567993164, 0.04689465713500977, 0.04633683013916016, 0.046671871185302735, 0.047029281616210936, 0.046582241058349606, 0.04640204620361328, 0.046772224426269535, 0.046617889404296876, 0.04728406524658203, 0.046832511901855466, 0.04673484802246094, 0.047172096252441405, 0.047159038543701175, 0.04724556732177734, 0.04760985565185547, 0.04754012680053711, 0.04738057708740234, 0.04736812973022461, 0.04700723266601563, 0.04669292831420899, 0.047341567993164066, 0.047050750732421875, 0.04691072082519531, 0.046742015838623044, 0.04679910278320312, 0.04818889617919922, 0.04715574264526367, 0.04694630432128906, 0.04669375991821289, 0.04789516830444336, 0.04769075012207031, 0.046584831237792966, 0.04730879974365235, 0.04711740875244141, 0.047481758117675785, 0.04775321578979492]",tokens/s,21.389145214159434,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7422.926848,7815.954432,0.0,7430.209536,7414.23104,s,1,11.5931337890625,11.5931337890625,0.0,11.5931337890625,11.5931337890625,11.5931337890625,11.5931337890625,[11.5931337890625],,kWh,0.0001317410172457888,1.4524865825930464e-05,4.455086897400817e-05,0.00019081675204572746,,MB,1761.787904,8581.414912,0.0,8166.309888,8044.111872,s,10,8.190297790527342,0.8190297790527344,0.003589906739423247,0.8196229858398438,0.8220453063964843,0.8221088348388672,0.8221596575927734,"[0.809703125, 0.8159363403320312, 0.819736572265625, 0.8220311889648437, 0.8195093994140625, 0.82217236328125, 0.8214393310546875, 0.821601318359375, 0.8194581909179688, 0.8187099609375]",tokens/s,312.5649476336282,kWh,2.383622019647541e-05,2.628726519947484e-06,1.5790119469691118e-05,4.225506618611401e-05,tokens/kWh,6058445.131110161,MB,1761.787904,8749.187072,0.0,8334.082048,8265.758208,s,10,33.48306030273437,3.348306030273437,0.0011582199854532499,3.348733642578125,3.349405810546875,3.3495796142578125,3.3497186572265627,"[3.3458291015625, 3.3472412109375, 3.3472548828125, 3.348966552734375, 3.34923779296875, 3.3493671875, 3.348639404296875, 3.34975341796875, 3.34794287109375, 3.348827880859375]",tokens/s,18.81548443612699,kWh,9.767525763310868e-05,1.077416827946904e-05,6.505911187630907e-05,0.0001735085377888868,tokens/kWh,363094.52435507264,,s,630,33.479189311981244,0.053141570336478094,0.0006600403183592957,0.05310464096069336,0.05380537414550782,0.05396999835968017,0.056340752792358416,"[0.056604030609130856, 0.05339708709716797, 0.05247488021850586, 0.052307968139648435, 0.05207859039306641, 0.052028736114501956, 0.052245185852050784, 0.05211545562744141, 0.05239807891845703, 0.05260416030883789, 0.052590526580810544, 0.05266105651855469, 0.052612640380859374, 0.05257468795776367, 0.05217283248901367, 0.052464832305908204, 0.052925056457519534, 0.05325839996337891, 0.05401724624633789, 0.05393078231811523, 0.053255840301513674, 0.0527088623046875, 0.05262627029418945, 0.05247993469238281, 0.052674625396728514, 0.05267446517944336, 0.05254563140869141, 0.052978721618652344, 0.05279024124145508, 0.05278105545043945, 0.05295308685302735, 0.05331548690795898, 0.05297366333007812, 0.05293875122070312, 0.053333152770996095, 0.05347209548950195, 0.05334531021118164, 0.054013950347900394, 0.053849056243896486, 0.053477375030517575, 0.05316723251342773, 0.0533955192565918, 0.05311366271972656, 0.05304054260253906, 0.05308886337280273, 0.05303206253051758, 0.05338937759399414, 0.053248832702636716, 0.05311686325073242, 0.05317631912231445, 0.05343852615356445, 0.053114879608154295, 0.05316147232055664, 0.05348588943481445, 0.0536343994140625, 0.05374857711791992, 0.053620704650878905, 0.05359107208251953, 0.05354598236083984, 0.05336678314208984, 0.053356414794921876, 0.05354099273681641, 0.05345260620117188, 0.055782913208007816, 0.05282662582397461, 0.05237519836425781, 0.05221311950683594, 0.05227411270141601, 0.05224448013305664, 0.05240220642089844, 0.05249001693725586, 0.052217247009277344, 0.05242758560180664, 0.05250998306274414, 0.05239596939086914, 0.05284259033203125, 0.05284934234619141, 0.05330476760864258, 0.05285030364990234, 0.0526703987121582, 0.053582847595214846, 0.054022144317626954, 0.053372928619384766, 0.053221279144287106, 0.05277500915527344, 0.05269504165649414, 0.0524403190612793, 0.052611839294433596, 0.05284787368774414, 0.052643646240234376, 0.05265299224853515, 0.05279334259033203, 0.05268889617919922, 0.05290403366088867, 0.052909984588623046, 0.05321263885498047, 0.0530027847290039, 0.052965057373046874, 0.0535266227722168, 0.053965023040771484, 0.05405491256713867, 0.053710849761962894, 0.053303295135498044, 0.05316188812255859, 0.05302796936035156, 0.05310153579711914, 0.052929920196533205, 0.052996734619140624, 0.05300428771972656, 0.05312921524047851, 0.05344255828857422, 0.05297356796264648, 0.05321113586425781, 0.05337497711181641, 0.053469345092773436, 0.053477375030517575, 0.05388067245483399, 0.053991424560546876, 0.05402188873291015, 0.05412275314331055, 0.05358796691894531, 0.053526527404785154, 0.05354873657226562, 0.053462337493896485, 0.05346406555175781, 0.053379070281982424, 0.056044097900390624, 0.05278515243530273, 0.05215027236938476, 0.05203148651123047, 0.052094974517822266, 0.05246566390991211, 0.05223833465576172, 0.05224038314819336, 0.05222579193115234, 0.05243519973754883, 0.052393985748291017, 0.05248819351196289, 0.052615169525146485, 0.05302463912963867, 0.05328643035888672, 0.05317612838745117, 0.053168926239013675, 0.052985855102539066, 0.0533524169921875, 0.05327465438842773, 0.05315564727783203, 0.052988094329833986, 0.05294015884399414, 0.05273040008544922, 0.052520801544189456, 0.052488449096679685, 0.05256787109375, 0.05276076889038086, 0.0526187858581543, 0.05276515197753906, 0.052919361114501955, 0.052843456268310544, 0.05314559936523437, 0.05331919860839844, 0.05327856063842774, 0.05362956619262695, 0.05359820938110352, 0.05356943893432617, 0.05378876876831055, 0.05356316757202149, 0.05334982299804687, 0.05317868804931641, 0.053128894805908204, 0.05308671951293945, 0.05310220718383789, 0.053449375152587894, 0.05303839874267578, 0.05293945693969727, 0.05304515075683594, 0.05327872085571289, 0.05331337738037109, 0.05324006271362305, 0.05382495880126953, 0.053776958465576175, 0.05355449676513672, 0.05353542327880859, 0.053975040435791016, 0.05399363327026367, 0.05391667175292969, 0.053774208068847654, 0.05374051284790039, 0.05350400161743164, 0.053413536071777346, 0.05684646224975586, 0.05329292678833008, 0.05223993682861328, 0.052247329711914064, 0.0521861457824707, 0.052339679718017576, 0.052457473754882813, 0.05229558563232422, 0.052410465240478515, 0.0523317756652832, 0.052372001647949216, 0.052728031158447264, 0.05255987167358398, 0.052676254272460935, 0.05281212615966797, 0.052969470977783206, 0.053110401153564454, 0.053739902496337894, 0.0538603515625, 0.05372927856445313, 0.05309404754638672, 0.05278550338745117, 0.052811775207519535, 0.05284249496459961, 0.05280153656005859, 0.052498432159423826, 0.052669536590576174, 0.05275126266479492, 0.052719390869140625, 0.05274851226806641, 0.05267459106445312, 0.052754398345947265, 0.05287936019897461, 0.05289984130859375, 0.053308448791503905, 0.05365200042724609, 0.053438560485839844, 0.0538568000793457, 0.05391737747192383, 0.0533526725769043, 0.05329497528076172, 0.053016609191894534, 0.0530142707824707, 0.0531596794128418, 0.05291900634765625, 0.053111839294433597, 0.052996734619140624, 0.05314982223510742, 0.0536102066040039, 0.05339929580688477, 0.05318038558959961, 0.05321721649169922, 0.05361318588256836, 0.05373747253417969, 0.05388083267211914, 0.053970848083496094, 0.05373756790161133, 0.05374537658691406, 0.05374758529663086, 0.05383327865600586, 0.053650272369384765, 0.05351424026489258, 0.05342371368408203, 0.05690163040161133, 0.05316198348999023, 0.05226496124267578, 0.05219123077392578, 0.05201225662231445, 0.052242431640625, 0.05238438415527344, 0.052324256896972655, 0.05227964782714844, 0.05214345550537109, 0.05270569610595703, 0.05273107147216797, 0.05257449722290039, 0.05275872039794922, 0.053096958160400394, 0.05283225631713867, 0.05299814224243164, 0.05332377624511719, 0.05368012619018555, 0.05380822372436524, 0.05297654342651367, 0.053040321350097654, 0.05258457565307617, 0.052693504333496094, 0.05265385437011719, 0.052705375671386716, 0.0525863037109375, 0.05253580856323242, 0.052512767791748044, 0.052856449127197266, 0.05277238464355469, 0.05309014511108399, 0.05340671920776367, 0.05310044860839844, 0.053407840728759766, 0.05360182571411133, 0.0535577278137207, 0.053759998321533206, 0.05384576034545899, 0.05339766311645508, 0.05326243209838867, 0.05314908981323242, 0.05299846267700195, 0.0531376953125, 0.05298502349853516, 0.05294559860229492, 0.053106815338134765, 0.05295536041259766, 0.05318611145019531, 0.05346889495849609, 0.05345724868774414, 0.05355692672729492, 0.05405129623413086, 0.053817344665527345, 0.05376588821411133, 0.05378294372558594, 0.054472286224365236, 0.0537081298828125, 0.05374249649047851, 0.053542911529541014, 0.053389312744140625, 0.05331302261352539, 0.053536895751953126, 0.05648963165283203, 0.053190303802490235, 0.05252985763549805, 0.052417919158935546, 0.05221955108642578, 0.05219622421264648, 0.05227500915527344, 0.05241680145263672, 0.052324256896972655, 0.052283134460449215, 0.052259166717529296, 0.05218876647949219, 0.05273574447631836, 0.0529189453125, 0.05297151947021484, 0.052729854583740236, 0.05306163024902344, 0.05353219223022461, 0.05366969680786133, 0.05356719970703125, 0.05348992156982422, 0.05310124969482422, 0.05267043304443359, 0.0527314567565918, 0.05259916687011719, 0.05304655838012695, 0.052783935546875, 0.052633087158203126, 0.05276435089111328, 0.052746337890625, 0.05278384017944336, 0.05285683059692383, 0.05309600067138672, 0.05303046417236328, 0.05320947265625, 0.05336092758178711, 0.054085311889648435, 0.053787166595458985, 0.05380505752563477, 0.05339750289916992, 0.05345894241333008, 0.05329100799560547, 0.05307814407348633, 0.05313504028320312, 0.05334854507446289, 0.05310464096069336, 0.05311619186401367, 0.05298588943481446, 0.053037471771240234, 0.05330710220336914, 0.053405887603759764, 0.05336307144165039, 0.053341697692871094, 0.05380352020263672, 0.05369440078735352, 0.05395872116088867, 0.053817344665527345, 0.05362633514404297, 0.05364569473266602, 0.053524639129638674, 0.053489376068115234, 0.05387702560424805, 0.053594112396240234, 0.05684355163574219, 0.05316067123413086, 0.05230387115478516, 0.052203231811523435, 0.05230201721191406, 0.05234220886230469, 0.05248271942138672, 0.05231411361694336, 0.05230182266235352, 0.05225676727294922, 0.05229888153076172, 0.052411262512207034, 0.05292031860351563, 0.052787200927734375, 0.05268617630004883, 0.05278112030029297, 0.05307043075561523, 0.05326816177368164, 0.05348588943481445, 0.05370265579223633, 0.05312073516845703, 0.052812000274658204, 0.05269510269165039, 0.052597824096679686, 0.05266460800170898, 0.05273980712890625, 0.052662528991699216, 0.052747039794921874, 0.052606815338134764, 0.05286918258666992, 0.05301862335205078, 0.052977664947509766, 0.05289791870117187, 0.052998046875, 0.05322134399414063, 0.05342940902709961, 0.05400227355957031, 0.0537889289855957, 0.053868545532226565, 0.053413406372070316, 0.053480926513671874, 0.053394432067871096, 0.05295513534545898, 0.05310255813598633, 0.053190689086914066, 0.05295040130615234, 0.052963134765625, 0.05332819366455078, 0.053211647033691405, 0.05315379333496094, 0.05318406295776367, 0.053416385650634765, 0.05354451370239258, 0.05374816131591797, 0.053776382446289066, 0.05410406494140625, 0.05396611022949219, 0.05383020782470703, 0.053832992553710934, 0.053856288909912106, 0.053392257690429684, 0.05347043228149414, 0.053371009826660154, 0.05646192169189453, 0.05316812896728516, 0.052333854675292966, 0.05224230575561523, 0.05220156860351562, 0.05233859252929687, 0.05220556640625, 0.0524194221496582, 0.05224604797363281, 0.0522243537902832, 0.05255747222900391, 0.05276924896240234, 0.052657886505126955, 0.05266783905029297, 0.052816192626953126, 0.05296796798706055, 0.0531701774597168, 0.05402009582519531, 0.05408358383178711, 0.05340364837646484, 0.05315584182739258, 0.0526292495727539, 0.05270963287353515, 0.05245721435546875, 0.05268681716918945, 0.052627742767333986, 0.05279926300048828, 0.052753952026367186, 0.05283020782470703, 0.052730560302734375, 0.052604351043701175, 0.05282428741455078, 0.0530777587890625, 0.053191265106201174, 0.05319241714477539, 0.053739551544189454, 0.053792510986328125, 0.05353932952880859, 0.053835777282714846, 0.05357923126220703, 0.053348320007324215, 0.05325471878051758, 0.0531701774597168, 0.05297097778320312, 0.0531297607421875, 0.05310464096069336, 0.05316793441772461, 0.05320908737182617, 0.053078208923339844, 0.0531701774597168, 0.05330688095092773, 0.053692928314208986, 0.053889022827148435, 0.05379065704345703, 0.05373548889160156, 0.053766143798828124, 0.05418598556518555, 0.05393388748168945, 0.053768096923828126, 0.05341750335693359, 0.05350403213500977, 0.05377711868286133, 0.05329510498046875, 0.05674095916748047, 0.05276150512695312, 0.052384929656982424, 0.05223100662231445, 0.052185089111328124, 0.052391937255859375, 0.052442752838134765, 0.05223052978515625, 0.05209907150268555, 0.05237891387939453, 0.052275936126708986, 0.052539390563964845, 0.05323980712890625, 0.053110271453857424, 0.05274166488647461, 0.052827102661132816, 0.0530780143737793, 0.05333401489257812, 0.05341756820678711, 0.053604225158691406, 0.05298175811767578, 0.0527501106262207, 0.05276895904541016, 0.05276073455810547, 0.05304771041870117, 0.0526929931640625, 0.05266022491455078, 0.052598785400390625, 0.05253267288208008, 0.052902366638183596, 0.05312521743774414, 0.053200031280517576, 0.053273311614990236, 0.053047199249267575, 0.05351241683959961, 0.05324143981933594, 0.05367644882202149, 0.053591873168945314, 0.0535816650390625, 0.05342764663696289, 0.05320383834838867, 0.05309782409667969, 0.053047840118408206, 0.053090465545654296, 0.05295449447631836, 0.05298211288452148, 0.053214977264404294, 0.05312771224975586, 0.05303500747680664, 0.05292761611938476, 0.05319990539550781, 0.053639007568359376, 0.05361449432373047, 0.053596256256103515, 0.0539607048034668, 0.05355875015258789, 0.05395897674560547, 0.05403376007080078, 0.05360115051269531, 0.053569534301757815, 0.053501953125, 0.053680065155029294, 0.053538303375244144, 0.05589481735229492, 0.052975425720214846, 0.052404510498046876, 0.052203521728515626, 0.05245062255859375, 0.0522308464050293, 0.05211545562744141, 0.052512767791748044, 0.05227648162841797, 0.05230377578735351, 0.052284255981445316, 0.05254348754882812, 0.05287321472167969, 0.05281158447265625, 0.05276895904541016, 0.053164031982421874, 0.05315584182739258, 0.05353676986694336, 0.054168991088867184, 0.05334022521972656, 0.053047840118408206, 0.052760574340820314, 0.05281587219238281, 0.05252918243408203, 0.05297510528564453, 0.05288768005371094, 0.05269334411621094, 0.052779006958007815, 0.052672256469726564, 0.05264944076538086, 0.052674495697021484, 0.05285564804077148, 0.05342822265625, 0.05338304138183594, 0.05366182327270508, 0.05346227264404297, 0.053881534576416014, 0.053968959808349606, 0.05390335845947265, 0.05349772644042969, 0.053224609375, 0.05307241439819336, 0.05295967864990234, 0.05279072189331055, 0.052986431121826175, 0.05314355087280274, 0.05317631912231445, 0.053016128540039065, 0.053217151641845706, 0.052980289459228516, 0.053026817321777345, 0.0534977912902832, 0.053703807830810545, 0.053660606384277346, 0.05364940643310547, 0.053835582733154294, 0.054110015869140625, 0.05420006561279297, 0.05348966217041016, 0.053607040405273435, 0.053585918426513675, 0.05347942352294922, 0.05349683380126953]",tokens/s,18.8176599537475,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3861.438464,5376.966656,0.0,4974.444544,4685.071872,s,1,11.2868056640625,11.2868056640625,0.0,11.2868056640625,11.2868056640625,11.2868056640625,11.2868056640625,[11.2868056640625],,kWh,0.00012178265385412032,1.3426059798512069e-05,5.182393034799948e-05,0.00018703264400063186,,MB,2164.14208,5410.521088,0.0,4993.318912,4233.626624,s,10,1.9413089294433592,0.19413089294433591,0.00027301655373829735,0.1941623001098633,0.19443526000976563,0.19445930633544922,0.1944785433959961,"[0.19367304992675782, 0.1944833526611328, 0.19418611145019532, 0.193955078125, 0.1938379211425781, 0.19442991638183593, 0.19385311889648438, 0.19432591247558595, 0.19413848876953124, 0.1944259796142578]",tokens/s,1318.6978956172838,kWh,5.7364405600494705e-06,6.325129958494147e-07,3.79123941643139e-06,1.0160192972330275e-05,tokens/kWh,25196371.830454074,MB,2175.328256,5410.521088,0.0,4993.318912,4337.11872,s,10,22.240558837890628,2.2240558837890627,0.009091044261655972,2.2245233154296873,2.2351972656249997,2.2358004150390625,2.2362829345703124,"[2.236403564453125, 2.22001123046875, 2.2236669921875, 2.22770703125, 2.211339111328125, 2.21260693359375, 2.213503662109375, 2.225379638671875, 2.23487744140625, 2.235063232421875]",tokens/s,28.326626349275287,kWh,6.49894395074494e-05,7.168345979978839e-06,3.816212747956844e-05,0.00011031991296699668,tokens/kWh,571066.4403700816,,s,630,22.237560539245596,0.03529771514165969,0.0005314413029271096,0.035199377059936525,0.035715160751342775,0.035988059997558594,0.036827805366516114,"[0.035760128021240234, 0.035883007049560545, 0.035573760986328126, 0.03540787124633789, 0.035416065216064455, 0.035168254852294925, 0.03531564712524414, 0.03529734420776367, 0.03527811050415039, 0.035408576965332034, 0.03581340789794922, 0.035688449859619144, 0.035628353118896484, 0.03528899383544922, 0.03508915328979492, 0.03510889434814453, 0.03539295959472656, 0.035551647186279296, 0.03529715347290039, 0.035248416900634766, 0.03529663848876953, 0.035459041595458984, 0.03578537750244141, 0.03684899139404297, 0.035269279479980466, 0.035364734649658204, 0.03525030517578125, 0.03500636672973633, 0.03514992141723633, 0.03520006561279297, 0.03517331314086914, 0.03525529479980469, 0.03564646530151367, 0.03549795150756836, 0.035186527252197265, 0.03533148956298828, 0.03533052825927734, 0.03540204620361328, 0.03568556976318359, 0.0355412483215332, 0.035619071960449215, 0.03570719909667969, 0.03632332611083984, 0.035655071258544925, 0.036295265197753904, 0.035528289794921876, 0.03571865463256836, 0.03556854248046875, 0.03521651077270508, 0.035601280212402345, 0.03586006546020508, 0.036390750885009766, 0.03557024002075195, 0.03546223831176758, 0.03543948745727539, 0.035106849670410153, 0.03551180648803711, 0.03550041580200195, 0.035289215087890624, 0.03537254333496094, 0.0350909423828125, 0.03540582275390625, 0.03554655838012695, 0.03539779281616211, 0.03529318237304688, 0.03535257720947266, 0.03506175994873047, 0.03508355331420898, 0.035111648559570316, 0.03504127883911133, 0.035272254943847656, 0.03498339080810547, 0.0352655029296875, 0.03526041412353516, 0.03506143951416016, 0.03503747177124023, 0.0349672966003418, 0.035248416900634766, 0.03545702362060547, 0.035160064697265625, 0.035135486602783206, 0.035244033813476565, 0.03596489715576172, 0.03514575958251953, 0.03512115097045899, 0.03502278518676758, 0.03513481521606445, 0.03517718505859375, 0.035729408264160156, 0.035856063842773435, 0.03592182540893555, 0.03536323165893555, 0.035160064697265625, 0.035110912322998046, 0.03515801620483398, 0.03513731384277344, 0.035006622314453124, 0.035020511627197264, 0.03506745529174805, 0.035099040985107424, 0.03564787292480469, 0.03612057495117187, 0.036775936126708986, 0.0355491828918457, 0.035282848358154296, 0.03497087860107422, 0.03525020980834961, 0.03521209716796875, 0.035350528717041016, 0.03514777755737305, 0.03513958358764648, 0.035168254852294925, 0.035125247955322264, 0.0351723518371582, 0.03496672058105469, 0.03479635238647461, 0.03502652740478516, 0.035424385070800785, 0.03527299118041992, 0.034968673706054686, 0.03488451385498047, 0.034904064178466795, 0.03493180847167969, 0.03494390487670898, 0.03500236892700195, 0.03506691360473633, 0.03539081573486328, 0.035197376251220706, 0.0349409294128418, 0.03497369766235352, 0.03514303970336914, 0.035560062408447266, 0.03539270401000977, 0.035130176544189456, 0.03491020965576172, 0.034797569274902344, 0.035074047088623043, 0.034936832427978515, 0.035004417419433595, 0.03522092819213867, 0.03596140670776367, 0.03587891387939453, 0.03538025665283203, 0.03555632019042969, 0.035329566955566404, 0.03511094284057617, 0.03526700973510742, 0.03520512008666992, 0.035166206359863283, 0.03510268783569336, 0.03507612609863281, 0.035209217071533204, 0.03522723388671875, 0.03545539093017578, 0.03539763259887695, 0.03700735855102539, 0.03540787124633789, 0.03537257766723633, 0.03512070465087891, 0.035111614227294925, 0.035440864562988283, 0.035227649688720705, 0.03531980895996094, 0.03524940872192383, 0.03519081497192383, 0.03487612915039062, 0.03501670455932617, 0.03510464096069336, 0.03531379318237305, 0.03592572784423828, 0.03548294448852539, 0.03545087814331055, 0.035283935546875, 0.035237857818603516, 0.03506383895874023, 0.035102718353271486, 0.03523379135131836, 0.0352308464050293, 0.035187583923339844, 0.035370944976806644, 0.035252193450927734, 0.0353608627319336, 0.03537715148925781, 0.035097984313964846, 0.03537481689453125, 0.035899585723876956, 0.03519356918334961, 0.03531366348266601, 0.035156993865966796, 0.03548160171508789, 0.03527897644042969, 0.03520703887939453, 0.03616873550415039, 0.035550079345703124, 0.03510454559326172, 0.035082561492919925, 0.03510883331298828, 0.034968673706054686, 0.03472684860229492, 0.03476275253295898, 0.03460095977783203, 0.03465830230712891, 0.0347955207824707, 0.03835084915161133, 0.035423904418945315, 0.03514547348022461, 0.035076702117919925, 0.037904384613037106, 0.03516108703613281, 0.035383647918701175, 0.03509267044067383, 0.035111392974853516, 0.03501670455932617, 0.03547955322265625, 0.035364864349365234, 0.03540377426147461, 0.035983360290527344, 0.035020320892333985, 0.03500080108642578, 0.03515596771240234, 0.035227649688720705, 0.03553478240966797, 0.03528844833374024, 0.0350371208190918, 0.03562508773803711, 0.03511881637573242, 0.03493775939941406, 0.03504127883911133, 0.03497983932495117, 0.03487539291381836, 0.035108768463134765, 0.03496764755249023, 0.03546851348876953, 0.03571996688842773, 0.03547107315063477, 0.035031326293945314, 0.03501260757446289, 0.035020065307617185, 0.0348875846862793, 0.03515475082397461, 0.035225471496582034, 0.03491648101806641, 0.035005825042724606, 0.03538140869140625, 0.03510275268554688, 0.035049598693847654, 0.038637535095214844, 0.03557001495361328, 0.035638465881347656, 0.035711807250976564, 0.03560559844970703, 0.03548160171508789, 0.035344001770019534, 0.0350885124206543, 0.03502025604248047, 0.035088481903076174, 0.03507590484619141, 0.03509747314453125, 0.03499212646484375, 0.03493404769897461, 0.035093215942382815, 0.03506099319458008, 0.03497856140136719, 0.03491587066650391, 0.0349268798828125, 0.03486041641235352, 0.0350494384765625, 0.034996673583984374, 0.03486556625366211, 0.034840576171875, 0.03599529647827148, 0.035471710205078125, 0.03554508972167969, 0.0350904312133789, 0.036058624267578124, 0.03537561416625976, 0.03506790542602539, 0.03563724899291992, 0.035071582794189454, 0.03509084701538086, 0.035026943206787106, 0.034920448303222655, 0.035380287170410155, 0.03489888000488281, 0.03480575942993164, 0.03599190521240234, 0.03483148956298828, 0.03471807861328125, 0.03493699264526367, 0.03461324691772461, 0.03460710525512695, 0.034797569274902344, 0.03478707122802734, 0.034910110473632815, 0.03505084609985352, 0.03527964782714844, 0.035278526306152344, 0.03510240173339844, 0.035153888702392576, 0.035009246826171875, 0.03518889617919922, 0.03512319946289062, 0.03625164794921875, 0.035286529541015625, 0.03498649597167969, 0.03486044692993164, 0.03497635269165039, 0.03504947280883789, 0.03504537582397461, 0.03494911956787109, 0.034861278533935544, 0.03497872161865234, 0.03504217529296875, 0.034858848571777346, 0.034853023529052736, 0.035152225494384765, 0.035014495849609376, 0.03492265701293945, 0.034928638458251955, 0.03497296142578125, 0.03506659317016601, 0.03495731353759766, 0.035149822235107424, 0.03525836944580078, 0.03537510299682617, 0.03512934494018555, 0.03493478393554687, 0.0352105598449707, 0.03495596694946289, 0.03477436828613281, 0.03471427154541016, 0.034781185150146485, 0.034848129272460934, 0.03513151931762695, 0.035133438110351564, 0.03486115264892578, 0.03488118362426758, 0.03501443099975586, 0.03497625732421875, 0.034983966827392576, 0.03519327926635742, 0.03521945571899414, 0.035332096099853515, 0.03523372650146484, 0.03516012954711914, 0.035135326385498045, 0.0351376953125, 0.03494911956787109, 0.03480166244506836, 0.03513865661621094, 0.03505244827270508, 0.0352174072265625, 0.03521244812011719, 0.0349332160949707, 0.03502937698364258, 0.035194591522216795, 0.035014846801757815, 0.03507580947875977, 0.03499577713012696, 0.03489260864257813, 0.0348037109375, 0.03480473709106445, 0.03627065658569336, 0.03662483215332031, 0.036240543365478516, 0.03544150543212891, 0.0351844482421875, 0.03510496139526367, 0.03497983932495117, 0.03501670455932617, 0.03499728012084961, 0.03544684982299805, 0.035080734252929686, 0.03503055953979492, 0.03520800018310547, 0.03518467330932617, 0.03502236938476563, 0.034826366424560544, 0.03553289413452149, 0.035230335235595704, 0.03502918243408203, 0.03500646209716797, 0.03505145645141602, 0.03511711883544922, 0.03504742431640625, 0.03518668746948242, 0.035059711456298825, 0.034959297180175784, 0.03496076965332031, 0.03483718490600586, 0.03485244750976563, 0.034875808715820314, 0.03510268783569336, 0.03500624084472656, 0.035117313385009764, 0.035133216857910154, 0.03553699111938476, 0.034945152282714845, 0.03523788833618164, 0.03487948989868164, 0.03491020965576172, 0.03504742431640625, 0.03547750473022461, 0.03569664001464844, 0.035604480743408204, 0.03540131378173828, 0.03524441528320312, 0.035270366668701175, 0.03556998443603516, 0.03567523193359375, 0.035062686920166015, 0.03548675155639648, 0.0349411506652832, 0.03515059280395508, 0.035254207611083985, 0.03494713592529297, 0.03496860885620117, 0.03642057418823242, 0.03525836944580078, 0.03511452865600586, 0.034996448516845705, 0.034809089660644534, 0.03513161468505859, 0.03516291046142578, 0.035141632080078124, 0.03521535873413086, 0.03511500930786133, 0.03500790405273437, 0.0348678092956543, 0.034887199401855466, 0.03499817657470703, 0.03501110458374023, 0.035011646270751956, 0.034810848236083984, 0.03511500930786133, 0.03506172943115234, 0.034934814453125, 0.0348590087890625, 0.03490611267089844, 0.035026943206787106, 0.0349323844909668, 0.035240222930908206, 0.035082046508789065, 0.03492489624023438, 0.034805023193359375, 0.03495600128173828, 0.03480985641479492, 0.03497369766235352, 0.034854911804199216, 0.03484262466430664, 0.034661823272705075, 0.03495692825317383, 0.03497600173950195, 0.03508089447021484, 0.03521916961669922, 0.03523990249633789, 0.035135807037353514, 0.035055103302001955, 0.035016384124755856, 0.03497452926635742, 0.034729984283447264, 0.034871295928955076, 0.034697216033935545, 0.034680385589599606, 0.03469500732421875, 0.0348391342163086, 0.03574716949462891, 0.04293289566040039, 0.03515596771240234, 0.03511705780029297, 0.03496345520019531, 0.03492659378051758, 0.03489712142944336, 0.03495811080932617, 0.03523571014404297, 0.03487347030639648, 0.034985633850097654, 0.035293537139892577, 0.036468734741210936, 0.03541196823120117, 0.03518873596191406, 0.035151870727539065, 0.03571712112426758, 0.03545609664916992, 0.03518352127075195, 0.03547657775878906, 0.03527683258056641, 0.03563151931762695, 0.03567843246459961, 0.036155647277832034, 0.03561471939086914, 0.03563449478149414, 0.03531164932250976, 0.035969696044921874, 0.0353361930847168, 0.035356670379638674, 0.035373054504394534, 0.03539148712158203, 0.035418113708496096, 0.035272705078125, 0.0352174072265625, 0.03528476715087891, 0.03555964660644531, 0.03512089538574219, 0.03570937728881836, 0.0360447998046875, 0.03572265625, 0.03576278305053711, 0.03580083084106445, 0.03595836639404297, 0.03561743927001953, 0.03559564971923828, 0.0362400016784668, 0.0354238395690918, 0.03549020767211914, 0.03541312026977539, 0.035672958374023435, 0.03523971176147461, 0.035377376556396486, 0.03539712142944336, 0.035354526519775394, 0.03523644638061523, 0.035334144592285156, 0.03510265731811523, 0.03497785568237305, 0.03546486282348633, 0.035360385894775394, 0.03533488082885742, 0.035053569793701174, 0.03487731170654297, 0.034842750549316404, 0.0352624626159668, 0.03580313491821289, 0.035588096618652344, 0.03571712112426758, 0.03585228729248047, 0.035698528289794924, 0.03559404754638672, 0.03584975814819336, 0.03570156860351562, 0.036251487731933596, 0.03552854537963867, 0.035565887451171875, 0.0353361930847168, 0.03571494293212891, 0.03540185546875, 0.036713600158691406, 0.035255168914794924, 0.03515727996826172, 0.03499900817871094, 0.03482624053955078, 0.03489129638671875, 0.034724288940429685, 0.03495529556274414, 0.03479347229003906, 0.03538723373413086, 0.035497856140136716, 0.03532524871826172, 0.03544905471801758, 0.035388160705566406, 0.035383296966552735, 0.035522560119628906, 0.0363474235534668, 0.035452991485595706, 0.03544294357299805, 0.035417472839355466, 0.03538204956054687, 0.03548310470581055, 0.03530806350708008, 0.035359870910644534, 0.035103614807128904, 0.03514102554321289, 0.03508899307250977, 0.03507814407348633, 0.035356670379638674, 0.03519868850708008, 0.03584201431274414, 0.0354901123046875, 0.034852863311767575, 0.03524185562133789, 0.03504140853881836, 0.035817470550537106, 0.035436447143554685, 0.03568854522705078, 0.03542630386352539, 0.03586777496337891, 0.035418399810791014, 0.03551087951660156, 0.03546931076049804, 0.03523331069946289, 0.035434207916259765, 0.036165985107421875, 0.03574620819091797, 0.03547475051879883, 0.035321758270263674, 0.03532060623168945, 0.035547134399414065, 0.03562271881103515, 0.03872940826416016, 0.035694847106933596, 0.0353337287902832, 0.03530614471435547, 0.03527475357055664, 0.03536281585693359, 0.035409889221191405, 0.035223583221435546, 0.03500851058959961, 0.03552441787719727, 0.03519302368164062, 0.03513679885864258, 0.03509116744995117, 0.03528905487060547, 0.03563100814819336, 0.03521855926513672, 0.03508291244506836, 0.03502425765991211, 0.03507299041748047, 0.03529024124145508, 0.03559478378295899, 0.03614755249023437, 0.03545702362060547, 0.03540777587890625, 0.0354931526184082, 0.03532265472412109, 0.036428897857666016, 0.036653984069824216, 0.03551023864746094, 0.03532806396484375, 0.035280895233154294, 0.03515596771240234]",tokens/s,28.33044563895192,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4287.602688,5792.202752,0.0,5389.68064,5000.446464,s,1,11.3664853515625,11.3664853515625,0.0,11.3664853515625,11.3664853515625,11.3664853515625,11.3664853515625,[11.3664853515625],,kWh,0.00012416470628333324,1.3688769109469043e-05,5.3929209810010814e-05,0.0001917826852028131,,MB,1430.171648,5811.07712,0.0,5393.874944,4706.596864,s,10,30.639380859375,3.0639380859375,0.0034788540541329013,3.064858154296875,3.0671171142578126,3.067231042480469,3.0673221850585937,"[3.05508642578125, 3.061507568359375, 3.062655517578125, 3.0633115234375, 3.064808837890625, 3.064907470703125, 3.067344970703125, 3.066302490234375, 3.067091796875, 3.0663642578125]",tokens/s,83.55260218049393,kWh,8.938060316500317e-05,9.858649946495967e-06,5.9529658734802646e-05,0.00015876891184630178,tokens/kWh,1612406.339648054,MB,1451.3152,5811.07712,0.0,5393.874944,4876.091904,s,10,19.602708618164062,1.9602708618164062,0.010871673097357119,1.9636372680664063,1.968801672363281,1.9745240661621093,1.9791019812011719,"[1.9802464599609375, 1.967530029296875, 1.9585501708984374, 1.9653387451171875, 1.9665380859375, 1.9641160888671876, 1.94756982421875, 1.944111572265625, 1.9455491943359375, 1.963158447265625]",tokens/s,32.13841578077816,kWh,5.716235474249366e-05,6.3052307525339865e-06,3.7719724620194356e-05,0.00010118731011522203,tokens/kWh,622607.715614358,,s,630,19.599750560760494,0.031110715175810315,0.0005143318263257343,0.03100860786437988,0.0314914342880249,0.03198289937973022,0.033081656494140625,"[0.032950401306152344, 0.03148614311218262, 0.031431840896606444, 0.031492671966552734, 0.031018272399902343, 0.031559968948364256, 0.031265024185180665, 0.03207827377319336, 0.0311582088470459, 0.03131510353088379, 0.03134969520568848, 0.03096076774597168, 0.031181695938110352, 0.031053823471069338, 0.031102975845336913, 0.0313691520690918, 0.031764543533325196, 0.03172515106201172, 0.03150467109680176, 0.03152704048156738, 0.03175628852844238, 0.03150364875793457, 0.03141705513000488, 0.03122092819213867, 0.031198015213012697, 0.031406080245971676, 0.03134668731689453, 0.03152067184448242, 0.03118060874938965, 0.03122537612915039, 0.030923519134521484, 0.031038496017456056, 0.031241184234619142, 0.031087936401367186, 0.031009344100952147, 0.033099903106689456, 0.031547391891479495, 0.0314204158782959, 0.03249094390869141, 0.03157792091369629, 0.031531776428222656, 0.03177881622314453, 0.03165388870239258, 0.03146668815612793, 0.03140060806274414, 0.031279264450073244, 0.03164121627807617, 0.03128767967224121, 0.031297536849975584, 0.031479551315307615, 0.031449344635009764, 0.0313384952545166, 0.031366304397583006, 0.031220575332641602, 0.031238143920898437, 0.03125200080871582, 0.03126115226745606, 0.031325279235839845, 0.031208351135253908, 0.031221759796142577, 0.03149129676818847, 0.03130447959899902, 0.030971551895141603, 0.032557056427001956, 0.032110591888427735, 0.0315228157043457, 0.031426559448242186, 0.031112512588500976, 0.03122774314880371, 0.031072383880615236, 0.031187679290771483, 0.030956607818603515, 0.03127187156677246, 0.031036415100097657, 0.03120025634765625, 0.031070207595825194, 0.03128639984130859, 0.031028095245361327, 0.03115007972717285, 0.03098419189453125, 0.031228927612304686, 0.03097702407836914, 0.031186687469482423, 0.030967456817626953, 0.03137187194824219, 0.031021055221557618, 0.0310230712890625, 0.0309467830657959, 0.03118060874938965, 0.030979936599731445, 0.03119532775878906, 0.03142070388793945, 0.031193536758422853, 0.03102662467956543, 0.03136905670166015, 0.031270944595336914, 0.03126956748962402, 0.03127836799621582, 0.03131670379638672, 0.031297536849975584, 0.03153446388244629, 0.031179391860961914, 0.031294591903686525, 0.031370111465454105, 0.03124412727355957, 0.031197248458862306, 0.031336544036865234, 0.031188991546630858, 0.031175935745239258, 0.031148799896240233, 0.031297536849975584, 0.031252479553222655, 0.03128694343566894, 0.031128992080688478, 0.031402912139892575, 0.030996511459350586, 0.03121968078613281, 0.03100048065185547, 0.031174495697021486, 0.030957855224609376, 0.031148031234741212, 0.031088640213012695, 0.03136038398742676, 0.030964351654052733, 0.03114188766479492, 0.031410175323486327, 0.03224371337890625, 0.03156879997253418, 0.03147369575500488, 0.0322191047668457, 0.031160255432128907, 0.031062080383300782, 0.031086591720581053, 0.03095347213745117, 0.030935007095336912, 0.030868736267089844, 0.031025951385498046, 0.03091177558898926, 0.030952159881591796, 0.030964927673339845, 0.030856000900268556, 0.030930944442749023, 0.03105996894836426, 0.03085456085205078, 0.0308701114654541, 0.031070207595825194, 0.030787519454956055, 0.030827743530273437, 0.030794591903686525, 0.03074662399291992, 0.030742528915405274, 0.03077120018005371, 0.03076652717590332, 0.030726720809936523, 0.030932287216186523, 0.031210176467895506, 0.03139993667602539, 0.031113216400146484, 0.031113216400146484, 0.032745471954345705, 0.03156172752380371, 0.03143475151062012, 0.031815103530883786, 0.031273536682128907, 0.03101900863647461, 0.03139993667602539, 0.030838783264160157, 0.03077939224243164, 0.03101900863647461, 0.030871263504028322, 0.03086319923400879, 0.030849472045898437, 0.03080601692199707, 0.030887359619140624, 0.030872127532958985, 0.03395379257202148, 0.030910463333129884, 0.03077030372619629, 0.03082316780090332, 0.030905792236328125, 0.03075142478942871, 0.030691328048706053, 0.03069878387451172, 0.03081612777709961, 0.030812736511230468, 0.030816543579101564, 0.030691328048706053, 0.030824384689331054, 0.030758975982666015, 0.03215769577026367, 0.03137075233459473, 0.030922367095947267, 0.031029727935791014, 0.030937023162841797, 0.03081590461730957, 0.031007040023803712, 0.03105638313293457, 0.031036544799804687, 0.030933887481689452, 0.030901248931884766, 0.030958591461181642, 0.03217107009887695, 0.031187904357910155, 0.03128428840637207, 0.0312259521484375, 0.03147407913208008, 0.031107519149780275, 0.031645696640014646, 0.03170099258422852, 0.03127699279785156, 0.03116838455200195, 0.03161887931823731, 0.030844703674316406, 0.03069808006286621, 0.031157440185546875, 0.031284032821655275, 0.031531007766723636, 0.030870655059814452, 0.03089676856994629, 0.030711328506469727, 0.03068796730041504, 0.030955520629882813, 0.03140998458862305, 0.03145747184753418, 0.03218022537231445, 0.031442655563354495, 0.03107049560546875, 0.03107551956176758, 0.03092953681945801, 0.031174848556518555, 0.03174399948120117, 0.030849023818969725, 0.031041536331176758, 0.031000160217285157, 0.030982559204101562, 0.03105177688598633, 0.031086175918579102, 0.03101532745361328, 0.031025152206420898, 0.03114988708496094, 0.031231168746948243, 0.031127840042114257, 0.031070144653320312, 0.031093536376953126, 0.03102060890197754, 0.030939584732055665, 0.031106752395629884, 0.031096736907958986, 0.030886144638061525, 0.030818464279174805, 0.033051712036132816, 0.03126777648925781, 0.03253247833251953, 0.03195699119567871, 0.031457279205322264, 0.03115519905090332, 0.031070560455322267, 0.031578784942626954, 0.031275007247924806, 0.03129270362854004, 0.031383712768554686, 0.031222335815429686, 0.031188991546630858, 0.031264768600463864, 0.03129343986511231, 0.031227840423583984, 0.031282623291015624, 0.03120560073852539, 0.031289087295532224, 0.031322784423828125, 0.03111631965637207, 0.031167455673217773, 0.031172607421875, 0.031156223297119142, 0.03121561622619629, 0.031144960403442383, 0.031099903106689454, 0.031123455047607423, 0.03113369560241699, 0.031072032928466796, 0.03121379280090332, 0.03258272171020508, 0.03140703964233398, 0.032215038299560544, 0.03139785575866699, 0.031313119888305666, 0.03139462471008301, 0.031131647109985353, 0.03095756721496582, 0.03100262451171875, 0.031029247283935548, 0.031143936157226562, 0.03109401512145996, 0.031046400070190428, 0.03090790367126465, 0.03101708793640137, 0.031193471908569335, 0.031280384063720704, 0.031015167236328123, 0.03144140815734863, 0.030963455200195313, 0.030879871368408203, 0.031024959564208983, 0.03102547264099121, 0.030930944442749023, 0.03112550354003906, 0.030877151489257813, 0.030902111053466796, 0.030894784927368163, 0.031074304580688477, 0.03081216049194336, 0.03100032043457031, 0.030910720825195314, 0.03085055923461914, 0.03079363250732422, 0.03224787139892578, 0.031668832778930664, 0.03120332717895508, 0.031174335479736328, 0.03104595184326172, 0.03098419189453125, 0.030871040344238283, 0.031150304794311523, 0.03139779281616211, 0.03143718338012695, 0.03146310424804687, 0.031321952819824216, 0.03128163146972656, 0.03122528076171875, 0.03130339241027832, 0.03126358413696289, 0.031389696121215824, 0.03123200035095215, 0.030912511825561522, 0.030810111999511718, 0.03102448081970215, 0.03094700813293457, 0.0309334716796875, 0.030958080291748048, 0.03086739158630371, 0.03099247932434082, 0.030889951705932617, 0.03080134391784668, 0.030853023529052736, 0.030882463455200196, 0.03077280044555664, 0.031142335891723633, 0.03146751976013184, 0.03122790336608887, 0.03090176010131836, 0.030939136505126953, 0.030775104522705078, 0.030864063262939452, 0.03092889595031738, 0.03074246406555176, 0.030763071060180665, 0.030930944442749023, 0.030883840560913086, 0.03082444763183594, 0.030877344131469725, 0.03096406364440918, 0.031002399444580078, 0.030742752075195313, 0.030971263885498045, 0.03118115234375, 0.03408060836791992, 0.03126931190490723, 0.03116646385192871, 0.031023103713989256, 0.030918655395507814, 0.030900224685668946, 0.03087129592895508, 0.03107046318054199, 0.030889984130859374, 0.03401318359375, 0.032066913604736326, 0.03118556785583496, 0.030920703887939452, 0.03233276748657227, 0.032004096984863284, 0.030815616607666015, 0.030800319671630858, 0.030761152267456054, 0.030885215759277343, 0.031079072952270508, 0.030683135986328124, 0.030661792755126954, 0.030786399841308595, 0.030835712432861328, 0.030897151947021483, 0.030769151687622072, 0.03089980888366699, 0.030888351440429687, 0.030883712768554686, 0.030847103118896484, 0.030674272537231446, 0.03078620719909668, 0.031007871627807618, 0.03142307281494141, 0.030964000701904297, 0.03089161682128906, 0.03061801528930664, 0.030621696472167968, 0.030516895294189453, 0.03071830368041992, 0.03065385627746582, 0.03077347183227539, 0.0307142391204834, 0.03179475212097168, 0.03201875305175781, 0.03128537559509277, 0.031160320281982422, 0.031135744094848632, 0.03107948875427246, 0.03085548782348633, 0.030904096603393556, 0.031021535873413084, 0.03081999969482422, 0.03070844841003418, 0.03073801612854004, 0.030808000564575194, 0.03082828712463379, 0.030810848236083984, 0.03073347282409668, 0.030820512771606447, 0.03079648017883301, 0.030769151687622072, 0.030748479843139647, 0.030817792892456054, 0.030829248428344728, 0.030719999313354493, 0.03089142417907715, 0.03107254409790039, 0.03095583915710449, 0.030810111999511718, 0.030792991638183595, 0.030728063583374023, 0.030644351959228516, 0.03069206428527832, 0.030695072174072267, 0.03059542465209961, 0.0323837776184082, 0.031119295120239258, 0.03080182456970215, 0.03091059112548828, 0.030631967544555664, 0.030668800354003906, 0.03094870376586914, 0.030988224029541017, 0.030884576797485352, 0.030926847457885744, 0.0307589111328125, 0.030723264694213867, 0.030614336013793944, 0.03069094467163086, 0.030738815307617188, 0.03078348731994629, 0.03075481605529785, 0.03060495948791504, 0.030606752395629884, 0.030810111999511718, 0.03059337615966797, 0.03057315254211426, 0.030664064407348632, 0.030869407653808592, 0.03064057540893555, 0.030785823822021486, 0.030803232192993163, 0.030613536834716796, 0.03072060775756836, 0.03072127914428711, 0.03064713668823242, 0.03133235168457031, 0.030959072113037108, 0.03063417625427246, 0.030590335845947267, 0.030612415313720703, 0.03074051284790039, 0.03068707275390625, 0.03060089683532715, 0.030857568740844728, 0.03062182426452637, 0.030691328048706053, 0.030678655624389647, 0.030826879501342774, 0.031194400787353516, 0.030836448669433594, 0.030552703857421874, 0.030628223419189454, 0.03072204780578613, 0.030883840560913086, 0.030679040908813477, 0.030734336853027344, 0.030672895431518556, 0.03062099266052246, 0.0305948486328125, 0.030802240371704103, 0.030591583251953124, 0.03057663917541504, 0.031703039169311525, 0.03398860931396484, 0.03133807945251465, 0.030973344802856444, 0.030897151947021483, 0.03231830215454102, 0.031307775497436525, 0.031246335983276367, 0.031126943588256836, 0.0311845760345459, 0.030972063064575197, 0.03083955192565918, 0.030700672149658204, 0.030658527374267577, 0.03076380729675293, 0.03080380821228027, 0.030695711135864258, 0.030650367736816408, 0.030665855407714843, 0.030698368072509766, 0.030971839904785158, 0.030740543365478514, 0.030750335693359374, 0.030735807418823244, 0.030961984634399413, 0.030618240356445312, 0.030721887588500977, 0.030572704315185547, 0.030707168579101562, 0.030758815765380858, 0.030671104431152344, 0.030783872604370117, 0.030650016784667968, 0.030744928359985352, 0.03234956741333008, 0.03138569641113281, 0.031316383361816406, 0.03147174453735352, 0.0310164794921875, 0.030728672027587892, 0.030897567749023438, 0.030962272644042967, 0.030842880249023437, 0.03059507179260254, 0.03058073616027832, 0.030668800354003906, 0.03080806350708008, 0.03070691108703613, 0.030562847137451173, 0.0305850887298584, 0.03275548934936524, 0.03073593521118164, 0.030657184600830077, 0.03070057678222656, 0.0306779842376709, 0.030803743362426757, 0.03091254425048828, 0.030748863220214844, 0.030552064895629883, 0.030447616577148437, 0.03064419174194336, 0.030604320526123045, 0.030607616424560547, 0.03081497573852539, 0.03079987144470215, 0.0307957763671875, 0.03075071907043457, 0.030756736755371095, 0.032376670837402345, 0.03128268814086914, 0.031185024261474608, 0.03080451202392578, 0.031014272689819336, 0.030853759765625, 0.030813312530517577, 0.03078780746459961, 0.030868127822875978, 0.030822368621826173, 0.0310960636138916, 0.030806304931640625, 0.030787071228027343, 0.030749696731567383, 0.030711328506469727, 0.03069340705871582, 0.03058118438720703, 0.03125846481323242, 0.031508544921875, 0.03131372833251953, 0.030845216751098633, 0.030810111999511718, 0.03121471977233887, 0.030727039337158202, 0.030785535812377928, 0.030846975326538087, 0.03076300811767578, 0.031405311584472656, 0.030923263549804687, 0.030986495971679687, 0.03141222381591797, 0.03137126350402832, 0.03139583969116211, 0.03122790336608887, 0.031236095428466795, 0.03123923110961914, 0.03106502342224121, 0.03093708801269531, 0.03076857566833496, 0.030898752212524413, 0.030756256103515626, 0.030818912506103517, 0.031041343688964843, 0.031129791259765626, 0.030889984130859374, 0.0309815673828125, 0.030923168182373048, 0.031146144866943358, 0.030947328567504883, 0.03094528007507324, 0.031086591720581053, 0.031213375091552736, 0.030992576599121094, 0.030930944442749023, 0.030999872207641603, 0.03098876762390137, 0.03102854347229004, 0.03309388732910156, 0.03697478485107422, 0.03130201530456543, 0.03106412887573242, 0.031078432083129885, 0.03134259223937988]",tokens/s,32.1432662138714,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11038.277632,14904.328192,0.0,14501.80608,13634.065408,s,1,18.67803515625,18.67803515625,0.0,18.67803515625,18.67803515625,18.67803515625,18.67803515625,[18.67803515625],,kWh,0.0003357929400874961,3.703303754438427e-05,0.000154732068229968,0.0005275580458618484,,MB,2100.539392,14919.008256,0.0,14501.80608,12898.96192,s,10,6.8922728881835935,0.6892272888183594,0.0004406917007922054,0.6892185974121094,0.6898374572753907,0.6898863189697265,0.6899254083251952,"[0.688307373046875, 0.6891243286132812, 0.6891480712890625, 0.6889052734375, 0.688982177734375, 0.6899351806640625, 0.6892891235351563, 0.6898265991210938, 0.6893224487304688, 0.6894323120117187]",tokens/s,371.4304470429447,kWh,2.0141807482225178e-05,2.2207032715105486e-06,1.3322251398532417e-05,3.568476215226815e-05,tokens/kWh,7173930.39941359,MB,2103.410688,14919.008256,0.0,14501.80608,13241.259008,s,10,39.45968505859375,3.945968505859375,0.0021274817924527545,3.946323974609375,3.947997680664063,3.9487859497070312,3.9494165649414064,"[3.94348779296875, 3.941757568359375, 3.947822509765625, 3.94523583984375, 3.94957421875, 3.94626123046875, 3.947715087890625, 3.94638671875, 3.946417724609375, 3.9450263671875]",tokens/s,15.965662145161879,kWh,0.00011498648193277277,1.2683835308882214e-05,7.65287371488695e-05,0.00020419905439052446,tokens/kWh,308522.4864925889,,s,630,39.432441539764355,0.06259117704724508,0.0004674883761991664,0.06259697532653809,0.06317264442443848,0.06325404319763184,0.06366601219177247,"[0.06346547317504883, 0.06186332702636719, 0.061628063201904296, 0.06209008026123047, 0.061900894165039064, 0.06187212753295898, 0.06162745666503906, 0.06224687957763672, 0.06183612823486328, 0.061810817718505856, 0.06183116912841797, 0.062091262817382815, 0.062107646942138675, 0.06182406234741211, 0.06207788848876953, 0.06190694427490234, 0.06229811096191406, 0.0625684471130371, 0.0626688003540039, 0.06257664108276367, 0.06205235290527344, 0.0621539192199707, 0.0618150405883789, 0.06221676635742188, 0.06256582260131836, 0.06259267044067383, 0.06236048126220703, 0.06257664108276367, 0.062330238342285155, 0.062345249176025394, 0.06252361679077148, 0.06273020935058594, 0.06261337661743165, 0.06258652877807618, 0.06252140808105469, 0.0625814094543457, 0.06265795135498047, 0.06271257781982421, 0.06393353652954102, 0.06278972625732422, 0.0627146873474121, 0.06288793563842773, 0.06265036773681641, 0.06279167938232422, 0.06272614288330078, 0.06281011199951173, 0.06279987335205078, 0.0630456314086914, 0.06285311889648437, 0.06295756912231446, 0.06271337509155274, 0.06295727920532226, 0.06285388946533203, 0.06308659362792969, 0.06296575927734376, 0.06311116790771484, 0.06308150482177734, 0.0633372802734375, 0.06311542510986329, 0.06320537567138672, 0.0631841926574707, 0.0634477424621582, 0.06332191848754883, 0.06258617782592774, 0.06196908950805664, 0.06159487915039062, 0.06164352035522461, 0.06197564697265625, 0.06180752182006836, 0.06219980621337891, 0.06187007904052735, 0.06209331130981445, 0.06181273651123047, 0.062070335388183594, 0.06196223831176758, 0.062083518981933594, 0.062033920288085936, 0.062083072662353515, 0.06222201538085938, 0.062093631744384765, 0.06230835342407227, 0.06204406356811523, 0.06217308807373047, 0.06206073760986328, 0.062054206848144534, 0.06215068817138672, 0.06228364944458008, 0.06222774505615234, 0.062491649627685546, 0.0622775993347168, 0.06211996841430664, 0.06302905654907226, 0.06242108917236328, 0.06231449508666992, 0.0626852798461914, 0.0625, 0.06276796722412109, 0.06264217758178711, 0.06267689514160156, 0.06261708831787109, 0.06277590560913086, 0.06265628814697266, 0.06284086227416992, 0.06275091171264649, 0.06273606491088868, 0.06275856018066406, 0.06266742324829101, 0.06268928146362304, 0.06283161544799805, 0.06295654296875, 0.0629043197631836, 0.06295072174072265, 0.06323635101318359, 0.06299488067626953, 0.06298406219482422, 0.06290547180175782, 0.06354374313354492, 0.063023681640625, 0.06310047912597656, 0.06298422241210938, 0.06306038284301758, 0.06320851135253906, 0.06307526397705078, 0.06301900863647461, 0.06322175979614258, 0.06330723190307617, 0.0627251853942871, 0.06189276885986328, 0.06220265579223633, 0.06192127990722656, 0.06217318344116211, 0.06193945693969727, 0.06199087905883789, 0.061964576721191406, 0.06211174392700195, 0.061973697662353514, 0.062290752410888675, 0.0622611198425293, 0.06289945602416992, 0.06199980926513672, 0.061962432861328125, 0.062475616455078126, 0.0621677131652832, 0.062223552703857425, 0.06224569702148437, 0.062220287322998044, 0.062134273529052736, 0.06237593460083008, 0.06222547149658203, 0.062343551635742185, 0.06242060852050781, 0.06244038391113281, 0.062371841430664064, 0.06251110458374023, 0.062371841430664064, 0.06259507369995117, 0.06263939285278321, 0.06370275115966798, 0.06266976165771485, 0.06268320083618165, 0.0625802879333496, 0.06255865478515625, 0.06268310546875, 0.06284902572631836, 0.06257667160034179, 0.06278496170043946, 0.06266876983642578, 0.06279228973388672, 0.06283059310913086, 0.06387302398681641, 0.06281011199951173, 0.06290841674804687, 0.06283171081542968, 0.06282534408569336, 0.06286134338378906, 0.06314393615722656, 0.06324371337890625, 0.06303715133666993, 0.0629277114868164, 0.06305168151855468, 0.06306371307373047, 0.06314140701293945, 0.0629666862487793, 0.06315539169311524, 0.06322646331787109, 0.06309913635253907, 0.0629708480834961, 0.06337417602539062, 0.06318201446533203, 0.06258294296264648, 0.06211955261230469, 0.06166988754272461, 0.06179020690917969, 0.061999103546142575, 0.06171443176269531, 0.061814144134521486, 0.061919456481933595, 0.06168822479248047, 0.06224486541748047, 0.06200627136230469, 0.06214713668823242, 0.062042560577392575, 0.062289920806884766, 0.062333984375, 0.062303199768066406, 0.06214041519165039, 0.06237590408325195, 0.06232681655883789, 0.06224486541748047, 0.062281726837158206, 0.06238617706298828, 0.062437374114990236, 0.06266252899169922, 0.06233510589599609, 0.06237731170654297, 0.06262031936645508, 0.06265849685668945, 0.06231046295166016, 0.06254118347167968, 0.06262847900390625, 0.06255820846557616, 0.0625458869934082, 0.06255379104614257, 0.06242303848266602, 0.06250121688842773, 0.06256979370117187, 0.06278828811645508, 0.06262169647216796, 0.06264115142822266, 0.0626402244567871, 0.06270659255981445, 0.06277439880371094, 0.0627570571899414, 0.06298652648925782, 0.06301859283447266, 0.06317548751831055, 0.06303436660766601, 0.06290943908691406, 0.06307411193847656, 0.06310521697998046, 0.06310255813598632, 0.06308291244506836, 0.06315369415283204, 0.0629252815246582, 0.0630681266784668, 0.06311110305786133, 0.06319113540649414, 0.06330563354492187, 0.06317248153686524, 0.0632056007385254, 0.06316032028198242, 0.06357606506347656, 0.06273257446289063, 0.061881534576416014, 0.062053119659423825, 0.06172796630859375, 0.06202598571777344, 0.06198313522338867, 0.06212396621704101, 0.061904129028320314, 0.06173353576660156, 0.062099807739257815, 0.06186393737792969, 0.062114974975585935, 0.06192822265625, 0.06214652633666992, 0.062199424743652344, 0.06218595123291016, 0.06272204971313476, 0.06249859237670898, 0.06235696029663086, 0.062323009490966794, 0.06208966445922852, 0.062356639862060546, 0.06252767944335938, 0.06245238494873047, 0.06288383865356445, 0.06262752151489258, 0.06246022415161133, 0.06251871871948242, 0.06229663848876953, 0.0624576301574707, 0.062486751556396485, 0.06277939224243165, 0.06269952011108398, 0.06264611053466797, 0.06258470535278321, 0.06266006469726562, 0.0625302734375, 0.06303043365478515, 0.06278035354614257, 0.06266387176513671, 0.06266553497314453, 0.06271385574340821, 0.06259916687011718, 0.06503952026367188, 0.0627633934020996, 0.06287203216552735, 0.06278963088989258, 0.06299443054199219, 0.06297395324707031, 0.06308249664306641, 0.06295676803588868, 0.0630013771057129, 0.06291987228393554, 0.06343967819213867, 0.06322585678100585, 0.06309273529052735, 0.06301491165161133, 0.06318262481689453, 0.0632465591430664, 0.06330284881591797, 0.06308448028564453, 0.06429785919189453, 0.0633733139038086, 0.06244966506958008, 0.06201728057861328, 0.0616240005493164, 0.06173126220703125, 0.062023807525634765, 0.06179616165161133, 0.06252758407592773, 0.062281822204589846, 0.06216294479370117, 0.06218547058105469, 0.06182297515869141, 0.06216019058227539, 0.061993663787841796, 0.06221823883056641, 0.062066207885742186, 0.06218390274047852, 0.06224486541748047, 0.062148609161376954, 0.062048255920410154, 0.0623325424194336, 0.062081409454345704, 0.062391998291015625, 0.062399967193603516, 0.062339424133300785, 0.06239039993286133, 0.062376319885253904, 0.062328830718994144, 0.06263398361206055, 0.06247350311279297, 0.062494686126708984, 0.06242790222167969, 0.06249676895141602, 0.0624780158996582, 0.06257244873046874, 0.06258524703979493, 0.06277119827270508, 0.0624640007019043, 0.06278758239746093, 0.06279484939575196, 0.06281494522094727, 0.06270995330810547, 0.06288793563842773, 0.06258224105834961, 0.06268982315063476, 0.06291791915893555, 0.06315900802612305, 0.06299647903442383, 0.062814208984375, 0.06309478378295899, 0.0631885108947754, 0.06295804977416992, 0.06306201553344727, 0.06297395324707031, 0.06336051177978516, 0.06309529495239258, 0.06327407836914062, 0.06308956909179687, 0.06328870391845703, 0.06326256179809571, 0.06322665786743165, 0.06340607833862305, 0.06398976135253906, 0.06328524780273438, 0.06270800018310548, 0.062007198333740236, 0.06216540908813477, 0.062443519592285154, 0.06172467041015625, 0.062127361297607424, 0.06195072174072266, 0.06221558380126953, 0.061973087310791014, 0.06210492706298828, 0.0621409912109375, 0.062140159606933594, 0.062117919921875, 0.062091583251953124, 0.06213187026977539, 0.06238652801513672, 0.06215679931640625, 0.062305473327636716, 0.06263891220092774, 0.0625022087097168, 0.061942462921142576, 0.06211155319213867, 0.06229119873046875, 0.062263774871826175, 0.06224899291992188, 0.06260924911499023, 0.06259286499023438, 0.06276121520996093, 0.06251776123046875, 0.06259862518310547, 0.06249526214599609, 0.06262124633789062, 0.062336544036865234, 0.06251993560791015, 0.0627100486755371, 0.06295705413818359, 0.06296755218505859, 0.06267571258544923, 0.06269740676879883, 0.06285113525390625, 0.06268313598632813, 0.06293628692626953, 0.06273923110961914, 0.06282553482055664, 0.06269785690307617, 0.06277731323242187, 0.06289059066772461, 0.06295676803588868, 0.0628067512512207, 0.06304569625854492, 0.06295068740844727, 0.06318124771118164, 0.0629271354675293, 0.06298796844482422, 0.06282803344726562, 0.06434899139404297, 0.0631838722229004, 0.06316339111328124, 0.06319014358520508, 0.06338444900512695, 0.06321152114868164, 0.06322700881958007, 0.06314812850952148, 0.0628674545288086, 0.06222438430786133, 0.06184511947631836, 0.06212441635131836, 0.06176483154296875, 0.062093887329101566, 0.06168115234375, 0.06187491226196289, 0.062144287109375, 0.06221343994140625, 0.06176860809326172, 0.06193503952026367, 0.06209088134765625, 0.062106559753417966, 0.06199017715454101, 0.06250979232788086, 0.06242057418823242, 0.06246236801147461, 0.06219161605834961, 0.0623267822265625, 0.06236569595336914, 0.06225244903564453, 0.06226739120483398, 0.062478240966796876, 0.06262160110473633, 0.06274121475219727, 0.06254188919067383, 0.06247219085693359, 0.062222335815429686, 0.062424510955810544, 0.06251686477661132, 0.06257759857177735, 0.06262579345703125, 0.0626767692565918, 0.06265235137939452, 0.06284467315673828, 0.06265478515625, 0.06275206375122071, 0.06282732772827149, 0.06284912109375, 0.062470142364501956, 0.0627886734008789, 0.0626077766418457, 0.06259766387939453, 0.06274415969848633, 0.06284064102172851, 0.06313840103149414, 0.06288179016113281, 0.06300227355957032, 0.0633716163635254, 0.0629695053100586, 0.0631330223083496, 0.06321049499511719, 0.06312960052490234, 0.06312076950073242, 0.06319107055664062, 0.0631363525390625, 0.06317411041259766, 0.06305759811401367, 0.06314681625366211, 0.06324758529663085, 0.06322412872314453, 0.06319769668579102, 0.06253772735595703, 0.061704193115234375, 0.06157721710205078, 0.06156601715087891, 0.061860801696777344, 0.062015487670898435, 0.06230335998535156, 0.06241155242919922, 0.06189065551757812, 0.06229103851318359, 0.06219868850708008, 0.0624304313659668, 0.062178081512451175, 0.0625459213256836, 0.06254796981811524, 0.06229971313476562, 0.06217772674560547, 0.06237308883666992, 0.06205110549926758, 0.0621952018737793, 0.0623559684753418, 0.06292480087280274, 0.06257868957519531, 0.06254796981811524, 0.06253945541381836, 0.06242950439453125, 0.062255104064941405, 0.06243942260742188, 0.06250495910644531, 0.06256435012817382, 0.06259097671508788, 0.06267612838745117, 0.06267375946044922, 0.06256995010375976, 0.06250902557373048, 0.062454334259033205, 0.062443519592285154, 0.06266662216186523, 0.06282652664184571, 0.06307030487060547, 0.06272614288330078, 0.06284902572631836, 0.06273401641845704, 0.06272022247314453, 0.06291455841064453, 0.06302320098876953, 0.06268300628662109, 0.06322166442871094, 0.06299875259399414, 0.06290636825561523, 0.06264620971679688, 0.06307420730590821, 0.06291417694091797, 0.06315411376953126, 0.06297455978393554, 0.06311936187744141, 0.06303324890136719, 0.06324233627319335, 0.06302467346191407, 0.06311094284057617, 0.06325932693481445, 0.06330307388305664, 0.06313328170776367, 0.06259628677368165, 0.06185452651977539, 0.061599712371826175, 0.06218345642089844, 0.06168127822875977, 0.061809024810791015, 0.062361598968505856, 0.06214246368408203, 0.06182297515869141, 0.062184799194335935, 0.06254972839355469, 0.062023681640625, 0.062281822204589846, 0.06201020812988281, 0.06192876815795898, 0.061903553009033205, 0.062328830718994144, 0.062336158752441403, 0.062111808776855466, 0.06231119918823242, 0.06239215850830078, 0.062277793884277344, 0.06230767822265625, 0.062257823944091795, 0.06242236709594726, 0.062341407775878904, 0.062343551635742185, 0.06243721771240234, 0.06254198455810547, 0.06277497482299804, 0.06264233779907226, 0.06274473571777343, 0.06268108749389649, 0.06263593673706054, 0.06257263946533204, 0.06269664001464843, 0.06259164810180665, 0.06264438247680663, 0.0627581443786621, 0.06273920059204102, 0.06273567962646484, 0.0627632942199707, 0.06272019195556641, 0.0627652816772461, 0.06323404693603515, 0.0629309425354004, 0.06309174346923828, 0.06269641494750977, 0.06288179016113281, 0.06303283309936523, 0.06290819168090821, 0.06311180877685547, 0.06284297561645508, 0.06307561492919922, 0.06299516677856445, 0.06299359893798828, 0.06300707244873047, 0.06320099258422851, 0.06329420852661133, 0.06312515258789063, 0.06314364624023437, 0.06336511993408203, 0.06339238357543946]",tokens/s,15.976692677390934,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,917.942272,645.791744,0.0,260.046848,253.520896,s,1,7.6398896484375,7.6398896484375,0.0,7.6398896484375,7.6398896484375,7.6398896484375,7.6398896484375,[7.6398896484375],,kWh,1.4925311129203085e-05,1.6391860425746726e-06,4.769448260000253e-06,2.1333945431778012e-05,,MB,1314.820096,752.746496,0.0,335.54432,312.653824,s,11,0.17338416004180907,0.01576219636743719,0.00010828755403994382,0.015726911544799806,0.015928192138671873,0.015972047805786132,0.01600713233947754,"[0.015928192138671873, 0.01576998424530029, 0.015753120422363282, 0.015666848182678224, 0.015726911544799806, 0.01577996826171875, 0.01601590347290039, 0.015715167999267577, 0.015694592475891115, 0.015691871643066405, 0.015641599655151366]",tokens/s,16241.391366552527,kWh,4.691879787915708e-07,5.174288871720302e-08,3.1127525795172883e-07,8.322061254605026e-07,tokens/kWh,307616096.74327016,MB,1354.0352,777.91232,0.0,360.710144,313.442816,s,11,10.047428588867188,0.9134025989879262,0.004345663893589249,0.9123449096679688,0.9187431030273437,0.9213524780273438,0.9234399780273438,"[0.9100906982421875, 0.9187431030273437, 0.9104745483398438, 0.9103081665039062, 0.9143639526367188, 0.9239618530273438, 0.9102621459960938, 0.9148922729492187, 0.9084573364257813, 0.9135296020507813, 0.9123449096679688]",tokens/s,68.97287140391941,kWh,2.670209927727025e-05,2.9447908383669755e-06,1.0052899700045936e-05,3.969978981568316e-05,tokens/kWh,1586910.164826924,,s,693,10.041998024940497,0.014490617640606768,0.00037698296868806237,0.014425888061523437,0.014630841636657715,0.014777849578857423,0.01585579162597657,"[0.014133760452270508, 0.014444928169250488, 0.01440278434753418, 0.014394144058227538, 0.01437491226196289, 0.014693375587463378, 0.014393728256225586, 0.014377823829650879, 0.01475984001159668, 0.014436223983764648, 0.014702400207519532, 0.014491840362548828, 0.01446243190765381, 0.014415648460388184, 0.014488320350646972, 0.014489695549011231, 0.014511455535888671, 0.014706656455993653, 0.014398048400878907, 0.014368767738342286, 0.01444863986968994, 0.014398688316345215, 0.014403679847717284, 0.01441862392425537, 0.014358528137207031, 0.014567744255065919, 0.014409407615661621, 0.014446816444396972, 0.014539615631103515, 0.01437337589263916, 0.014397855758666991, 0.014482975959777832, 0.014565759658813477, 0.01458188819885254, 0.014736960411071778, 0.014565823554992675, 0.01447321605682373, 0.014516096115112304, 0.014422143936157227, 0.01447321605682373, 0.01446224021911621, 0.014435040473937987, 0.014320832252502441, 0.014344544410705566, 0.014332608222961426, 0.014365504264831543, 0.014367712020874024, 0.014290623664855957, 0.014322336196899415, 0.014298784255981445, 0.014306528091430664, 0.014439200401306152, 0.014419967651367188, 0.014407008171081544, 0.014440352439880372, 0.014390015602111816, 0.014388704299926757, 0.014375455856323242, 0.014439871788024902, 0.014312352180480957, 0.014315168380737304, 0.01439129638671875, 0.014405632019042968, 0.014103903770446777, 0.014360896110534668, 0.015357024192810058, 0.015840160369873048, 0.019570144653320312, 0.014544480323791504, 0.014433152198791504, 0.014898336410522461, 0.014449824333190918, 0.014433279991149902, 0.014445119857788086, 0.014473407745361329, 0.014467424392700195, 0.014517919540405273, 0.014469120025634765, 0.014518272399902344, 0.014446592330932618, 0.014419967651367188, 0.014452320098876953, 0.014378399848937988, 0.014405823707580567, 0.014465503692626953, 0.014710687637329101, 0.014370431900024414, 0.014375328063964844, 0.014481311798095703, 0.014392095565795899, 0.014499551773071289, 0.014344191551208496, 0.014415871620178223, 0.014411775588989258, 0.014444543838500976, 0.014495200157165527, 0.014502655982971192, 0.014359840393066406, 0.014488063812255859, 0.01439948844909668, 0.014398943901062012, 0.014333760261535645, 0.014568160057067871, 0.014428352355957031, 0.014558239936828614, 0.015718591690063476, 0.014407551765441895, 0.014573951721191407, 0.014480031967163087, 0.014419648170471191, 0.014405887603759765, 0.014393088340759277, 0.014417920112609863, 0.014458880424499512, 0.014370816230773926, 0.014374496459960938, 0.014272671699523926, 0.01434995174407959, 0.014385791778564453, 0.014442496299743653, 0.014319616317749024, 0.014354528427124024, 0.014511296272277832, 0.01434928035736084, 0.014384896278381347, 0.014446144104003905, 0.01407910442352295, 0.014537440299987794, 0.014577823638916016, 0.014446304321289063, 0.014412991523742676, 0.01445356845855713, 0.01438310432434082, 0.014392864227294922, 0.014454848289489746, 0.014379424095153808, 0.014438400268554688, 0.014409728050231933, 0.014446784019470214, 0.014423359870910644, 0.014948736190795899, 0.014483584403991698, 0.01444649600982666, 0.014383551597595214, 0.014532256126403808, 0.014417119979858398, 0.01456003189086914, 0.014415871620178223, 0.014370047569274902, 0.014410112380981445, 0.014387359619140625, 0.01463094425201416, 0.014686400413513184, 0.01449728012084961, 0.014534655570983887, 0.01435916805267334, 0.01434556770324707, 0.014451519966125488, 0.014399104118347168, 0.01438044834136963, 0.014424544334411621, 0.014387264251708984, 0.014376799583435058, 0.01460649585723877, 0.014502112388610839, 0.014479488372802734, 0.014581567764282226, 0.014454431533813476, 0.014451040267944335, 0.014401247978210449, 0.014562591552734375, 0.014373567581176758, 0.014401951789855956, 0.01436355209350586, 0.014484479904174804, 0.014641471862792969, 0.014431679725646972, 0.014506239891052247, 0.014380576133728028, 0.014369248390197754, 0.014388416290283203, 0.014302047729492187, 0.014395359992980958, 0.014389599800109863, 0.014405440330505372, 0.014337087631225586, 0.014342944145202636, 0.014452735900878906, 0.014316800117492676, 0.014110624313354492, 0.014438400268554688, 0.014502143859863282, 0.014524160385131836, 0.01446713638305664, 0.014353695869445801, 0.014355104446411132, 0.014376959800720214, 0.014406975746154784, 0.014398143768310548, 0.014401535987854003, 0.014317567825317384, 0.014314720153808594, 0.01436297607421875, 0.014321824073791505, 0.014388704299926757, 0.014321567535400391, 0.014361120223999024, 0.014407999992370605, 0.01446713638305664, 0.014575615882873535, 0.01446224021911621, 0.014471808433532714, 0.014493503570556641, 0.014522111892700196, 0.014508223533630371, 0.014481535911560059, 0.014471136093139648, 0.014509344100952148, 0.014426624298095703, 0.01439948844909668, 0.014514304161071778, 0.014360159873962402, 0.014323679924011231, 0.014361375808715821, 0.014366623878479003, 0.01430742359161377, 0.014385151863098144, 0.014432255744934081, 0.014370112419128419, 0.014451040267944335, 0.014420543670654297, 0.014395071983337402, 0.01448748779296875, 0.014352543830871582, 0.014387136459350585, 0.014436256408691407, 0.014438559532165527, 0.014495743751525878, 0.014705727577209473, 0.014467935562133789, 0.014487648010253906, 0.014452735900878906, 0.01454694366455078, 0.014677824020385742, 0.014560959815979003, 0.01466204833984375, 0.01468835163116455, 0.01449728012084961, 0.014460895538330078, 0.01448374366760254, 0.014460639953613281, 0.014449503898620606, 0.01406771183013916, 0.014417920112609863, 0.014618559837341309, 0.014340096473693848, 0.014468735694885253, 0.01446940803527832, 0.014503168106079102, 0.014453408241271972, 0.014485343933105469, 0.014537216186523438, 0.014415295600891113, 0.014391776084899902, 0.014438400268554688, 0.014462976455688477, 0.01447321605682373, 0.014442303657531737, 0.01450169563293457, 0.01447539234161377, 0.014471648216247558, 0.014386015892028809, 0.014426272392272949, 0.014416671752929688, 0.014327808380126953, 0.01438265609741211, 0.014376959800720214, 0.014560864448547364, 0.014521183967590333, 0.014622719764709472, 0.014876607894897462, 0.014564640045166015, 0.014746591567993165, 0.014579648017883301, 0.014527551651000977, 0.014511327743530273, 0.014361856460571289, 0.01438918399810791, 0.014462623596191406, 0.014597184181213379, 0.014485183715820313, 0.014577024459838868, 0.014538751602172852, 0.014613120079040528, 0.014622719764709472, 0.014618047714233398, 0.014532832145690918, 0.014647104263305665, 0.014512672424316406, 0.014544672012329101, 0.014573792457580566, 0.01460428810119629, 0.014835712432861328, 0.014630816459655761, 0.014788031578063965, 0.014621024131774902, 0.014685695648193359, 0.014484288215637207, 0.014411775588989258, 0.014423839569091797, 0.014336223602294923, 0.014454015731811523, 0.014410655975341797, 0.014424127578735351, 0.014438176155090332, 0.01421343994140625, 0.014446399688720702, 0.014352255821228028, 0.014388768196105958, 0.014349056243896484, 0.014321439743041993, 0.015272992134094238, 0.014435232162475586, 0.01454694366455078, 0.014429759979248048, 0.014363072395324706, 0.014913536071777344, 0.014357728004455566, 0.01445353603363037, 0.01445683193206787, 0.014407679557800293, 0.014482912063598632, 0.014444576263427735, 0.014858016014099121, 0.014662367820739746, 0.014659584045410156, 0.01477462387084961, 0.014721983909606934, 0.014729951858520508, 0.01459164810180664, 0.01454319953918457, 0.014858240127563477, 0.014512127876281738, 0.014698623657226562, 0.014842816352844238, 0.014738656044006348, 0.014733344078063966, 0.014767200469970702, 0.014689087867736816, 0.014677599906921386, 0.014970144271850587, 0.014733695983886718, 0.014630847930908204, 0.014449248313903809, 0.014378560066223145, 0.014360256195068359, 0.01481107234954834, 0.014505087852478027, 0.01433903980255127, 0.014438752174377442, 0.014346783638000488, 0.014393183708190918, 0.014377984046936035, 0.014400511741638184, 0.014444543838500976, 0.014537759780883789, 0.014398048400878907, 0.014364031791687012, 0.014358559608459472, 0.01433676815032959, 0.014373087882995605, 0.014366720199584961, 0.014351807594299316, 0.014470911979675294, 0.014408512115478516, 0.01443603229522705, 0.016736576080322266, 0.020052288055419924, 0.014106623649597168, 0.014399423599243164, 0.014426400184631347, 0.01451353645324707, 0.01445894432067871, 0.014480064392089844, 0.014597663879394531, 0.014497920036315918, 0.014438400268554688, 0.014421088218688965, 0.014388128280639649, 0.014547136306762696, 0.014421695709228516, 0.014434432029724121, 0.014499615669250489, 0.014364352226257324, 0.014520863533020019, 0.014440383911132813, 0.014344256401062011, 0.014339360237121582, 0.014357215881347656, 0.01439782428741455, 0.014380672454833984, 0.014401727676391602, 0.014419584274291993, 0.014352543830871582, 0.014352416038513184, 0.014466400146484376, 0.014326432228088378, 0.014352191925048828, 0.01436691188812256, 0.01440287971496582, 0.014402239799499512, 0.014450048446655274, 0.014426591873168946, 0.014380736351013184, 0.014369248390197754, 0.014810591697692872, 0.014491840362548828, 0.014688608169555663, 0.014530719757080078, 0.01450278377532959, 0.014453472137451172, 0.014432512283325195, 0.014428159713745118, 0.014403583526611329, 0.014452927589416504, 0.014740991592407226, 0.014441856384277344, 0.014453856468200684, 0.014423808097839356, 0.014608480453491212, 0.014591487884521484, 0.014359040260314941, 0.014384672164916993, 0.014449119567871094, 0.014368864059448242, 0.014478752136230469, 0.014383168220520019, 0.01436518383026123, 0.014526559829711913, 0.014358367919921875, 0.014385151863098144, 0.01406499195098877, 0.015106783866882325, 0.016192832946777345, 0.014572159767150879, 0.01569705581665039, 0.014598591804504395, 0.0145382719039917, 0.014432543754577637, 0.014404191970825195, 0.014573568344116212, 0.014494976043701173, 0.014466912269592284, 0.014388128280639649, 0.014534655570983887, 0.014561280250549317, 0.014558624267578125, 0.01603555107116699, 0.014412672042846679, 0.0144650239944458, 0.014409728050231933, 0.01443660831451416, 0.014453727722167969, 0.01444534397125244, 0.014344191551208496, 0.0143189115524292, 0.014387904167175293, 0.014356639862060547, 0.01432969570159912, 0.014306559562683105, 0.014370880126953124, 0.01447107219696045, 0.01436684799194336, 0.014420127868652344, 0.014313983917236327, 0.014294495582580566, 0.014328672409057617, 0.014318495750427247, 0.014324031829833984, 0.014541279792785645, 0.014517567634582519, 0.01452236843109131, 0.014526944160461425, 0.014420191764831542, 0.014436415672302246, 0.015015872001647949, 0.014544896125793457, 0.014493696212768555, 0.014468928337097169, 0.014391008377075196, 0.014343903541564942, 0.014361087799072265, 0.014359871864318847, 0.01440783977508545, 0.01445471954345703, 0.014385024070739745, 0.014302176475524902, 0.014319616317749024, 0.014375167846679688, 0.014365632057189942, 0.014414624214172363, 0.014407967567443848, 0.014458720207214355, 0.014444448471069337, 0.01408409595489502, 0.014411904335021973, 0.014393343925476074, 0.01437887954711914, 0.014415871620178223, 0.014370623588562012, 0.014378399848937988, 0.014385951995849609, 0.014378623962402344, 0.014348959922790528, 0.014355808258056641, 0.014401920318603515, 0.014377120018005372, 0.014370112419128419, 0.014396096229553223, 0.014384736061096191, 0.014432512283325195, 0.014254079818725587, 0.014344191551208496, 0.014345855712890624, 0.014342880249023438, 0.014881855964660644, 0.014370688438415527, 0.014411968231201172, 0.014336480140686034, 0.014305248260498046, 0.01439680004119873, 0.014339936256408692, 0.014379327774047851, 0.014309856414794922, 0.014348383903503417, 0.014425888061523437, 0.014351743698120117, 0.014416735649108886, 0.014336383819580079, 0.014461600303649902, 0.014463359832763673, 0.014347935676574707, 0.014445119857788086, 0.014413472175598144, 0.014412256240844726, 0.01437887954711914, 0.014399807929992675, 0.014332096099853515, 0.01438092803955078, 0.014430208206176758, 0.014448384284973144, 0.014772480010986327, 0.014573568344116212, 0.014626079559326172, 0.014477375984191895, 0.014535615921020508, 0.014470879554748534, 0.01442732810974121, 0.014498592376708985, 0.014442527770996094, 0.014360671997070312, 0.014573247909545898, 0.014397664070129395, 0.014430208206176758, 0.014395423889160156, 0.014448351860046386, 0.014443872451782226, 0.014007840156555176, 0.014376416206359863, 0.014461055755615234, 0.014338944435119629, 0.014336000442504883, 0.014381055831909179, 0.01445900821685791, 0.01444159984588623, 0.014453503608703613, 0.014517824172973632, 0.014494144439697265, 0.01457151985168457, 0.01451580810546875, 0.014575807571411133, 0.014544832229614257, 0.014569760322570801, 0.014503711700439454, 0.015067359924316407, 0.016132223129272462, 0.015384223937988281, 0.01457529640197754, 0.014513952255249023, 0.014430975914001465, 0.014467071533203125, 0.014376447677612305, 0.01439145565032959, 0.014386752128601073, 0.014401887893676758, 0.014572992324829101, 0.01460035228729248, 0.014443360328674316, 0.014412896156311035, 0.014403648376464843, 0.015786848068237304, 0.014356479644775391, 0.014417920112609863, 0.014745599746704101, 0.01447856044769287, 0.014394271850585937, 0.01431116771697998, 0.014315072059631348, 0.01432192039489746, 0.014354751586914063, 0.014366751670837402, 0.01431494426727295, 0.014396160125732423, 0.014291808128356934, 0.014461440086364746, 0.014340543746948242, 0.014470399856567383, 0.014404352188110352, 0.014364831924438477, 0.014362719535827637, 0.014278400421142578, 0.014333344459533692, 0.014313664436340332, 0.014327936172485352, 0.014514399528503417, 0.014294591903686524, 0.014323648452758788, 0.014330400466918945, 0.01431980800628662, 0.014647135734558106, 0.013995552062988281, 0.014330623626708984, 0.014316896438598632, 0.014360608100891114, 0.014340448379516601, 0.014279935836791992, 0.014307392120361329, 0.014311552047729492, 0.014318143844604492, 0.014319616317749024, 0.01430303955078125, 0.014420255661010743, 0.01433574390411377, 0.014358400344848633, 0.014440735816955567, 0.014308896064758301, 0.01437161636352539, 0.014552672386169434, 0.014443903923034669, 0.014332639694213866, 0.014280320167541504, 0.014293375968933106, 0.014251903533935547, 0.01435865592956543, 0.014325375556945802, 0.014416383743286134, 0.014368639945983887, 0.014382623672485351, 0.014331647872924805, 0.014266143798828125, 0.014317631721496582, 0.014375840187072754, 0.01454691219329834, 0.014467328071594239, 0.01690812873840332, 0.015411104202270508, 0.014534496307373046, 0.014522527694702149, 0.015048704147338866, 0.014444479942321778, 0.015030400276184082, 0.014782688140869141, 0.014397151947021484, 0.014331456184387206, 0.014383551597595214, 0.014327808380126953, 0.01439129638671875, 0.01442307186126709, 0.014380000114440917, 0.014352383613586426, 0.014393343925476074, 0.014358528137207031, 0.014327872276306152, 0.014372703552246093, 0.01450812816619873, 0.014390399932861328, 0.014492511749267578, 0.014861920356750487, 0.014534943580627442, 0.014684320449829102, 0.014522047996520996, 0.014452383995056153, 0.014545568466186523]",tokens/s,69.01017091208865,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3758.690304,4378.722304,0.0,3992.977408,3875.045888,s,1,9.8147177734375,9.8147177734375,0.0,9.8147177734375,9.8147177734375,9.8147177734375,9.8147177734375,[9.8147177734375],,kWh,7.752485757918445e-05,8.544551186845526e-06,2.5716965018005022e-05,0.000111786373784035,,MB,1973.571584,4603.117568,0.0,4188.012544,4099.58912,s,10,3.364724914550781,0.3364724914550781,0.0032697928118991084,0.33655128479003904,0.33938257751464845,0.3408152359008789,0.3419613626098633,"[0.3422478942871094, 0.336137451171875, 0.3356319580078125, 0.3364901123046875, 0.33589376831054685, 0.3284207458496094, 0.339064208984375, 0.33661245727539063, 0.337208251953125, 0.33701806640625]",tokens/s,760.8348572358051,kWh,1.0237454220402637e-05,1.1290065341091085e-06,6.817476718344834e-06,1.8183937472856578e-05,tokens/kWh,14078359.012295045,MB,1983.574016,4710.07232,0.0,4292.870144,4198.6176,s,10,27.749000488281254,2.7749000488281252,0.00607473265624769,2.7731916503906247,2.7813903076171873,2.7839662719726563,2.786027043457031,"[2.786542236328125, 2.78081787109375, 2.772927001953125, 2.771801513671875, 2.780426025390625, 2.770079345703125, 2.7762587890625, 2.773456298828125, 2.772892333984375, 2.763799072265625]",tokens/s,22.703520448098907,kWh,8.029023154209456e-05,8.856118171040925e-06,5.0364485693854304e-05,0.00013951083540698976,tokens/kWh,451577.8277451529,,s,630,27.745571228027355,0.04404058925083706,0.0005169595490615367,0.043934864044189455,0.04442426261901856,0.04467755146026611,0.04625859310150147,"[0.044488800048828124, 0.04408115386962891, 0.04404019165039062, 0.04410300827026367, 0.04422313690185547, 0.044077152252197264, 0.04412745666503906, 0.044355838775634766, 0.044069313049316404, 0.04461299133300781, 0.044296833038330076, 0.04427926254272461, 0.04457731246948242, 0.04441420745849609, 0.04398771286010742, 0.04400128173828125, 0.04396543884277344, 0.044047359466552735, 0.04446540832519531, 0.043981121063232424, 0.043905471801757814, 0.04482441711425781, 0.0464365119934082, 0.04413030242919922, 0.0442347526550293, 0.043935264587402344, 0.043944416046142576, 0.04388249588012695, 0.04393574523925781, 0.04412211227416992, 0.04406175994873047, 0.04401379013061523, 0.04447449493408203, 0.04433673477172852, 0.04426601409912109, 0.04421065521240235, 0.0442081298828125, 0.04409171295166016, 0.04399481582641602, 0.04412815856933594, 0.04408531188964844, 0.04402588653564453, 0.04389273452758789, 0.04419952011108398, 0.04411638259887695, 0.04439820861816406, 0.04404870223999023, 0.04525881576538086, 0.044119808197021486, 0.0440629768371582, 0.04401152038574219, 0.043954177856445314, 0.04391321563720703, 0.04386969757080078, 0.04415923309326172, 0.04444931030273438, 0.044276256561279294, 0.04467526245117188, 0.04465628814697266, 0.04432112121582031, 0.04409084701538086, 0.0441492805480957, 0.0441258544921875, 0.044217025756835934, 0.044077056884765625, 0.04390835189819336, 0.043940608978271484, 0.04405833435058594, 0.04391759872436524, 0.04399827194213867, 0.04407107162475586, 0.04584223937988281, 0.046282878875732424, 0.04427222442626953, 0.04417318344116211, 0.04415497589111328, 0.044263328552246094, 0.04404572677612305, 0.044207073211669924, 0.044106975555419925, 0.04402438354492187, 0.04423907089233398, 0.04400918579101563, 0.04405072021484375, 0.04411801528930664, 0.04389273452758789, 0.0442429428100586, 0.044075008392333984, 0.04396236801147461, 0.04402985763549805, 0.04391254425048828, 0.04396464157104492, 0.04446665573120117, 0.04396976089477539, 0.044174175262451175, 0.04492902374267578, 0.044453887939453124, 0.0439766731262207, 0.044001312255859376, 0.04382720184326172, 0.043853599548339846, 0.0438389778137207, 0.04389913558959961, 0.04385225677490234, 0.0437841911315918, 0.04380780792236328, 0.04548246383666992, 0.044025409698486326, 0.043856800079345705, 0.04371660614013672, 0.043681793212890625, 0.04386406326293945, 0.043687934875488284, 0.04356876754760742, 0.046020992279052736, 0.044197887420654294, 0.0441646728515625, 0.04392291259765625, 0.04375651168823242, 0.04391459274291992, 0.04407910537719727, 0.04406095886230469, 0.043980289459228515, 0.04383628845214844, 0.04399718475341797, 0.04380847930908203, 0.044109825134277345, 0.043812862396240236, 0.04404633712768555, 0.04397260665893555, 0.0442531852722168, 0.04412211227416992, 0.04432028961181641, 0.044577247619628904, 0.044184703826904294, 0.044163326263427734, 0.04417747116088867, 0.04395475387573242, 0.04389651107788086, 0.04382751846313476, 0.04393369674682617, 0.04384707260131836, 0.043727455139160154, 0.043842750549316405, 0.04368057632446289, 0.0438947525024414, 0.043810272216796876, 0.043952350616455076, 0.043880062103271486, 0.04384841537475586, 0.04408278274536133, 0.04405699157714844, 0.04392918395996094, 0.04410204696655273, 0.04395212936401367, 0.04417331314086914, 0.04422246551513672, 0.0444661750793457, 0.04420608139038086, 0.04430160140991211, 0.044060768127441405, 0.043958911895751955, 0.043911167144775394, 0.04398662567138672, 0.043902305603027346, 0.04517577743530273, 0.043885921478271486, 0.043791007995605466, 0.04390092849731445, 0.0437841911315918, 0.04392755126953125, 0.043824958801269534, 0.04394412612915039, 0.04378214263916016, 0.043810272216796876, 0.04377788925170899, 0.043813568115234375, 0.04385817718505859, 0.04382080078125, 0.04445731353759766, 0.04411663818359375, 0.043993087768554685, 0.043974655151367184, 0.04408428955078125, 0.04389779281616211, 0.04392550277709961, 0.04386544036865234, 0.04390134429931641, 0.044148990631103516, 0.045158977508544924, 0.043732864379882816, 0.04467942428588867, 0.04398387145996094, 0.04407542419433594, 0.044485214233398435, 0.0440294075012207, 0.044055072784423825, 0.04398489761352539, 0.043921409606933595, 0.04386761474609375, 0.04366732788085938, 0.04380508804321289, 0.04413056182861328, 0.044300289154052735, 0.043640830993652346, 0.043869281768798826, 0.04379331207275391, 0.04387184143066406, 0.04373750305175781, 0.043886592864990234, 0.043640830993652346, 0.04382838439941406, 0.0436715202331543, 0.04369087982177734, 0.04393369674682617, 0.04387430572509766, 0.0443526725769043, 0.043875167846679684, 0.04381491088867188, 0.04391321563720703, 0.04398899078369141, 0.04401356887817383, 0.044050430297851564, 0.044036094665527346, 0.04414035034179688, 0.04490627288818359, 0.04561756896972656, 0.044543998718261715, 0.0441712646484375, 0.044381248474121095, 0.04394851303100586, 0.04405705642700195, 0.04384105682373047, 0.04393212890625, 0.044058624267578124, 0.043837440490722655, 0.04404633712768555, 0.04388016128540039, 0.04360630416870117, 0.04332339096069336, 0.04346879959106445, 0.04378214263916016, 0.043921409606933595, 0.043956031799316404, 0.043888832092285154, 0.043888641357421876, 0.04386816024780273, 0.04397260665893555, 0.04379033660888672, 0.04381827163696289, 0.04367228698730469, 0.043797664642333985, 0.044219615936279294, 0.044257984161376954, 0.044717727661132814, 0.04441670227050781, 0.044192703247070315, 0.044379806518554686, 0.04417366409301758, 0.044283905029296876, 0.04398899078369141, 0.04434534454345703, 0.04415078353881836, 0.04401087951660156, 0.04392819213867188, 0.04776959991455078, 0.04415078353881836, 0.04393164825439453, 0.04375056076049805, 0.043987297058105466, 0.04387830352783203, 0.04391996765136719, 0.043705886840820315, 0.04382076644897461, 0.044106273651123046, 0.044019935607910156, 0.04799692916870117, 0.04552499389648437, 0.04393891143798828, 0.04404316711425781, 0.04424828720092774, 0.04423350524902344, 0.04391843032836914, 0.04398102569580078, 0.043755966186523436, 0.044206336975097654, 0.043720703125, 0.043822463989257814, 0.043845375061035155, 0.04403081512451172, 0.043808799743652344, 0.043812862396240236, 0.0437608642578125, 0.04363087844848633, 0.043620864868164064, 0.04367488098144531, 0.04415359878540039, 0.0440909423828125, 0.0437825927734375, 0.04431257629394531, 0.04386537551879883, 0.043770111083984375, 0.04370275115966797, 0.044158367156982424, 0.04420463943481445, 0.043819007873535154, 0.043689342498779295, 0.0438524169921875, 0.043993087768554685, 0.04389590454101563, 0.043652000427246096, 0.04398854446411133, 0.04388889694213867, 0.043798015594482424, 0.04381151962280273, 0.04450067138671875, 0.044564800262451174, 0.04413235092163086, 0.04390265655517578, 0.044079425811767575, 0.04398854446411133, 0.04383379364013672, 0.04389625549316406, 0.043900608062744144, 0.04408115386962891, 0.043872928619384764, 0.0440588493347168, 0.04379852676391602, 0.04387635040283203, 0.0439846076965332, 0.043939167022705075, 0.04388755035400391, 0.0437369270324707, 0.04386012649536133, 0.04414025497436523, 0.043993343353271486, 0.044074878692626954, 0.04424867248535156, 0.04395065689086914, 0.04383334350585937, 0.04381206512451172, 0.04376041412353516, 0.04396799850463867, 0.04396895980834961, 0.04444508743286133, 0.043839969635009766, 0.04396051025390625, 0.04397260665893555, 0.04453567886352539, 0.04396364974975586, 0.044046657562255856, 0.04384758377075195, 0.04395894241333008, 0.043905025482177736, 0.043974655151367184, 0.043853729248046876, 0.04389078521728516, 0.04398694229125977, 0.04474259185791016, 0.043948097229003905, 0.04392451095581055, 0.04396335983276367, 0.043740928649902346, 0.04377356719970703, 0.04386268615722656, 0.04380054473876953, 0.04355276870727539, 0.04357324981689453, 0.04366745758056641, 0.0446005744934082, 0.04355337524414062, 0.04378844833374024, 0.044030975341796875, 0.043840511322021485, 0.043796478271484376, 0.04381491088867188, 0.04393075180053711, 0.044036991119384764, 0.044332225799560546, 0.04409225463867188, 0.04427881622314453, 0.044149696350097654, 0.044083198547363284, 0.043853824615478515, 0.04382268905639648, 0.04383785629272461, 0.04374518585205078, 0.043855968475341796, 0.043835391998291014, 0.043915199279785155, 0.04373481750488281, 0.04363292694091797, 0.04410163116455078, 0.04383334350585937, 0.04386111831665039, 0.04380966567993164, 0.04423680114746094, 0.0441794548034668, 0.044867584228515625, 0.043872001647949216, 0.04409779357910156, 0.0437691535949707, 0.04368371200561524, 0.04386284637451172, 0.04404396820068359, 0.043769889831542966, 0.04383158493041992, 0.04394803237915039, 0.045817344665527344, 0.04619913482666015, 0.04395801544189453, 0.04392377471923828, 0.043921504974365234, 0.043788288116455076, 0.043870208740234375, 0.04410367965698242, 0.04392959976196289, 0.04384953689575195, 0.043882686614990236, 0.044060672760009766, 0.04424844741821289, 0.04445043182373047, 0.04381411361694336, 0.04369241714477539, 0.04391977691650391, 0.04378620910644531, 0.043923263549804685, 0.04370438385009766, 0.04383452987670899, 0.044336128234863284, 0.04705855941772461, 0.04397225570678711, 0.0437314567565918, 0.043952350616455076, 0.0437760009765625, 0.04386556625366211, 0.043923999786376955, 0.04411180877685547, 0.04388665771484375, 0.04375680160522461, 0.04396723175048828, 0.0445370864868164, 0.04424576187133789, 0.04415871810913086, 0.04376355361938476, 0.043859870910644534, 0.04381932830810547, 0.043735233306884766, 0.04368313598632813, 0.043743358612060544, 0.0440489616394043, 0.04388988876342773, 0.04364777755737305, 0.04377376174926758, 0.04379391860961914, 0.043923873901367184, 0.04443510437011719, 0.044007328033447264, 0.044520160675048825, 0.04400128173828125, 0.04459929656982422, 0.043829246520996096, 0.043911167144775394, 0.043646175384521486, 0.043932449340820315, 0.04377190399169922, 0.04371865463256836, 0.04365107345581055, 0.04368323135375977, 0.04534105682373047, 0.04503359985351563, 0.04392764663696289, 0.04374323272705078, 0.04407484817504883, 0.04457283020019531, 0.04394364929199219, 0.04410780715942383, 0.04396467208862305, 0.044076416015625, 0.04384832000732422, 0.0441354866027832, 0.044979072570800784, 0.04522195053100586, 0.04408684921264648, 0.043856319427490235, 0.043904159545898436, 0.043899742126464844, 0.04410572814941406, 0.04427727890014648, 0.04370857620239258, 0.0437391357421875, 0.043568607330322265, 0.04369699096679688, 0.04372889709472656, 0.04382304000854492, 0.04383929443359375, 0.04387456130981445, 0.04399686431884765, 0.04382137680053711, 0.04358553695678711, 0.04374528121948242, 0.04382041549682617, 0.04412678527832031, 0.04456403350830078, 0.04443411254882813, 0.0439846076965332, 0.04400156784057617, 0.04375961685180664, 0.04389068984985352, 0.0438493766784668, 0.043848033905029296, 0.04379238510131836, 0.04389273452758789, 0.04386406326293945, 0.04391657638549805, 0.04572335815429687, 0.04414361572265625, 0.04406272125244141, 0.04400310516357422, 0.04428752136230469, 0.04394054412841797, 0.04386816024780273, 0.04382662582397461, 0.0438476791381836, 0.04384588623046875, 0.04398473739624023, 0.04400585556030273, 0.04396851348876953, 0.04375961685180664, 0.04384697723388672, 0.043985023498535156, 0.043962944030761716, 0.04382515335083008, 0.043851680755615234, 0.04386368179321289, 0.043837921142578125, 0.043875423431396485, 0.04455516815185547, 0.04417536163330078, 0.04419359970092773, 0.04413043212890625, 0.044670368194580076, 0.04491535949707031, 0.04391676712036133, 0.04378268814086914, 0.04394803237915039, 0.0438903694152832, 0.043899200439453126, 0.04401356887817383, 0.044052448272705075, 0.04392963027954101, 0.04386991882324219, 0.044550079345703125, 0.04388671875, 0.04378646469116211, 0.04404435348510742, 0.043710399627685546, 0.044423168182373046, 0.04394921493530273, 0.043991905212402344, 0.0439007682800293, 0.04401894378662109, 0.04386908721923828, 0.04372860717773437, 0.04354076766967773, 0.043584991455078125, 0.043897377014160154, 0.04397244644165039, 0.04366739273071289, 0.04363695907592773, 0.04370636749267578, 0.043665409088134766, 0.04379238510131836, 0.04353023910522461, 0.04366745758056641, 0.04372480010986328, 0.04829747009277344, 0.04383180618286133, 0.043523681640625, 0.044329376220703126, 0.04354646301269531, 0.04368108749389649, 0.044198753356933594, 0.04367148971557617, 0.043736385345458983, 0.043638782501220705, 0.04404633712768555, 0.043934463500976566, 0.04361625671386719, 0.043474945068359375, 0.04343603134155274, 0.04340326309204102, 0.04335766220092773, 0.043502113342285154, 0.043407360076904294, 0.04337635040283203, 0.04334147262573242, 0.0432151985168457, 0.043391265869140626, 0.04571884918212891, 0.045773502349853515, 0.04374937438964844, 0.04383910369873047, 0.04366144180297851, 0.04363699340820312, 0.04362236785888672, 0.04359990310668945, 0.043560321807861326, 0.043425697326660156, 0.04343881607055664, 0.044205760955810545, 0.04366553497314453, 0.04420390319824219, 0.04351212692260742, 0.04353638458251953, 0.04353023910522461, 0.043943489074707034, 0.04343558502197266, 0.04354956817626953, 0.043603073120117186, 0.043694976806640626, 0.043374591827392575, 0.043351104736328125, 0.0480777587890625, 0.04410572814941406, 0.04415283203125, 0.043720703125, 0.04347036743164062, 0.04350556945800781, 0.043498046875]",tokens/s,22.706326527658653,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2221.73184,2487.0912,0.0,2101.346304,1978.345472,s,1,8.6955439453125,8.6955439453125,0.0,8.6955439453125,8.6955439453125,8.6955439453125,8.6955439453125,[8.6955439453125],,kWh,4.702572449174719e-05,5.180081841825118e-06,1.5379456748010334e-05,6.758526308158264e-05,,MB,2219.225088,2824.732672,0.0,2409.627648,2250.334208,s,10,1.735576477050781,0.17355764770507814,0.000830941837277008,0.1737763214111328,0.1743269760131836,0.17459412918090822,0.1748078517150879,"[0.17418492126464843, 0.17372563171386718, 0.17382701110839843, 0.1726878662109375, 0.1721871337890625, 0.17323350524902345, 0.17248374938964844, 0.17486128234863282, 0.17426760864257812, 0.17411776733398437]",tokens/s,1475.0142294796133,kWh,5.296739780207341e-06,5.841332861885616e-07,3.5216645236775334e-06,9.402537590073437e-06,tokens/kWh,27226692.53354195,MB,2233.30304,2868.772864,0.0,2451.570688,2333.38624,s,10,23.732358154296872,2.3732358154296875,0.008737109544356372,2.3728175048828124,2.3830573730468747,2.3872508544921875,2.3906056396484376,"[2.368879150390625, 2.38212548828125, 2.369197998046875, 2.376924560546875, 2.3765166015625, 2.36181103515625, 2.36426513671875, 2.37643701171875, 2.3647568359375, 2.3914443359375]",tokens/s,26.546034570354525,kWh,6.9370192682292e-05,7.65146764306109e-06,3.514136640672504e-05,0.00011216302673207816,tokens/kWh,561682.4174198419,,s,630,23.72861008453371,0.037664460451640784,0.0005523026267992928,0.03757385635375977,0.03805219650268555,0.03828668251037598,0.040355060234069824,"[0.03778355026245117, 0.03743743896484375, 0.03737180709838867, 0.037185054779052734, 0.03736787033081055, 0.037318878173828125, 0.03757289505004883, 0.03787353515625, 0.0380530891418457, 0.03793398284912109, 0.0376627197265625, 0.03732089614868164, 0.03741676712036133, 0.037394432067871096, 0.03722854232788086, 0.037236736297607424, 0.03715071868896484, 0.03751935958862305, 0.03719372940063476, 0.03734921646118164, 0.03721231842041016, 0.04070195388793945, 0.03788800048828125, 0.03774998474121094, 0.037327648162841794, 0.03724697494506836, 0.03725107192993164, 0.03719987106323242, 0.03738768005371094, 0.037319263458251956, 0.03736883163452148, 0.0374015998840332, 0.03737702560424805, 0.03738467025756836, 0.037781471252441405, 0.03784761428833008, 0.03781411361694336, 0.03776028823852539, 0.0376943359375, 0.03764416122436524, 0.03776934432983398, 0.03764828872680664, 0.0373526725769043, 0.037432193756103516, 0.03769343948364258, 0.03742252731323242, 0.037450302124023435, 0.03724867248535156, 0.03756886291503906, 0.037718017578125, 0.037703678131103514, 0.03800064086914062, 0.03750912094116211, 0.037449726104736326, 0.03789814376831055, 0.03748387145996094, 0.03740934371948242, 0.03745801544189453, 0.03750092697143555, 0.03758675384521484, 0.03792460632324219, 0.03874256134033203, 0.037883678436279294, 0.03789744186401367, 0.037688255310058594, 0.0376563835144043, 0.03790156936645508, 0.03781932830810547, 0.03999087905883789, 0.03781264114379883, 0.03759308624267578, 0.03767276763916016, 0.03760115051269531, 0.03792736053466797, 0.037887039184570315, 0.037653247833251954, 0.037603263854980466, 0.037507198333740235, 0.040052734375, 0.03901801681518555, 0.037720542907714844, 0.03762176132202148, 0.03746515274047851, 0.03758156967163086, 0.037669055938720705, 0.037580448150634764, 0.03740835189819336, 0.037412670135498045, 0.037465023040771483, 0.03753094482421875, 0.03744348907470703, 0.03761875152587891, 0.037607070922851565, 0.03761772918701172, 0.03748659133911133, 0.03751116943359375, 0.037541889190673826, 0.03757833480834961, 0.03746572875976562, 0.03744364929199219, 0.03751804733276367, 0.037757984161376955, 0.03755311965942383, 0.037596160888671876, 0.037618686676025394, 0.03993804931640625, 0.03831398391723633, 0.037600639343261716, 0.03789683151245117, 0.038016094207763675, 0.03829238510131836, 0.03818636703491211, 0.03772998428344727, 0.037739456176757814, 0.03765043258666992, 0.038653953552246094, 0.037612991333007814, 0.03749753570556641, 0.03743270492553711, 0.037818878173828126, 0.03746201705932617, 0.037533695220947266, 0.03751443099975586, 0.03759596633911133, 0.03758601760864258, 0.03766159820556641, 0.03802627182006836, 0.03797091293334961, 0.037822463989257815, 0.037625473022460935, 0.037935489654541014, 0.038043296813964844, 0.03798422241210937, 0.038279712677001955, 0.03780387115478516, 0.03759513473510742, 0.037615230560302734, 0.03757503890991211, 0.03757212829589844, 0.03746249771118164, 0.03737395095825195, 0.03742105484008789, 0.037326847076416016, 0.03741219329833984, 0.03749955368041992, 0.03730636978149414, 0.03790835189819336, 0.03755747222900391, 0.03743017578125, 0.03727769470214844, 0.037594593048095704, 0.037620254516601566, 0.03745974349975586, 0.03749910354614258, 0.037566463470458986, 0.037160736083984375, 0.03751139068603516, 0.03764217758178711, 0.03791999816894531, 0.038380126953125, 0.03753801727294922, 0.03736576080322265, 0.037275646209716795, 0.03731455993652344, 0.037195201873779296, 0.0371894416809082, 0.037248863220214846, 0.0370431022644043, 0.038752254486083985, 0.03716684722900391, 0.037265663146972654, 0.03723263931274414, 0.037326847076416016, 0.037394432067871096, 0.037521343231201175, 0.037249088287353516, 0.038742015838623044, 0.03741900634765625, 0.03786137771606445, 0.0375002555847168, 0.03740124893188477, 0.037418014526367185, 0.037481441497802734, 0.0372795524597168, 0.037513408660888675, 0.038163841247558596, 0.03807910537719727, 0.03788934326171875, 0.037878463745117184, 0.03803955078125, 0.0378787841796875, 0.03812374496459961, 0.03745462417602539, 0.03761356735229492, 0.03733913421630859, 0.03751731109619141, 0.037378047943115236, 0.03751059341430664, 0.037411392211914064, 0.037369857788085936, 0.03736166381835938, 0.03768115234375, 0.03740444946289063, 0.037525726318359376, 0.037580799102783204, 0.03800678253173828, 0.03833651351928711, 0.03823961639404297, 0.03804428863525391, 0.03803257751464844, 0.037921470642089845, 0.03801007843017578, 0.0376610221862793, 0.03789494323730469, 0.037272960662841796, 0.037282207489013675, 0.037396320343017576, 0.0404317741394043, 0.03825254440307617, 0.03779321670532226, 0.03771039962768555, 0.0380579833984375, 0.03785103988647461, 0.0376607666015625, 0.03757056045532227, 0.037628097534179686, 0.037695297241210936, 0.03784294509887695, 0.037760257720947266, 0.03768396759033203, 0.037598880767822265, 0.0381115837097168, 0.037598880767822265, 0.03741088104248047, 0.03752783966064453, 0.03786751937866211, 0.037771263122558595, 0.037768192291259765, 0.037571582794189456, 0.037515262603759765, 0.03747580718994141, 0.03738473510742187, 0.03765619277954101, 0.03761910247802734, 0.0374496955871582, 0.03744211196899414, 0.037386207580566405, 0.03764204788208008, 0.03750883102416992, 0.037622718811035155, 0.037375167846679686, 0.03783327865600586, 0.038159904479980467, 0.0377042236328125, 0.038621536254882814, 0.03942931365966797, 0.03824723052978515, 0.03792006301879883, 0.038055999755859375, 0.03808240127563477, 0.03804998397827149, 0.03758147048950195, 0.03771590423583984, 0.037760704040527344, 0.037805919647216794, 0.03804140853881836, 0.03755894470214844, 0.03775619125366211, 0.040358623504638674, 0.03788800048828125, 0.03782451248168945, 0.0374128646850586, 0.03759308624267578, 0.03762172698974609, 0.03724006271362305, 0.03786783981323242, 0.03732236862182617, 0.037191967010498046, 0.03760569763183594, 0.03785958480834961, 0.03763558578491211, 0.03760784149169922, 0.037518943786621094, 0.03747001647949219, 0.03768131256103516, 0.03743388748168945, 0.03739980697631836, 0.03742118453979492, 0.037366401672363284, 0.03751321411132812, 0.03757660675048828, 0.03796780776977539, 0.037322910308837894, 0.0373185920715332, 0.037298240661621095, 0.03727974319458008, 0.03745382308959961, 0.037428478240966796, 0.037623584747314455, 0.03864675140380859, 0.037541889190673826, 0.037648384094238284, 0.03750044631958008, 0.03726777648925781, 0.037488800048828125, 0.03748803329467774, 0.03788246536254883, 0.03790217590332031, 0.03781206512451172, 0.03760316848754883, 0.037515487670898434, 0.0373221435546875, 0.03731455993652344, 0.037257888793945315, 0.03743939208984375, 0.03798444747924805, 0.03785324859619141, 0.03779331207275391, 0.03750384140014648, 0.037498878479003905, 0.037343231201171875, 0.03782860946655273, 0.03730022430419922, 0.03750502395629883, 0.03708480072021485, 0.04037980651855469, 0.038213920593261716, 0.03767337417602539, 0.03736604690551758, 0.03785871887207031, 0.03779769515991211, 0.037825313568115235, 0.037658401489257816, 0.037650367736816404, 0.03745737457275391, 0.037343360900878905, 0.03737260818481445, 0.037466110229492186, 0.03744742584228516, 0.03744179153442383, 0.037539230346679685, 0.037169761657714843, 0.03724662399291992, 0.03724227142333984, 0.03729401779174805, 0.037341182708740234, 0.0371599349975586, 0.03731167984008789, 0.03765126419067383, 0.037335041046142575, 0.03728998565673828, 0.03737190246582031, 0.03736502456665039, 0.04034633636474609, 0.037577823638916014, 0.03749776077270508, 0.037612545013427735, 0.037563392639160156, 0.037220352172851565, 0.037099582672119144, 0.03723244857788086, 0.03726963043212891, 0.037131649017333984, 0.03710630416870117, 0.03707904052734375, 0.037182464599609374, 0.03730944061279297, 0.0371486701965332, 0.037078113555908204, 0.037112545013427735, 0.03702726364135742, 0.03715375900268555, 0.03715049743652344, 0.036999168395996096, 0.03712204742431641, 0.03750028610229492, 0.036997024536132815, 0.037066814422607425, 0.038044574737548825, 0.037950782775878905, 0.038109886169433595, 0.03804774475097656, 0.03752755355834961, 0.03740377426147461, 0.03747929763793945, 0.03733206558227539, 0.037394657135009765, 0.0371739501953125, 0.03732646560668945, 0.03741734313964844, 0.037377918243408206, 0.037179519653320316, 0.037322208404541014, 0.037423648834228516, 0.037414016723632815, 0.03733388900756836, 0.03720185470581055, 0.03755014419555664, 0.03739766311645508, 0.037251937866210935, 0.03743046569824219, 0.03709215927124023, 0.03726131057739258, 0.03729817581176758, 0.03744764709472656, 0.03788969421386719, 0.038709632873535155, 0.03767091369628906, 0.03784003067016602, 0.037505088806152345, 0.03749478530883789, 0.037370655059814455, 0.03722991943359375, 0.03723740768432617, 0.03740671920776367, 0.03757648086547852, 0.037414497375488284, 0.037182079315185544, 0.03712204742431641, 0.037101024627685546, 0.03732284927368164, 0.037193313598632816, 0.03734783935546875, 0.03772412872314453, 0.03821158218383789, 0.037574817657470706, 0.037542110443115236, 0.03769139099121094, 0.03733913421630859, 0.03745177459716797, 0.03725513458251953, 0.03754396820068359, 0.03821673583984375, 0.0378480339050293, 0.03785318374633789, 0.037746688842773435, 0.037730304718017575, 0.03759718322753906, 0.03759823989868164, 0.03746915054321289, 0.037508255004882814, 0.03808844757080078, 0.038052097320556644, 0.03796188735961914, 0.0376607666015625, 0.03790233612060547, 0.03766886520385742, 0.03776716613769531, 0.03760051345825195, 0.037513374328613285, 0.03741900634765625, 0.037472862243652344, 0.037343231201171875, 0.03774259185791016, 0.03838566589355469, 0.03824835205078125, 0.03802326583862305, 0.03797420883178711, 0.03817043304443359, 0.03803955078125, 0.03771187210083008, 0.0374552001953125, 0.03730294418334961, 0.03744768142700195, 0.03831145477294922, 0.04061846542358399, 0.038191104888916014, 0.037612895965576175, 0.037429920196533205, 0.037318622589111325, 0.03758406448364258, 0.037359745025634765, 0.037405406951904294, 0.037588897705078124, 0.03765663909912109, 0.037452926635742186, 0.03756880187988281, 0.037470272064208984, 0.03768172836303711, 0.037746688842773435, 0.03788800048828125, 0.03810713577270508, 0.037953536987304685, 0.037804031372070314, 0.03781222534179687, 0.03796377563476563, 0.03782451248168945, 0.037564414978027344, 0.03760287857055664, 0.03757894515991211, 0.037582687377929684, 0.03754025650024414, 0.0373587532043457, 0.03726627349853515, 0.03748979187011719, 0.0377209587097168, 0.0374060173034668, 0.03742544174194336, 0.037298591613769534, 0.03725503921508789, 0.037265087127685545, 0.03764035034179687, 0.03724889755249024, 0.03730883026123047, 0.03802374267578125, 0.037455806732177736, 0.03749689483642578, 0.03760332870483398, 0.037736446380615234, 0.03747225570678711, 0.037414241790771484, 0.03718368148803711, 0.03740003204345703, 0.03749548721313477, 0.037337406158447266, 0.03973673629760742, 0.03813846588134766, 0.037400577545166014, 0.03753696060180664, 0.037351585388183596, 0.03735590362548828, 0.03758927917480469, 0.03791462326049805, 0.037195777893066405, 0.03716908645629883, 0.03723203277587891, 0.03734185409545898, 0.03711385726928711, 0.037252159118652345, 0.03706771087646484, 0.03737334442138672, 0.037314273834228515, 0.037208736419677736, 0.037116096496582034, 0.0373535041809082, 0.03760947036743164, 0.037812000274658204, 0.03760355377197266, 0.03783580780029297, 0.037565727233886716, 0.037397727966308594, 0.038126049041748045, 0.03738796615600586, 0.03745196914672851, 0.03742892837524414, 0.037434974670410154, 0.03728060913085938, 0.037238784790039066, 0.03726697540283203, 0.03755465698242187, 0.037379585266113284, 0.037292545318603515, 0.03722963333129883, 0.03708614349365234, 0.03726335906982422, 0.03767295837402344, 0.038182464599609375, 0.03784265518188477, 0.03768707275390625, 0.037348190307617185, 0.0374411506652832, 0.03761577606201172, 0.037806400299072264, 0.03773440170288086, 0.03782851028442383, 0.03776316833496094, 0.03791257476806641, 0.03778121566772461, 0.03766726303100586, 0.03751935958862305, 0.037338592529296874, 0.03746412658691406, 0.03725769424438476, 0.044390144348144533, 0.038891777038574216, 0.03830294418334961, 0.03804444885253906, 0.03793289566040039, 0.03827907180786133, 0.03772246551513672, 0.037672222137451174, 0.03775961685180664, 0.03765654373168945, 0.03766479873657227, 0.03782451248168945, 0.03805155181884766, 0.03792310333251953, 0.037817344665527344, 0.03799456024169922, 0.03760224151611328, 0.03768854522705078, 0.03777609634399414, 0.03764595031738281, 0.03760355377197266, 0.03761929702758789, 0.03772275161743164, 0.037617374420166015, 0.03821187210083008, 0.037746688842773435, 0.037804031372070314, 0.03832940673828125, 0.04110841751098633, 0.0381952018737793, 0.03809632110595703, 0.03773875045776367, 0.03749100875854492, 0.037779457092285154, 0.03758703994750977, 0.037506977081298826, 0.037644351959228516, 0.03763756942749023, 0.03753420639038086, 0.037469566345214846, 0.03796031951904297, 0.03847919845581055, 0.037889793395996095, 0.037620033264160156, 0.03774508666992187, 0.03777552032470703, 0.03759715270996094, 0.037464096069335935, 0.03767670440673828, 0.03762825775146485, 0.03755417633056641, 0.037746337890625, 0.03771551895141602, 0.03767350387573242, 0.038569793701171876, 0.03770796966552734, 0.03822003173828125]",tokens/s,26.55022766843954,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1534.140416,1331.560448,0.0,945.815552,943.480832,s,1,8.2804453125,8.2804453125,0.0,8.2804453125,8.2804453125,8.2804453125,8.2804453125,[8.2804453125],,kWh,3.378422795416706e-05,3.7193951844002356e-06,1.0622508497995509e-05,4.81261316365628e-05,,MB,1627.922432,1495.138304,0.0,1080.03328,1046.519808,s,10,0.8413082656860351,0.08413082656860352,0.0005411269151147151,0.08411937713623047,0.08498404083251954,0.08500063018798829,0.08501390167236328,"[0.08501721954345703, 0.08403056335449219, 0.08425718688964844, 0.08399298858642579, 0.08385699462890625, 0.08420819091796874, 0.08329564666748047, 0.08337535858154296, 0.08429376220703125, 0.08498035430908203]",tokens/s,3042.879886497344,kWh,2.5345507926365313e-06,2.794329652731288e-07,1.6871493382239957e-06,4.5011330961336565e-06,tokens/kWh,56874567.92155216,MB,1632.063488,1583.218688,0.0,1166.016512,1082.823168,s,10,12.217605834960937,1.2217605834960938,0.0038837035037777566,1.2212030029296876,1.225930285644531,1.2272507873535157,1.228307188720703,"[1.22072802734375, 1.22065771484375, 1.2285712890625, 1.2246650390625, 1.2195157470703124, 1.2256368408203124, 1.221677978515625, 1.222939208984375, 1.219701171875, 1.2135128173828125]",tokens/s,51.56493084735486,kWh,3.572005324652893e-05,3.9389814592167994e-06,1.849861681037646e-05,5.815765151612217e-05,tokens/kWh,1083262.4488376298,,s,630,12.214905199050905,0.01938873841119191,0.0003129599496396208,0.01932523250579834,0.019679600715637208,0.019840338706970214,0.020372791442871095,"[0.019585599899291994, 0.01939356803894043, 0.019151840209960937, 0.019055679321289064, 0.01914361572265625, 0.01902592086791992, 0.020246208190917967, 0.019177696228027345, 0.019220415115356444, 0.019433631896972656, 0.019599008560180663, 0.019446111679077147, 0.01922662353515625, 0.019271167755126953, 0.020680416107177736, 0.01923971176147461, 0.019093471527099608, 0.019105823516845703, 0.01921353530883789, 0.019155296325683593, 0.019247488021850587, 0.019242719650268556, 0.01921449661254883, 0.019075263977050783, 0.019228351593017577, 0.019177791595458984, 0.01919532775878906, 0.019282495498657227, 0.019385631561279298, 0.019308319091796877, 0.01932179260253906, 0.019252416610717773, 0.01943747138977051, 0.01928451156616211, 0.01926134490966797, 0.019317216873168945, 0.01935468864440918, 0.019207103729248047, 0.019820735931396483, 0.019562303543090822, 0.019621183395385742, 0.01956729507446289, 0.020631744384765626, 0.01951708793640137, 0.01932700729370117, 0.01921241569519043, 0.019148031234741212, 0.019104223251342773, 0.01913065528869629, 0.019130367279052735, 0.019311775207519533, 0.019304607391357424, 0.01939731216430664, 0.019337215423583985, 0.019281856536865233, 0.019556415557861327, 0.019373632431030272, 0.0194085750579834, 0.019726560592651366, 0.01946015930175781, 0.019313312530517577, 0.019332576751708984, 0.019485023498535155, 0.01968182373046875, 0.01952511978149414, 0.019245855331420897, 0.019355648040771483, 0.01943510437011719, 0.019354015350341796, 0.01918511962890625, 0.019212799072265627, 0.019378208160400392, 0.01932044792175293, 0.01977177619934082, 0.01925324821472168, 0.019533824920654298, 0.019355167388916017, 0.019235519409179686, 0.01919753646850586, 0.019224767684936524, 0.019043840408325196, 0.019175935745239257, 0.01906892776489258, 0.01925939178466797, 0.01953721618652344, 0.020122304916381836, 0.01984511947631836, 0.020023296356201172, 0.01986790466308594, 0.019831840515136718, 0.019590911865234376, 0.019566751480102538, 0.019456832885742188, 0.019352575302124024, 0.019352575302124024, 0.01936534309387207, 0.01936172866821289, 0.019194463729858398, 0.019083263397216797, 0.01903001594543457, 0.019021087646484375, 0.019081951141357422, 0.019130720138549804, 0.019152256011962892, 0.019244800567626952, 0.019845663070678712, 0.019367935180664063, 0.01919817543029785, 0.019268415451049806, 0.019474624633789062, 0.019563295364379882, 0.01955958366394043, 0.019513631820678713, 0.019341888427734374, 0.01936787223815918, 0.01922640037536621, 0.01921567916870117, 0.01928057670593262, 0.01920755195617676, 0.019268512725830078, 0.019209280014038085, 0.019182527542114258, 0.01924505615234375, 0.019381664276123048, 0.019249759674072265, 0.01941433525085449, 0.019816959381103515, 0.019554655075073243, 0.019424640655517578, 0.01929587173461914, 0.019323871612548827, 0.019355648040771483, 0.019331071853637697, 0.01966249656677246, 0.01957855987548828, 0.01943846321105957, 0.01948575973510742, 0.019563232421875, 0.01945801544189453, 0.019460128784179687, 0.01922047996520996, 0.0193919677734375, 0.019244991302490234, 0.019435903549194337, 0.0194682559967041, 0.01943987274169922, 0.019353504180908202, 0.01930793571472168, 0.019195680618286134, 0.01917430305480957, 0.019262496948242187, 0.0193154239654541, 0.0198002872467041, 0.019492639541625976, 0.020259199142456056, 0.019939104080200196, 0.0202258243560791, 0.019575103759765625, 0.019679231643676756, 0.01955011177062988, 0.019814495086669923, 0.019777536392211914, 0.019757055282592775, 0.01948057556152344, 0.019388416290283202, 0.019300352096557616, 0.019400447845458985, 0.01941926383972168, 0.01949625587463379, 0.019494752883911132, 0.01948975944519043, 0.01948057556152344, 0.019533760070800783, 0.019602848052978517, 0.019546783447265625, 0.0194550724029541, 0.01940777587890625, 0.019673088073730468, 0.01948876762390137, 0.019539968490600586, 0.01958016014099121, 0.019409984588623048, 0.019666624069213868, 0.0195850887298584, 0.019373344421386718, 0.01938809585571289, 0.019241504669189453, 0.019146751403808594, 0.019313087463378908, 0.019685247421264648, 0.01944972801208496, 0.01932262420654297, 0.01911497688293457, 0.019287616729736327, 0.019265407562255858, 0.01926406478881836, 0.019189151763916015, 0.019252031326293946, 0.019099103927612306, 0.019568960189819337, 0.01913772773742676, 0.019229503631591798, 0.0192491512298584, 0.019376127243041993, 0.01943142318725586, 0.019396608352661132, 0.019931104660034178, 0.01943356704711914, 0.019320768356323244, 0.019341440200805665, 0.019339487075805663, 0.019260480880737306, 0.01930723190307617, 0.019254207611083984, 0.019182144165039064, 0.01922915267944336, 0.019310495376586915, 0.019277631759643556, 0.01938387107849121, 0.01930950355529785, 0.019476160049438477, 0.019378015518188477, 0.01933737564086914, 0.01943756866455078, 0.019298303604125978, 0.01928985595703125, 0.01914729690551758, 0.019128032684326172, 0.01927190399169922, 0.019322656631469728, 0.01957683181762695, 0.02001919937133789, 0.019798368453979493, 0.02002195167541504, 0.019943487167358397, 0.01980099105834961, 0.019505151748657225, 0.019418336868286132, 0.019753759384155273, 0.019736576080322265, 0.019914623260498046, 0.019710208892822264, 0.019660671234130858, 0.019490816116333007, 0.01922012710571289, 0.01924540710449219, 0.019324928283691405, 0.019474431991577147, 0.019590816497802734, 0.01961199951171875, 0.019578880310058593, 0.019737663269042968, 0.019834495544433593, 0.02036297607421875, 0.020793439865112305, 0.019466079711914063, 0.019190303802490233, 0.01929644775390625, 0.019347455978393553, 0.019288063049316406, 0.01903206443786621, 0.019140031814575194, 0.01923286437988281, 0.01943337631225586, 0.019680896759033204, 0.019524543762207032, 0.019558591842651366, 0.01981420707702637, 0.019556224822998045, 0.019277023315429687, 0.019176351547241212, 0.019331071853637697, 0.01923276710510254, 0.0191628475189209, 0.019179807662963868, 0.01922172737121582, 0.019202848434448243, 0.019173376083374022, 0.019198272705078127, 0.019363616943359373, 0.019277727127075196, 0.019184703826904296, 0.019190719604492187, 0.01925529670715332, 0.019171327590942384, 0.01936787223815918, 0.01932521629333496, 0.019250335693359374, 0.01915763282775879, 0.019261280059814454, 0.01910099220275879, 0.019186527252197265, 0.01911993598937988, 0.019143999099731444, 0.019204992294311524, 0.019228288650512695, 0.019251327514648437, 0.019365760803222658, 0.019377952575683595, 0.0193337287902832, 0.019386367797851564, 0.019367935180664063, 0.019304447174072266, 0.019232160568237306, 0.019244928359985352, 0.019434207916259764, 0.01960710334777832, 0.019327423095703126, 0.019367103576660157, 0.019352672576904296, 0.01939462471008301, 0.019365503311157228, 0.01944169616699219, 0.0194150390625, 0.01921023941040039, 0.01967945671081543, 0.019340991973876953, 0.019074304580688477, 0.01918262481689453, 0.01934832000732422, 0.01922719955444336, 0.01914713668823242, 0.019036191940307617, 0.019174560546875, 0.019458911895751954, 0.019499135971069337, 0.019789695739746094, 0.019490816116333007, 0.019445344924926757, 0.019311296463012696, 0.019312128067016602, 0.019302495956420897, 0.019323007583618164, 0.01951263999938965, 0.01947001647949219, 0.019254016876220702, 0.01935590362548828, 0.019361343383789063, 0.01931881523132324, 0.019378591537475585, 0.019336288452148437, 0.01930441665649414, 0.019231679916381837, 0.01942118453979492, 0.019374080657958984, 0.019458047866821288, 0.01935094451904297, 0.019461952209472656, 0.019545984268188477, 0.01928652763366699, 0.01930486488342285, 0.019395839691162108, 0.019443775177001955, 0.01940550422668457, 0.01982054328918457, 0.019381248474121093, 0.019309568405151366, 0.019212223052978514, 0.019267648696899415, 0.01927395248413086, 0.019342559814453125, 0.019513919830322267, 0.019358816146850585, 0.019377119064331056, 0.019291391372680666, 0.019349567413330077, 0.01933990478515625, 0.019468032836914062, 0.0193702392578125, 0.019820863723754883, 0.022863552093505858, 0.01946950340270996, 0.01929689598083496, 0.019219648361206054, 0.019100671768188478, 0.01978982353210449, 0.019536991119384766, 0.020376800537109375, 0.019630592346191408, 0.01927961540222168, 0.019400768280029297, 0.01955344009399414, 0.019430015563964845, 0.019392768859863282, 0.019273183822631837, 0.019235200881958008, 0.019054079055786134, 0.019184127807617187, 0.019007551193237306, 0.019253183364868164, 0.019441343307495116, 0.019404863357543944, 0.01968374443054199, 0.019353599548339845, 0.019230495452880858, 0.019265247344970704, 0.019184127807617187, 0.019230592727661134, 0.01916041564941406, 0.01926793670654297, 0.019411392211914062, 0.01949286460876465, 0.01944166374206543, 0.019266719818115233, 0.019267744064331054, 0.019392480850219728, 0.019260128021240233, 0.01934067153930664, 0.01939686393737793, 0.02258572769165039, 0.020189184188842774, 0.01943071937561035, 0.01918841552734375, 0.019382272720336914, 0.019373344421386718, 0.01938505554199219, 0.01921433639526367, 0.01924300765991211, 0.019293567657470704, 0.019161727905273436, 0.019173376083374022, 0.019183616638183593, 0.020316352844238283, 0.019255264282226563, 0.01941497611999512, 0.01928793525695801, 0.019414239883422852, 0.019157791137695314, 0.01908847999572754, 0.01911065673828125, 0.019890367507934572, 0.019388416290283202, 0.019289440155029296, 0.01928054428100586, 0.01925529670715332, 0.019169279098510742, 0.019208192825317383, 0.019163135528564454, 0.019168863296508788, 0.019419008255004883, 0.019171775817871092, 0.0196146240234375, 0.019394527435302733, 0.01955023956298828, 0.019325248718261717, 0.01939187240600586, 0.019337408065795897, 0.019351423263549803, 0.019266944885253906, 0.019900703430175783, 0.01935174369812012, 0.01930825614929199, 0.019266239166259767, 0.019195552825927734, 0.01924239921569824, 0.019194816589355467, 0.019214048385620117, 0.019261472702026366, 0.019368192672729493, 0.019619840621948242, 0.0194150390625, 0.01937171173095703, 0.01930067253112793, 0.019184640884399414, 0.019264511108398438, 0.019159040451049804, 0.019275199890136718, 0.01916166305541992, 0.019544063568115236, 0.01961577606201172, 0.019636192321777345, 0.019597312927246095, 0.01951945686340332, 0.01956252861022949, 0.019539392471313477, 0.019582975387573243, 0.019740224838256836, 0.01971887969970703, 0.0195598087310791, 0.01943404769897461, 0.01931660842895508, 0.019441120147705077, 0.01939148712158203, 0.019448928833007813, 0.01952422332763672, 0.019447904586791992, 0.019459840774536132, 0.019560895919799804, 0.019617759704589843, 0.019481727600097656, 0.019473663330078127, 0.019417951583862305, 0.01958304023742676, 0.019321056365966798, 0.019343904495239258, 0.01926553535461426, 0.019215776443481446, 0.019233631134033202, 0.019195648193359376, 0.020008256912231445, 0.019122880935668947, 0.01918479919433594, 0.019190847396850588, 0.019122079849243166, 0.019563488006591797, 0.01942156791687012, 0.019305023193359375, 0.019204160690307618, 0.019261375427246093, 0.019285312652587892, 0.019341440200805665, 0.01955686378479004, 0.019858783721923828, 0.019729440689086913, 0.01973017692565918, 0.019582368850708007, 0.019505376815795897, 0.01958745574951172, 0.019621376037597657, 0.019999231338500977, 0.019984159469604492, 0.01985977554321289, 0.019741920471191405, 0.019400896072387694, 0.019235231399536132, 0.01913235282897949, 0.019122335433959962, 0.019200000762939453, 0.019120128631591796, 0.019171104431152344, 0.019212480545043945, 0.01913654327392578, 0.01909667205810547, 0.019221376419067383, 0.019103776931762694, 0.01910281562805176, 0.019085535049438475, 0.019079519271850587, 0.019097951889038085, 0.019576608657836916, 0.019372032165527343, 0.019203615188598634, 0.019313087463378908, 0.01926972770690918, 0.019148767471313475, 0.019404928207397462, 0.01922627258300781, 0.01917487907409668, 0.019221183776855468, 0.019421695709228515, 0.019635072708129885, 0.01944051170349121, 0.01924710464477539, 0.019273408889770506, 0.019293760299682616, 0.01920195198059082, 0.019270528793334962, 0.019312128067016602, 0.019257823944091798, 0.01926348876953125, 0.019189119338989258, 0.019395200729370118, 0.019318784713745117, 0.019636224746704102, 0.019283967971801756, 0.01923023986816406, 0.01921891212463379, 0.019742271423339845, 0.020064096450805664, 0.019638879776000977, 0.019380319595336915, 0.019371936798095703, 0.019246463775634767, 0.01932147216796875, 0.019306495666503908, 0.019417087554931642, 0.01944329643249512, 0.01951785659790039, 0.019273727416992188, 0.019095552444458007, 0.019148767471313475, 0.0190196475982666, 0.01912188720703125, 0.019372480392456055, 0.018976768493652343, 0.018984960556030273, 0.019158304214477537, 0.019086048126220702, 0.020938880920410158, 0.019531551361083983, 0.019056735992431642, 0.018970495223999025, 0.01900796890258789, 0.019175071716308594, 0.019175424575805664, 0.01922483253479004, 0.01913372802734375, 0.019205631256103514, 0.019237184524536134, 0.019198623657226563, 0.01921798324584961, 0.019124671936035155, 0.019072383880615235, 0.019089567184448243, 0.019085056304931642, 0.019108575820922853, 0.019115264892578126, 0.019245824813842773, 0.019062784194946288, 0.019173408508300783, 0.01934332847595215, 0.01942019271850586, 0.019119071960449218, 0.019062591552734376, 0.01927587127685547, 0.019170848846435547, 0.019185344696044923, 0.019323776245117188, 0.019482624053955077, 0.019124223709106446, 0.019349504470825195, 0.019050624847412108, 0.019119424819946287, 0.019086944580078126, 0.019131488800048828, 0.019174720764160155, 0.019233343124389648, 0.019218656539916994, 0.019197727203369142, 0.019351327896118164]",tokens/s,51.57633151741128,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7425.867776,7815.954432,0.0,7430.209536,7414.23104,s,1,11.5578759765625,11.5578759765625,0.0,11.5578759765625,11.5578759765625,11.5578759765625,11.5578759765625,[11.5578759765625],,kWh,0.00013001373898338593,1.4334036416258362e-05,4.264253411401242e-05,0.00018699030951365673,,MB,1757.855744,8591.900672,0.0,8176.795648,8052.041728,s,10,8.388756896972657,0.8388756896972657,0.005068092329457814,0.8398027343750001,0.8426118225097656,0.8426351959228515,0.8426538946533203,"[0.8244771118164063, 0.8389241333007813, 0.8390857543945313, 0.8404760131835938, 0.8415714721679688, 0.8377677612304687, 0.8420599975585937, 0.8391294555664063, 0.8426585693359375, 0.8426066284179687]",tokens/s,305.17036450583703,kWh,2.4399284272217553e-05,2.6908122011379806e-06,1.6139642541333304e-05,4.322973901468884e-05,tokens/kWh,5921849.306400274,MB,1765.298176,8740.798464,0.0,8323.596288,8263.496192,s,10,37.441457275390626,3.7441457275390624,0.006499362104962373,3.742072265625,3.752938061523438,3.7563946899414065,3.7591599926757815,"[3.737705322265625, 3.740154541015625, 3.73918017578125, 3.739918701171875, 3.743107666015625, 3.741036865234375, 3.744752685546875, 3.743580078125, 3.759851318359375, 3.752169921875]",tokens/s,16.826268148865132,kWh,0.00010964002129111094,1.209397140198706e-05,7.295534540126732e-05,0.0001946893380943653,tokens/kWh,323592.4504990823,,s,630,37.43461421966556,0.05942002257089766,0.000583698622059506,0.059415279388427736,0.0599520565032959,0.0601386157989502,0.06203461723327637,"[0.06208425521850586, 0.05944313430786133, 0.05856892776489258, 0.058562816619873045, 0.05849055862426758, 0.058687454223632814, 0.058455936431884764, 0.058755073547363285, 0.05866998291015625, 0.0588144645690918, 0.058652481079101565, 0.05874911880493164, 0.05876326370239258, 0.05908396911621094, 0.05917068862915039, 0.05957523345947266, 0.059694942474365235, 0.059674175262451175, 0.05948271942138672, 0.059150337219238285, 0.05974425506591797, 0.05929276657104492, 0.05867161560058594, 0.058648990631103515, 0.058654720306396485, 0.05887372970581055, 0.05893654251098633, 0.05880720138549805, 0.058679039001464844, 0.058824958801269533, 0.05903564834594727, 0.0594752311706543, 0.05958115386962891, 0.059957248687744144, 0.05990195083618164, 0.05974118423461914, 0.05940531158447265, 0.059572097778320315, 0.05909660720825195, 0.059421279907226565, 0.05931574249267578, 0.0591794548034668, 0.059105312347412106, 0.059009025573730466, 0.05916876983642578, 0.05938585662841797, 0.05928879928588867, 0.0597608642578125, 0.05965046310424805, 0.0598120002746582, 0.05997772979736328, 0.059815937042236325, 0.059735870361328124, 0.05970044708251953, 0.05954630279541016, 0.06085036849975586, 0.05976617431640625, 0.05972230529785156, 0.059752288818359374, 0.05980393600463867, 0.059426815032958984, 0.059283073425292966, 0.059462017059326175, 0.061862464904785155, 0.0589552001953125, 0.058692161560058596, 0.05886716842651367, 0.058690078735351564, 0.05875888061523438, 0.058652961730957034, 0.05877350234985351, 0.05850316619873047, 0.05907436752319336, 0.05859347152709961, 0.05919948959350586, 0.058943489074707034, 0.05886566543579102, 0.05875491333007812, 0.05909110260009766, 0.059746112823486325, 0.05970083236694336, 0.05958646392822266, 0.0592350082397461, 0.058848350524902344, 0.058681697845458985, 0.05879046249389648, 0.05884508895874024, 0.05876851272583008, 0.05907759857177734, 0.05909299087524414, 0.05915596771240234, 0.05901299285888672, 0.05912387084960938, 0.0593895378112793, 0.059635902404785154, 0.0597551040649414, 0.05999375915527344, 0.05967507171630859, 0.05977027130126953, 0.05920419311523437, 0.05941657638549805, 0.05952259063720703, 0.059519454956054686, 0.05927526473999024, 0.059146240234375, 0.05909708786010742, 0.05963334274291992, 0.05958073425292969, 0.05952713775634766, 0.059616447448730465, 0.05996835327148437, 0.059660385131835934, 0.0597828483581543, 0.060206497192382816, 0.059878143310546875, 0.05991635131835937, 0.059735103607177736, 0.05957727813720703, 0.0595753288269043, 0.059616222381591796, 0.0595968017578125, 0.05959619140625, 0.05955644989013672, 0.05971148681640625, 0.05941785430908203, 0.06000307083129883, 0.06209740829467773, 0.0594442253112793, 0.05873097610473633, 0.058780094146728516, 0.05845206451416016, 0.05869772720336914, 0.05854003143310547, 0.058568702697753904, 0.05866022491455078, 0.05857548904418945, 0.05870796966552734, 0.05892300796508789, 0.05900697708129883, 0.05903769683837891, 0.058894241333007816, 0.05920905685424805, 0.05966924667358398, 0.05962137603759766, 0.05938585662841797, 0.058845184326171876, 0.05873049545288086, 0.058689537048339846, 0.058817855834960936, 0.058813121795654295, 0.05873788833618164, 0.059113761901855466, 0.059122177124023435, 0.05926911926269531, 0.05912575912475586, 0.05930160140991211, 0.05944144058227539, 0.0594513931274414, 0.059568126678466796, 0.05974425506591797, 0.062150367736816405, 0.05968230438232422, 0.059224864959716794, 0.05932249450683594, 0.059165760040283205, 0.05910995101928711, 0.05922140884399414, 0.05925360107421875, 0.059184478759765624, 0.059346687316894534, 0.0592823371887207, 0.059396095275878906, 0.05939590454101563, 0.05984467315673828, 0.05967059326171875, 0.05967264175415039, 0.05968896102905273, 0.05979260635375976, 0.05960492706298828, 0.05952188873291016, 0.05949849700927735, 0.05961347198486328, 0.05944496154785156, 0.05957222366333008, 0.059469825744628904, 0.05954150390625, 0.05977088165283203, 0.06013747024536133, 0.06009241485595703, 0.0616118392944336, 0.059137950897216796, 0.058498943328857425, 0.05873651123046875, 0.058525856018066404, 0.05858956909179688, 0.0586690559387207, 0.05905408096313477, 0.05883670425415039, 0.058743072509765626, 0.058711265563964846, 0.05906224060058594, 0.05893180847167969, 0.058812641143798826, 0.0588144645690918, 0.059256832122802736, 0.059539295196533205, 0.05910953521728515, 0.0595599365234375, 0.0594411506652832, 0.05911059188842773, 0.058818687438964845, 0.05865043258666992, 0.05878054428100586, 0.05881856155395508, 0.05886495971679687, 0.059034305572509764, 0.05908889770507812, 0.05931414413452148, 0.059308032989501956, 0.05920131301879883, 0.05965990447998047, 0.06045759963989258, 0.05963776016235352, 0.05946332931518555, 0.059483711242675784, 0.059388256072998045, 0.059647678375244144, 0.05930636978149414, 0.05941900634765625, 0.05919948959350586, 0.05928550338745117, 0.059327583312988284, 0.05964857482910156, 0.05972383880615234, 0.05967491149902344, 0.05964595031738281, 0.05978112030029297, 0.059828193664550784, 0.06013955307006836, 0.05981388854980469, 0.05992038345336914, 0.05977088165283203, 0.05986304092407226, 0.059842399597167965, 0.05933276748657226, 0.059377662658691405, 0.059875328063964846, 0.059563838958740234, 0.0595212173461914, 0.05941398239135742, 0.059748897552490236, 0.059831710815429685, 0.062189567565917966, 0.059297119140625, 0.05858575820922852, 0.05866495895385742, 0.0584595832824707, 0.05875094223022461, 0.05888399887084961, 0.058757183074951175, 0.05913459014892578, 0.05883084869384766, 0.05902694320678711, 0.05980006408691406, 0.05850726318359375, 0.058675006866455076, 0.058929344177246094, 0.05934080123901367, 0.05962492752075195, 0.05985744094848633, 0.05943091201782227, 0.059232257843017576, 0.059015167236328124, 0.0591912956237793, 0.05891049575805664, 0.05901308822631836, 0.05887196731567383, 0.059050079345703124, 0.05904383850097656, 0.05892060852050781, 0.05886806488037109, 0.05914214324951172, 0.05904492950439453, 0.05969795227050781, 0.0599758415222168, 0.06160934448242188, 0.05971212768554687, 0.05940822219848633, 0.059407745361328125, 0.059192096710205075, 0.0590909423828125, 0.058910144805908206, 0.05907855987548828, 0.059966110229492185, 0.05981919860839844, 0.05936742401123047, 0.05946451187133789, 0.05961481475830078, 0.05945183944702148, 0.05956784057617188, 0.05970355224609375, 0.06026233673095703, 0.059867198944091794, 0.05995929718017578, 0.05972323226928711, 0.059673118591308597, 0.059635711669921876, 0.05947574234008789, 0.05951663970947266, 0.05983411026000977, 0.05971635055541992, 0.059565502166748045, 0.05935683059692383, 0.05994998550415039, 0.059797439575195316, 0.062377983093261716, 0.059243648529052735, 0.05855936050415039, 0.05854412841796875, 0.05861171340942383, 0.05871308898925781, 0.05895868682861328, 0.05917712020874023, 0.05865881729125977, 0.05868134307861328, 0.058947582244873044, 0.05877347183227539, 0.05885955047607422, 0.058900478363037106, 0.05925068664550781, 0.059504638671875, 0.05984070587158203, 0.05982393646240235, 0.059338752746582034, 0.059211326599121095, 0.05901356887817383, 0.0592561264038086, 0.05913056182861328, 0.0586995849609375, 0.0588023681640625, 0.05901311874389648, 0.059187198638916014, 0.0591646728515625, 0.05908873748779297, 0.059020862579345704, 0.059288158416748046, 0.059463680267333986, 0.05999411010742187, 0.059920158386230465, 0.05953327941894531, 0.05945123291015625, 0.05915411376953125, 0.05945008087158203, 0.05955583953857422, 0.05948396682739258, 0.0593306884765625, 0.05921379089355469, 0.05950678253173828, 0.059176639556884764, 0.05906668853759765, 0.059291488647460935, 0.06028713607788086, 0.05980364990234375, 0.05970233535766602, 0.05997459030151367, 0.05967462539672851, 0.05984249496459961, 0.059788768768310546, 0.05967318344116211, 0.059619327545166016, 0.05969001770019531, 0.059603775024414066, 0.05961539077758789, 0.059633663177490234, 0.05932646560668945, 0.059659328460693356, 0.05960595321655274, 0.05954150390625, 0.06191308975219727, 0.05912284851074219, 0.05861385726928711, 0.058667552947998046, 0.05872867202758789, 0.05897760009765625, 0.05871603012084961, 0.05864940643310547, 0.058887294769287106, 0.058845409393310545, 0.05874867248535156, 0.05904585647583008, 0.05913859176635742, 0.059154590606689456, 0.059143905639648435, 0.0596383056640625, 0.059727008819580076, 0.05947401428222656, 0.059302528381347655, 0.059240577697753906, 0.05874892807006836, 0.0587960319519043, 0.05934447860717774, 0.058882465362548826, 0.05887180709838867, 0.05904383850097656, 0.060295169830322265, 0.05896531295776367, 0.05924729537963867, 0.05930575942993164, 0.0594106559753418, 0.0595148811340332, 0.05988486480712891, 0.05957497787475586, 0.05966556930541992, 0.05983830261230469, 0.059463871002197265, 0.05951964950561523, 0.05968707275390625, 0.059625598907470705, 0.05956982421875, 0.0597608642578125, 0.059240447998046876, 0.05907660675048828, 0.05947103881835938, 0.05995193481445313, 0.059635711669921876, 0.05960908889770508, 0.05972943878173828, 0.05992265701293945, 0.05983462524414063, 0.06008329772949219, 0.05964892959594727, 0.05954969787597656, 0.05934656143188476, 0.05955417633056641, 0.05974176025390625, 0.05965372848510742, 0.05987347030639648, 0.05950067138671875, 0.059682369232177734, 0.05980022430419922, 0.060329856872558596, 0.06150457763671875, 0.05930928039550781, 0.058587615966796874, 0.058585025787353515, 0.05841468811035156, 0.058728927612304686, 0.058959873199462894, 0.05871206283569336, 0.05902336120605469, 0.05901926422119141, 0.0591278076171875, 0.05904291152954102, 0.05922246551513672, 0.05895420837402344, 0.05916662216186523, 0.05928559875488281, 0.05987692642211914, 0.059576766967773434, 0.05917491149902344, 0.05885747146606445, 0.058912769317626956, 0.059260032653808595, 0.058864513397216794, 0.05900838470458984, 0.0593230094909668, 0.05898649597167969, 0.059150337219238285, 0.059025409698486325, 0.05930105590820312, 0.05941126251220703, 0.05933670425415039, 0.05933465576171875, 0.05962956619262695, 0.05951062393188476, 0.059623584747314454, 0.059813087463378906, 0.05949635314941406, 0.05947071838378906, 0.059355136871337894, 0.0591912956237793, 0.05951897430419922, 0.05957017517089844, 0.05982313537597656, 0.05950332641601563, 0.05934924697875977, 0.05953104019165039, 0.05998767852783203, 0.059787456512451174, 0.059679039001464845, 0.05965619277954102, 0.05972377777099609, 0.060026592254638675, 0.05961142349243164, 0.059690689086914064, 0.05972614288330078, 0.0599285774230957, 0.05982617568969727, 0.059675678253173825, 0.05971862411499024, 0.059566078186035154, 0.06024192047119141, 0.0598466567993164, 0.05973331069946289, 0.06253977584838867, 0.05995315170288086, 0.05897216033935547, 0.058797569274902345, 0.058700096130371096, 0.058878143310546874, 0.05882265472412109, 0.061900798797607424, 0.0590643196105957, 0.05904313659667969, 0.059160736083984374, 0.05900294494628906, 0.0591583366394043, 0.059175582885742185, 0.059084320068359376, 0.05986556625366211, 0.06007603073120117, 0.05996505737304687, 0.05967500686645508, 0.05892406463623047, 0.05898748779296875, 0.059146240234375, 0.05933260726928711, 0.059131294250488284, 0.0617393913269043, 0.05930416107177734, 0.05912985610961914, 0.05923635101318359, 0.05936537551879883, 0.05969305419921875, 0.05943695831298828, 0.05972563171386719, 0.060125473022460935, 0.05986064147949219, 0.060270431518554685, 0.05994694519042969, 0.05988582229614258, 0.059617313385009765, 0.05946547317504883, 0.05937964630126953, 0.06073199844360352, 0.05956576156616211, 0.05954137420654297, 0.05932064056396484, 0.059297183990478515, 0.059781375885009765, 0.059675102233886716, 0.059883392333984375, 0.05967884826660156, 0.060129280090332034, 0.059957248687744144, 0.06019878387451172, 0.059964576721191404, 0.059807777404785156, 0.05959299087524414, 0.05958108901977539, 0.059741729736328124, 0.05996729660034179, 0.059671199798583985, 0.05963161468505859, 0.05975209426879883, 0.06010095977783203, 0.0599920654296875, 0.06228678512573242, 0.059170814514160154, 0.05875302505493164, 0.058875553131103514, 0.058546432495117186, 0.0587039680480957, 0.058721343994140626, 0.05889529418945313, 0.05892095947265625, 0.05909299087524414, 0.058793983459472655, 0.05897830581665039, 0.05889811325073242, 0.058904895782470705, 0.05904751968383789, 0.059646366119384765, 0.0599733772277832, 0.05952451324462891, 0.05920761489868164, 0.05929257583618164, 0.05934428787231445, 0.05926934432983398, 0.05902924728393555, 0.05908092880249023, 0.05916048049926758, 0.0592281608581543, 0.05954406356811524, 0.059346687316894534, 0.05957247924804687, 0.05966460800170898, 0.059340576171875, 0.05999200057983398, 0.05956614303588867, 0.05971945571899414, 0.060690528869628904, 0.059619457244873046, 0.059668319702148434, 0.05966659164428711, 0.059719680786132816, 0.05951897430419922, 0.05931574249267578, 0.05943958282470703, 0.059504638671875, 0.059425792694091796, 0.05963625717163086, 0.06000883102416992, 0.05981398391723633, 0.06018368148803711, 0.060322689056396483, 0.06031155014038086, 0.06024188613891602, 0.06030303955078125, 0.05988185501098633, 0.059956768035888675, 0.0598798713684082, 0.06013523101806641, 0.05974556732177734, 0.05943795013427734, 0.05954358291625977, 0.059760478973388674, 0.05965020751953125, 0.0599444465637207, 0.059996673583984375]",tokens/s,16.829343994389077,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7423.381504,9464.315904,0.0,9061.793792,8463.626752,s,1,14.1642626953125,14.1642626953125,0.0,14.1642626953125,14.1642626953125,14.1642626953125,14.1642626953125,[14.1642626953125],,kWh,0.00020746710742908665,2.287786177515843e-05,9.54975763980126e-05,0.00032584254560225767,,MB,1743.0528,9478.995968,0.0,9061.793792,7991.22432,s,10,60.08788671874999,6.008788671875,0.011248043769296657,6.008897705078125,6.021964892578125,6.025627954101562,6.028558403320313,"[5.99210107421875, 5.9977314453125, 5.9971025390625, 6.0016982421875, 6.0068544921875, 6.01094091796875, 6.013810546875, 6.01720556640625, 6.02115087890625, 6.029291015625]",tokens/s,42.60426085514454,kWh,0.00017607391245208116,1.942152804380476e-05,0.0001169291490988017,0.0003124245895946876,tokens/kWh,819397.7315681588,MB,1750.224896,9478.995968,0.0,9061.793792,8265.583104,s,10,30.307095703124997,3.0307095703124998,0.002603455310761117,3.0300980224609373,3.0334981201171876,3.0349486450195315,3.036109064941406,"[3.03184423828125, 3.026663818359375, 3.03193212890625, 3.029801513671875, 3.028995361328125, 3.02874072265625, 3.03039453125, 3.03317578125, 3.0291484375, 3.036399169921875]",tokens/s,20.78721122509406,kWh,8.807217654667506e-05,9.713401138696478e-06,5.8610769110797886e-05,0.00015639634679616952,tokens/kWh,402822.70839809027,,s,630,30.301531970977784,0.04809766979520283,0.0004766924224146047,0.04808817481994629,0.048582083892822266,0.048745139312744136,0.049788932876586915,"[0.049733695983886717, 0.05066342544555664, 0.0474521598815918, 0.048824321746826174, 0.047486976623535154, 0.04745625686645508, 0.047392223358154295, 0.047338302612304685, 0.047392478942871095, 0.047271934509277344, 0.04740016174316406, 0.047581985473632814, 0.04746352005004883, 0.047637088775634766, 0.04763292694091797, 0.04765657424926758, 0.04790464019775391, 0.04793740844726563, 0.04788265609741211, 0.048097278594970705, 0.04804991912841797, 0.04806886291503906, 0.047943519592285155, 0.050697952270507815, 0.04756934356689453, 0.04776262283325195, 0.04764057540893555, 0.04750982284545899, 0.04757059097290039, 0.047817569732666015, 0.04786175918579102, 0.04793753433227539, 0.04787164688110351, 0.04794812774658203, 0.048226207733154294, 0.04801340866088867, 0.04853760147094727, 0.04831612777709961, 0.04810688018798828, 0.04815539169311524, 0.04819283294677734, 0.0485302734375, 0.048510974884033206, 0.0481976318359375, 0.04839014434814453, 0.048191585540771485, 0.04851087951660156, 0.04814438247680664, 0.048089088439941405, 0.04806777572631836, 0.0478460807800293, 0.0482790412902832, 0.048454078674316406, 0.048256607055664064, 0.04848825454711914, 0.04815705490112305, 0.04810383987426758, 0.04846323013305664, 0.048629600524902346, 0.04851116943359375, 0.04859145736694336, 0.048492542266845705, 0.04857855987548828, 0.049631584167480466, 0.048287487030029295, 0.04773628616333008, 0.04726015853881836, 0.04741737747192383, 0.04729827117919922, 0.04717795181274414, 0.047433792114257814, 0.047513633728027346, 0.04758108901977539, 0.04755257415771484, 0.04759273529052734, 0.04764495849609375, 0.04751958465576172, 0.047505470275878904, 0.04810540771484375, 0.048237152099609375, 0.0477591667175293, 0.04787424087524414, 0.04803583908081055, 0.048159744262695314, 0.04815337753295899, 0.04870755386352539, 0.048631935119628905, 0.04806159973144531, 0.04782521438598633, 0.04757161712646484, 0.04759257507324219, 0.04783139038085937, 0.048078369140625, 0.04790361785888672, 0.0479579849243164, 0.04805587387084961, 0.04788809585571289, 0.047714496612548826, 0.04833555221557617, 0.04829529571533203, 0.048142974853515624, 0.04807884979248047, 0.04820172882080078, 0.048156288146972655, 0.04833638381958008, 0.048448383331298826, 0.048330753326416016, 0.04815871810913086, 0.047982593536376954, 0.047865856170654295, 0.04793958282470703, 0.04807475280761719, 0.048010238647460936, 0.048021759033203125, 0.048187744140625, 0.04818691253662109, 0.04802864074707031, 0.04808806228637695, 0.04841308975219726, 0.048374271392822264, 0.04853081512451172, 0.04850342559814453, 0.04879747009277344, 0.04873648071289063, 0.048452766418457034, 0.04837222290039062, 0.04947740936279297, 0.04789424133300781, 0.04754460906982422, 0.04713238525390625, 0.04736614227294922, 0.04748457717895508, 0.0474568977355957, 0.0474439697265625, 0.047529983520507815, 0.047531776428222657, 0.04789254379272461, 0.04780051040649414, 0.047527393341064456, 0.04776752090454101, 0.047694400787353514, 0.04793484878540039, 0.04770675277709961, 0.047587329864501954, 0.04757708740234375, 0.047804161071777346, 0.047765758514404295, 0.04797625732421875, 0.048519359588623044, 0.05069209671020508, 0.04781430435180664, 0.0482327995300293, 0.04750131225585937, 0.047672447204589845, 0.04783808135986328, 0.04787177658081055, 0.047589599609375, 0.048056320190429686, 0.047908863067626956, 0.048021759033203125, 0.048329536437988284, 0.048395198822021486, 0.04800483322143555, 0.04796854400634765, 0.047872161865234374, 0.04834214401245117, 0.048242752075195315, 0.04839190292358399, 0.048605918884277344, 0.04847750473022461, 0.04924726486206055, 0.04818227386474609, 0.04823289489746094, 0.04821440124511719, 0.048256385803222654, 0.04807955169677734, 0.048215423583984375, 0.04826297760009766, 0.048470848083496096, 0.04839014434814453, 0.048304126739501956, 0.04843724822998047, 0.04852326583862305, 0.048525310516357424, 0.04875823974609375, 0.04894787216186523, 0.048780479431152345, 0.048537662506103516, 0.04862630462646484, 0.05010150527954101, 0.04814825439453125, 0.047502304077148436, 0.04739481735229492, 0.04733459091186523, 0.0476781120300293, 0.04747699356079101, 0.04740662384033203, 0.04735539245605469, 0.04757968139648437, 0.047511329650878904, 0.047880352020263674, 0.047702110290527344, 0.04783500671386719, 0.04810105514526367, 0.04787071990966797, 0.04793724822998047, 0.047624095916748044, 0.04771587371826172, 0.04825993728637695, 0.048377151489257815, 0.048155326843261716, 0.04806041717529297, 0.048016929626464845, 0.047663551330566406, 0.047595039367675784, 0.0475687370300293, 0.047944351196289064, 0.04777987289428711, 0.04766086578369141, 0.04770611190795898, 0.04785382461547852, 0.04809292984008789, 0.048310432434082035, 0.04795808029174805, 0.04807468795776367, 0.0478410873413086, 0.04812819290161133, 0.04839014434814453, 0.04836912155151367, 0.04862416076660156, 0.04855161666870117, 0.04841046524047852, 0.049316318511962894, 0.04813347244262695, 0.048138145446777345, 0.04826393508911133, 0.048242462158203124, 0.048035999298095704, 0.048158782958984375, 0.04837376022338867, 0.04822220611572266, 0.048216064453125, 0.04840985488891601, 0.04848307037353516, 0.04845568084716797, 0.0483221435546875, 0.04828201675415039, 0.048261119842529294, 0.04836956787109375, 0.04869744110107422, 0.048790782928466794, 0.04873497772216797, 0.04970089721679687, 0.047960033416748045, 0.04748291015625, 0.04731084823608398, 0.04729977416992188, 0.04745702362060547, 0.04748704147338867, 0.04755865478515625, 0.04749311828613281, 0.047484031677246095, 0.04743974304199219, 0.047750144958496096, 0.04762009429931641, 0.047793441772460935, 0.0476638412475586, 0.0478023681640625, 0.04792943954467774, 0.047832992553710936, 0.04777353668212891, 0.04816326522827148, 0.048019168853759765, 0.04832255935668945, 0.048054271697998044, 0.04791276931762695, 0.04767881774902344, 0.04776227188110352, 0.047851520538330077, 0.04764876937866211, 0.04766463851928711, 0.047935775756835934, 0.04788041687011719, 0.047799392700195314, 0.04762422561645508, 0.048429950714111325, 0.04885504150390625, 0.04820089721679687, 0.04836617660522461, 0.048394462585449216, 0.04848230361938476, 0.04825088119506836, 0.04835737609863281, 0.04833792114257812, 0.04813312149047851, 0.04801740646362305, 0.04807424163818359, 0.04812179183959961, 0.048323360443115235, 0.048264961242675784, 0.04807273483276367, 0.04808499145507812, 0.04826931381225586, 0.04817919921875, 0.0482242546081543, 0.048227424621582034, 0.04831036758422851, 0.04838800048828125, 0.048548095703125, 0.04868521499633789, 0.04860502243041992, 0.04875151824951172, 0.04893788909912109, 0.04859580612182617, 0.048639999389648435, 0.04973497772216797, 0.04809603118896484, 0.047462303161621096, 0.04737567901611328, 0.04724396896362305, 0.0474439697265625, 0.0474315185546875, 0.04734518432617187, 0.047540863037109374, 0.0474804801940918, 0.04749142456054688, 0.04744761657714844, 0.047648353576660155, 0.047690238952636715, 0.04767574310302734, 0.047683582305908204, 0.047889854431152346, 0.04785619354248047, 0.04785561752319336, 0.04805420684814453, 0.048432865142822266, 0.048201889038085935, 0.04817763137817383, 0.04789424133300781, 0.04788422393798828, 0.047873950958251955, 0.04783734512329101, 0.04778188705444336, 0.04794319915771485, 0.04800150299072266, 0.04802969741821289, 0.04795961761474609, 0.04808070373535156, 0.048138942718505856, 0.04831343841552734, 0.048459903717041015, 0.048296672821044925, 0.04846956634521484, 0.04824518585205078, 0.048261119842529294, 0.04820742416381836, 0.04816707229614258, 0.048352928161621095, 0.0483559684753418, 0.04832460784912109, 0.048285694122314454, 0.04801670455932617, 0.04804678344726562, 0.047922367095947264, 0.048334785461425785, 0.04830502319335937, 0.048320510864257815, 0.0482242546081543, 0.04827059173583984, 0.04909056091308594, 0.04835567855834961, 0.048581024169921876, 0.04845772933959961, 0.048328224182128905, 0.04868963241577148, 0.04844134521484375, 0.04815158462524414, 0.04845257568359375, 0.04980780792236328, 0.0479027214050293, 0.0475463981628418, 0.04749414443969727, 0.047477470397949216, 0.04741145706176758, 0.04749852752685547, 0.047956703186035156, 0.04759961700439453, 0.047896160125732425, 0.04773302459716797, 0.047860862731933594, 0.047459327697753906, 0.04759334564208984, 0.048102752685546875, 0.047772319793701175, 0.04786505508422852, 0.04771868896484375, 0.04779248046875, 0.04824422454833984, 0.048156673431396485, 0.048115936279296875, 0.04818499374389648, 0.048042911529541016, 0.04809862518310547, 0.04783993530273437, 0.04768684768676758, 0.04796448135375977, 0.04782115173339844, 0.04797481536865234, 0.047922046661376956, 0.047790721893310545, 0.04799622344970703, 0.04813449478149414, 0.04802825546264648, 0.048218273162841795, 0.04820975875854492, 0.04829788970947266, 0.04816835021972656, 0.04865708923339844, 0.04839744186401367, 0.04849107360839844, 0.04825734329223633, 0.04815014266967774, 0.04832499313354492, 0.04820377731323242, 0.04813619232177734, 0.04820377731323242, 0.04808828735351563, 0.048158561706542966, 0.04823545455932617, 0.04814438247680664, 0.04844134521484375, 0.04848003387451172, 0.04840166473388672, 0.04823139190673828, 0.04833612823486328, 0.04835001754760742, 0.04845356750488281, 0.04867891311645508, 0.048594593048095706, 0.048506912231445314, 0.04837817764282227, 0.049742721557617185, 0.048055393218994144, 0.04775363159179687, 0.04741971206665039, 0.0475096321105957, 0.04750038528442383, 0.04742838287353516, 0.04748287963867188, 0.047480960845947266, 0.047654270172119144, 0.0476759033203125, 0.047588897705078126, 0.04789823913574219, 0.047842144012451175, 0.047702014923095705, 0.04788019180297851, 0.04774121475219727, 0.04782419204711914, 0.04787651062011719, 0.048051551818847654, 0.04886140823364258, 0.048462528228759766, 0.04777267074584961, 0.04767206573486328, 0.04760335922241211, 0.047661407470703125, 0.047924545288085936, 0.048462528228759766, 0.04807475280761719, 0.047922977447509764, 0.04770163345336914, 0.048107776641845706, 0.04778019332885742, 0.04794367980957031, 0.04801945495605469, 0.04822220611572266, 0.048260673522949216, 0.04824313735961914, 0.04847811126708984, 0.0483513298034668, 0.048263168334960936, 0.04847811126708984, 0.04842627334594726, 0.04836435317993164, 0.048187168121337894, 0.048225536346435546, 0.04810812759399414, 0.04813782501220703, 0.04838063812255859, 0.048375137329101564, 0.048349918365478514, 0.04850646209716797, 0.04855177688598633, 0.04863033676147461, 0.04834409713745117, 0.04843824005126953, 0.048545150756835936, 0.04851980972290039, 0.04887347030639649, 0.04905804824829101, 0.048868480682373046, 0.04867910385131836, 0.048529918670654294, 0.049939712524414065, 0.047915008544921874, 0.047370529174804686, 0.047411678314208984, 0.0473620491027832, 0.04740726470947266, 0.04751043319702149, 0.047350303649902344, 0.047556896209716794, 0.04747257614135742, 0.047391998291015626, 0.047507614135742185, 0.04746630477905273, 0.04762518310546875, 0.04759251022338867, 0.047634719848632816, 0.047994911193847654, 0.047850112915039066, 0.04763852691650391, 0.048386177062988284, 0.04834028625488281, 0.04805484771728516, 0.047874046325683595, 0.04793468856811523, 0.047908737182617185, 0.04788102340698242, 0.04762361526489258, 0.04778255844116211, 0.0478201904296875, 0.04791356658935547, 0.04800044631958008, 0.047850048065185544, 0.047948863983154295, 0.04790572738647461, 0.04781465530395508, 0.048332607269287106, 0.04817103958129883, 0.04821343994140625, 0.04821475219726563, 0.04844876861572266, 0.048588737487792966, 0.04846169662475586, 0.04834819030761719, 0.04844038391113281, 0.048153438568115235, 0.04842086410522461, 0.04822016143798828, 0.04901068878173828, 0.04812192153930664, 0.048455615997314454, 0.048465377807617185, 0.0483221435546875, 0.04827846527099609, 0.0482344970703125, 0.048146430969238284, 0.04828160095214844, 0.04823859024047852, 0.04860518264770508, 0.04873011016845703, 0.04851007843017578, 0.04858134460449219, 0.04870697784423828, 0.04868531036376953, 0.04995862579345703, 0.04835971069335938, 0.04760985565185547, 0.04754227066040039, 0.047718399047851565, 0.04742339324951172, 0.04756649780273438, 0.04794585418701172, 0.04789075088500976, 0.04764876937866211, 0.04791030502319336, 0.04789718246459961, 0.047862815856933597, 0.04768355178833008, 0.04765708923339844, 0.04794047927856445, 0.04800921630859375, 0.04788633728027344, 0.048793888092041014, 0.048101089477539063, 0.04827932739257813, 0.04804207992553711, 0.047926750183105465, 0.048005760192871096, 0.04793289566040039, 0.04781318283081055, 0.04782883071899414, 0.04797251129150391, 0.047865856170654295, 0.04797212982177734, 0.04794800186157227, 0.047900672912597655, 0.04817932891845703, 0.04846681594848633, 0.048084224700927734, 0.04810931015014648, 0.04825088119506836, 0.04845568084716797, 0.04839180755615234, 0.04830451202392578, 0.04825526428222656, 0.04850249481201172, 0.04836556625366211, 0.04861273574829102, 0.04828432083129883, 0.04841062545776367, 0.04823036956787109, 0.04815801620483398, 0.0480775032043457, 0.04849868774414062, 0.0483568000793457, 0.048142913818359376, 0.048516159057617185, 0.04857942581176758, 0.04847536087036133, 0.04859529495239258, 0.04848284912109375, 0.04863180923461914, 0.04870963287353516, 0.04879564666748047, 0.048737342834472654, 0.048460193634033207, 0.048691200256347655]",tokens/s,20.791028011501258,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,916.205568,645.791744,0.0,260.046848,253.883392,s,1,7.66553662109375,7.66553662109375,0.0,7.66553662109375,7.66553662109375,7.66553662109375,7.66553662109375,[7.66553662109375],,kWh,1.5644842033331466e-05,1.7184331225910374e-06,4.820003856043087e-06,2.218327901196559e-05,,MB,1280.917504,756.9408,0.0,341.835776,312.754176,s,16,0.1817112636566162,0.011356953978538514,8.128927567747061e-05,0.011335920333862305,0.011469647884368897,0.011498096227645874,0.011538301038742066,"[0.011202591896057129, 0.01128172779083252, 0.011407039642333984, 0.011354463577270508, 0.01148134422302246, 0.011346400260925293, 0.011324704170227051, 0.011457951545715332, 0.01132367992401123, 0.011303775787353516, 0.011548352241516114, 0.011348159790039062, 0.011304863929748535, 0.011325440406799316, 0.011385472297668457, 0.011315296173095703]",tokens/s,22541.255382716954,kWh,3.408080381126275e-07,3.757389801499991e-08,2.243985977735976e-07,6.02780533901225e-07,tokens/kWh,424698518.95043045,MB,1294.155776,784.203776,0.0,369.098752,313.412096,s,16,9.891184814453126,0.6181990509033203,0.0019364419070296783,0.617803466796875,0.6210430297851562,0.62130908203125,0.6217072265625,"[0.6187214965820312, 0.618590576171875, 0.6177598266601563, 0.62073095703125, 0.62094287109375, 0.6178471069335938, 0.6160676879882813, 0.6211431884765625, 0.6167020874023438, 0.6159141845703126, 0.616677978515625, 0.6218067626953125, 0.61667236328125, 0.6188052978515625, 0.6166756591796875, 0.6161267700195312]",tokens/s,101.90892384571539,kWh,1.7999287462669538e-05,1.9850220942598222e-06,7.257395582101198e-06,2.724170513903056e-05,tokens/kWh,2312630.5669367495,,s,1008,9.8833883190155,0.009804948729182045,0.00018407207501759405,0.00977515172958374,0.009890349197387696,0.009977855682373048,0.01046766662597656,"[0.00944428825378418, 0.009783231735229492, 0.00975382423400879, 0.009761216163635254, 0.009820544242858886, 0.009760704040527344, 0.009776415824890137, 0.009807840347290039, 0.009919296264648438, 0.009799136161804198, 0.009820416450500488, 0.009752863883972169, 0.009760767936706542, 0.009780511856079102, 0.009899104118347168, 0.009794239997863769, 0.00985593605041504, 0.009784640312194825, 0.009751551628112793, 0.009719488143920898, 0.01001910400390625, 0.009844351768493652, 0.00978662395477295, 0.009756511688232421, 0.009759679794311523, 0.009867327690124512, 0.009760767936706542, 0.009815775871276855, 0.00976915168762207, 0.009772512435913086, 0.009773823738098144, 0.009902239799499512, 0.009762592315673828, 0.009756832122802735, 0.009799455642700195, 0.00973145580291748, 0.00978332805633545, 0.009818943977355957, 0.009789600372314453, 0.009813823699951173, 0.009776224136352539, 0.010026975631713867, 0.009865983963012696, 0.010234880447387695, 0.009943327903747558, 0.009887776374816894, 0.010070719718933106, 0.00987945556640625, 0.009853023529052735, 0.009834495544433594, 0.009801024436950684, 0.00980348777770996, 0.009846879959106445, 0.009802623748779297, 0.009815903663635253, 0.009816224098205567, 0.009781248092651367, 0.00974556827545166, 0.009730303764343261, 0.009716320037841796, 0.00972809600830078, 0.009826272010803223, 0.009801664352416993, 0.00943449592590332, 0.009838144302368164, 0.00973043155670166, 0.0096976957321167, 0.009767040252685547, 0.00974835205078125, 0.009766912460327149, 0.009836416244506836, 0.00982643222808838, 0.009793536186218262, 0.009762816429138184, 0.00974240016937256, 0.009778464317321777, 0.009805695533752441, 0.00972879981994629, 0.009736448287963868, 0.009985183715820313, 0.009899616241455078, 0.009883872032165527, 0.009744895935058593, 0.009727359771728516, 0.009733023643493653, 0.00974028778076172, 0.009789440155029297, 0.009784928321838379, 0.009755392074584961, 0.009772255897521972, 0.009784799575805664, 0.009860320091247559, 0.009750271797180176, 0.009721823692321777, 0.00979958438873291, 0.00971993637084961, 0.00974028778076172, 0.009773056030273437, 0.009794943809509278, 0.009766752243041993, 0.009747072219848633, 0.009805472373962402, 0.00968563175201416, 0.009714688301086426, 0.00976153564453125, 0.009720128059387207, 0.009812895774841308, 0.009962400436401368, 0.009789440155029297, 0.011431967735290528, 0.009793184280395507, 0.009856991767883301, 0.009824064254760742, 0.009867808341979981, 0.009832448005676269, 0.009834848403930663, 0.009797535896301269, 0.009766464233398437, 0.009773407936096191, 0.009742239952087402, 0.009911840438842773, 0.009847359657287598, 0.009857024192810059, 0.009766400337219238, 0.009933152198791505, 0.009754624366760254, 0.009451168060302734, 0.009794079780578613, 0.009783647537231445, 0.009856672286987304, 0.009699423789978028, 0.009803775787353516, 0.009764767646789551, 0.009744223594665527, 0.009750687599182128, 0.00974608039855957, 0.00978774356842041, 0.00975273609161377, 0.009838015556335448, 0.00981049633026123, 0.009850720405578613, 0.01002086353302002, 0.010026304244995118, 0.009845343589782715, 0.009807968139648437, 0.00982630443572998, 0.010075584411621093, 0.009901920318603515, 0.009792223930358886, 0.009821503639221192, 0.009751456260681152, 0.009802623748779297, 0.009714591979980468, 0.009768320083618163, 0.009731807708740235, 0.009806591987609864, 0.009981087684631348, 0.009736767768859863, 0.00972435188293457, 0.009756671905517577, 0.00974828815460205, 0.009707712173461914, 0.009783295631408692, 0.009695039749145509, 0.00976915168762207, 0.009817631721496582, 0.009726431846618653, 0.009807680130004883, 0.009697312355041504, 0.00977894401550293, 0.009727999687194825, 0.009787520408630372, 0.009761119842529297, 0.009766016006469726, 0.009798368453979493, 0.009754655838012695, 0.00981817626953125, 0.009813183784484864, 0.009775424003601074, 0.0097510404586792, 0.009752799987792968, 0.010123040199279785, 0.009797727584838867, 0.00979462432861328, 0.009833312034606934, 0.009797535896301269, 0.00976300811767578, 0.009863072395324708, 0.009947327613830567, 0.009510463714599609, 0.009767711639404297, 0.009839872360229492, 0.009777183532714845, 0.009789376258850098, 0.009749279975891114, 0.009793536186218262, 0.011833087921142578, 0.011063615798950195, 0.009787327766418457, 0.009826399803161622, 0.009766976356506347, 0.009809215545654297, 0.00975107192993164, 0.00977894401550293, 0.009760448455810547, 0.009944992065429687, 0.009715904235839843, 0.009711487770080567, 0.009756352424621582, 0.00974947166442871, 0.009764800071716309, 0.009840736389160156, 0.009750432014465332, 0.009771007537841797, 0.00990601634979248, 0.009807744026184083, 0.009773344039916991, 0.009750783920288086, 0.009771103858947755, 0.0097457275390625, 0.009905759811401366, 0.009816191673278808, 0.009742976188659668, 0.009785599708557129, 0.009752320289611817, 0.009766847610473632, 0.009824543952941895, 0.009776927947998048, 0.009749631881713867, 0.00980288028717041, 0.009742048263549805, 0.00975062370300293, 0.009801664352416993, 0.0097259521484375, 0.009727328300476075, 0.009722528457641601, 0.00970956802368164, 0.009834879875183106, 0.00973583984375, 0.009779168128967284, 0.009676799774169922, 0.009776191711425781, 0.009751487731933593, 0.009950528144836426, 0.00978809642791748, 0.009778976440429688, 0.010473695755004883, 0.009998175621032715, 0.009945247650146484, 0.009768959999084472, 0.009762816429138184, 0.009760767936706542, 0.009527551651000976, 0.009717503547668456, 0.009759103775024414, 0.00975759983062744, 0.009706432342529298, 0.00973091220855713, 0.009734720230102538, 0.009814335823059081, 0.009747936248779296, 0.009898112297058106, 0.009793343544006348, 0.009868000030517578, 0.009825535774230957, 0.009773759841918946, 0.009754591941833497, 0.009767135620117188, 0.009774911880493163, 0.009697279930114745, 0.009756704330444337, 0.009799648284912109, 0.009872639656066894, 0.009745152473449707, 0.009778719902038574, 0.009764575958251953, 0.00975494384765625, 0.009942591667175293, 0.009745120048522949, 0.009775263786315918, 0.010086400032043457, 0.009857024192810059, 0.009805760383605956, 0.009870944023132324, 0.010280799865722657, 0.009886336326599122, 0.009766912460327149, 0.010063199996948243, 0.012452768325805665, 0.00985251235961914, 0.00984489631652832, 0.009785344123840332, 0.009846464157104492, 0.009866656303405762, 0.00978012752532959, 0.009752767562866211, 0.009780447959899902, 0.009888352394104004, 0.009854240417480469, 0.009769696235656739, 0.009737631797790528, 0.009774751663208008, 0.009882047653198242, 0.009748991966247558, 0.009754400253295898, 0.009781311988830567, 0.009751839637756348, 0.0097227201461792, 0.00976211166381836, 0.009714048385620117, 0.009902432441711425, 0.009803808212280274, 0.009844832420349121, 0.009811840057373046, 0.009799679756164551, 0.009496288299560547, 0.00972652816772461, 0.009902048110961913, 0.009875071525573731, 0.010246047973632813, 0.009853792190551759, 0.009868639945983887, 0.009769632339477538, 0.009797632217407226, 0.00978553581237793, 0.009787199974060058, 0.00980953598022461, 0.009836959838867187, 0.009801664352416993, 0.009728032112121582, 0.009810208320617676, 0.00976252841949463, 0.009727999687194825, 0.009823552131652832, 0.009681056022644043, 0.009742624282836914, 0.009773632049560547, 0.009989055633544922, 0.009825023651123047, 0.009858719825744629, 0.009842944145202637, 0.009903360366821289, 0.009953920364379882, 0.009857439994812011, 0.009868608474731446, 0.009847519874572754, 0.00981158447265625, 0.009835776329040527, 0.00982038402557373, 0.009689791679382324, 0.009781248092651367, 0.009731328010559082, 0.009945856094360352, 0.009725215911865235, 0.009761504173278809, 0.009791104316711426, 0.009744768142700195, 0.009748000144958496, 0.009722240447998047, 0.009713664054870605, 0.009687071800231933, 0.00982153606414795, 0.009929439544677735, 0.009756671905517577, 0.009843935966491699, 0.009734527587890625, 0.009717568397521973, 0.00974499225616455, 0.009766847610473632, 0.009713855743408203, 0.009801600456237793, 0.00975830364227295, 0.009750656127929688, 0.00988803195953369, 0.009754624366760254, 0.00979580783843994, 0.009787296295166016, 0.00974630355834961, 0.009445504188537597, 0.009820480346679688, 0.009816032409667969, 0.0097193603515625, 0.009904512405395508, 0.00974835205078125, 0.009773311614990234, 0.009743552207946777, 0.00976979160308838, 0.00974028778076172, 0.009820159912109374, 0.009758496284484863, 0.009740032196044923, 0.009732576370239257, 0.009852928161621094, 0.00974028778076172, 0.009755776405334473, 0.00976576042175293, 0.009747936248779296, 0.009797375679016113, 0.009710368156433105, 0.0098121919631958, 0.009723008155822754, 0.009691807746887207, 0.009783391952514648, 0.009694687843322754, 0.009799872398376465, 0.009804096221923828, 0.009745471954345703, 0.009717727661132812, 0.009741375923156739, 0.009762880325317382, 0.009674528121948241, 0.009828351974487304, 0.009699328422546387, 0.009801728248596191, 0.009806079864501953, 0.009702367782592774, 0.009724703788757325, 0.009729215621948242, 0.009736512184143066, 0.009789183616638183, 0.009791935920715332, 0.010111424446105957, 0.009863327980041504, 0.009856736183166504, 0.009776191711425781, 0.009787615776062011, 0.009738719940185547, 0.009783552169799805, 0.00974233627319336, 0.009844544410705567, 0.00972812843322754, 0.009755999565124511, 0.009773951530456543, 0.009742176055908203, 0.009803775787353516, 0.00973420810699463, 0.009865152359008788, 0.009739871978759765, 0.009783103942871093, 0.009781248092651367, 0.00985968017578125, 0.009516608238220215, 0.010185471534729004, 0.009977855682373048, 0.009867263793945312, 0.010278464317321778, 0.009918784141540527, 0.009907584190368652, 0.009896832466125488, 0.009879232406616211, 0.009862400054931641, 0.009819071769714355, 0.009819519996643067, 0.0098056001663208, 0.00978825569152832, 0.009842687606811524, 0.009818112373352051, 0.009780287742614746, 0.009751487731933593, 0.00977843189239502, 0.009831168174743653, 0.009730048179626465, 0.009807871818542481, 0.009792767524719238, 0.009873888015747071, 0.00971190357208252, 0.009772831916809083, 0.009849151611328126, 0.009911968231201171, 0.009795519828796386, 0.009775424003601074, 0.00974403190612793, 0.009744511604309083, 0.009828831672668457, 0.009798912048339844, 0.009783583641052246, 0.009754847526550293, 0.009799967765808106, 0.009773887634277344, 0.009808799743652345, 0.009961471557617188, 0.009744000434875489, 0.009904512405395508, 0.009756352424621582, 0.009750847816467284, 0.009693183898925782, 0.009737855911254882, 0.00970796775817871, 0.00975391960144043, 0.009746368408203126, 0.009730751991271974, 0.009831744194030762, 0.009673407554626465, 0.010473407745361329, 0.010301695823669434, 0.010957663536071777, 0.009800736427307128, 0.009800864219665526, 0.009814528465270997, 0.009791775703430176, 0.009750687599182128, 0.009846464157104492, 0.00975641632080078, 0.009967679977416993, 0.00940447998046875, 0.00984598445892334, 0.009743488311767579, 0.009721343994140624, 0.009834815979003906, 0.009762816429138184, 0.00972390365600586, 0.009741408348083496, 0.009931679725646972, 0.009861120223999023, 0.009774463653564453, 0.009807616233825684, 0.00974732780456543, 0.009752575874328612, 0.009752256393432617, 0.009770400047302246, 0.010263615608215333, 0.010391391754150391, 0.00983846378326416, 0.009977855682373048, 0.009800000190734863, 0.009776032447814942, 0.009720576286315919, 0.009795392036437988, 0.009700863838195802, 0.009728863716125488, 0.009834495544433594, 0.009754112243652344, 0.009769280433654786, 0.009810272216796876, 0.009753631591796876, 0.009700448036193847, 0.009751903533935546, 0.009716256141662598, 0.009764703750610351, 0.00974351978302002, 0.009789376258850098, 0.009729984283447266, 0.009759200096130372, 0.009805631637573243, 0.009802656173706055, 0.00979535961151123, 0.009736191749572755, 0.00972390365600586, 0.00972390365600586, 0.009745439529418946, 0.009729184150695801, 0.009717184066772461, 0.009798015594482422, 0.009752575874328612, 0.009781087875366211, 0.009768192291259765, 0.009736512184143066, 0.009728863716125488, 0.009668352127075196, 0.00972390365600586, 0.009670495986938477, 0.009788607597351074, 0.009837120056152344, 0.009802144050598144, 0.009784768104553223, 0.009809663772583008, 0.009751359939575195, 0.009479647636413574, 0.009721664428710938, 0.009720576286315919, 0.009798720359802246, 0.009802687644958495, 0.00972003173828125, 0.009749407768249512, 0.009739392280578614, 0.009809472084045411, 0.00977023983001709, 0.009770015716552734, 0.009768863677978516, 0.009785344123840332, 0.009880640029907227, 0.009751487731933593, 0.009836895942687988, 0.009723487854003907, 0.009737728118896484, 0.009732128143310547, 0.009742048263549805, 0.009749312400817872, 0.009754207611083985, 0.009728416442871094, 0.00969059181213379, 0.009769503593444823, 0.009686431884765624, 0.009730655670166016, 0.009795583724975587, 0.009788703918457032, 0.009781439781188965, 0.009722335815429688, 0.00972208023071289, 0.009700832366943359, 0.009724063873291016, 0.009701727867126465, 0.009729920387268066, 0.009820159912109374, 0.00968073558807373, 0.010020319938659667, 0.00969593620300293, 0.009778304100036621, 0.0097074556350708, 0.009687071800231933, 0.009717696189880371, 0.009753567695617676, 0.009707263946533203, 0.00972969627380371, 0.009732383728027344, 0.009784640312194825, 0.009906656265258789, 0.009749119758605956, 0.009759967803955077, 0.010134207725524902, 0.010030912399291993, 0.009916607856750487, 0.009778528213500977, 0.009792384147644042, 0.009756319999694825, 0.009803071975708007, 0.009759455680847169, 0.00984278392791748, 0.00978320026397705, 0.00976643180847168, 0.009515199661254883, 0.009830400466918946, 0.009773056030273437, 0.009797823905944825, 0.009865023612976075, 0.009834688186645507, 0.009797439575195313, 0.00972009563446045, 0.00974614429473877, 0.009750528335571289, 0.009791040420532226, 0.009768383979797363, 0.0098088960647583, 0.010220864295959472, 0.009834719657897948, 0.00984876823425293, 0.010017312049865723, 0.009880864143371582, 0.009757408142089844, 0.009787391662597657, 0.009738368034362793, 0.00982630443572998, 0.009742207527160644, 0.00976089572906494, 0.00972812843322754, 0.009750368118286133, 0.009791487693786622, 0.009734047889709472, 0.009888928413391113, 0.009744832038879395, 0.009732512474060059, 0.009756671905517577, 0.009730143547058106, 0.009709055900573731, 0.009776960372924805, 0.00973680019378662, 0.009747712135314942, 0.009840607643127441, 0.009751328468322754, 0.009754912376403808, 0.009779999732971192, 0.009736960411071777, 0.009739968299865722, 0.009765376091003418, 0.009760767936706542, 0.00971571159362793, 0.009718976020812987, 0.009751359939575195, 0.00971168041229248, 0.009774463653564453, 0.009736767768859863, 0.009828351974487304, 0.009885503768920898, 0.009893664360046387, 0.009769599914550781, 0.009709343910217286, 0.009719807624816895, 0.009719807624816895, 0.009846879959106445, 0.009729951858520507, 0.009801695823669434, 0.00973417568206787, 0.009778816223144532, 0.009447168350219727, 0.009758912086486816, 0.00979702377319336, 0.009783712387084961, 0.009791487693786622, 0.009768959999084472, 0.009769023895263672, 0.009744319915771484, 0.00974841594696045, 0.009737600326538086, 0.00973091220855713, 0.009742143630981446, 0.009684608459472657, 0.009803744316101074, 0.00967238426208496, 0.009741056442260742, 0.009785599708557129, 0.009703167915344238, 0.009717439651489258, 0.009720128059387207, 0.009754143714904785, 0.009881600379943848, 0.00977558422088623, 0.009767264366149902, 0.009721504211425782, 0.009762911796569825, 0.010298656463623048, 0.011973247528076172, 0.010305055618286133, 0.009823840141296386, 0.009821184158325195, 0.009776991844177246, 0.009807904243469239, 0.009966848373413086, 0.009872127532958984, 0.01033574390411377, 0.010844672203063965, 0.010565664291381836, 0.009811327934265137, 0.010027615547180176, 0.009922592163085938, 0.009914336204528808, 0.009844544410705567, 0.009789631843566895, 0.009730048179626465, 0.009764543533325195, 0.009773216247558593, 0.009699487686157226, 0.00970956802368164, 0.009684896469116211, 0.009744000434875489, 0.009746272087097168, 0.009742783546447755, 0.0098220157623291, 0.009732480049133302, 0.009836544036865234, 0.009721856117248535, 0.009831487655639648, 0.009745344161987305, 0.009733216285705566, 0.009743103981018067, 0.009733407974243165, 0.009822688102722167, 0.009404255867004394, 0.009722335815429688, 0.009703104019165038, 0.009687487602233886, 0.009772192001342774, 0.009738112449645996, 0.009773216247558593, 0.009808704376220704, 0.009954527854919433, 0.009756959915161133, 0.009868895530700684, 0.009796671867370606, 0.00987939167022705, 0.009916447639465331, 0.009852895736694336, 0.009846783638000489, 0.009833919525146484, 0.009802207946777344, 0.009829536437988281, 0.00979859161376953, 0.009760767936706542, 0.009955327987670898, 0.009806079864501953, 0.009854496002197266, 0.00986134433746338, 0.00982630443572998, 0.009793600082397462, 0.009950495719909669, 0.009806719779968262, 0.009735967636108398, 0.009755743980407714, 0.009733119964599609, 0.009798975944519043, 0.00972383975982666, 0.009818079948425294, 0.009722559928894044, 0.009698944091796875, 0.00976300811767578, 0.009722047805786133, 0.009747520446777343, 0.009991264343261719, 0.00999407958984375, 0.009756511688232421, 0.009844287872314453, 0.009679488182067871, 0.009741920471191406, 0.009755200386047363, 0.00975443172454834, 0.009731904029846191, 0.009676192283630371, 0.009752511978149414, 0.009777440071105956, 0.009761568069458007, 0.00969644832611084, 0.00975648021697998, 0.00971241569519043, 0.009715423583984376, 0.009732383728027344, 0.009813023567199706, 0.009788576126098633, 0.009709376335144043, 0.009748671531677246, 0.009725791931152344, 0.009413887977600097, 0.010082783699035645, 0.010035296440124512, 0.009818304061889648, 0.009752832412719726, 0.00981497573852539, 0.009804287910461425, 0.00975494384765625, 0.009797727584838867, 0.009860320091247559, 0.009826815605163575, 0.009803104400634765, 0.009775551795959472, 0.009740832328796387, 0.009853023529052735, 0.009840096473693848, 0.009781567573547364, 0.009789440155029297, 0.009817376136779785, 0.009836832046508788, 0.009743040084838867, 0.009757439613342285, 0.009767744064331054, 0.009725215911865235, 0.009928735733032226, 0.009874303817749024, 0.009754207611083985, 0.010055871963500976, 0.009803423881530762, 0.009811936378479004, 0.009771615982055663, 0.009807871818542481, 0.009758015632629395, 0.009781951904296876, 0.009785375595092773, 0.009938912391662598, 0.009776512145996093, 0.009708127975463866, 0.009723775863647461, 0.00975436782836914, 0.009740703582763672, 0.009833600044250488, 0.009767807960510255, 0.009762080192565917, 0.009722271919250488, 0.009785183906555176, 0.009849311828613282, 0.009795583724975587, 0.009826592445373535, 0.009804672241210937, 0.009847647666931152, 0.009823424339294434, 0.010565471649169923, 0.009808032035827637, 0.009814816474914551, 0.009760671615600586, 0.009791232109069825, 0.00978553581237793, 0.009843232154846192, 0.009790271759033202, 0.00979132843017578, 0.009822367668151855, 0.009772095680236816, 0.00945199966430664, 0.009734272003173828, 0.009789567947387695, 0.009719552040100098, 0.009807871818542481, 0.0097259521484375, 0.009840928077697753, 0.009797344207763672, 0.009747648239135742, 0.009896415710449219, 0.009781599998474121, 0.009785311698913574, 0.009765055656433106, 0.009754400253295898, 0.009748543739318848, 0.009742464065551758, 0.009770912170410156, 0.009746399879455567, 0.009752127647399902, 0.009718015670776367, 0.009816255569458008, 0.009738431930541993, 0.009867327690124512, 0.009766655921936035, 0.009792736053466797, 0.009740544319152831, 0.009789792060852051, 0.009713151931762695, 0.009700032234191894, 0.009788960456848144, 0.009722335815429688, 0.009741567611694335, 0.009759039878845214, 0.009742783546447755, 0.00974847984313965, 0.009761792182922363, 0.009772031784057618, 0.009685215950012207, 0.00972480010986328, 0.009710495948791503, 0.009896096229553222, 0.009860960006713868, 0.009703424453735352, 0.009733440399169922, 0.009722559928894044, 0.009816160202026367, 0.009735263824462891, 0.009828319549560547, 0.009810784339904784, 0.00981164836883545, 0.00977337646484375, 0.009731679916381837, 0.009759136199951172, 0.009764096260070802, 0.009818880081176758, 0.009752575874328612, 0.009777152061462402, 0.009758079528808594, 0.010092384338378906, 0.00998684787750244, 0.00991436767578125, 0.009963520050048828, 0.009992192268371582, 0.009516096115112305, 0.009816767692565918, 0.009887999534606933, 0.009807007789611816, 0.009859264373779297, 0.009799712181091309, 0.009798272132873535, 0.009951231956481933, 0.009871520042419434, 0.00977724838256836, 0.009782400131225586, 0.009757311820983886, 0.009748064041137695, 0.009768511772155762, 0.009861472129821778, 0.009803359985351562, 0.009765376091003418, 0.009746848106384277, 0.009827936172485351, 0.009851648330688476, 0.009778847694396972, 0.009799519538879394, 0.009814240455627442, 0.009775039672851562, 0.009795583724975587, 0.009741439819335937, 0.009771679878234863, 0.00977337646484375, 0.009760671615600586, 0.00975596809387207, 0.009790240287780761, 0.009781151771545411, 0.009751968383789063, 0.009797727584838867, 0.009757280349731445, 0.00978048038482666, 0.009777728080749512, 0.009811039924621581, 0.009734496116638184, 0.009679519653320312, 0.009699328422546387, 0.009706624031066894, 0.009707679748535156, 0.00971337604522705, 0.009763839721679688, 0.009746463775634765, 0.009779135704040527, 0.00971168041229248, 0.009722111701965332, 0.009786432266235351, 0.009749152183532716, 0.009776736259460449, 0.009718144416809082, 0.009717791557312012, 0.00980399990081787, 0.00971497631072998, 0.009810720443725586, 0.0097543363571167, 0.009759872436523438, 0.009795968055725097, 0.009746944427490235, 0.009766912460327149, 0.009773056030273437]",tokens/s,101.98931454110955,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,841.887744,545.128448,0.0,159.383552,141.760512,s,1,7.9829306640625,7.9829306640625,0.0,7.9829306640625,7.9829306640625,7.9829306640625,7.9829306640625,[7.9829306640625],,kWh,1.1520401229199706e-05,1.2633210803813114e-06,3.7986141500634396e-06,1.658233645964446e-05,,MB,1292.886016,614.334464,0.0,199.22944,184.771584,s,30,0.19476470327377318,0.0064921567757924405,0.00010932250531919672,0.0064503359794616695,0.0066313791275024415,0.0067772047996521,0.006817365641593933,"[0.006820223808288574, 0.0064410557746887205, 0.006444320201873779, 0.006736671924591065, 0.006433407783508301, 0.006421440124511719, 0.006481919765472412, 0.006619679927825928, 0.006386079788208008, 0.006454912185668946, 0.00644649600982666, 0.006409215927124024, 0.006545407772064209, 0.006381760120391846, 0.0064479680061340335, 0.006497856140136719, 0.00646608018875122, 0.0064297599792480465, 0.0064419198036193845, 0.006450463771820068, 0.006490464210510254, 0.006810368061065674, 0.006450208187103271, 0.006430335998535156, 0.006447231769561768, 0.0064626879692077635, 0.0064486398696899415, 0.006463136196136475, 0.006477183818817138, 0.00652780818939209]",tokens/s,39432.19623940032,kWh,1.905411043163144e-07,2.1013270092877177e-08,1.1222231200004048e-07,3.2377668640923206e-07,tokens/kWh,790668416.6766509,MB,1306.390528,616.431616,0.0,201.326592,184.774144,s,30,10.150693634033203,0.33835645446777346,0.00305314062611328,0.33731991577148435,0.3416425628662109,0.3430384872436523,0.34882457305908204,"[0.35100250244140624, 0.34248370361328123, 0.34349240112304685, 0.33788888549804685, 0.3356640625, 0.338087890625, 0.3367860107421875, 0.339640380859375, 0.33927737426757815, 0.3375888977050781, 0.3368311767578125, 0.3371366271972656, 0.337141845703125, 0.33606500244140625, 0.338419921875, 0.3362176513671875, 0.33584613037109373, 0.3364391174316406, 0.3415491027832031, 0.33623696899414063, 0.3369935302734375, 0.33624237060546874, 0.3363418884277344, 0.3391838073730469, 0.33949880981445313, 0.3360239868164063, 0.3382908020019531, 0.33749798583984375, 0.33685055541992187, 0.3399742431640625]",tokens/s,186.19417235322874,kWh,9.675515677074287e-06,1.0670399091473843e-06,3.738651139066676e-06,1.4481206725288347e-05,tokens/kWh,4350466.17282135,,s,1890,10.137191549777999,0.005363593412580944,0.0001639975976887082,0.005326560020446777,0.005433769416809082,0.005530254411697388,0.006151267066001889,"[0.005814271926879883, 0.0063077759742736815, 0.00563750410079956, 0.005646495819091797, 0.005517951965332031, 0.005682208061218262, 0.005876607894897461, 0.005509215831756592, 0.0055313920974731446, 0.005501152038574219, 0.00546563196182251, 0.005504576206207276, 0.005425248146057129, 0.005503744125366211, 0.005473631858825684, 0.0055220799446105955, 0.005502143859863281, 0.005375936031341553, 0.005417856216430664, 0.0054007678031921386, 0.005345056056976319, 0.005404704093933106, 0.005386496067047119, 0.005416704177856445, 0.005352543830871582, 0.0054137282371521, 0.005416800022125244, 0.005450208187103272, 0.005690271854400635, 0.0060076479911804195, 0.007196576118469238, 0.007116672039031983, 0.00719484806060791, 0.006051807880401611, 0.0054325442314147945, 0.005373023986816407, 0.005923583984375, 0.005380383968353272, 0.005388383865356445, 0.005403488159179687, 0.005350783824920654, 0.005350944042205811, 0.005391456127166748, 0.005368896007537842, 0.005388735771179199, 0.005394879817962646, 0.005304128170013428, 0.0053463678359985355, 0.005334335803985596, 0.00538812780380249, 0.005372704029083252, 0.005659647941589355, 0.005294079780578613, 0.005318560123443603, 0.005365856170654297, 0.005422143936157226, 0.00529094409942627, 0.005361279964447021, 0.005294591903686524, 0.005460959911346435, 0.005337984085083008, 0.005340384006500244, 0.0054336957931518555, 0.005288159847259522, 0.005347392082214355, 0.005345119953155518, 0.00536521577835083, 0.005488128185272217, 0.0053554878234863285, 0.005376959800720215, 0.0052921600341796875, 0.005334943771362305, 0.0054273600578308104, 0.005349472045898438, 0.005295839786529541, 0.005510208129882813, 0.005332064151763916, 0.005309823989868164, 0.005337567806243897, 0.005418752193450928, 0.0066910719871520995, 0.005990655899047852, 0.006598400115966797, 0.006061471939086914, 0.006177440166473389, 0.005390272140502929, 0.005471263885498047, 0.005313471794128418, 0.005340352058410645, 0.005464928150177002, 0.005345344066619873, 0.005308351993560791, 0.005318880081176758, 0.0053491201400756834, 0.005319839954376221, 0.005303328037261963, 0.00529750394821167, 0.005302783966064453, 0.005328896045684814, 0.005296127796173096, 0.00529807996749878, 0.005324031829833984, 0.005284543991088867, 0.0053597760200500486, 0.00533292818069458, 0.0053218560218811035, 0.005298751831054688, 0.005382688045501709, 0.005309343814849854, 0.0053414077758789065, 0.005397312164306641, 0.005336639881134034, 0.005445024013519287, 0.005375103950500488, 0.0053348479270935055, 0.005332543849945069, 0.005323200225830078, 0.00532480001449585, 0.005324831962585449, 0.005343200206756592, 0.005334976196289062, 0.005328095912933349, 0.005360479831695557, 0.005419007778167725, 0.005361248016357422, 0.005597599983215332, 0.005204607963562011, 0.005319839954376221, 0.005337952136993408, 0.005345280170440674, 0.005320672035217285, 0.0053350720405578615, 0.005325056076049804, 0.005318336009979248, 0.005390751838684082, 0.005328256130218506, 0.005287871837615967, 0.0053067197799682615, 0.005320703983306885, 0.005297247886657715, 0.006054815769195557, 0.005411168098449707, 0.005330751895904541, 0.0053574080467224125, 0.005283008098602295, 0.005302559852600097, 0.00532534408569336, 0.005296319961547852, 0.005322495937347412, 0.005330175876617432, 0.00528275203704834, 0.005547904014587403, 0.005525504112243652, 0.006708864212036133, 0.00686953592300415, 0.006286367893218994, 0.005326720237731933, 0.005345344066619873, 0.0053747200965881346, 0.005304224014282226, 0.00532089614868164, 0.0053719358444213865, 0.005519552230834961, 0.0054683198928833, 0.005500576019287109, 0.005328127861022949, 0.005338880062103271, 0.005336063861846924, 0.005299680233001709, 0.005337215900421142, 0.005335135936737061, 0.005300447940826416, 0.005436704158782959, 0.005532608032226562, 0.005459904193878174, 0.005379392147064209, 0.005358208179473877, 0.005322368144989014, 0.005418816089630127, 0.005323328018188476, 0.005509439945220947, 0.005527008056640625, 0.005528863906860352, 0.0054479680061340335, 0.005392672061920166, 0.005443967819213867, 0.005670911788940429, 0.005472256183624268, 0.005339136123657226, 0.00523635196685791, 0.005300064086914063, 0.005307231903076172, 0.005355072021484375, 0.0053805761337280275, 0.00530841588973999, 0.005305888175964356, 0.0053003840446472165, 0.0052873601913452145, 0.005318943977355957, 0.005402463912963867, 0.005296127796173096, 0.005309184074401855, 0.005416959762573242, 0.005289023876190186, 0.005318624019622803, 0.005364704132080078, 0.005391424179077149, 0.005301568031311035, 0.0054206719398498535, 0.005322847843170166, 0.005341087818145752, 0.0053227200508117675, 0.0053125438690185545, 0.005339136123657226, 0.005477888107299805, 0.0053448319435119625, 0.005341728210449218, 0.006840735912322998, 0.005341184139251709, 0.005455840110778809, 0.005343264102935791, 0.005316703796386719, 0.0053472318649291995, 0.005302271842956543, 0.005351424217224121, 0.005368832111358642, 0.005303520202636719, 0.005340960025787354, 0.0053350400924682614, 0.00528329610824585, 0.005306111812591553, 0.00538105583190918, 0.00527350378036499, 0.0053591361045837406, 0.005320256233215332, 0.005288959980010987, 0.005282944202423096, 0.005306879997253418, 0.005279967784881592, 0.005281792163848877, 0.0053002238273620605, 0.00528985595703125, 0.005296319961547852, 0.005396416187286377, 0.005390367984771729, 0.005307360172271729, 0.005358560085296631, 0.005288320064544678, 0.005291264057159423, 0.00548905611038208, 0.005289696216583252, 0.005320352077484131, 0.005258272171020508, 0.005344672203063965, 0.005326848030090332, 0.00536729621887207, 0.005342080116271973, 0.005293824195861816, 0.005306399822235107, 0.005310688018798828, 0.005302271842956543, 0.005324096202850342, 0.005341119766235351, 0.005311232089996338, 0.005312511920928955, 0.005312511920928955, 0.005319712162017823, 0.005318687915802002, 0.00533190393447876, 0.00531763219833374, 0.005313536167144775, 0.005443295955657959, 0.005313983917236328, 0.005321248054504395, 0.005323296070098877, 0.005309599876403809, 0.005313055992126465, 0.0053105602264404295, 0.005311488151550293, 0.00531763219833374, 0.005314144134521484, 0.005304480075836181, 0.005341216087341309, 0.005326367855072021, 0.005290688037872315, 0.005291007995605469, 0.005292223930358887, 0.005284639835357666, 0.005320064067840576, 0.005333663940429688, 0.005306367874145508, 0.005330399990081787, 0.005335584163665772, 0.005316415786743164, 0.005306560039520264, 0.005347328186035156, 0.005275648117065429, 0.0053023362159729005, 0.005318592071533203, 0.005287168025970459, 0.00531328010559082, 0.005324512004852295, 0.005384575843811035, 0.005320608139038086, 0.005325952053070068, 0.005278592109680176, 0.0053861761093139645, 0.005312448024749756, 0.00528601598739624, 0.005344511985778809, 0.005352320194244385, 0.0053144640922546385, 0.005371871948242187, 0.005326240062713623, 0.005335648059844971, 0.005237023830413818, 0.005292031764984131, 0.0053309440612792965, 0.005293344020843506, 0.005339871883392334, 0.005361663818359375, 0.005318655967712403, 0.0054018239974975586, 0.005948256015777588, 0.005310175895690918, 0.005332896232604981, 0.00534768009185791, 0.005410783767700196, 0.005326591968536377, 0.005335296154022217, 0.005300159931182861, 0.0053463678359985355, 0.005323071956634521, 0.005314784049987793, 0.005298975944519043, 0.005287615776062011, 0.005275648117065429, 0.005312160015106201, 0.005306848049163818, 0.005283775806427002, 0.005304255962371826, 0.005339295864105225, 0.005288928031921387, 0.005292928218841553, 0.0053043198585510255, 0.005312511920928955, 0.0052921919822692875, 0.005321951866149902, 0.005309216022491455, 0.005304160118103027, 0.00531388807296753, 0.005284512042999267, 0.005312352180480957, 0.005427135944366455, 0.005330368041992188, 0.005348031997680664, 0.005331039905548096, 0.005278719902038574, 0.005594111919403076, 0.005476287841796875, 0.00533519983291626, 0.005371232032775879, 0.00539081621170044, 0.005521471977233887, 0.005718048095703125, 0.005337088108062744, 0.00533897590637207, 0.005404831886291504, 0.0054130878448486325, 0.005441311836242676, 0.005605055809020996, 0.0053534722328186036, 0.0053558402061462404, 0.005389344215393066, 0.005352511882781983, 0.005306015968322754, 0.005320191860198975, 0.005286943912506103, 0.005206048011779785, 0.005306367874145508, 0.005324384212493896, 0.005279359817504883, 0.005319392204284668, 0.005330399990081787, 0.005299871921539307, 0.0053110399246215825, 0.005310848236083985, 0.005283967971801758, 0.0053925437927246096, 0.0053266558647155765, 0.0052932162284851075, 0.005292799949645996, 0.005296127796173096, 0.005281888008117676, 0.005320064067840576, 0.0053233919143676756, 0.0053565120697021485, 0.005292640209197998, 0.0053067197799682615, 0.005298208236694336, 0.005300191879272461, 0.005314911842346191, 0.005326496124267578, 0.0053944320678710935, 0.005484543800354004, 0.00536575984954834, 0.005345280170440674, 0.005361663818359375, 0.0053199357986450195, 0.005343904018402099, 0.005314176082611084, 0.005319136142730713, 0.005314400196075439, 0.005348832130432129, 0.005382400035858154, 0.005437024116516113, 0.005339903831481934, 0.005323967933654785, 0.005327775955200196, 0.005850560188293457, 0.005332863807678222, 0.005380671977996826, 0.00536572790145874, 0.005345439910888672, 0.0053937921524047855, 0.005374591827392578, 0.005326272010803223, 0.005314815998077393, 0.0052713918685913085, 0.0053846721649169925, 0.005324031829833984, 0.005268223762512207, 0.005285888195037842, 0.005503263950347901, 0.0053079681396484375, 0.005365503787994385, 0.0053290238380432125, 0.005284383773803711, 0.005299424171447754, 0.0053294401168823245, 0.005285888195037842, 0.0052128639221191405, 0.005287263870239258, 0.005320479869842529, 0.005296671867370605, 0.005310848236083985, 0.0053285760879516605, 0.005294303894042968, 0.005283840179443359, 0.005312640190124512, 0.0052856321334838864, 0.00527782392501831, 0.0053225278854370115, 0.005288159847259522, 0.005422880172729492, 0.005345856189727783, 0.005355167865753174, 0.0053311681747436526, 0.005357151985168457, 0.005306496143341065, 0.005273663997650146, 0.005338848114013672, 0.005291456222534179, 0.005286752223968506, 0.005318655967712403, 0.0054514241218566895, 0.005339488029479981, 0.0053556480407714845, 0.0053431038856506345, 0.005303808212280273, 0.005294591903686524, 0.005367839813232422, 0.005286943912506103, 0.005368896007537842, 0.005375840187072754, 0.0053350720405578615, 0.005352447986602784, 0.005354015827178955, 0.005326464176177979, 0.005303135871887207, 0.005292031764984131, 0.0052899842262268066, 0.005305920124053955, 0.005308320045471191, 0.005290143966674805, 0.005359807968139649, 0.005307936191558838, 0.005306464195251465, 0.006208352088928223, 0.006558784008026123, 0.006331039905548096, 0.005646336078643799, 0.005481696128845215, 0.005370240211486816, 0.005427616119384766, 0.0054988799095153805, 0.005324960231781006, 0.00533897590637207, 0.005326560020446777, 0.005318943977355957, 0.005326015949249268, 0.005321152210235595, 0.005301727771759033, 0.005319583892822266, 0.005328256130218506, 0.005400383949279785, 0.0059359359741210935, 0.00533894395828247, 0.005310656070709229, 0.005320703983306885, 0.005346975803375244, 0.0053108158111572264, 0.005375487804412842, 0.00535968017578125, 0.005319104194641113, 0.005318655967712403, 0.005330912113189697, 0.005322879791259766, 0.005320735931396484, 0.005340159893035889, 0.005311359882354736, 0.0053043198585510255, 0.005346848011016846, 0.0053805761337280275, 0.005338143825531006, 0.005311456203460694, 0.005273600101470947, 0.005316480159759522, 0.005322432041168213, 0.005295584201812744, 0.005305632114410401, 0.00538592004776001, 0.005307487964630127, 0.005348256111145019, 0.005332992076873779, 0.0052912960052490235, 0.005308928012847901, 0.0053373122215271, 0.005314080238342285, 0.005386655807495117, 0.005386303901672363, 0.005379199981689453, 0.005612607955932617, 0.005377280235290527, 0.005674655914306641, 0.005487360000610351, 0.0054685440063476564, 0.005367584228515625, 0.005420032024383545, 0.005331776142120361, 0.005430655956268311, 0.005448512077331543, 0.005482560157775879, 0.005353407859802246, 0.005388256072998047, 0.005382016181945801, 0.0053638720512390135, 0.005392640113830566, 0.005303840160369873, 0.005354720115661621, 0.005313536167144775, 0.005322751998901368, 0.005330175876617432, 0.005579455852508545, 0.0053795199394226076, 0.005661312103271484, 0.005334496021270752, 0.005284192085266114, 0.00531283187866211, 0.005441952228546143, 0.005297535896301269, 0.005336703777313233, 0.005346975803375244, 0.005338047981262207, 0.0053534722328186036, 0.005380095958709717, 0.005312511920928955, 0.0053283839225769045, 0.005335360050201416, 0.005295839786529541, 0.005299871921539307, 0.00532969617843628, 0.0053144960403442385, 0.005326560020446777, 0.005374688148498535, 0.005338655948638916, 0.005302432060241699, 0.005339104175567627, 0.005351424217224121, 0.00532476806640625, 0.005339168071746826, 0.0052811517715454104, 0.005296703815460205, 0.005386303901672363, 0.005347104072570801, 0.00532316780090332, 0.005459775924682617, 0.005374080181121827, 0.005332831859588623, 0.005751967906951904, 0.005286399841308594, 0.005294464111328125, 0.005320703983306885, 0.005289760112762451, 0.005329184055328369, 0.005324704170227051, 0.005287040233612061, 0.00529091215133667, 0.005311615943908691, 0.005328896045684814, 0.005274687767028809, 0.005327807903289795, 0.005294879913330078, 0.005408864021301269, 0.005326687812805176, 0.005501344203948975, 0.005334784030914306, 0.00536575984954834, 0.005332992076873779, 0.005416927814483642, 0.0054596481323242185, 0.005396383762359619, 0.005365664005279541, 0.00544979190826416, 0.005647903919219971, 0.005387199878692627, 0.005310400009155274, 0.005292096138000488, 0.005332992076873779, 0.005296127796173096, 0.005226143836975098, 0.005280064105987549, 0.00530790376663208, 0.0052778878211975095, 0.0053043198585510255, 0.005320703983306885, 0.005302271842956543, 0.005378047943115235, 0.0053005762100219725, 0.005283648014068603, 0.005267295837402344, 0.00547430419921875, 0.0053208317756652836, 0.005293951988220215, 0.005444863796234131, 0.005798655986785889, 0.0053534722328186036, 0.005406720161437988, 0.00530185604095459, 0.005318880081176758, 0.005335231781005859, 0.0052646718025207515, 0.005333183765411377, 0.005313055992126465, 0.005308191776275634, 0.00531059217453003, 0.005322432041168213, 0.005293536186218262, 0.005292992115020752, 0.005357600212097168, 0.005362912178039551, 0.005301119804382325, 0.0053021440505981446, 0.005320703983306885, 0.0052977919578552244, 0.005344768047332763, 0.005276544094085693, 0.005296000003814697, 0.0053024001121521, 0.0052715520858764645, 0.00530998420715332, 0.005332831859588623, 0.005281504154205322, 0.005297376155853272, 0.005293759822845459, 0.005406720161437988, 0.0052932162284851075, 0.005370719909667969, 0.00532044792175293, 0.005302527904510498, 0.005341184139251709, 0.0052899842262268066, 0.005309599876403809, 0.005302464008331299, 0.005315072059631347, 0.0052696638107299805, 0.005326848030090332, 0.005310207843780518, 0.005301504135131836, 0.005299424171447754, 0.005877535820007324, 0.005734399795532226, 0.00533241605758667, 0.005197824001312256, 0.005279488086700439, 0.005273280143737793, 0.005319231986999511, 0.0052705597877502446, 0.005291264057159423, 0.005326272010803223, 0.005256896018981934, 0.005272192001342773, 0.00532476806640625, 0.00530841588973999, 0.005302271842956543, 0.005351424217224121, 0.005275872230529785, 0.005332831859588623, 0.005310495853424072, 0.00529807996749878, 0.005318655967712403, 0.005359615802764893, 0.005287936210632324, 0.00532480001449585, 0.005343232154846191, 0.00539788818359375, 0.005317440032958984, 0.005370975971221924, 0.0053071041107177735, 0.005330656051635742, 0.005327136039733887, 0.005285696029663086, 0.005306816101074219, 0.005353216171264648, 0.0053003840446472165, 0.005330783843994141, 0.005333024024963379, 0.005289951801300049, 0.005395967960357666, 0.005855743885040283, 0.005351424217224121, 0.005326335906982422, 0.005334688186645508, 0.005360320091247559, 0.005461152076721191, 0.005483520030975342, 0.005505119800567627, 0.0053821120262146, 0.0053597760200500486, 0.005340960025787354, 0.005396480083465576, 0.0053136320114135745, 0.005723040103912353, 0.005328447818756104, 0.005306816101074219, 0.005328896045684814, 0.005426591873168946, 0.0052824001312255855, 0.005318880081176758, 0.005316383838653564, 0.0053002238273620605, 0.005322751998901368, 0.005330368041992188, 0.005305215835571289, 0.005316287994384766, 0.005339072227478027, 0.005357503890991211, 0.005307295799255371, 0.005337088108062744, 0.005338880062103271, 0.005306975841522217, 0.005328127861022949, 0.005335455894470215, 0.005314367771148682, 0.005331007957458496, 0.005374144077301026, 0.005340479850769043, 0.005329535961151123, 0.005402336120605469, 0.0053266239166259765, 0.005327360153198242, 0.005312511920928955, 0.0053078079223632816, 0.005306272029876709, 0.005311168193817138, 0.005323103904724121, 0.005308032035827637, 0.0053919677734375, 0.00530406379699707, 0.0053359360694885255, 0.005330751895904541, 0.005385439872741699, 0.00585200023651123, 0.005388224124908447, 0.005360864162445069, 0.005348127841949463, 0.005375775814056396, 0.005308703899383545, 0.0054531521797180176, 0.005319615840911865, 0.005289631843566895, 0.005277120113372803, 0.0053060479164123535, 0.005271488189697265, 0.005288896083831787, 0.005312511920928955, 0.0053162240982055665, 0.005322144031524658, 0.005557216167449951, 0.005324704170227051, 0.005351232051849365, 0.005302559852600097, 0.00530841588973999, 0.005319712162017823, 0.005323103904724121, 0.005307007789611817, 0.005353631973266601, 0.005390175819396973, 0.005437439918518067, 0.005325856208801269, 0.0052960958480834965, 0.005313536167144775, 0.005329184055328369, 0.005369855880737305, 0.005324512004852295, 0.00532480001449585, 0.005308608055114746, 0.005299583911895752, 0.005360000133514405, 0.005236544132232666, 0.0053821120262146, 0.005285088062286377, 0.005328991889953613, 0.005288640022277832, 0.005275648117065429, 0.005347040176391602, 0.005304671764373779, 0.005271423816680909, 0.00529420804977417, 0.005326272010803223, 0.005271359920501709, 0.005290527820587158, 0.005310239791870117, 0.0053080000877380375, 0.005282783985137939, 0.005314367771148682, 0.005294079780578613, 0.005279744148254394, 0.005288191795349121, 0.005326591968536377, 0.005276703834533691, 0.005298975944519043, 0.005302464008331299, 0.00531660795211792, 0.0053137922286987304, 0.005294847965240479, 0.005332320213317871, 0.005396512031555176, 0.005300864219665528, 0.005320703983306885, 0.0053350400924682614, 0.005306367874145508, 0.005300352096557617, 0.005323935985565185, 0.005304959774017334, 0.005363743782043457, 0.0054028801918029785, 0.005304128170013428, 0.005283840179443359, 0.005292319774627686, 0.0054412479400634765, 0.005386559963226318, 0.005353151798248291, 0.005320384025573731, 0.005316544055938721, 0.005319327831268311, 0.005304031848907471, 0.005322751998901368, 0.0053127360343933104, 0.005293824195861816, 0.005326879978179932, 0.0053443841934204105, 0.005374847888946533, 0.005441696166992188, 0.005759967803955078, 0.005468192100524902, 0.005320864200592041, 0.005351359844207764, 0.005303040027618409, 0.005285280227661133, 0.005323359966278076, 0.005257215976715088, 0.00518393611907959, 0.005305952072143555, 0.0053043198585510255, 0.005267839908599853, 0.005312672138214111, 0.005288191795349121, 0.005273344039916993, 0.005298367977142334, 0.005322239875793457, 0.005291808128356934, 0.005303135871887207, 0.005308159828186035, 0.005397439956665039, 0.005323775768280029, 0.005341184139251709, 0.005295360088348388, 0.00530508804321289, 0.005322207927703857, 0.005296671867370605, 0.005303520202636719, 0.005350336074829102, 0.005322591781616211, 0.005332128047943115, 0.005370719909667969, 0.005308320045471191, 0.0053424320220947265, 0.005335872173309326, 0.005316671848297119, 0.00531388807296753, 0.005333663940429688, 0.005306496143341065, 0.005431168079376221, 0.00540883207321167, 0.0053842558860778805, 0.0054007678031921386, 0.005485472202301026, 0.005378528118133545, 0.0053927040100097655, 0.005720352172851563, 0.005367519855499267, 0.005371200084686279, 0.005331967830657959, 0.005356800079345703, 0.005407423973083496, 0.005342527866363526, 0.0054992961883544925, 0.005399775981903076, 0.0054380159378051756, 0.005363967895507813, 0.0053678078651428224, 0.0053673281669616695, 0.005382815837860108, 0.005369664192199707, 0.005317759990692139, 0.005335840225219726, 0.005857439994812011, 0.005724095821380616, 0.005471231937408447, 0.005323775768280029, 0.00533296012878418, 0.005352735996246338, 0.005311232089996338, 0.0053002238273620605, 0.0052451519966125485, 0.005358496189117432, 0.005323520183563232, 0.0052936959266662595, 0.005345791816711426, 0.00529807996749878, 0.005368288040161133, 0.005328095912933349, 0.005312928199768066, 0.00531660795211792, 0.005310463905334473, 0.005311744213104248, 0.00530463981628418, 0.005318816184997559, 0.0052943677902221676, 0.005305535793304443, 0.005318655967712403, 0.0052600960731506346, 0.0053227200508117675, 0.005304351806640625, 0.005279776096343994, 0.005298367977142334, 0.005339200019836426, 0.005303520202636719, 0.005302432060241699, 0.0053333439826965335, 0.005331295967102051, 0.005348703861236572, 0.005388160228729248, 0.005310688018798828, 0.005379551887512207, 0.005353631973266601, 0.005337183952331543, 0.005298304080963135, 0.0054102401733398435, 0.005307328224182129, 0.0053678078651428224, 0.0054122557640075685, 0.0053192639350891115, 0.005758975982666016, 0.005353600025177002, 0.005301951885223389, 0.00532860803604126, 0.005322944164276123, 0.0053062400817871095, 0.005353663921356201, 0.005480671882629394, 0.005305920124053955, 0.005326272010803223, 0.005337503910064698, 0.005286784172058106, 0.005299935817718506, 0.005326399803161621, 0.0052941122055053715, 0.005300640106201172, 0.005304224014282226, 0.005276063919067383, 0.005291711807250976, 0.005306464195251465, 0.005291232109069824, 0.005278463840484619, 0.005379968166351319, 0.005297632217407227, 0.005234399795532227, 0.005310175895690918, 0.0053367681503295895, 0.005286752223968506, 0.005302303791046142, 0.005332191944122314, 0.005309216022491455, 0.005330719947814941, 0.005337535858154297, 0.005316383838653564, 0.005314559936523438, 0.00532425594329834, 0.005312928199768066, 0.005308576107025146, 0.005309631824493408, 0.005294655799865723, 0.005319168090820312, 0.005324512004852295, 0.005302495956420898, 0.0053225278854370115, 0.005316480159759522, 0.0053060798645019535, 0.005296832084655762, 0.005408095836639404, 0.00529036808013916, 0.0053002238273620605, 0.0053043198585510255, 0.005285280227661133, 0.005284448146820068, 0.005291232109069824, 0.005388768196105957, 0.005314815998077393, 0.00530847978591919, 0.005291359901428223, 0.005300928115844727, 0.005306335926055908, 0.005347551822662354, 0.005324575901031494, 0.005293407917022705, 0.005272223949432373, 0.005309599876403809, 0.005350240230560303, 0.005283872127532959, 0.005300191879272461, 0.005339136123657226, 0.005316864013671875, 0.0054126400947570805, 0.005320672035217285, 0.005283840179443359, 0.005272928237915039, 0.005977920055389404, 0.005311327934265137, 0.005307648181915283, 0.00533132791519165, 0.005302656173706055, 0.005367455959320068, 0.005321407794952392, 0.005262080192565918, 0.005360544204711914, 0.005316512107849121, 0.005282048225402832, 0.005314400196075439, 0.005293983936309815, 0.005211328029632568, 0.005263743877410888, 0.0052841281890869144, 0.005314720153808594, 0.005271423816680909, 0.005267392158508301, 0.0053311362266540525, 0.005305920124053955, 0.0052904319763183594, 0.005318272113800049, 0.005289792060852051, 0.005277535915374756, 0.005310431957244873, 0.005286880016326904, 0.005315904140472412, 0.00529363203048706, 0.0053769278526306155, 0.005330624103546143, 0.0053207998275756835, 0.005314784049987793, 0.005275648117065429, 0.005318655967712403, 0.00533465576171875, 0.00530675220489502, 0.005299424171447754, 0.005497119903564453, 0.005349728107452392, 0.005560480117797852, 0.005361663818359375, 0.005501215934753418, 0.005369056224822998, 0.005511007785797119, 0.005360511779785157, 0.005377503871917724, 0.005337696075439453, 0.005363391876220703, 0.005394015789031982, 0.005363647937774658, 0.005356160163879395, 0.0053474240303039555, 0.005326111793518066, 0.0053499841690063474, 0.005414271831512451, 0.0053153600692749025, 0.005329919815063477, 0.005337088108062744, 0.005272319793701172, 0.005295328140258789, 0.005320672035217285, 0.005413695812225342, 0.005308320045471191, 0.005394527912139893, 0.005261023998260498, 0.00530675220489502, 0.00543939208984375, 0.005285855770111084, 0.005269536018371582, 0.005316832065582275, 0.005297952175140381, 0.005304575920104981, 0.005306208133697509, 0.00529417610168457, 0.005272992134094238, 0.00519375991821289, 0.005388576030731201, 0.0052740797996521, 0.005305535793304443, 0.005325471878051758, 0.005255199909210205, 0.005280064105987549, 0.005310304164886474, 0.005280863761901855, 0.005292799949645996, 0.005314559936523438, 0.005299615859985352, 0.005304704189300537, 0.005316351890563965, 0.005310719966888428, 0.005306591987609863, 0.00531385612487793, 0.005273407936096192, 0.005300896167755127, 0.005302495956420898, 0.0052856321334838864, 0.005288095951080323, 0.0053105602264404295, 0.005287519931793213, 0.005267039775848388, 0.005310336112976074, 0.005319712162017823, 0.005287839889526367, 0.005320703983306885, 0.005342944145202637, 0.0053966398239135745, 0.00550105619430542, 0.005316832065582275, 0.005313663959503174, 0.005607295989990235, 0.0054505281448364255, 0.005297887802124024, 0.005394847869873047, 0.005453695774078369, 0.005563551902770996, 0.005600096225738526, 0.006012928009033203, 0.005603328227996827, 0.005358751773834229, 0.006624192237854004, 0.007079840183258057, 0.005354559898376464, 0.005301184177398681, 0.0054085440635681156, 0.005347551822662354, 0.005320352077484131, 0.005319007873535156, 0.0053350400924682614, 0.005296127796173096, 0.0053301119804382326, 0.005370687961578369, 0.0053043198585510255, 0.005336063861846924, 0.005344255924224854, 0.006211423873901367, 0.005331103801727295, 0.005342495918273926, 0.005313248157501221, 0.005199135780334473, 0.005350399971008301, 0.005301727771759033, 0.005304736137390137, 0.005314527988433838, 0.00525929594039917, 0.005318143844604492, 0.005329376220703125, 0.005309631824493408, 0.005331776142120361, 0.005312511920928955, 0.005338367938995362, 0.005332831859588623, 0.0053253121376037596, 0.0053060479164123535, 0.005392640113830566, 0.005343711853027343, 0.005275551795959473, 0.005300320148468017, 0.005306367874145508, 0.005297567844390869, 0.005326496124267578, 0.005313759803771973, 0.005280608177185058, 0.005290880203247071, 0.005312511920928955, 0.005396480083465576, 0.005307392120361328, 0.005335552215576172, 0.00530182409286499, 0.005351840019226074, 0.005335872173309326, 0.00530944013595581, 0.005306367874145508, 0.005327936172485352, 0.005272799968719482, 0.0053004159927368165, 0.005304575920104981, 0.005300543785095215, 0.005307199954986572, 0.005308896064758301, 0.005283552169799805, 0.0053357439041137695, 0.0053002238273620605, 0.005283135890960694, 0.005886655807495117, 0.005351424217224121, 0.00527945613861084, 0.005290048122406006, 0.005327072143554688, 0.005298175811767578, 0.0059695677757263185, 0.005347807884216308, 0.005310336112976074, 0.005317887783050537, 0.005309184074401855, 0.005291840076446533, 0.005303552150726318, 0.005311423778533935, 0.005287936210632324, 0.0052899842262268066, 0.005303936004638672, 0.005279520034790039, 0.005198527812957763, 0.005295904159545899, 0.005314815998077393, 0.00527891206741333, 0.0052973442077636716, 0.005312128067016602, 0.005275551795959473, 0.005301887989044189, 0.005330560207366944, 0.005382527828216552, 0.005570303916931152, 0.005315296173095703, 0.005314559936523438, 0.005326303958892822, 0.005322303771972656, 0.005372896194458008, 0.005299295902252197, 0.005313471794128418, 0.0053405117988586425, 0.005322815895080567, 0.005315008163452148, 0.0052880640029907226, 0.0053043198585510255, 0.005621503829956054, 0.005386720180511475, 0.005478176116943359, 0.005545983791351319, 0.005347328186035156, 0.005351776123046875, 0.005326496124267578, 0.0053209919929504395, 0.005357279777526856, 0.00530185604095459, 0.0053285760879516605, 0.005331391811370849, 0.005300511837005615, 0.005305856227874756, 0.005358399868011475, 0.005289663791656494, 0.005292031764984131, 0.005279744148254394, 0.005295648097991944, 0.00534281587600708, 0.005777599811553955, 0.005306848049163818, 0.005425375938415527, 0.005299295902252197, 0.005272480010986328, 0.005322751998901368, 0.005342847824096679, 0.00529036808013916, 0.005332352161407471, 0.0054216961860656734, 0.005326848030090332, 0.005298175811767578, 0.0053161921501159665, 0.005282368183135986, 0.005318048000335693, 0.005329343795776367, 0.00537721586227417, 0.005344128131866455, 0.005320767879486084, 0.005289408206939697, 0.005323872089385986, 0.00533187198638916, 0.005359360218048096, 0.005340544223785401, 0.005347487926483155, 0.005322559833526612, 0.005343743801116943, 0.005341919898986816, 0.005315743923187256, 0.005331488132476807, 0.00534335994720459, 0.005328767776489258, 0.005307487964630127, 0.00532908821105957, 0.00541539192199707, 0.0052902398109436035, 0.005348703861236572, 0.005319200038909912, 0.0052921600341796875, 0.00535148811340332, 0.005302207946777344, 0.005301631927490234, 0.005300864219665528, 0.00534335994720459, 0.005283872127532959, 0.005459807872772217, 0.005341184139251709, 0.005332608222961426, 0.005367775917053222, 0.005376416206359863, 0.0053435201644897465, 0.005371583938598633, 0.005363743782043457, 0.005322144031524658, 0.005441567897796631, 0.005535744190216065, 0.0053010878562927246, 0.005347040176391602, 0.005293759822845459, 0.005282048225402832, 0.005305984020233154, 0.0054133119583129885, 0.005328896045684814, 0.005295263767242432, 0.005317471981048584, 0.005287936210632324, 0.005295519828796387, 0.005335328102111817, 0.005286208152770996, 0.005311744213104248, 0.00530508804321289, 0.005279359817504883, 0.0053067197799682615, 0.005336319923400879, 0.005286431789398193, 0.005318687915802002, 0.00532092809677124, 0.0053281598091125485, 0.0052779197692871095, 0.005308928012847901, 0.0052633600234985355, 0.005314559936523438, 0.005281023979187012, 0.005230591773986816, 0.005298272132873535, 0.00528988790512085, 0.0053344001770019535, 0.005263455867767334, 0.005280191898345947, 0.005318943977355957, 0.005287807941436767, 0.005295904159545899, 0.005306272029876709, 0.005275904178619385, 0.005341311931610107, 0.00533900785446167, 0.005316671848297119, 0.005357759952545166, 0.005338304042816162, 0.005302847862243653, 0.005318655967712403, 0.005335135936737061, 0.005316512107849121, 0.00532480001449585, 0.005328927993774414, 0.005293087959289551, 0.005303135871887207, 0.0054068160057067875, 0.005284895896911621, 0.005286880016326904, 0.0053043198585510255, 0.005298336029052734, 0.005393631935119629, 0.005357471942901612, 0.005285696029663086, 0.005300896167755127, 0.005322751998901368, 0.005410560131072998, 0.005313024044036865, 0.005318655967712403, 0.005434432029724121, 0.005311359882354736, 0.005496223926544189, 0.005765664100646973, 0.005316383838653564, 0.005339488029479981, 0.005314208030700683, 0.005303711891174317, 0.005319615840911865, 0.005294079780578613, 0.005315775871276855, 0.005331935882568359, 0.005289279937744141, 0.00533673620223999, 0.005327744007110596, 0.005359744071960449, 0.005403584003448487, 0.0053290238380432125, 0.005320703983306885, 0.005307487964630127, 0.005332640171051025, 0.00530841588973999, 0.005345344066619873, 0.005339136123657226, 0.005345280170440674, 0.005326848030090332, 0.005229184150695801, 0.005379839897155762, 0.005292031764984131, 0.005326848030090332, 0.005341023921966553, 0.005300127983093262, 0.005353439807891846, 0.0053476161956787106, 0.0053225278854370115, 0.005352831840515136, 0.005343071937561035, 0.005333248138427734, 0.005355807781219482, 0.0054932479858398435, 0.0053554878234863285, 0.005343232154846191, 0.005462016105651855, 0.005345280170440674, 0.005531680107116699, 0.005668479919433594, 0.005502495765686035, 0.005578688144683838, 0.005358687877655029, 0.005504799842834473, 0.005427519798278809, 0.005367487907409668, 0.005418303966522217, 0.005354144096374512, 0.005388319969177246, 0.005406496047973633, 0.005347551822662354, 0.0053944320678710935, 0.005431295871734619, 0.005361279964447021, 0.005448383808135986, 0.0053480958938598635, 0.005356480121612548, 0.005358784198760986, 0.005345344066619873, 0.005339903831481934, 0.00533622407913208, 0.005452640056610107, 0.005363327980041504, 0.00533132791519165, 0.005345280170440674, 0.005356895923614502, 0.005307040214538574, 0.005310463905334473, 0.00532480001449585, 0.005323840141296387, 0.005344223976135254, 0.0053209919929504395, 0.0053695359230041505, 0.005457151889801026, 0.005343999862670899, 0.005338143825531006, 0.005360288143157959, 0.005335360050201416, 0.0053578557968139645, 0.005381760120391846, 0.005372000217437744, 0.005361599922180176, 0.005361728191375733, 0.005264383792877197, 0.005305408000946045, 0.005334943771362305, 0.0053443841934204105, 0.005294496059417724, 0.005325088024139404, 0.005329279899597168, 0.005340544223785401, 0.005396959781646729, 0.005352543830871582, 0.005374752044677735, 0.005376255989074707, 0.005352575778961182, 0.005366528034210205, 0.0053637118339538575, 0.005325024127960205, 0.005356959819793701, 0.005356192111968994, 0.0053266558647155765, 0.005459936141967774, 0.005337056159973145, 0.00533519983291626, 0.005312320232391358, 0.005327072143554688, 0.005308063983917236, 0.005326272010803223, 0.005323103904724121, 0.005306111812591553, 0.005354144096374512, 0.006148032188415527, 0.0053407039642333985, 0.005364192008972168, 0.0054967041015625, 0.005355135917663574, 0.005380608081817627, 0.005375520229339599, 0.005478879928588867, 0.005369120121002198, 0.005315296173095703, 0.005351039886474609, 0.00533951997756958, 0.0053916797637939455, 0.005922592163085937, 0.005421984195709229, 0.005537343978881836, 0.005660192012786865, 0.005726912021636963, 0.005336832046508789, 0.005343711853027343, 0.0053331198692321774, 0.005331007957458496, 0.005383999824523926, 0.00528934383392334, 0.005329535961151123, 0.005339231967926026, 0.005326752185821533, 0.005319776058197022, 0.0053309440612792965, 0.00527452802658081, 0.005337088108062744, 0.005321919918060303, 0.005294303894042968, 0.005309343814849854, 0.005253024101257324, 0.005308512210845947, 0.0052789440155029295, 0.00531331205368042, 0.0053023681640625, 0.005290976047515869, 0.00531935977935791, 0.005332255840301513, 0.005284671783447266, 0.005310239791870117, 0.00533132791519165, 0.0052871999740600585, 0.0053108158111572264, 0.0053211841583251955, 0.0053144640922546385, 0.005320703983306885, 0.005310463905334473, 0.005322207927703857, 0.005368288040161133, 0.005334303855895996, 0.00528879976272583, 0.005312448024749756, 0.005332608222961426, 0.005294432163238525, 0.005304351806640625, 0.005326848030090332, 0.005312511920928955, 0.005296448230743408, 0.005303999900817871, 0.005275648117065429, 0.0052731199264526366, 0.005290463924407959, 0.005303391933441162, 0.005383071899414063, 0.006014080047607422, 0.0053211841583251955, 0.005351903915405273, 0.005339072227478027, 0.005310751914978027, 0.005285600185394287, 0.005314591884613037, 0.005294047832489014, 0.005303904056549073, 0.0053637118339538575, 0.005296256065368652, 0.005331232070922852, 0.0053385281562805175, 0.005309023857116699, 0.0053309440612792965, 0.00531660795211792, 0.005307839870452881, 0.0053060479164123535, 0.005333055973052978, 0.005299168109893799, 0.005319615840911865, 0.00534006404876709, 0.005273600101470947, 0.005327871799468994, 0.005311168193817138, 0.005306303977966309, 0.0053784317970275875, 0.0053405117988586425, 0.0053992319107055665, 0.005223936080932618, 0.005319327831268311, 0.005326560020446777, 0.0053161921501159665, 0.005313055992126465, 0.005318655967712403, 0.005299200057983398, 0.005335135936737061, 0.005334976196289062, 0.005319647789001465, 0.005345280170440674, 0.00534876823425293, 0.005337408065795898, 0.005370143890380859, 0.005336991786956787, 0.005636159896850586, 0.00539961576461792, 0.005559264183044433, 0.00532480001449585, 0.005345280170440674, 0.005369855880737305, 0.005316639900207519, 0.005390304088592529, 0.005509119987487793, 0.00530841588973999, 0.005334432125091553, 0.005327455997467041, 0.005292031764984131, 0.005318655967712403, 0.005341184139251709, 0.005317696094512939, 0.006202112197875977, 0.005609536170959472, 0.005366079807281494, 0.005429183959960938, 0.005416351795196533, 0.005360159873962402, 0.005347328186035156, 0.005293439865112304, 0.005374527931213379, 0.005332896232604981, 0.005298272132873535, 0.005292255878448487, 0.0053350400924682614, 0.0053491520881652835, 0.00530841588973999, 0.005342336177825928, 0.00532912015914917, 0.00545801591873169, 0.005347648143768311, 0.0053573760986328125, 0.005329055786132813, 0.005306528091430664, 0.005274943828582764, 0.005408736228942871, 0.005301152229309082, 0.005277632236480713, 0.0052984957695007325, 0.005297311782836914, 0.0053367681503295895, 0.005299039840698242, 0.005329919815063477, 0.005290559768676758, 0.0052204160690307614, 0.0053554558753967285, 0.005322815895080567, 0.005284832000732422, 0.005290048122406006, 0.005306367874145508, 0.005333920001983642, 0.005293119907379151, 0.005370240211486816, 0.005294655799865723, 0.005337088108062744, 0.005341536045074463, 0.005310111999511718, 0.005425151824951172, 0.005358751773834229, 0.005331999778747558, 0.005332255840301513, 0.005304351806640625, 0.005328832149505615, 0.005313087940216064, 0.005467167854309082, 0.005300960063934326, 0.005330463886260986, 0.006036223888397217, 0.005440959930419922, 0.00535811185836792, 0.005326272010803223, 0.005390944004058838, 0.005429215908050537, 0.0053207998275756835, 0.005392288208007813, 0.00537395191192627, 0.005347583770751953, 0.005342976093292236, 0.0053862080574035645, 0.005316351890563965, 0.005337183952331543, 0.005324543952941895, 0.005278336048126221, 0.0053143038749694825, 0.005395936012268066, 0.005301152229309082, 0.005326528072357178, 0.005332799911499023, 0.0053002238273620605, 0.005351935863494873, 0.005404416084289551, 0.005291232109069824, 0.005441472053527832, 0.005384895801544189, 0.005304416179656982, 0.00535920000076294, 0.005317024230957031, 0.005303647994995117, 0.005337759971618652, 0.005314559936523438, 0.00530614423751831, 0.005304543972015381, 0.005335328102111817, 0.0053056640625, 0.005310880184173584, 0.005345376014709473, 0.005397664070129394, 0.005213344097137451, 0.005318528175354004, 0.0053350720405578615, 0.005318624019622803, 0.005303264141082764, 0.005310751914978027, 0.005301439762115478, 0.005310688018798828, 0.005314400196075439, 0.005290080070495606, 0.00533951997756958, 0.005322815895080567, 0.00537721586227417, 0.005309023857116699, 0.005319104194641113, 0.005355423927307129, 0.005345439910888672, 0.005314208030700683, 0.005312416076660156, 0.005322847843170166, 0.005361663818359375, 0.005328896045684814, 0.00532480001449585, 0.005339136123657226, 0.005314559936523438, 0.0053266239166259765, 0.005344768047332763, 0.005317344188690186, 0.006053184032440186, 0.005341440200805664, 0.005352960109710693, 0.005368768215179443, 0.0053043198585510255, 0.00532480001449585, 0.005347328186035156, 0.005304351806640625, 0.00530838394165039, 0.005457664012908935, 0.005371551990509033, 0.005321311950683594, 0.005319839954376221, 0.0053005762100219725, 0.0053276801109313964, 0.005360671997070313, 0.005333856105804443, 0.005344096183776855, 0.0053415679931640625, 0.0052941122055053715, 0.005325376033782959, 0.00531660795211792, 0.005308703899383545, 0.005316319942474365, 0.005330431938171387, 0.005321311950683594, 0.005337279796600342, 0.005361375808715821, 0.005326208114624024, 0.005321343898773193, 0.005338240146636963, 0.005337696075439453, 0.005336639881134034, 0.0053439679145812985, 0.005310400009155274, 0.005235551834106445, 0.005611519813537597, 0.005395936012268066, 0.005331391811370849, 0.005337183952331543, 0.005378047943115235, 0.0053578557968139645, 0.006516096115112305, 0.005344927787780762, 0.005405375957489014, 0.005369855880737305, 0.0056258559226989744, 0.005402336120605469, 0.005491040229797363, 0.005393919944763184, 0.005388576030731201, 0.005337247848510742, 0.0053407039642333985, 0.005365568161010742, 0.005337728023529053, 0.005340864181518554, 0.005381792068481445, 0.005318655967712403, 0.005336959838867187, 0.005333824157714844, 0.00537171220779419, 0.005718463897705078, 0.005367519855499267, 0.005330976009368897, 0.005349376201629639, 0.005351424217224121, 0.005318624019622803, 0.0053220481872558595, 0.005347775936126709, 0.0053376321792602535, 0.005346464157104492, 0.0053458881378173825, 0.005296127796173096, 0.005336063861846924, 0.005331967830657959, 0.005374176025390625, 0.005351295948028564, 0.005363135814666748, 0.0053079681396484375, 0.0053805441856384275, 0.005368288040161133, 0.005307456016540527, 0.0054158720970153805, 0.005342912197113037, 0.005316927909851075, 0.005339136123657226, 0.005695487976074219, 0.005332159996032715, 0.0053645439147949215, 0.005310463905334473, 0.005330495834350586, 0.005353919982910156, 0.005422143936157226, 0.00531712007522583, 0.005364160060882569, 0.005318655967712403, 0.005363296031951905, 0.005335455894470215]",tokens/s,186.44217096217275,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1577.795584,1553.85856,0.0,1168.113664,1154.613248,s,1,8.20472265625,8.20472265625,0.0,8.20472265625,8.20472265625,8.20472265625,8.20472265625,[8.20472265625],,kWh,3.413696830827272e-05,3.758214207544938e-06,1.1367786871996355e-05,4.9262969387814016e-05,,MB,1540.128768,1805.5168,0.0,1388.314624,1334.065152,s,10,0.8514541091918945,0.08514541091918945,0.0008195607626122215,0.08506641769409179,0.0857202651977539,0.0864238914489746,0.08698679244995117,"[0.0871275177001953, 0.08425894165039062, 0.08463334655761719, 0.08503977966308594, 0.08556390380859374, 0.08551907348632813, 0.08410380554199219, 0.08466307067871094, 0.08509305572509765, 0.08545161437988281]",tokens/s,3006.621228746746,kWh,2.688878274159692e-06,2.9653418308604944e-07,1.779586031210989e-06,4.764998488456731e-06,tokens/kWh,53725095.74140752,MB,1542.934528,1805.5168,0.0,1388.314624,1372.847616,s,10,17.940097290039066,1.7940097290039063,0.00800117290884937,1.7964407348632812,1.8032906127929689,1.8041688049316407,1.8048713586425782,"[1.796681640625, 1.7994681396484375, 1.8050469970703125, 1.803095458984375, 1.7961998291015624, 1.7929622802734375, 1.7841248779296874, 1.786051513671875, 1.797322021484375, 1.77914453125]",tokens/s,35.11686641464296,kWh,5.204412376750844e-05,5.740185549824232e-06,2.393282342759194e-05,8.17171327449246e-05,tokens/kWh,770952.1600158309,,s,630,17.937439983367927,0.028472126957726863,0.00043208029031914833,0.028374128341674805,0.02881567668914795,0.029108592319488525,0.030494490337371852,"[0.028861215591430664, 0.02930838394165039, 0.030855583190917968, 0.030705888748168944, 0.028741247177124025, 0.0284769287109375, 0.02847830390930176, 0.028288063049316407, 0.028375999450683594, 0.028215295791625978, 0.028270591735839845, 0.028260351181030274, 0.028309503555297853, 0.02843836784362793, 0.02847350311279297, 0.028434431076049805, 0.028284927368164063, 0.028569536209106447, 0.028319807052612306, 0.028294784545898437, 0.02849420738220215, 0.02853206443786621, 0.028406431198120117, 0.02830723190307617, 0.028485376358032225, 0.0284564151763916, 0.028427263259887696, 0.02838937568664551, 0.028443872451782228, 0.028439327239990233, 0.028301311492919923, 0.028450815200805665, 0.02874505615234375, 0.028318368911743164, 0.028612512588500977, 0.02880316734313965, 0.02875596809387207, 0.02931443214416504, 0.028811904907226564, 0.02854092788696289, 0.02834841537475586, 0.028274688720703125, 0.028452512741088867, 0.02824742317199707, 0.028224479675292968, 0.028291040420532227, 0.02824006462097168, 0.02822127914428711, 0.028356351852416993, 0.028455135345458984, 0.028241567611694336, 0.028393856048583986, 0.02846892738342285, 0.028397727966308593, 0.02825641632080078, 0.02828441619873047, 0.028259872436523437, 0.028455904006958008, 0.028487680435180664, 0.02838528060913086, 0.028276479721069336, 0.028296703338623046, 0.02803740882873535, 0.028772159576416014, 0.028879104614257814, 0.02859116744995117, 0.028600288391113282, 0.028570335388183595, 0.029250848770141603, 0.028920543670654296, 0.029220863342285155, 0.028462528228759765, 0.02838380813598633, 0.02876620864868164, 0.02831974411010742, 0.02833612823486328, 0.028282880783081055, 0.028192127227783203, 0.028210912704467773, 0.029766559600830078, 0.02864921569824219, 0.02892131233215332, 0.028617504119873047, 0.028882240295410155, 0.028944480895996095, 0.028901472091674804, 0.02875651168823242, 0.028783775329589843, 0.028637184143066406, 0.028754751205444337, 0.028955904006958008, 0.02894086456298828, 0.028510400772094727, 0.0284117431640625, 0.02827280044555664, 0.028278783798217775, 0.028205055236816406, 0.028184576034545897, 0.028222976684570314, 0.028459423065185546, 0.028211296081542967, 0.028338176727294922, 0.02834022331237793, 0.028321535110473632, 0.02850966453552246, 0.02922758483886719, 0.028432607650756836, 0.028420095443725587, 0.028297216415405273, 0.028375040054321288, 0.02828803253173828, 0.028650463104248045, 0.02833203125, 0.02840166473388672, 0.028368896484375, 0.02836070442199707, 0.028617984771728517, 0.028496736526489257, 0.028398975372314453, 0.028695072174072266, 0.02836479949951172, 0.028416000366210937, 0.028475168228149415, 0.028492000579833983, 0.0282476806640625, 0.02832534408569336, 0.028791296005249024, 0.028684160232543946, 0.02876851272583008, 0.0285034236907959, 0.028675968170166016, 0.028377376556396484, 0.02850886344909668, 0.028383232116699218, 0.028505952835083007, 0.029196447372436523, 0.02914508819580078, 0.029746688842773438, 0.029000192642211913, 0.028846080780029298, 0.02863747215270996, 0.02845039939880371, 0.02850624084472656, 0.028280832290649413, 0.0285347843170166, 0.028428064346313477, 0.02857318305969238, 0.028424928665161133, 0.028618335723876953, 0.02861033630371094, 0.02959564781188965, 0.03169343948364258, 0.028733440399169922, 0.028759807586669923, 0.028547487258911132, 0.028358432769775392, 0.028604192733764647, 0.02852236747741699, 0.028529056549072264, 0.028507871627807616, 0.028741920471191406, 0.02858143997192383, 0.028719551086425783, 0.028475008010864257, 0.028399999618530274, 0.028461055755615236, 0.02870681571960449, 0.028508159637451173, 0.02880851173400879, 0.028502527236938476, 0.028539295196533202, 0.028688159942626953, 0.02905641555786133, 0.0282589111328125, 0.02834636878967285, 0.02834432029724121, 0.02827692794799805, 0.028434240341186523, 0.028319232940673827, 0.028367103576660155, 0.028387584686279298, 0.028323423385620116, 0.028385696411132814, 0.028493824005126952, 0.028358655929565428, 0.028600223541259767, 0.028424095153808594, 0.028532703399658202, 0.028697055816650392, 0.0284770565032959, 0.028448480606079102, 0.028437183380126952, 0.028568704605102538, 0.02869286346435547, 0.02854515266418457, 0.028455583572387696, 0.028348320007324217, 0.028361600875854494, 0.02837785530090332, 0.02841823959350586, 0.0285347843170166, 0.028770303726196288, 0.02900979232788086, 0.02908582305908203, 0.029628416061401368, 0.02891756820678711, 0.02862303924560547, 0.02856707191467285, 0.028413408279418944, 0.028483903884887696, 0.02848633575439453, 0.028339231491088867, 0.028558303833007812, 0.028341983795166014, 0.028307743072509765, 0.02820425605773926, 0.02835043144226074, 0.0283123836517334, 0.028434591293334963, 0.02827199935913086, 0.031094240188598632, 0.028877824783325196, 0.028618751525878908, 0.028712959289550782, 0.028553216934204102, 0.028413951873779295, 0.028467199325561524, 0.028336095809936523, 0.02830303955078125, 0.028640928268432616, 0.028512800216674804, 0.028356767654418944, 0.02842624092102051, 0.028503904342651366, 0.028813119888305663, 0.02849622344970703, 0.028639232635498047, 0.028907392501831056, 0.029050912857055664, 0.02959779167175293, 0.028888416290283204, 0.02866377639770508, 0.028577823638916016, 0.02856617546081543, 0.02854412841796875, 0.028701568603515627, 0.02837321662902832, 0.028524320602416994, 0.028585472106933595, 0.028452512741088867, 0.028434656143188478, 0.0284453125, 0.028739551544189453, 0.028778432846069336, 0.02857708740234375, 0.0285316162109375, 0.02851353645324707, 0.028366464614868164, 0.028545055389404297, 0.028416000366210937, 0.0284105281829834, 0.028293407440185547, 0.02852659225463867, 0.028638687133789063, 0.02829779243469238, 0.028263616561889648, 0.028492128372192383, 0.028316032409667968, 0.028465248107910155, 0.028696575164794923, 0.02858188819885254, 0.02849705505371094, 0.028535648345947264, 0.02832793617248535, 0.028335744857788087, 0.02829759979248047, 0.028483583450317384, 0.02842153549194336, 0.028430944442749025, 0.028305408477783203, 0.02838300704956055, 0.028321088790893553, 0.02851113510131836, 0.028370880126953126, 0.03247702407836914, 0.028681663513183592, 0.028367647171020506, 0.028436479568481447, 0.028398719787597657, 0.028285823822021484, 0.02831702423095703, 0.028281503677368165, 0.028245695114135744, 0.028353952407836915, 0.028349344253540038, 0.028204383850097655, 0.02885856056213379, 0.02832841682434082, 0.028682207107543944, 0.029687551498413085, 0.02927644729614258, 0.02880512046813965, 0.028393152236938477, 0.028344640731811522, 0.0286246395111084, 0.028190975189208985, 0.02826153564453125, 0.028237663269042968, 0.02815692710876465, 0.028138944625854492, 0.028179008483886717, 0.028180479049682617, 0.028264448165893553, 0.028094463348388672, 0.02817955207824707, 0.028683103561401368, 0.028506111145019532, 0.02853273582458496, 0.028395391464233397, 0.02835433578491211, 0.02830691146850586, 0.02840435218811035, 0.028420543670654295, 0.02827039909362793, 0.028310943603515625, 0.028332639694213867, 0.028370943069458008, 0.02822947120666504, 0.02811238479614258, 0.028240543365478515, 0.02819590377807617, 0.028203968048095704, 0.028487071990966797, 0.02855779266357422, 0.028471200942993165, 0.028471519470214843, 0.029085376739501952, 0.02915564727783203, 0.028777984619140624, 0.03201871871948242, 0.02865897560119629, 0.028554176330566405, 0.028504064559936523, 0.028473312377929688, 0.028330015182495116, 0.02838118362426758, 0.02834217643737793, 0.028239616394042967, 0.02814601516723633, 0.028331232070922852, 0.028258975982666017, 0.0283239688873291, 0.028419679641723632, 0.02818908882141113, 0.028235424041748048, 0.02834841537475586, 0.028180831909179686, 0.028121088027954103, 0.02815580749511719, 0.028244064331054686, 0.028177728652954103, 0.028334783554077148, 0.02816147232055664, 0.028039007186889647, 0.028033504486083983, 0.028069440841674804, 0.028158655166625978, 0.028493824005126952, 0.029245439529418944, 0.028640768051147462, 0.028520959854125977, 0.028602239608764648, 0.028573087692260742, 0.02892425537109375, 0.02862508773803711, 0.028427711486816408, 0.028136192321777345, 0.028223487854003908, 0.02835273551940918, 0.028528480529785155, 0.02816009521484375, 0.02822159957885742, 0.02813327980041504, 0.02809507179260254, 0.028102176666259766, 0.028080223083496093, 0.028098432540893555, 0.028082847595214844, 0.028122976303100587, 0.028182016372680665, 0.028079967498779296, 0.028003135681152345, 0.028115999221801757, 0.028179424285888672, 0.02810256004333496, 0.02819206428527832, 0.02803936004638672, 0.02821180725097656, 0.0281395206451416, 0.028278783798217775, 0.02833612823486328, 0.028610048294067384, 0.02853318405151367, 0.02846112060546875, 0.028807136535644533, 0.028271839141845702, 0.028971839904785156, 0.02836841583251953, 0.028269023895263673, 0.02829088020324707, 0.02840729522705078, 0.02836345672607422, 0.028511327743530275, 0.02843110466003418, 0.02847148895263672, 0.029570751190185547, 0.02823504066467285, 0.028406784057617186, 0.028262271881103515, 0.028272544860839844, 0.02818275260925293, 0.028211200714111328, 0.028252159118652344, 0.028270591735839845, 0.028044416427612306, 0.028149791717529297, 0.028158527374267578, 0.028071487426757812, 0.028340192794799806, 0.028280767440795898, 0.028185600280761718, 0.0282063045501709, 0.028265056610107423, 0.028258304595947265, 0.028411487579345703, 0.028236192703247072, 0.02836092758178711, 0.028172063827514648, 0.028415935516357422, 0.028809247970581056, 0.029114208221435546, 0.028552864074707033, 0.02855900764465332, 0.028402015686035155, 0.02868614387512207, 0.028234272003173827, 0.028274591445922852, 0.028472448348999025, 0.028255231857299806, 0.028303327560424803, 0.02820467185974121, 0.02827097511291504, 0.028235296249389648, 0.028228063583374024, 0.02835660743713379, 0.02833203125, 0.028192384719848633, 0.02828121566772461, 0.028309663772583007, 0.028079679489135742, 0.02818275260925293, 0.028353759765625, 0.028144319534301757, 0.02813327980041504, 0.02811110305786133, 0.0282576961517334, 0.028348640441894533, 0.02813283157348633, 0.028353439331054688, 0.028676095962524413, 0.028446720123291015, 0.028419647216796875, 0.028723648071289062, 0.028368896484375, 0.028221439361572266, 0.028378496170043944, 0.02822809600830078, 0.028219488143920897, 0.028215328216552735, 0.028313600540161132, 0.028217344284057616, 0.028114336013793945, 0.028193023681640624, 0.028270944595336914, 0.02815804862976074, 0.029310880661010744, 0.029101728439331054, 0.0299769287109375, 0.028631040573120117, 0.028444671630859376, 0.028291072845458985, 0.028272640228271483, 0.02833612823486328, 0.028196863174438477, 0.02814361572265625, 0.028102655410766602, 0.028460287094116212, 0.02819558334350586, 0.028167999267578125, 0.028208736419677735, 0.028174943923950195, 0.0281943359375, 0.028098848342895506, 0.02807200050354004, 0.02873756790161133, 0.028863935470581054, 0.02912719917297363, 0.02889632034301758, 0.028838687896728516, 0.02867625617980957, 0.02898739242553711, 0.029018239974975588, 0.02874969673156738, 0.028516351699829103, 0.028474655151367188, 0.028383359909057618, 0.028233695983886718, 0.02835049629211426, 0.028270816802978514, 0.02827097511291504, 0.02824928092956543, 0.0283492488861084, 0.028424192428588867, 0.028289024353027343, 0.028256256103515624, 0.028239871978759764, 0.02837887954711914, 0.02826470375061035, 0.028231679916381838, 0.02817228889465332, 0.02807151985168457, 0.028026975631713868, 0.028153791427612304, 0.028202880859375, 0.028333663940429688, 0.028205984115600585, 0.02937638473510742, 0.02828300857543945, 0.028554975509643556, 0.0292838077545166, 0.029124448776245117, 0.028910303115844728, 0.028780416488647462, 0.028764543533325197, 0.02824928092956543, 0.028238624572753907, 0.028421888351440428, 0.02843267250061035, 0.02829644775390625, 0.02843110466003418, 0.02834841537475586, 0.02819651222229004, 0.028350175857543944, 0.028586496353149415, 0.028434560775756835, 0.028203008651733398, 0.02900774383544922, 0.030896255493164063, 0.028647167205810547, 0.028223743438720705, 0.029618175506591796, 0.028151008605957033, 0.02808438491821289, 0.028252639770507813, 0.028137632369995117, 0.02819811248779297, 0.028361631393432618, 0.02863545608520508, 0.028741439819335936, 0.02815795135498047, 0.028225696563720704, 0.02823561668395996, 0.028339775085449218, 0.028243520736694335, 0.028150144577026366, 0.028453088760375975, 0.0283786563873291, 0.028134143829345704, 0.02811494445800781, 0.028302976608276367, 0.0281329288482666, 0.028088479995727538, 0.028125215530395507, 0.02823027229309082, 0.028155872344970703, 0.028026912689208986, 0.028014591217041016, 0.02796134376525879, 0.02800230407714844, 0.028250112533569335, 0.02835456085205078, 0.028143232345581054, 0.0287010555267334, 0.02861414337158203, 0.028219263076782228, 0.028116607666015626, 0.028562112808227538, 0.028072256088256836, 0.028141183853149412, 0.028224960327148437, 0.028097248077392577, 0.028221567153930663, 0.028100223541259767, 0.02828745651245117, 0.02815123176574707, 0.028106592178344728, 0.02839353561401367, 0.028463775634765626, 0.028238975524902343, 0.028164991378784178, 0.028121088027954103, 0.028165376663208008, 0.028297983169555663, 0.028198816299438476, 0.02820515251159668, 0.028256223678588866, 0.028131359100341796, 0.028159584045410156, 0.028135839462280272, 0.02820425605773926, 0.02820790481567383, 0.028146751403808595, 0.028459808349609376, 0.028119199752807616, 0.02830745506286621, 0.02832147216796875, 0.028334400177001954, 0.028209152221679686, 0.028444671630859376, 0.028006399154663086]",tokens/s,35.12206873356248,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11040.99328,14904.328192,0.0,14501.80608,13634.065408,s,1,18.603384765625,18.603384765625,0.0,18.603384765625,18.603384765625,18.603384765625,18.603384765625,[18.603384765625],,kWh,0.0003357025777583279,3.702333328084543e-05,0.00015452151250600066,0.000527247423545174,,MB,2137.76384,14919.008256,0.0,14501.80608,12898.830848,s,10,6.491704650878907,0.6491704650878907,0.0010156237259086448,0.6491363525390625,0.6504643310546875,0.6504889404296875,0.6505086279296874,"[0.6493602905273438, 0.6505135498046875, 0.650320068359375, 0.6504588623046875, 0.6479478149414063, 0.64727490234375, 0.6489307250976563, 0.648851806640625, 0.6493419799804687, 0.6487046508789063]",tokens/s,394.3494255631922,kWh,1.89531661666687e-05,2.0894943817466006e-06,1.2588517015249198e-05,3.36311775636645e-05,tokens/kWh,7611984.430678552,MB,2142.007296,14919.008256,0.0,14501.80608,13243.63264,s,10,36.25201391601562,3.625201391601563,0.002351755937623522,3.6258980712890625,3.6275467529296876,3.6281932983398435,3.6287105346679684,"[3.621019287109375, 3.621991943359375, 3.622748291015625, 3.626355224609375, 3.62588720703125, 3.62883984375, 3.626087158203125, 3.62577294921875, 3.625908935546875, 3.627403076171875]",tokens/s,17.378344868219166,kWh,0.00010576162410333874,1.1666748967240213e-05,7.026768815855157e-05,0.00018769606122913054,tokens/kWh,335649.02527758724,,s,630,36.22841629409786,0.05750542268904428,0.0005003790075829649,0.05751103973388672,0.05817350959777832,0.05826274871826172,0.058385601806640625,"[0.05736159896850586, 0.056646465301513675, 0.05662105560302735, 0.056589344024658206, 0.05670601654052734, 0.0566715202331543, 0.0567136001586914, 0.05674428939819336, 0.056882686614990234, 0.05680393600463867, 0.0568089599609375, 0.05695119857788086, 0.057013919830322266, 0.05694879913330078, 0.0571366081237793, 0.05713987350463867, 0.057169696807861325, 0.05705740737915039, 0.05706099319458008, 0.05709664154052734, 0.05706975936889649, 0.057059326171875, 0.057011775970458985, 0.057223617553710936, 0.05723081588745117, 0.057635360717773435, 0.05733171081542969, 0.057229312896728515, 0.05729228973388672, 0.05730752182006836, 0.057444225311279296, 0.05764326477050781, 0.0575711669921875, 0.05765884780883789, 0.057522239685058596, 0.0575494384765625, 0.0576523208618164, 0.057653919219970706, 0.05762892913818359, 0.05758156967163086, 0.0576286735534668, 0.05774060821533203, 0.057723583221435545, 0.05773311996459961, 0.05781302261352539, 0.0578191032409668, 0.0580272331237793, 0.05793667221069336, 0.05801356887817383, 0.057895038604736326, 0.0579431037902832, 0.058194881439208986, 0.05810995101928711, 0.05810176086425781, 0.05815091323852539, 0.058103809356689455, 0.058085376739501954, 0.058090721130371094, 0.058180385589599606, 0.05822623825073242, 0.05815750503540039, 0.05818982315063476, 0.05837612915039062, 0.057190719604492186, 0.056682880401611326, 0.05657833480834961, 0.05652384185791016, 0.056571872711181644, 0.05658272171020508, 0.05687871932983399, 0.05673372650146485, 0.0568430061340332, 0.05682486343383789, 0.05692720031738281, 0.05701545715332031, 0.05702912139892578, 0.05701871871948242, 0.05710835266113281, 0.057325695037841795, 0.057210880279541014, 0.0571514892578125, 0.05717177581787109, 0.05702880096435547, 0.05700147247314453, 0.05837158584594727, 0.05710502243041992, 0.05720307159423828, 0.0572407341003418, 0.0573645133972168, 0.05738086318969727, 0.05732758331298828, 0.057367393493652344, 0.05737628936767578, 0.05748489761352539, 0.05747596740722656, 0.057545822143554685, 0.05762700653076172, 0.05754220962524414, 0.05752252960205078, 0.057495521545410155, 0.05759043121337891, 0.05760204696655274, 0.05756460952758789, 0.05757519912719727, 0.0577520637512207, 0.057833759307861325, 0.05774310302734375, 0.05794815826416016, 0.05774755096435547, 0.05784182357788086, 0.057927711486816406, 0.058078239440917966, 0.058022335052490236, 0.05807564926147461, 0.05805055999755859, 0.05800688171386719, 0.05803484725952148, 0.05808332824707031, 0.058036224365234375, 0.05794815826416016, 0.05803417587280273, 0.05809971237182617, 0.05820998382568359, 0.05835603332519531, 0.05832089614868164, 0.058265598297119144, 0.057756832122802734, 0.05675299072265625, 0.056543231964111325, 0.05666611099243164, 0.056759326934814454, 0.056830078125, 0.056699745178222655, 0.05673574447631836, 0.056856094360351564, 0.056914142608642575, 0.05692646408081055, 0.05689545440673828, 0.056970977783203126, 0.05698918533325195, 0.05703558349609375, 0.056943710327148435, 0.057097118377685545, 0.05722521591186523, 0.05713100814819336, 0.0570450553894043, 0.05712211227416992, 0.0572248649597168, 0.05723043060302734, 0.05712883377075195, 0.057112575531005856, 0.05732662582397461, 0.05751052856445313, 0.05750175857543945, 0.057374526977539066, 0.057511550903320316, 0.057410400390625, 0.057363937377929684, 0.05745126342773438, 0.05761206436157226, 0.05773632049560547, 0.05775244903564453, 0.05777423858642578, 0.057630142211914065, 0.05773968124389649, 0.05777407836914063, 0.0577116813659668, 0.05770489501953125, 0.05768454360961914, 0.05794758224487305, 0.05782694244384766, 0.0578548469543457, 0.05786624145507813, 0.05784371185302734, 0.057870334625244144, 0.05790719985961914, 0.05806284713745117, 0.05816454315185547, 0.05810655975341797, 0.058068031311035155, 0.058065536499023435, 0.058190113067626954, 0.05819590377807617, 0.058191967010498044, 0.0580423698425293, 0.05806387329101562, 0.058355777740478516, 0.05818054580688477, 0.05841312026977539, 0.0574029426574707, 0.0566255989074707, 0.0566396484375, 0.056909664154052735, 0.05695699310302734, 0.05694867324829102, 0.05677471923828125, 0.05675942230224609, 0.0569486083984375, 0.05695379257202148, 0.05692432022094727, 0.057105438232421875, 0.05726496124267578, 0.05692009735107422, 0.05695836639404297, 0.057016895294189456, 0.05724985504150391, 0.057148448944091795, 0.057125312805175785, 0.05708848190307617, 0.05713919830322266, 0.0572149772644043, 0.057235454559326174, 0.05722316741943359, 0.05723366546630859, 0.057204479217529296, 0.05733555221557617, 0.057446849822998046, 0.05743999862670898, 0.05755526351928711, 0.05736627197265625, 0.05735120010375976, 0.05740848159790039, 0.05762819290161133, 0.057600608825683595, 0.057621726989746096, 0.057573726654052734, 0.057597888946533206, 0.05764873504638672, 0.05777695846557617, 0.05769622421264648, 0.05764435195922851, 0.05778204727172852, 0.057696895599365236, 0.057827617645263674, 0.05799935913085937, 0.05793382263183594, 0.05792505645751953, 0.05798150253295899, 0.058093120574951175, 0.058096065521240234, 0.05804851150512695, 0.05807923126220703, 0.05818777465820312, 0.05812838363647461, 0.05813977432250977, 0.05830131149291992, 0.058136577606201174, 0.058193920135498046, 0.05834726333618164, 0.058284255981445314, 0.05823081588745117, 0.05828403091430664, 0.057417728424072265, 0.056672191619873045, 0.05656582260131836, 0.0566080322265625, 0.056775390625, 0.05674803161621094, 0.05674185562133789, 0.05678425598144531, 0.0568900146484375, 0.057040897369384766, 0.05692364883422851, 0.05693801498413086, 0.05689254379272461, 0.05697622299194336, 0.057097408294677736, 0.05713286590576172, 0.0571514892578125, 0.05726617431640625, 0.057356128692626955, 0.05714876937866211, 0.05715846252441406, 0.057210880279541014, 0.057204734802246096, 0.05726959991455078, 0.057180831909179684, 0.05713715362548828, 0.05732556915283203, 0.05732969665527344, 0.05747273635864258, 0.05741791915893555, 0.05740969467163086, 0.05736246490478516, 0.05746265411376953, 0.057603294372558594, 0.057723678588867185, 0.05788240051269531, 0.057651424407958986, 0.05754470443725586, 0.057659393310546876, 0.05775155258178711, 0.05767168045043945, 0.057642719268798825, 0.05782556915283203, 0.05775155258178711, 0.0578284797668457, 0.05789785766601562, 0.05792086410522461, 0.057971359252929684, 0.05794815826416016, 0.058103809356689455, 0.058052574157714844, 0.058148414611816406, 0.05809344100952148, 0.05817414474487305, 0.058101665496826174, 0.058011646270751956, 0.05805670547485352, 0.05824716949462891, 0.05816844940185547, 0.05821664047241211, 0.058302783966064455, 0.05836019134521484, 0.05835327911376953, 0.05802934265136719, 0.05681340789794922, 0.056829761505126954, 0.05666118240356445, 0.0567468147277832, 0.056755905151367185, 0.056798561096191406, 0.0567982063293457, 0.056853855133056644, 0.056836673736572266, 0.05701375961303711, 0.057037406921386716, 0.05701420974731446, 0.057126335144042965, 0.05707241439819336, 0.05701980972290039, 0.05707820892333984, 0.05731033706665039, 0.057315582275390624, 0.05718080139160156, 0.05709823989868164, 0.0571412467956543, 0.0573111686706543, 0.05731436920166016, 0.057381889343261716, 0.05723900985717773, 0.057371166229248045, 0.05745865631103515, 0.05735184097290039, 0.057468639373779294, 0.057565601348876956, 0.057521728515625, 0.05748601531982422, 0.057505950927734376, 0.05763792037963867, 0.057696414947509767, 0.05824332809448242, 0.05803459167480469, 0.05755910491943359, 0.05760403060913086, 0.057664608001708986, 0.057662593841552735, 0.057794334411621094, 0.057758880615234376, 0.057832481384277344, 0.05792953491210937, 0.05793382263183594, 0.05793382263183594, 0.058071041107177736, 0.05796790313720703, 0.05798153686523438, 0.05805459213256836, 0.05816912078857422, 0.05807318496704102, 0.05875948715209961, 0.058049888610839845, 0.05807174301147461, 0.05806409454345703, 0.05820828628540039, 0.05822332763671875, 0.05828335952758789, 0.05828879928588867, 0.05844172668457031, 0.05740851211547852, 0.056796287536621096, 0.056632225036621096, 0.056645599365234375, 0.056840190887451174, 0.05686067199707031, 0.05692006301879883, 0.05672140884399414, 0.056763614654541016, 0.05682681655883789, 0.056906848907470706, 0.05695759963989258, 0.05702579116821289, 0.056904319763183595, 0.05697558212280274, 0.057017822265625, 0.057119041442871096, 0.05715990447998047, 0.05722019195556641, 0.05710496139526367, 0.0571088638305664, 0.05707977676391601, 0.05721427154541016, 0.05719929504394531, 0.05741910552978516, 0.05727932739257813, 0.05730080032348633, 0.05734000015258789, 0.057554847717285154, 0.05749292755126953, 0.05743859100341797, 0.05733804702758789, 0.05738300704956055, 0.057589439392089846, 0.05762796783447265, 0.05767875289916992, 0.057675777435302736, 0.05759507369995117, 0.057627071380615236, 0.05773152160644531, 0.05771379089355469, 0.05774185562133789, 0.05781283187866211, 0.05780332946777344, 0.05791136169433594, 0.0581036148071289, 0.058244960784912106, 0.05785820770263672, 0.05784985733032227, 0.05790105438232422, 0.05801574325561523, 0.058011646270751956, 0.05815500640869141, 0.05822841644287109, 0.0580590705871582, 0.05799321746826172, 0.05805456161499024, 0.058229022979736325, 0.05819363021850586, 0.05823065567016601, 0.05836003112792969, 0.05837363052368164, 0.058550334930419924, 0.057223678588867184, 0.05665568161010742, 0.056662208557128904, 0.0567193603515625, 0.0567193603515625, 0.0566927375793457, 0.056747806549072265, 0.057011680603027345, 0.05700412750244141, 0.0569002571105957, 0.056807422637939455, 0.05687091064453125, 0.057012222290039063, 0.05704908752441406, 0.05697043228149414, 0.05704560089111328, 0.056981376647949215, 0.057045246124267576, 0.05724716949462891, 0.057174686431884766, 0.057204383850097656, 0.05717449569702148, 0.05717964935302734, 0.057253440856933596, 0.057246528625488284, 0.05729075241088867, 0.05766963195800781, 0.05730508804321289, 0.05731257629394531, 0.05740409469604492, 0.0574890251159668, 0.057624961853027346, 0.0575239372253418, 0.057415008544921875, 0.057465599060058596, 0.057659584045410155, 0.057616382598876956, 0.05759795379638672, 0.057775840759277344, 0.05770387268066406, 0.05767808151245117, 0.05770915222167969, 0.0578392333984375, 0.05776588821411133, 0.057845375061035154, 0.057976734161376955, 0.057936641693115236, 0.05790729522705078, 0.05793382263183594, 0.05800723266601562, 0.05803452682495117, 0.057904449462890625, 0.05803484725952148, 0.058173439025878904, 0.058298366546630856, 0.05816672134399414, 0.0581732177734375, 0.05819881439208984, 0.05820620727539062, 0.058169345855712894, 0.05826969528198242, 0.05837625503540039, 0.05851679992675781, 0.057386016845703124, 0.056648670196533205, 0.056627071380615235, 0.05666604614257813, 0.056672542572021485, 0.056739742279052735, 0.05689750289916992, 0.0569134407043457, 0.05691625595092773, 0.05691209411621094, 0.05688716888427734, 0.05690585708618164, 0.05704294586181641, 0.05700403213500976, 0.057012222290039063, 0.05700566482543945, 0.057092510223388675, 0.0571262092590332, 0.057068225860595706, 0.057266368865966796, 0.05711561584472656, 0.05710729598999023, 0.057036224365234374, 0.05729337692260742, 0.05731244659423828, 0.057315521240234375, 0.057332351684570314, 0.057339614868164065, 0.0573130874633789, 0.05760457611083984, 0.057565086364746096, 0.05751171112060547, 0.057583934783935545, 0.05745663833618164, 0.05747286224365234, 0.05760121536254883, 0.057619422912597654, 0.05768115234375, 0.05776460647583008, 0.057692161560058595, 0.05770627212524414, 0.05786236953735351, 0.05782527923583984, 0.05774256134033203, 0.057769760131835934, 0.057744384765625, 0.057956321716308594, 0.05805449676513672, 0.058164894104003904, 0.05810393524169922, 0.058103233337402346, 0.05795119857788086, 0.0579420166015625, 0.058060798645019535, 0.058132606506347655, 0.058044288635253904, 0.05833523178100586, 0.05816521453857422, 0.058191776275634766, 0.05829439926147461, 0.05828812789916992, 0.05821644973754883, 0.05850931167602539, 0.05732470321655273, 0.05671305465698242, 0.056648704528808595, 0.056630271911621094, 0.056578048706054686, 0.05674950408935547, 0.05678303909301758, 0.05682624053955078, 0.05699174499511719, 0.05697292709350586, 0.05686924743652344, 0.05690083312988281, 0.05698831939697266, 0.057005985260009766, 0.05697967910766601, 0.05706668853759766, 0.05713318252563476, 0.05715564727783203, 0.05729548645019531, 0.05715967941284179, 0.05713510513305664, 0.05712076950073242, 0.05721443176269531, 0.05723814392089844, 0.05727632141113281, 0.05735833740234375, 0.05750543975830078, 0.05753071975708008, 0.05747711944580078, 0.05748873519897461, 0.05750032043457031, 0.05749350357055664, 0.057691551208496096, 0.05769686508178711, 0.057586910247802735, 0.05762847900390625, 0.05780166244506836, 0.057777503967285156, 0.05777705764770508, 0.0576673583984375, 0.057708545684814455, 0.05772233581542969, 0.057784385681152343, 0.05785260772705078, 0.05786707305908203, 0.057893856048583985, 0.05794521713256836, 0.0580186882019043, 0.05807513427734375, 0.05794815826416016, 0.058060062408447265, 0.05806358337402344, 0.05818755340576172, 0.058167518615722655, 0.05810281753540039, 0.05803945541381836, 0.05811590576171875, 0.05829017639160156, 0.05821459197998047, 0.0582592658996582, 0.0582371826171875, 0.05838617706298828, 0.05838419342041016]",tokens/s,17.389664369696327,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2219.937792,2914.910208,0.0,2512.388096,2240.694784,s,1,9.68597265625,9.68597265625,0.0,9.68597265625,9.68597265625,9.68597265625,9.68597265625,[9.68597265625],,kWh,6.452215431671296e-05,7.110281926860325e-06,2.6827799239959038e-05,9.846023548353233e-05,,MB,2236.964864,2931.687424,0.0,2514.485248,2227.003904,s,10,12.32526086425781,1.2325260864257812,0.01063930289644262,1.2293075561523437,1.2344844604492187,1.249265850830078,1.2610909631347658,"[1.2292568359375, 1.228900390625, 1.2287049560546874, 1.2278560791015625, 1.224905029296875, 1.2293582763671875, 1.2302276611328125, 1.2308046875, 1.2640472412109376, 1.23119970703125]",tokens/s,207.703514610695,kWh,3.6112697719578743e-05,3.982736337017925e-06,2.40260747763954e-05,6.412150883299208e-05,tokens/kWh,3992420.0889714835,MB,2248.204288,2931.687424,0.0,2514.485248,2334.959104,s,10,18.859075927734374,1.8859075927734374,0.006407550417403678,1.8867675170898437,1.8942435302734375,1.8958975219726562,1.8972207153320313,"[1.8772825927734376, 1.8764168701171875, 1.8805714111328125, 1.88649658203125, 1.8834573974609374, 1.8870384521484376, 1.897551513671875, 1.8877025146484374, 1.8938759765625, 1.8886826171875]",tokens/s,33.40566645015277,kWh,5.488706481709716e-05,6.0544829961801046e-06,3.0390607645808255e-05,9.133215545908555e-05,tokens/kWh,689789.9177275235,,s,630,18.855960075378427,0.029930095357743523,0.0004194383459578958,0.02984047985076904,0.030339401817321778,0.030551728248596192,0.03165947565078737,"[0.030560064315795898, 0.030034400939941405, 0.02949283218383789, 0.03340697479248047, 0.029604255676269533, 0.029632511138916014, 0.029575168609619142, 0.029857088088989257, 0.02954870414733887, 0.029597408294677736, 0.029698495864868165, 0.0296690559387207, 0.029434528350830078, 0.029579296112060546, 0.029659135818481445, 0.029568511962890624, 0.029999616622924805, 0.030301567077636718, 0.02986057662963867, 0.029980575561523438, 0.0298023681640625, 0.029905183792114258, 0.029726367950439453, 0.029786304473876955, 0.02953011131286621, 0.02963046455383301, 0.029533952713012696, 0.0296114559173584, 0.029659456253051757, 0.029680288314819336, 0.02958870315551758, 0.029637216567993164, 0.029517824172973633, 0.029990079879760743, 0.029397119522094728, 0.02953241539001465, 0.029576831817626954, 0.029644800186157227, 0.02990320014953613, 0.029798912048339843, 0.029796192169189453, 0.029779712677001954, 0.029569759368896484, 0.02970591926574707, 0.029755136489868165, 0.02967478370666504, 0.029531103134155273, 0.029739007949829102, 0.02958083152770996, 0.029558303833007813, 0.029837696075439454, 0.030269311904907226, 0.03051590347290039, 0.03040057563781738, 0.029857696533203124, 0.029818111419677735, 0.029776287078857423, 0.029763999938964843, 0.029578815460205077, 0.029542240142822265, 0.029434431076049806, 0.029494943618774413, 0.02952022361755371, 0.030636959075927735, 0.030080127716064452, 0.02962112045288086, 0.02969603157043457, 0.029521888732910156, 0.029715808868408203, 0.02968828773498535, 0.02977948760986328, 0.029565792083740234, 0.029723968505859375, 0.029597280502319336, 0.02962281608581543, 0.0295468807220459, 0.029777887344360352, 0.03011382484436035, 0.030224384307861327, 0.029984960556030272, 0.02983302307128906, 0.029552255630493164, 0.029600128173828125, 0.02951571273803711, 0.029631711959838866, 0.02961043167114258, 0.029648479461669923, 0.029468511581420897, 0.029692895889282228, 0.029555871963500978, 0.029560800552368163, 0.029385215759277345, 0.032237056732177735, 0.03038617515563965, 0.029948160171508788, 0.029690496444702147, 0.0297606086730957, 0.029422496795654295, 0.02960755157470703, 0.029475135803222655, 0.02984556770324707, 0.029491199493408202, 0.029722623825073242, 0.029595903396606445, 0.029861440658569337, 0.02955228805541992, 0.03122230339050293, 0.02956707191467285, 0.029601247787475585, 0.029475423812866212, 0.029793983459472657, 0.030072959899902343, 0.029964000701904296, 0.029897247314453125, 0.02996406364440918, 0.029636608123779298, 0.02978201675415039, 0.029678592681884764, 0.029733856201171874, 0.029480127334594725, 0.02965999984741211, 0.029537919998168946, 0.02964518356323242, 0.029480960845947264, 0.029741056442260744, 0.029444095611572265, 0.03065452766418457, 0.030089151382446288, 0.029865983963012696, 0.029980255126953126, 0.02982748794555664, 0.0299233283996582, 0.029953632354736328, 0.03104150390625, 0.029709760665893554, 0.029831552505493165, 0.029569568634033202, 0.029898399353027343, 0.029819328308105467, 0.029826623916625977, 0.02958790397644043, 0.02970140838623047, 0.029534944534301756, 0.029697952270507814, 0.02957321548461914, 0.029892608642578124, 0.030074880599975585, 0.02960508728027344, 0.029631263732910158, 0.02977791976928711, 0.029519872665405275, 0.02958131217956543, 0.029508928298950195, 0.029697792053222656, 0.02946656036376953, 0.02962076759338379, 0.02953264045715332, 0.029838399887084963, 0.02992633628845215, 0.02993561553955078, 0.029660255432128906, 0.02964588737487793, 0.02972585678100586, 0.0308569278717041, 0.029557056427001953, 0.02975155258178711, 0.029807008743286133, 0.02985196876525879, 0.029656768798828125, 0.029846912384033204, 0.029851680755615236, 0.030708415985107422, 0.030625696182250976, 0.030224384307861327, 0.029841119766235352, 0.029743392944335936, 0.029523935317993164, 0.02990083122253418, 0.029529279708862304, 0.029708831787109376, 0.02972496032714844, 0.02969599914550781, 0.029873567581176756, 0.03037049674987793, 0.02966927909851074, 0.02977177619934082, 0.029735008239746095, 0.029987743377685547, 0.029720767974853516, 0.030810111999511718, 0.03033670425415039, 0.030138847351074218, 0.029951839447021483, 0.02991231918334961, 0.03000396728515625, 0.030040128707885742, 0.03042092704772949, 0.030114912033081056, 0.030589855194091797, 0.029894367218017578, 0.02980611228942871, 0.029565696716308595, 0.029692127227783204, 0.029550079345703126, 0.02981430435180664, 0.029836032867431642, 0.02997859191894531, 0.029653024673461915, 0.02976153564453125, 0.030113792419433592, 0.029739168167114256, 0.029598880767822265, 0.029739423751831053, 0.029583648681640626, 0.029894655227661132, 0.029601472854614258, 0.03302953720092774, 0.029741920471191407, 0.029763008117675783, 0.029548864364624023, 0.029828927993774415, 0.029640928268432617, 0.029720863342285155, 0.029626623153686523, 0.029631967544555663, 0.029544736862182615, 0.029665279388427734, 0.029519296646118163, 0.029775840759277344, 0.030265727996826173, 0.0301693115234375, 0.029882368087768556, 0.030535327911376954, 0.030277984619140625, 0.030111743927001954, 0.029871583938598633, 0.029917728424072264, 0.029685760498046877, 0.029869312286376952, 0.029594400405883788, 0.030100608825683595, 0.0296615047454834, 0.030224927902221678, 0.030373567581176757, 0.02990300750732422, 0.029585311889648438, 0.029884927749633788, 0.02953558349609375, 0.02976304054260254, 0.029615039825439452, 0.03059916877746582, 0.029623552322387694, 0.03248102569580078, 0.030267391204833984, 0.030215679168701173, 0.03021001625061035, 0.02989926338195801, 0.03042419242858887, 0.030368223190307617, 0.03017568016052246, 0.02983852767944336, 0.029841535568237303, 0.029813087463378907, 0.02980019187927246, 0.029764192581176758, 0.029792255401611328, 0.029646656036376954, 0.0298272647857666, 0.029749248504638674, 0.02982707214355469, 0.02973695945739746, 0.029869983673095703, 0.02970403289794922, 0.02988003158569336, 0.029674016952514648, 0.029679935455322267, 0.029728448867797852, 0.029668800354003905, 0.029351808547973632, 0.029636320114135743, 0.029417535781860352, 0.02968668746948242, 0.02981888008117676, 0.031171968460083008, 0.029723264694213866, 0.029929471969604493, 0.029620223999023438, 0.029574911117553712, 0.029583391189575196, 0.030339296340942384, 0.030195711135864257, 0.030240415573120117, 0.030480735778808592, 0.030066688537597655, 0.029730815887451172, 0.02965849685668945, 0.029575807571411133, 0.02972480010986328, 0.02956867218017578, 0.02974470329284668, 0.02960246467590332, 0.029712703704833983, 0.029454015731811525, 0.029888832092285156, 0.03141744041442871, 0.029978624343872072, 0.029762144088745116, 0.0298721923828125, 0.029437887191772462, 0.02955897521972656, 0.02946028709411621, 0.02958742332458496, 0.029402591705322265, 0.029716863632202148, 0.029605600357055666, 0.030742528915405274, 0.030529727935791017, 0.030309759140014648, 0.030228736877441407, 0.030217599868774415, 0.03031705665588379, 0.029901119232177736, 0.03016044807434082, 0.029841184616088867, 0.030076608657836915, 0.02980672073364258, 0.029947839736938476, 0.029746080398559572, 0.029868032455444334, 0.029667583465576172, 0.029763328552246095, 0.029633983612060547, 0.031113792419433593, 0.03021414375305176, 0.02998588752746582, 0.030001888275146483, 0.02998444747924805, 0.029514240264892577, 0.029857791900634766, 0.029683712005615235, 0.029769920349121095, 0.030565408706665038, 0.029837120056152345, 0.029689823150634766, 0.02977484893798828, 0.029607456207275392, 0.02982310485839844, 0.029960384368896486, 0.03018502426147461, 0.03033763122558594, 0.0303024959564209, 0.030051136016845705, 0.02997724723815918, 0.02990620803833008, 0.030032224655151367, 0.03021273612976074, 0.0301527042388916, 0.02975129508972168, 0.02990675163269043, 0.02966281509399414, 0.029821887969970703, 0.029851520538330078, 0.02964454460144043, 0.029586847305297852, 0.029756256103515625, 0.029640480041503905, 0.030296287536621093, 0.02971625518798828, 0.029777088165283204, 0.029614912033081055, 0.02988787269592285, 0.029808576583862306, 0.029931488037109374, 0.029601823806762694, 0.029807296752929688, 0.029902847290039062, 0.02977996826171875, 0.029650943756103516, 0.030636032104492186, 0.03038960075378418, 0.03017353630065918, 0.03042131233215332, 0.03022425651550293, 0.030367935180664062, 0.02989254379272461, 0.030090656280517578, 0.029890464782714843, 0.030079456329345704, 0.02985385513305664, 0.030050527572631835, 0.029892192840576173, 0.029903104782104492, 0.02993312072753906, 0.02980828857421875, 0.02973513603210449, 0.029752992630004884, 0.02962940788269043, 0.029937824249267577, 0.029820863723754882, 0.02987945556640625, 0.029839839935302735, 0.03034035110473633, 0.029827327728271485, 0.029833984375, 0.029872095108032227, 0.029947999954223634, 0.02994780731201172, 0.03055820846557617, 0.03038412857055664, 0.03034419250488281, 0.030020288467407227, 0.030046527862548827, 0.029861087799072265, 0.029966623306274413, 0.029979135513305662, 0.03018454360961914, 0.029996128082275392, 0.030392255783081055, 0.02987606430053711, 0.030132255554199218, 0.02994380760192871, 0.029994144439697265, 0.02991923141479492, 0.029975391387939452, 0.029808639526367187, 0.030322656631469727, 0.029861791610717774, 0.029849184036254882, 0.033224449157714844, 0.03023276710510254, 0.02992598342895508, 0.030113792419433592, 0.029886335372924806, 0.030146879196166994, 0.0304815673828125, 0.03033344078063965, 0.030159072875976564, 0.030244800567626955, 0.0301977596282959, 0.03062579154968262, 0.030271488189697264, 0.030426496505737304, 0.030118719100952148, 0.02983225631713867, 0.03014713668823242, 0.03018288040161133, 0.030247167587280275, 0.029864288330078124, 0.029960639953613283, 0.029828384399414064, 0.030057056427001953, 0.02987740707397461, 0.029893760681152345, 0.029693376541137694, 0.029896991729736328, 0.029655071258544923, 0.029750879287719727, 0.030678720474243165, 0.029934623718261718, 0.029674272537231445, 0.029922176361083984, 0.029644800186157227, 0.029935007095336915, 0.029646879196166993, 0.029778144836425782, 0.030044511795043947, 0.030149856567382814, 0.030532480239868164, 0.030273439407348633, 0.02999513626098633, 0.030211360931396484, 0.03012054443359375, 0.030277824401855467, 0.03013430404663086, 0.03018435287475586, 0.02987612724304199, 0.030023712158203125, 0.0297126407623291, 0.029954015731811525, 0.030046335220336916, 0.03008777618408203, 0.03024300765991211, 0.029847103118896483, 0.029899007797241212, 0.029919424057006837, 0.029744192123413084, 0.029797119140625, 0.02972502326965332, 0.02982467269897461, 0.029605855941772462, 0.029794336318969727, 0.029603647232055663, 0.029785568237304688, 0.029666015625, 0.03035545539855957, 0.03005958366394043, 0.029766592025756836, 0.029600864410400392, 0.03004707145690918, 0.029724863052368163, 0.030388320922851562, 0.03008729553222656, 0.03004585647583008, 0.02962784004211426, 0.03056435203552246, 0.030141984939575196, 0.03041846466064453, 0.03066966438293457, 0.029978464126586914, 0.030112064361572266, 0.029804479598999022, 0.03013033676147461, 0.029988704681396486, 0.03013199996948242, 0.029950176239013672, 0.03011564826965332, 0.030013280868530272, 0.030035743713378905, 0.029991487503051757, 0.030155967712402344, 0.02986992073059082, 0.029916160583496092, 0.029831199645996093, 0.03054380798339844, 0.030381568908691405, 0.030640960693359375, 0.030180927276611327, 0.03069900894165039, 0.03021433639526367, 0.030197280883789063, 0.03049769592285156, 0.030257343292236328, 0.030013439178466796, 0.03034297561645508, 0.030170272827148438, 0.030258079528808594, 0.030176959991455077, 0.030176864624023438, 0.029781728744506835, 0.029877023696899416, 0.029976608276367188, 0.032056926727294925, 0.030100000381469726, 0.030168544769287108, 0.029993696212768553, 0.030029312133789062, 0.02979635238647461, 0.02979257583618164, 0.029677568435668947, 0.02977123260498047, 0.029659231185913085, 0.02981228828430176, 0.02960438346862793, 0.030206399917602537, 0.029640480041503905, 0.029774303436279296, 0.02961782455444336, 0.02983660888671875, 0.02951238441467285, 0.029790048599243165, 0.029655008316040038, 0.029714624404907228, 0.029575168609619142, 0.029908992767333983, 0.02981068801879883, 0.03000102424621582, 0.029874303817749023, 0.030728864669799804, 0.030421152114868164, 0.03027743911743164, 0.030097471237182618, 0.02971455955505371, 0.029855327606201174, 0.029710752487182617, 0.029834911346435546, 0.02973731231689453, 0.029921152114868163, 0.03035148811340332, 0.03175833511352539, 0.03023686408996582, 0.03009516716003418, 0.02977824020385742, 0.03015443229675293, 0.03014041519165039, 0.030121919631958007, 0.03003772735595703, 0.030024383544921877, 0.02975324821472168, 0.02980793571472168, 0.029915584564208984, 0.02994175910949707, 0.029710336685180663, 0.029898847579956055, 0.030014623641967775, 0.03003264045715332, 0.029734912872314452, 0.029847551345825195, 0.030290239334106444, 0.029857471466064454, 0.02964896011352539, 0.029796287536621092, 0.02976880073547363, 0.02993961524963379, 0.030093536376953126, 0.030089887619018554, 0.02982310485839844, 0.030065759658813477, 0.02973401641845703, 0.029819807052612304, 0.029854591369628907, 0.030138208389282228, 0.029634464263916017, 0.029796607971191408, 0.029772800445556642, 0.029809663772583008, 0.02994380760192871, 0.03037164878845215, 0.029976192474365233, 0.030222911834716797, 0.029726240158081056, 0.03020841598510742, 0.029646272659301757, 0.029784095764160155, 0.02974166488647461, 0.0296529598236084, 0.029579296112060546, 0.03014169692993164, 0.03007369613647461, 0.029816448211669924, 0.029917631149291992]",tokens/s,33.41118656814702,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1526.386688,1868.43136,0.0,1465.909248,1358.169088,s,1,8.827353515625,8.827353515625,0.0,8.827353515625,8.827353515625,8.827353515625,8.827353515625,[8.827353515625],,kWh,4.83153412874041e-05,5.322241863624296e-06,1.7567514053962174e-05,7.120509720499058e-05,,MB,1634.312192,1889.40288,0.0,1472.200704,1356.544512,s,10,0.44906796646118163,0.044906796646118165,0.0001434592084082612,0.04489153480529785,0.044959188079833984,0.045130633163452144,0.04526778923034668,"[0.04530207824707031, 0.044893566131591796, 0.0448895034790039, 0.0447419204711914, 0.04492108917236328, 0.044770591735839846, 0.044905311584472654, 0.04486304092407226, 0.04487184143066406, 0.04490902328491211]",tokens/s,5700.696088776333,kWh,1.3670687055297076e-06,1.5076205554766556e-07,9.107769487664807e-07,2.428607709843854e-06,tokens/kWh,105410189.94642794,MB,1643.27424,1889.40288,0.0,1472.200704,1409.94816,s,10,11.1873271484375,1.11873271484375,0.004212379872111906,1.1176658325195312,1.1234790283203124,1.1259884155273439,1.1279959252929688,"[1.12292138671875, 1.120030517578125, 1.1182462158203126, 1.128497802734375, 1.11680224609375, 1.1130865478515626, 1.1148236083984375, 1.11708544921875, 1.1198770751953124, 1.115956298828125]",tokens/s,56.313719232568445,kWh,3.246614477906079e-05,3.5805692261708373e-06,1.5811791984830543e-05,5.1858505990062164e-05,tokens/kWh,1214844.099289573,,s,630,11.181116909027105,0.017747804617503332,0.00031120225536306935,0.017688928604125978,0.017971506881713867,0.018106043338775635,0.018938109149932863,"[0.01800886344909668, 0.01777663993835449, 0.017747392654418947, 0.01763542366027832, 0.017656288146972655, 0.01761484718322754, 0.01768009567260742, 0.017774463653564453, 0.017766176223754884, 0.017772960662841796, 0.017791200637817382, 0.017866016387939453, 0.017592960357666016, 0.017721439361572267, 0.01771843147277832, 0.017617759704589845, 0.01765171241760254, 0.01773107147216797, 0.01773526382446289, 0.01771612739562988, 0.017764352798461915, 0.01777663993835449, 0.017713151931762695, 0.01780121612548828, 0.0175861759185791, 0.017588191986083985, 0.017682079315185548, 0.017807743072509766, 0.017888511657714844, 0.017699583053588867, 0.017980640411376952, 0.01797609519958496, 0.018192224502563477, 0.018173952102661133, 0.01776041603088379, 0.017718463897705077, 0.017750848770141603, 0.017666112899780272, 0.0177192325592041, 0.017760255813598632, 0.018160768508911133, 0.01784294319152832, 0.01781353569030762, 0.017684511184692383, 0.017602720260620118, 0.017671199798583986, 0.017631616592407227, 0.017672704696655273, 0.017625087738037108, 0.017771520614624024, 0.01762611198425293, 0.018397184371948243, 0.017913856506347657, 0.018395135879516602, 0.01822857666015625, 0.018457088470458984, 0.018000032424926756, 0.017911808013916015, 0.017913856506347657, 0.01785219192504883, 0.018014432907104493, 0.017972543716430665, 0.01793673515319824, 0.017977535247802736, 0.017854272842407228, 0.017694719314575197, 0.017714719772338867, 0.017856992721557618, 0.01780928039550781, 0.017760383605957032, 0.017927967071533202, 0.01809619140625, 0.017774751663208008, 0.017721120834350585, 0.01766124725341797, 0.017621055603027343, 0.01767919921875, 0.01762713623046875, 0.01794867134094238, 0.01984476852416992, 0.018352479934692384, 0.018132448196411133, 0.01782806396484375, 0.017721664428710936, 0.01770515251159668, 0.017647584915161132, 0.017573728561401367, 0.017782272338867186, 0.01770252799987793, 0.017602975845336915, 0.017586559295654298, 0.017576095581054687, 0.0175897274017334, 0.017695199966430663, 0.017729280471801757, 0.017678592681884767, 0.017579616546630858, 0.017577472686767577, 0.017603488922119142, 0.017638784408569336, 0.017599103927612304, 0.01759427261352539, 0.017581247329711915, 0.01755999946594238, 0.017576416015625, 0.017657440185546876, 0.017619359970092775, 0.01763260841369629, 0.017601280212402343, 0.0177161922454834, 0.01774687957763672, 0.017730592727661133, 0.017611743927001954, 0.017610143661499024, 0.017770944595336916, 0.017766559600830078, 0.017725536346435547, 0.017650720596313476, 0.017709056854248048, 0.01779974365234375, 0.017899839401245118, 0.017796960830688477, 0.01777414321899414, 0.017766944885253905, 0.017927648544311524, 0.01787129592895508, 0.018116287231445313, 0.017970912933349608, 0.017709503173828123, 0.017671392440795897, 0.01782172775268555, 0.01765456008911133, 0.017739776611328126, 0.017788991928100586, 0.017649599075317383, 0.018066560745239258, 0.017683328628540038, 0.01773980712890625, 0.017800832748413088, 0.017590208053588866, 0.017915456771850587, 0.017773408889770508, 0.017702463150024415, 0.01797983932495117, 0.017885343551635742, 0.01766579246520996, 0.017569183349609375, 0.01756844711303711, 0.017597728729248047, 0.017595104217529297, 0.01759846305847168, 0.017530399322509764, 0.017520288467407226, 0.017634111404418944, 0.01765100860595703, 0.017609407424926758, 0.01788313674926758, 0.017584192276000978, 0.017629119873046876, 0.017508352279663086, 0.017754112243652344, 0.017729280471801757, 0.017971391677856444, 0.018001983642578125, 0.01784832000732422, 0.017786880493164063, 0.017713151931762695, 0.017688575744628905, 0.017811040878295898, 0.017670207977294922, 0.017724767684936523, 0.01761782455444336, 0.017675392150878905, 0.01766703987121582, 0.01781056022644043, 0.017773439407348633, 0.017750240325927733, 0.017698591232299804, 0.01770911979675293, 0.017743871688842772, 0.017795007705688478, 0.018024351119995115, 0.017770591735839843, 0.017585311889648438, 0.017777311325073243, 0.01769696044921875, 0.017739776611328126, 0.017851648330688478, 0.017816320419311523, 0.017973503112792968, 0.01781328010559082, 0.017743616104125975, 0.01755763244628906, 0.017590591430664063, 0.017593408584594728, 0.017664960861206055, 0.01761075210571289, 0.01761689567565918, 0.017741504669189452, 0.01777452850341797, 0.01759833526611328, 0.01771776008605957, 0.017672191619873046, 0.01767628860473633, 0.017665279388427733, 0.018358144760131836, 0.019108736038208007, 0.018019584655761717, 0.01794534492492676, 0.017991680145263672, 0.01780121612548828, 0.01761075210571289, 0.017711103439331053, 0.017728639602661134, 0.017746816635131835, 0.017975296020507812, 0.017995008468627928, 0.017931007385253907, 0.017844255447387696, 0.01782371139526367, 0.017797311782836913, 0.01780659294128418, 0.017711872100830077, 0.017637184143066406, 0.017870304107666015, 0.017777183532714843, 0.01788051223754883, 0.018068031311035158, 0.01785843276977539, 0.01781705665588379, 0.017730207443237306, 0.017565696716308594, 0.017704896926879883, 0.01774928092956543, 0.01779587173461914, 0.018229248046875, 0.0180709114074707, 0.018048736572265626, 0.018260576248168944, 0.020799455642700197, 0.0181507511138916, 0.018093055725097656, 0.017999744415283202, 0.01814233589172363, 0.018131967544555663, 0.018181535720825197, 0.01812540817260742, 0.017713151931762695, 0.017702207565307618, 0.017682687759399414, 0.017701311111450194, 0.017573888778686524, 0.017772863388061524, 0.01776803207397461, 0.017975807189941406, 0.017923744201660156, 0.017686784744262694, 0.017903392791748046, 0.017702911376953127, 0.017675680160522463, 0.017767040252685547, 0.017737695693969727, 0.017706144332885743, 0.018025312423706055, 0.01761689567565918, 0.017767520904541017, 0.017730464935302736, 0.017945856094360352, 0.017566463470458985, 0.017788127899169923, 0.017857311248779296, 0.01777862358093262, 0.017722528457641603, 0.017652639389038084, 0.01762713623046875, 0.01768448066711426, 0.01779654312133789, 0.017721920013427733, 0.017721343994140625, 0.017708351135253906, 0.01761350440979004, 0.017663007736206056, 0.01767865562438965, 0.017752735137939454, 0.017657855987548828, 0.018044288635253907, 0.017728096008300782, 0.017651199340820312, 0.017732128143310547, 0.01757702445983887, 0.017613759994506838, 0.01759846305847168, 0.017547264099121093, 0.01756492805480957, 0.017631488800048827, 0.01763587188720703, 0.017612607955932617, 0.017657312393188476, 0.017719999313354492, 0.01779257583618164, 0.01776479911804199, 0.01765273666381836, 0.01769068717956543, 0.01761964797973633, 0.0175467529296875, 0.017465215682983398, 0.017589120864868163, 0.01759654426574707, 0.01769049644470215, 0.01777008056640625, 0.017766847610473632, 0.01778396797180176, 0.01796998405456543, 0.017764352798461915, 0.01767628860473633, 0.0176843204498291, 0.017617183685302733, 0.017536895751953125, 0.017597951889038087, 0.017836544036865236, 0.017649663925170898, 0.017752063751220702, 0.01790118408203125, 0.017830272674560547, 0.01769875144958496, 0.017777727127075194, 0.017691776275634764, 0.017667007446289063, 0.017523296356201173, 0.01776166343688965, 0.017578975677490234, 0.017737728118896484, 0.017686784744262694, 0.017823488235473632, 0.017743871688842772, 0.017862655639648437, 0.017709056854248048, 0.01770086479187012, 0.017756160736083985, 0.017729536056518554, 0.017608640670776367, 0.017759967803955078, 0.017754400253295898, 0.017696672439575196, 0.017585376739501953, 0.017662912368774413, 0.017628671646118164, 0.017637887954711915, 0.017596416473388672, 0.017571840286254883, 0.01757788848876953, 0.017587936401367188, 0.01754764747619629, 0.017727487564086913, 0.01764352035522461, 0.01756480026245117, 0.017597312927246093, 0.01770086479187012, 0.01756300735473633, 0.017689216613769532, 0.017512351989746093, 0.017501983642578125, 0.01765718460083008, 0.017750335693359376, 0.01751945686340332, 0.017593376159667967, 0.01759872055053711, 0.017496671676635742, 0.017839872360229492, 0.017744064331054688, 0.017698816299438477, 0.01760870361328125, 0.017612064361572265, 0.017615583419799803, 0.017571840286254883, 0.017530303955078125, 0.017533023834228514, 0.017512928009033202, 0.018068639755249024, 0.018060127258300782, 0.01790096092224121, 0.017719903945922853, 0.017776384353637695, 0.017780927658081053, 0.017471328735351562, 0.01762940788269043, 0.017523839950561525, 0.017613183975219725, 0.017564159393310547, 0.017504255294799806, 0.017565696716308594, 0.01772755241394043, 0.019814336776733398, 0.017713151931762695, 0.017670143127441407, 0.01762099266052246, 0.017754112243652344, 0.017760255813598632, 0.01768780708312988, 0.017743839263916015, 0.017531295776367188, 0.017469823837280272, 0.017543167114257813, 0.017522687911987304, 0.01750649642944336, 0.017520448684692384, 0.017545120239257812, 0.01750783920288086, 0.01752115249633789, 0.017495872497558594, 0.01758236885070801, 0.017501663208007813, 0.017646047592163087, 0.017626815795898438, 0.017504640579223633, 0.017649248123168947, 0.017600160598754883, 0.017647680282592774, 0.017504127502441406, 0.017625120162963866, 0.017608480453491213, 0.017521408081054686, 0.0175229434967041, 0.017674240112304687, 0.01772857666015625, 0.017808319091796875, 0.017675775527954102, 0.017666208267211915, 0.017750368118286133, 0.01764556884765625, 0.017819007873535155, 0.017817920684814453, 0.017774944305419923, 0.01764963150024414, 0.01800396728515625, 0.017655807495117186, 0.017686016082763673, 0.017623552322387694, 0.017559551239013673, 0.017723392486572266, 0.017534975051879884, 0.017722335815429688, 0.017535999298095704, 0.017695743560791014, 0.017539039611816406, 0.01772153663635254, 0.017645408630371093, 0.01761484718322754, 0.01761702346801758, 0.017602655410766603, 0.017643295288085937, 0.0177392635345459, 0.01767475128173828, 0.02075609588623047, 0.018649471282958983, 0.017694175720214842, 0.01767616081237793, 0.01762499237060547, 0.017643936157226564, 0.017641504287719725, 0.017706911087036134, 0.017795488357543944, 0.017678144454956055, 0.017659807205200197, 0.017752351760864257, 0.017552831649780273, 0.01760927963256836, 0.01762303924560547, 0.017526784896850587, 0.01763532829284668, 0.017511999130249024, 0.0177271671295166, 0.017780895233154296, 0.017615455627441406, 0.017749120712280273, 0.017639520645141602, 0.01763337516784668, 0.017484447479248048, 0.017659040451049806, 0.017724031448364257, 0.017843488693237305, 0.017778688430786133, 0.017635744094848634, 0.01764614486694336, 0.01792361640930176, 0.017555904388427735, 0.018014272689819335, 0.01791721534729004, 0.017834112167358397, 0.017506879806518556, 0.01758348846435547, 0.01753971290588379, 0.017614528656005858, 0.01751603126525879, 0.017597248077392578, 0.01753481674194336, 0.017632928848266602, 0.01752934455871582, 0.017612800598144532, 0.017583744049072265, 0.01753536033630371, 0.017592607498168947, 0.017673952102661133, 0.0176944637298584, 0.017880767822265626, 0.017811264038085937, 0.01792403221130371, 0.02104377555847168, 0.018911231994628908, 0.017688575744628905, 0.017831647872924804, 0.017769983291625976, 0.017705503463745116, 0.01759667205810547, 0.017670143127441407, 0.017858367919921875, 0.017603008270263672, 0.017540895462036132, 0.017561567306518554, 0.017756160736083985, 0.017583839416503905, 0.017633087158203126, 0.017575584411621093, 0.017631296157836915, 0.017560319900512697, 0.01755855941772461, 0.01754211235046387, 0.017678335189819337, 0.01763737678527832, 0.01757583999633789, 0.017605920791625977, 0.017624895095825197, 0.017762752532958986, 0.01760108757019043, 0.017608480453491213, 0.01761302375793457, 0.017801088333129884, 0.01810416030883789, 0.018062976837158202, 0.017766143798828123, 0.017759136199951172, 0.017690624237060547, 0.017657855987548828, 0.017588096618652342, 0.017684064865112304, 0.017545759201049806, 0.01781065559387207, 0.01762588882446289, 0.01767628860473633, 0.017811328887939452, 0.017670143127441407, 0.017747808456420898, 0.01769910430908203, 0.017804479598999022, 0.017621824264526367, 0.017753183364868166, 0.017738655090332033, 0.017866783142089844, 0.017695968627929687, 0.01769139289855957, 0.017590272903442384, 0.01777663993835449, 0.017732959747314453, 0.01765376091003418, 0.017596223831176757, 0.01749081611633301, 0.017552576065063476, 0.017569631576538087, 0.01770083236694336, 0.017877023696899415, 0.017571840286254883, 0.017569343566894532, 0.017566144943237303, 0.017477632522583008, 0.01746124839782715, 0.01756979179382324, 0.017516000747680664, 0.017549856185913086, 0.017555456161499023, 0.017624927520751954, 0.017592063903808595, 0.017541536331176756, 0.017678239822387695, 0.01759769630432129, 0.017548128128051756, 0.017588224411010742, 0.01763737678527832, 0.01769209671020508, 0.017742399215698243, 0.017741056442260743, 0.017621471405029298, 0.017684768676757813, 0.01810758399963379, 0.01836115264892578, 0.018167936325073242, 0.018136896133422852, 0.01787910461425781, 0.01797020721435547, 0.017893375396728514, 0.017619935989379883, 0.01755897521972656, 0.017666624069213866, 0.01764761543273926, 0.01764556884765625, 0.017567743301391603, 0.017605728149414062, 0.017541568756103517, 0.017537248611450194, 0.017594623565673827, 0.017633279800415038, 0.017569664001464844, 0.017667200088500975, 0.01761587142944336, 0.017665855407714842, 0.01758425521850586, 0.01768864059448242, 0.01760256004333496, 0.017483776092529296, 0.01752176094055176, 0.01755801582336426, 0.017537439346313476, 0.017552383422851564, 0.018949087142944337, 0.018730911254882812, 0.017796255111694335, 0.017777631759643555, 0.01768448066711426, 0.017577215194702147, 0.017733951568603516, 0.01758172798156738]",tokens/s,56.34499711664477,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3756.392448,4378.722304,0.0,3992.977408,3875.045888,s,1,9.8596025390625,9.8596025390625,0.0,9.8596025390625,9.8596025390625,9.8596025390625,9.8596025390625,[9.8596025390625],,kWh,7.826191788330258e-05,8.625795021579568e-06,2.6490021191996616e-05,0.00011337773409687877,,MB,1925.443584,4603.117568,0.0,4188.012544,4099.58912,s,10,3.0957692565917965,0.30957692565917966,0.003570036092416834,0.31060455322265623,0.3130186370849609,0.31367777557373044,0.3142050863647461,"[0.30433880615234377, 0.31193292236328124, 0.30542306518554685, 0.3125439453125, 0.30390496826171876, 0.31012008666992186, 0.3143369140625, 0.3110890197753906, 0.3092073669433594, 0.31287216186523437]",tokens/s,826.935016086555,kWh,8.980704409091597e-06,9.90416978918037e-07,5.9876226352121344e-06,1.5958744023221768e-05,tokens/kWh,16041362.630260326,MB,1925.443584,4710.07232,0.0,4292.870144,4197.764096,s,10,22.939639648437502,2.2939639648437504,0.003656954076743619,2.2940338134765623,2.2972066162109375,2.298715368652344,2.299922370605469,"[2.296842529296875, 2.294723876953125, 2.29334375, 2.296871337890625, 2.291328857421875, 2.28712548828125, 2.30022412109375, 2.296737548828125, 2.2916650390625, 2.290777099609375]",tokens/s,27.463378224553384,kWh,6.727438088715911e-05,7.420358655816667e-06,4.42063341865872e-05,0.00011890107372956296,tokens/kWh,529852.2378636518,,s,630,22.936604812622093,0.036407309226384243,0.00047154736759290637,0.03631649589538574,0.036648781967163084,0.036917424011230465,0.03914371829986574,"[0.03883993530273438, 0.040167617797851565, 0.03654070281982422, 0.03626803207397461, 0.036257793426513675, 0.03620451354980469, 0.03615103912353516, 0.03663276672363281, 0.03627420806884766, 0.03629062271118164, 0.036245502471923825, 0.03624915313720703, 0.03633359909057617, 0.03628819274902344, 0.036238048553466795, 0.03629859161376953, 0.036219200134277346, 0.036480510711669925, 0.036386497497558595, 0.03624736022949219, 0.036161376953125, 0.03618304061889648, 0.03608083343505859, 0.036203006744384765, 0.03613113784790039, 0.036155391693115234, 0.03583180618286133, 0.035770336151123044, 0.0357061767578125, 0.036165374755859375, 0.03597820663452148, 0.03593401718139649, 0.03618431854248047, 0.03928057479858398, 0.03636547088623047, 0.03632624053955078, 0.03607062530517578, 0.03619952011108398, 0.03618492889404297, 0.036391712188720705, 0.03730255889892578, 0.038101119995117186, 0.0364950065612793, 0.03633356857299805, 0.03621017456054688, 0.036186622619628905, 0.03619635009765625, 0.0361267204284668, 0.03689267349243164, 0.03634128189086914, 0.036123104095458984, 0.03639910507202149, 0.03632332611083984, 0.03641350555419922, 0.0361962890625, 0.03651939010620117, 0.036556735992431644, 0.03632598495483398, 0.03626559829711914, 0.036571231842041016, 0.036283775329589846, 0.03626076889038086, 0.03620249557495117, 0.03682867050170899, 0.03673548889160156, 0.03657727813720703, 0.03658956909179688, 0.036260959625244144, 0.03632835388183594, 0.03659775924682617, 0.03619798278808594, 0.0362889289855957, 0.03636633682250977, 0.03630419158935547, 0.0365299186706543, 0.03636281585693359, 0.03656070327758789, 0.03618668746948242, 0.03663811111450195, 0.03622329711914062, 0.03632735824584961, 0.03623766326904297, 0.03639436721801758, 0.03653900909423828, 0.03661344146728516, 0.036487392425537106, 0.03654006576538086, 0.03639788818359375, 0.036591392517089844, 0.03667987060546875, 0.03665900802612305, 0.03637433624267578, 0.036563358306884765, 0.03655680084228516, 0.036918975830078124, 0.036312736511230466, 0.03635660934448242, 0.03624275207519531, 0.03648803329467774, 0.036397056579589845, 0.03615667343139648, 0.03632521438598633, 0.0361512336730957, 0.03737699127197266, 0.036034561157226565, 0.03629868698120117, 0.03598262405395508, 0.0358834228515625, 0.036116447448730465, 0.03654288101196289, 0.036417472839355466, 0.03631520080566406, 0.03628646469116211, 0.03630694580078125, 0.036245502471923825, 0.03619023895263672, 0.03655632019042969, 0.03654899215698242, 0.036622047424316406, 0.03645065689086914, 0.03716505432128906, 0.03635200119018555, 0.03636371231079102, 0.03613980865478516, 0.03626345443725586, 0.036071422576904294, 0.036657054901123046, 0.03612681579589844, 0.03639068984985352, 0.03605321502685547, 0.0363581428527832, 0.03617305755615234, 0.03635481643676758, 0.03620377731323242, 0.036264415740966796, 0.036221183776855466, 0.03630198287963867, 0.03654345703125, 0.0367184944152832, 0.03664691162109375, 0.036429088592529295, 0.03676416015625, 0.03634783935546875, 0.03645241546630859, 0.03626208114624024, 0.03635200119018555, 0.03614108657836914, 0.0362762222290039, 0.03654246520996094, 0.036511745452880856, 0.03647078323364258, 0.036380672454833986, 0.036528129577636716, 0.03642675018310547, 0.03609657669067383, 0.03627257537841797, 0.03618815994262695, 0.03625574493408203, 0.0362342414855957, 0.03635315322875977, 0.036462337493896484, 0.036229248046875, 0.03624959945678711, 0.03641356658935547, 0.03646860885620117, 0.03670220947265625, 0.03659366226196289, 0.03681280136108398, 0.03643392181396484, 0.03630284881591797, 0.0361756477355957, 0.03620476913452148, 0.036087806701660154, 0.036240543365478516, 0.03616979217529297, 0.036430622100830076, 0.03710927963256836, 0.0382911376953125, 0.036539169311523435, 0.03650559997558594, 0.036534271240234374, 0.03651583862304687, 0.03602022552490235, 0.036144832611083984, 0.03614262390136719, 0.03605535888671875, 0.03635862350463867, 0.036329471588134765, 0.036214431762695315, 0.036666431427001954, 0.036542369842529294, 0.0363006706237793, 0.036354175567626955, 0.03612803268432617, 0.03753647994995117, 0.03628179168701172, 0.036552734375, 0.03648761749267578, 0.03635823822021484, 0.036261886596679685, 0.03631689453125, 0.036089630126953126, 0.036186622619628905, 0.036314144134521484, 0.03626620864868164, 0.036247295379638673, 0.03622419357299805, 0.03618099212646484, 0.03630163192749023, 0.03627008056640625, 0.03628441619873047, 0.03685945510864258, 0.037724609375, 0.03649289703369141, 0.03656131362915039, 0.03679171371459961, 0.03671535873413086, 0.036404991149902345, 0.0363768310546875, 0.036601600646972654, 0.036442047119140626, 0.03645244979858398, 0.03649737548828125, 0.03611443328857422, 0.03618201446533203, 0.03778985595703125, 0.03690460968017578, 0.036354209899902346, 0.036171104431152346, 0.03634451293945312, 0.036107902526855466, 0.03623491287231445, 0.03633660888671875, 0.03620991897583008, 0.03627407836914062, 0.03613961410522461, 0.036173919677734374, 0.03620272064208984, 0.03608953475952149, 0.03617792129516602, 0.0362762222290039, 0.03621068954467774, 0.03684515380859375, 0.03632134246826172, 0.036351905822753904, 0.03626230239868164, 0.03625894546508789, 0.03627084732055664, 0.03631734466552734, 0.036313087463378906, 0.03754147338867188, 0.037722496032714846, 0.03648723220825195, 0.03609008026123047, 0.036106239318847655, 0.036149246215820316, 0.03618815994262695, 0.03636633682250977, 0.036714366912841793, 0.03662860870361328, 0.03637247848510742, 0.03658342361450195, 0.036404415130615236, 0.03637715148925781, 0.036182273864746095, 0.036241409301757815, 0.03634921646118164, 0.03633833694458008, 0.03707295989990234, 0.036380672454833986, 0.03622230529785156, 0.03621884918212891, 0.03605779266357422, 0.03628879928588867, 0.03623907089233398, 0.03619203186035156, 0.03608931350708008, 0.036227294921875, 0.03587702560424805, 0.036337982177734374, 0.0363947525024414, 0.03925628662109375, 0.036413536071777344, 0.03643910217285156, 0.036230079650878905, 0.036353824615478515, 0.0362757453918457, 0.03643462371826172, 0.036218334197998046, 0.03649967956542969, 0.03620671844482422, 0.036734943389892576, 0.03625558471679687, 0.036173824310302735, 0.03641996765136719, 0.03626393508911133, 0.03638211059570313, 0.036802913665771486, 0.036222591400146484, 0.03623984146118164, 0.036251808166503904, 0.03619123077392578, 0.0362239990234375, 0.03621478271484375, 0.036271518707275394, 0.03616214370727539, 0.03611852645874023, 0.036237087249755856, 0.03618838500976562, 0.03626118469238281, 0.03624825668334961, 0.036367935180664064, 0.036230945587158205, 0.03664889526367188, 0.03641449737548828, 0.036716766357421875, 0.036343006134033205, 0.03608992004394531, 0.03618684768676758, 0.03600716781616211, 0.03653696060180664, 0.036568256378173826, 0.036428382873535156, 0.03636054229736328, 0.036397377014160154, 0.03629228973388672, 0.036439201354980466, 0.03616032028198242, 0.036079647064208985, 0.03607353591918945, 0.03612255859375, 0.0362760009765625, 0.03630307388305664, 0.036305118560791015, 0.03640643310546875, 0.036178558349609376, 0.03626598358154297, 0.036300769805908205, 0.0362437744140625, 0.0364356803894043, 0.03618201446533203, 0.03615129470825195, 0.03621683120727539, 0.03624755096435547, 0.03751935958862305, 0.03613004684448242, 0.03610086441040039, 0.03618406295776367, 0.03620771026611328, 0.03614963150024414, 0.036119071960449216, 0.036380672454833986, 0.03632128143310547, 0.03619430541992188, 0.03632332611083984, 0.036132865905761716, 0.03610569763183594, 0.036181537628173825, 0.0362690544128418, 0.03661603164672852, 0.036939743041992185, 0.03643795013427734, 0.03620870590209961, 0.0373551025390625, 0.036237918853759765, 0.03607305526733398, 0.03617753601074219, 0.036538944244384766, 0.0362825927734375, 0.036071422576904294, 0.03630611038208008, 0.036141887664794925, 0.036085025787353515, 0.03606572723388672, 0.0360219841003418, 0.036193153381347654, 0.03620832061767578, 0.036210750579833986, 0.03661414337158203, 0.03665580749511719, 0.03639494323730469, 0.03628867340087891, 0.03704115295410156, 0.0361767349243164, 0.03637452697753906, 0.03629260635375976, 0.03640310287475586, 0.03647318267822266, 0.03635993576049805, 0.0362960319519043, 0.03656140899658203, 0.03892444610595703, 0.03638681411743164, 0.036435966491699216, 0.036539520263671875, 0.0364901123046875, 0.03633356857299805, 0.03635363388061524, 0.036177505493164064, 0.0361923828125, 0.03637113571166992, 0.036278526306152345, 0.03602153778076172, 0.03610262298583984, 0.03590963363647461, 0.03593209457397461, 0.03652758407592773, 0.03650944137573242, 0.036397537231445315, 0.0364977912902832, 0.036337665557861325, 0.03621683120727539, 0.036364288330078126, 0.03625273513793945, 0.03634012985229492, 0.03656496047973633, 0.03678857421875, 0.04008777618408203, 0.0393072624206543, 0.03638447952270508, 0.03624512100219727, 0.03628713607788086, 0.0362471694946289, 0.036190174102783204, 0.03626825714111328, 0.036069568634033204, 0.036157150268554684, 0.03623123168945312, 0.03632121658325195, 0.036346145629882816, 0.03628851318359375, 0.036280319213867186, 0.03649030303955078, 0.036415454864501956, 0.036377567291259766, 0.03650124740600586, 0.036369983673095706, 0.03651017761230469, 0.03651606369018555, 0.036347904205322266, 0.03648076629638672, 0.03691670227050781, 0.036474689483642575, 0.036483806610107423, 0.03651379013061523, 0.03620044708251953, 0.03655487823486328, 0.03634569549560547, 0.03634179306030273, 0.036278175354003905, 0.03641968154907226, 0.03643164825439453, 0.036679359436035154, 0.036588062286376954, 0.0363581428527832, 0.03612182235717774, 0.036705375671386715, 0.0362061767578125, 0.03624713516235351, 0.036155296325683595, 0.03639766311645508, 0.03616067123413086, 0.03641206359863281, 0.03633990478515625, 0.03642777633666992, 0.03612057495117187, 0.037993919372558596, 0.03638943862915039, 0.0361923828125, 0.03608972930908203, 0.03639065551757813, 0.03615359878540039, 0.03641958236694336, 0.03663622283935547, 0.03645280075073242, 0.036060768127441405, 0.03647119903564453, 0.03661209487915039, 0.03651583862304687, 0.03628854370117188, 0.03629228973388672, 0.03619868850708008, 0.036185825347900394, 0.03640758514404297, 0.03619430541992188, 0.036238880157470704, 0.03618684768676758, 0.03646192169189453, 0.03620022583007813, 0.0361662712097168, 0.03639910507202149, 0.03630838394165039, 0.04035343933105469, 0.036330432891845704, 0.036462593078613284, 0.03623731231689453, 0.03661414337158203, 0.03640457534790039, 0.036357120513916014, 0.03655401611328125, 0.03671830368041992, 0.036151966094970706, 0.036310016632080076, 0.03616179275512695, 0.03664691162109375, 0.036318912506103515, 0.036407615661621096, 0.03630080032348633, 0.03643612670898438, 0.03652592086791992, 0.03631484985351562, 0.0365366096496582, 0.036477184295654295, 0.03640278244018555, 0.03621494293212891, 0.036204353332519534, 0.037350719451904296, 0.036367103576660155, 0.03600352096557617, 0.0361569938659668, 0.036080513000488285, 0.03742665481567383, 0.039233280181884767, 0.03656572723388672, 0.03616979217529297, 0.036183681488037106, 0.03604220962524414, 0.035942657470703125, 0.03599017715454102, 0.036173824310302735, 0.03696230316162109, 0.03637968063354492, 0.036211681365966794, 0.03633513641357422, 0.03628195190429687, 0.03629660797119141, 0.036561119079589845, 0.036918014526367185, 0.0361448974609375, 0.0362289924621582, 0.036395393371582034, 0.03636633682250977, 0.0363581428527832, 0.03634995269775391, 0.036327423095703124, 0.03626598358154297, 0.03615129470825195, 0.036472129821777347, 0.03623555374145508, 0.03638518524169922, 0.036173824310302735, 0.03628396987915039, 0.03640323257446289, 0.036827392578125, 0.0368559684753418, 0.03619548797607422, 0.03588351821899414, 0.03617827224731445, 0.03591756820678711, 0.03625600051879883, 0.035724414825439456, 0.03604159927368164, 0.0359071044921875, 0.0361374397277832, 0.03626598358154297, 0.03614720153808594, 0.036075393676757814, 0.03661414337158203, 0.03637401580810547, 0.0362501106262207, 0.036563007354736325, 0.03628844833374024, 0.036428863525390626, 0.036316097259521486, 0.036519775390625, 0.03633926391601563, 0.03655107116699219, 0.03614944076538086, 0.036067329406738284, 0.03610211181640625, 0.03630492782592774, 0.03624755096435547, 0.03628236770629883, 0.036229118347167966, 0.03740492630004883, 0.03627088165283203, 0.03613180923461914, 0.036132865905761716, 0.03627423858642578, 0.03638880157470703, 0.03641763305664063, 0.03651484680175781, 0.03651264190673828, 0.03633152008056641, 0.036549633026123046, 0.036334945678710935, 0.03639590454101563, 0.036436767578125, 0.036495361328125, 0.036412960052490236, 0.036743648529052736, 0.03635103988647461, 0.03634067153930664, 0.03673507308959961, 0.03657308959960937, 0.03625164794921875, 0.03612627029418945, 0.03634969711303711, 0.03625235366821289, 0.036351646423339844, 0.03638051223754883, 0.036401054382324216, 0.036345630645751956, 0.03623980712890625, 0.03621305465698242, 0.036192256927490236, 0.03616159820556641, 0.036169441223144534, 0.03632944107055664, 0.036372798919677735, 0.03624755096435547, 0.03639247894287109, 0.03621100616455078, 0.036326847076416015, 0.03628739166259766, 0.03633119964599609, 0.0362657585144043, 0.03613033676147461, 0.036648769378662106, 0.03611273574829101]",tokens/s,27.467012016238314,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,920.92416,698.220544,0.0,295.698432,277.263872,s,1,7.637330078125,7.637330078125,0.0,7.637330078125,7.637330078125,7.637330078125,7.637330078125,[7.637330078125],,kWh,1.7258002066743454e-05,1.8964579322763058e-06,5.811949093992297e-06,2.4966409093012054e-05,,MB,1301.106688,752.746496,0.0,335.54432,312.39168,s,17,0.6788023033142089,0.039929547253776995,0.001641969387338302,0.03968115234375,0.04038382110595703,0.041533716583251945,0.044981380157470705,"[0.04584329605102539, 0.04004419326782226, 0.04033548736572266, 0.040085983276367185, 0.04045632171630859, 0.03968115234375, 0.03963561630249023, 0.04014380645751953, 0.03944192123413086, 0.03909215927124023, 0.03880585479736328, 0.038991455078125, 0.04028947067260742, 0.04014211273193359, 0.03963916778564453, 0.0382562255859375, 0.0379180793762207]",tokens/s,6411.292328785034,kWh,1.4213595445794763e-06,1.5674503308794648e-07,9.488559370777636e-07,2.5269605147451864e-06,tokens/kWh,101307479.2843823,MB,1314.496512,780.009472,0.0,362.807296,312.39424,s,17,9.942083923339844,0.5848284660788143,0.0014426401086585952,0.584994873046875,0.5867971801757813,0.58686884765625,0.58688783203125,"[0.584994873046875, 0.5853904418945313, 0.585811767578125, 0.5840001220703125, 0.5867533569335938, 0.5836983642578125, 0.5844157104492187, 0.5854854736328124, 0.586892578125, 0.5830146484375, 0.582984619140625, 0.5819075927734375, 0.5837880249023437, 0.5841383666992187, 0.58593798828125, 0.586007080078125, 0.5868629150390625]",tokens/s,107.72389453339265,kWh,1.6692769911301947e-05,1.8406650757018702e-06,6.877490193276999e-06,2.5410925180280815e-05,tokens/kWh,2479248.5733218705,,s,1071,9.933967778205872,0.009275413425028824,0.00013982563431464266,0.009250816345214843,0.0093635196685791,0.009433551788330078,0.009863523101806637,"[0.00922214412689209, 0.00934502410888672, 0.009274944305419922, 0.009263551712036133, 0.009225215911865235, 0.009187775611877442, 0.00935910415649414, 0.00923731231689453, 0.009220383644104003, 0.009344096183776856, 0.00924460792541504, 0.00920851230621338, 0.009202879905700683, 0.00921878433227539, 0.009227904319763183, 0.009251296043395996, 0.009275391578674316, 0.009236479759216308, 0.009249792098999024, 0.009294848442077636, 0.00923033618927002, 0.009273183822631837, 0.009218175888061524, 0.00924454402923584, 0.009213151931762696, 0.009226847648620605, 0.009189375877380371, 0.009234687805175782, 0.009207903861999512, 0.009252448081970215, 0.009329055786132813, 0.009227744102478027, 0.009296416282653808, 0.00928179168701172, 0.009235775947570801, 0.009236991882324219, 0.009453375816345215, 0.009232512474060059, 0.009250271797180175, 0.009271840095520019, 0.009215999603271484, 0.009236319541931153, 0.009260448455810547, 0.009251263618469239, 0.0092675199508667, 0.009527135848999023, 0.009283007621765137, 0.00962838363647461, 0.009369919776916504, 0.009313952445983887, 0.0092871675491333, 0.009337247848510742, 0.00959705638885498, 0.00933193588256836, 0.009409312248229981, 0.009278783798217774, 0.009235136032104492, 0.009345184326171875, 0.009254752159118652, 0.009166399955749511, 0.009223936080932617, 0.009288384437561035, 0.009211903572082519, 0.00900710391998291, 0.009269248008728028, 0.00927948760986328, 0.009217951774597168, 0.009181280136108399, 0.009225919723510742, 0.009285951614379883, 0.009291616439819336, 0.00921833610534668, 0.009229984283447266, 0.009226719856262207, 0.009200960159301759, 0.009207455635070801, 0.009233311653137207, 0.009179007530212403, 0.009217984199523925, 0.00919711971282959, 0.00921455955505371, 0.009168800354003906, 0.009242624282836913, 0.00962764835357666, 0.009332799911499023, 0.009305472373962403, 0.009404512405395507, 0.00974028778076172, 0.009746560096740723, 0.009275744438171387, 0.009342304229736328, 0.010326687812805175, 0.009332159996032714, 0.00931488037109375, 0.009286879539489746, 0.009470239639282226, 0.00930844783782959, 0.009256447792053223, 0.009286368370056152, 0.00935529613494873, 0.0092457275390625, 0.009288895606994628, 0.009295712471008301, 0.009242527961730957, 0.009267392158508301, 0.00925881576538086, 0.009278623580932618, 0.009135295867919922, 0.009182368278503418, 0.009177599906921387, 0.009177087783813476, 0.009179136276245118, 0.009178943634033203, 0.009242303848266601, 0.009224703788757324, 0.009177087783813476, 0.009221311569213866, 0.009220928192138671, 0.009201663970947266, 0.009236479759216308, 0.009273344039916993, 0.009226240158081055, 0.009275327682495118, 0.00918508815765381, 0.009297504425048828, 0.009220895767211914, 0.009065695762634277, 0.009249567985534668, 0.009258591651916503, 0.009187744140625, 0.009233983993530273, 0.009248736381530762, 0.009241056442260743, 0.009248543739318847, 0.009220319747924805, 0.00923852825164795, 0.009178879737854004, 0.009236927986145019, 0.009221247673034668, 0.00920195198059082, 0.009429696083068848, 0.009287391662597656, 0.009318528175354003, 0.009279359817504882, 0.009351167678833008, 0.009246975898742675, 0.009258975982666015, 0.009283359527587891, 0.00925209617614746, 0.009282272338867187, 0.00926483154296875, 0.009396160125732422, 0.009251232147216798, 0.009303232192993165, 0.009276224136352538, 0.009310208320617675, 0.009357024192810058, 0.009244959831237793, 0.00928767967224121, 0.009364959716796874, 0.009337375640869141, 0.009287360191345215, 0.009382207870483399, 0.00930406379699707, 0.009302176475524903, 0.009330880165100098, 0.009319840431213379, 0.009277695655822754, 0.009376895904541016, 0.009345184326171875, 0.00930288028717041, 0.009371616363525391, 0.009324671745300293, 0.009319968223571777, 0.009343232154846191, 0.009336223602294922, 0.00935756778717041, 0.009331199645996094, 0.00931004810333252, 0.00933894443511963, 0.009265151977539063, 0.009239808082580566, 0.009255776405334473, 0.009260607719421386, 0.009351455688476562, 0.009256287574768067, 0.009517727851867675, 0.009260031700134277, 0.009240768432617187, 0.00934499168395996, 0.009396127700805664, 0.009287808418273926, 0.009256959915161133, 0.009238592147827148, 0.009309727668762207, 0.009163167953491211, 0.009182880401611328, 0.009222847938537598, 0.009268896102905273, 0.009256128311157227, 0.009223039627075196, 0.00920364761352539, 0.009271231651306153, 0.009232447624206544, 0.00924783992767334, 0.009270175933837891, 0.009189375877380371, 0.009218048095703125, 0.009207807540893554, 0.009203136444091797, 0.009165023803710938, 0.00919587230682373, 0.009273344039916993, 0.009174880027770995, 0.009197728157043457, 0.009255071640014649, 0.00918671989440918, 0.009224639892578126, 0.009289728164672852, 0.009176128387451172, 0.009179743766784668, 0.00924675178527832, 0.00924499225616455, 0.009172991752624511, 0.00921833610534668, 0.009260767936706544, 0.009209407806396484, 0.009259552001953124, 0.009256959915161133, 0.009237664222717285, 0.00933683204650879, 0.009319168090820312, 0.00922822380065918, 0.009248831748962402, 0.009263039588928223, 0.009248831748962402, 0.009240575790405273, 0.009263104438781738, 0.009244799613952637, 0.009388223648071289, 0.009250495910644531, 0.009318400382995605, 0.00924623966217041, 0.009300448417663575, 0.009283519744873047, 0.0093635196685791, 0.00936348819732666, 0.009359040260314942, 0.00934291172027588, 0.009559871673583985, 0.009384415626525878, 0.009326656341552735, 0.009446559906005859, 0.009450336456298829, 0.009385408401489258, 0.009302495956420898, 0.009281632423400878, 0.00920576000213623, 0.009248064041137696, 0.00918175983428955, 0.009254176139831543, 0.00941926383972168, 0.009251296043395996, 0.009217951774597168, 0.009289407730102539, 0.009200032234191895, 0.009250495910644531, 0.009294079780578614, 0.00930399990081787, 0.009263104438781738, 0.009293824195861817, 0.00924995231628418, 0.009190239906311036, 0.009210016250610352, 0.009218048095703125, 0.009144160270690918, 0.009209856033325196, 0.009299455642700195, 0.009130208015441895, 0.009214143753051757, 0.009201760292053223, 0.009191328048706055, 0.009197823524475098, 0.009158495903015137, 0.009188608169555664, 0.009388447761535645, 0.009275744438171387, 0.009242624282836913, 0.009234432220458985, 0.00918671989440918, 0.00920143985748291, 0.009173824310302734, 0.009170944213867188, 0.00925209617614746, 0.009356032371520996, 0.009260512351989747, 0.009242719650268554, 0.009282143592834472, 0.009379679679870606, 0.009801728248596191, 0.010192159652709962, 0.009980640411376952, 0.009399680137634278, 0.0093023681640625, 0.009306591987609863, 0.009239999771118165, 0.00926966381072998, 0.009280703544616698, 0.009242400169372559, 0.009939776420593262, 0.009412799835205078, 0.009243807792663573, 0.009231200218200683, 0.009223584175109864, 0.009218303680419922, 0.009002431869506836, 0.009201600074768066, 0.009244864463806152, 0.009205663681030273, 0.009221983909606933, 0.009301759719848634, 0.009196352005004883, 0.009226048469543457, 0.009262944221496582, 0.009245216369628906, 0.009271231651306153, 0.009226304054260254, 0.009192607879638671, 0.00921884822845459, 0.009336159706115723, 0.009244704246520996, 0.0092227840423584, 0.009168992042541504, 0.009279104232788086, 0.009187520027160645, 0.009145440101623535, 0.0091843843460083, 0.009234463691711426, 0.009555520057678223, 0.009195808410644532, 0.009176447868347168, 0.009173503875732422, 0.009232000350952148, 0.009252863883972168, 0.009322367668151855, 0.009332703590393067, 0.009319071769714355, 0.009378047943115234, 0.009526368141174316, 0.009389120101928711, 0.00926262378692627, 0.009230400085449219, 0.00924783992767334, 0.00929798412322998, 0.009347999572753906, 0.009240511894226075, 0.009242624282836913, 0.00927948760986328, 0.009289728164672852, 0.009184576034545899, 0.009265312194824219, 0.009239071846008301, 0.009286848068237305, 0.009210016250610352, 0.009290047645568848, 0.009365856170654297, 0.0092957763671875, 0.009254976272583008, 0.009184767723083496, 0.00923408031463623, 0.00927996826171875, 0.009265888214111328, 0.00927289581298828, 0.009269311904907226, 0.009303359985351563, 0.009261823654174805, 0.009189536094665527, 0.009257087707519531, 0.009068799972534179, 0.009254783630371093, 0.009300095558166504, 0.009168895721435547, 0.009166399955749511, 0.009202112197875976, 0.009323776245117188, 0.009212672233581543, 0.009229408264160157, 0.009170144081115723, 0.009168576240539551, 0.009208127975463868, 0.009209535598754882, 0.009173184394836427, 0.00921782398223877, 0.009240703582763672, 0.00918723201751709, 0.009198816299438476, 0.00935536003112793, 0.009386688232421876, 0.0092543363571167, 0.009251263618469239, 0.00932470417022705, 0.0092258243560791, 0.00927782440185547, 0.00926211166381836, 0.00928985595703125, 0.009308032035827637, 0.009355936050415039, 0.0092675199508667, 0.0092958402633667, 0.00932153606414795, 0.00926204776763916, 0.009459391593933105, 0.009270976066589356, 0.009396832466125488, 0.009299615859985351, 0.009316991806030274, 0.009358847618103027, 0.009261311531066895, 0.009265376091003417, 0.009274399757385254, 0.009332511901855469, 0.009425408363342285, 0.009326272010803223, 0.009337632179260254, 0.009267040252685547, 0.00948969554901123, 0.009642239570617675, 0.009212575912475585, 0.00913526439666748, 0.009192255973815918, 0.009220095634460449, 0.00913980770111084, 0.009156959533691406, 0.009353280067443848, 0.009177087783813476, 0.009271295547485351, 0.009357312202453612, 0.009233471870422363, 0.009204671859741212, 0.009232159614562988, 0.009189599990844727, 0.009236479759216308, 0.009364576339721679, 0.009276320457458496, 0.009265151977539063, 0.00926262378692627, 0.0092741117477417, 0.009465567588806153, 0.009295040130615235, 0.009194175720214843, 0.009235712051391601, 0.00943126392364502, 0.00930992031097412, 0.009223103523254395, 0.009227616310119629, 0.009390399932861329, 0.009267071723937988, 0.009301888465881347, 0.009362015724182129, 0.009272543907165528, 0.009253600120544433, 0.00928159999847412, 0.009496576309204101, 0.009250816345214843, 0.009375264167785644, 0.009230879783630372, 0.0092325439453125, 0.0092260160446167, 0.009349120140075684, 0.009228287696838379, 0.009279775619506835, 0.009182687759399413, 0.00919484806060791, 0.009229215621948242, 0.009181183815002441, 0.00919961643218994, 0.00921126365661621, 0.00920639991760254, 0.009180288314819336, 0.009157504081726074, 0.009187328338623046, 0.00914742374420166, 0.009231328010559082, 0.0091627197265625, 0.009140255928039551, 0.009150336265563965, 0.009169183731079101, 0.009200927734375, 0.009243295669555665, 0.009173024177551269, 0.009189248085021973, 0.009263104438781738, 0.009211903572082519, 0.009177087783813476, 0.00918505573272705, 0.009148768424987793, 0.009195487976074218, 0.009215904235839843, 0.009172991752624511, 0.00963584041595459, 0.010733440399169921, 0.009786944389343261, 0.009300383567810058, 0.009375583648681641, 0.009133952140808105, 0.009277248382568359, 0.00923475170135498, 0.009250559806823731, 0.009236127853393554, 0.009203871726989747, 0.009269311904907226, 0.009178560256958007, 0.009224127769470215, 0.00925494384765625, 0.009256256103515626, 0.009244000434875489, 0.009213567733764649, 0.009170656204223632, 0.009221088409423829, 0.009252832412719727, 0.009207839965820313, 0.009179136276245118, 0.009147968292236328, 0.009266783714294433, 0.009196096420288086, 0.009209823608398438, 0.009208127975463868, 0.009371647834777832, 0.009197567939758301, 0.009195103645324706, 0.009247200012207031, 0.009273247718811035, 0.009203488349914551, 0.009517312049865723, 0.009354463577270508, 0.01021020793914795, 0.009282560348510742, 0.009306400299072266, 0.00924899196624756, 0.009207327842712403, 0.009337696075439453, 0.009338656425476074, 0.00922646427154541, 0.009203424453735351, 0.00921340847015381, 0.009214783668518066, 0.009217663764953614, 0.009144703865051269, 0.010129247665405273, 0.011167872428894042, 0.009392160415649414, 0.009388031959533692, 0.009252863883972168, 0.009236479759216308, 0.009259167671203614, 0.00932140827178955, 0.009203904151916504, 0.009208288192749024, 0.009187583923339843, 0.009228287696838379, 0.009219840049743652, 0.009216256141662597, 0.00934217643737793, 0.009224831581115723, 0.009416799545288086, 0.009220416069030762, 0.009254976272583008, 0.009105952262878417, 0.009309599876403809, 0.009237152099609375, 0.009204959869384766, 0.009230815887451171, 0.009228544235229492, 0.009247967720031738, 0.009284511566162109, 0.00928553581237793, 0.009241663932800293, 0.00921014404296875, 0.00921833610534668, 0.009249376296997071, 0.009285183906555175, 0.00921782398223877, 0.009238944053649903, 0.009196736335754395, 0.009189663887023925, 0.009240127563476563, 0.009231328010559082, 0.00923356819152832, 0.009232255935668945, 0.009212512016296387, 0.009186880111694336, 0.00925068759918213, 0.009311296463012696, 0.009191007614135742, 0.009203871726989747, 0.010466496467590332, 0.009335200309753417, 0.009306431770324707, 0.009250080108642578, 0.00935756778717041, 0.009254783630371093, 0.009186112403869628, 0.009315615653991699, 0.00931062412261963, 0.009165184020996094, 0.009176608085632323, 0.009252639770507812, 0.009291808128356933, 0.009222528457641602, 0.009254912376403808, 0.00921945571899414, 0.00915772819519043, 0.009212767601013184, 0.009204640388488769, 0.0091810884475708, 0.009261152267456055, 0.00923033618927002, 0.009184672355651855, 0.009144927978515625, 0.009203295707702636, 0.009191583633422852, 0.00920803165435791, 0.00920195198059082, 0.009220895767211914, 0.00917967987060547, 0.009187616348266602, 0.00919961643218994, 0.009207967758178712, 0.009151519775390625, 0.009172127723693848, 0.008930368423461914, 0.009183712005615234, 0.009197952270507813, 0.00915385627746582, 0.00919814395904541, 0.009264800071716308, 0.009139776229858398, 0.009198464393615723, 0.009150079727172852, 0.009200127601623535, 0.009164799690246582, 0.009184927940368652, 0.009142623901367188, 0.009234272003173827, 0.009258912086486817, 0.009303744316101075, 0.009284159660339356, 0.009277664184570312, 0.009245696067810059, 0.009241375923156738, 0.00950704002380371, 0.009264960289001464, 0.009237824440002441, 0.009316991806030274, 0.009293600082397461, 0.009439488410949708, 0.009289216041564942, 0.009310720443725586, 0.009393183708190918, 0.009337056159973144, 0.00925772762298584, 0.00925391960144043, 0.009237248420715332, 0.009287296295166015, 0.009261664390563964, 0.009250816345214843, 0.009391839981079102, 0.009238240242004394, 0.009241344451904297, 0.00927337646484375, 0.009262720108032226, 0.009236639976501464, 0.009224255561828614, 0.009258015632629395, 0.009199584007263183, 0.009274432182312012, 0.009254783630371093, 0.009240511894226075, 0.009218111991882324, 0.009208959579467773, 0.009251359939575196, 0.009213824272155762, 0.009242527961730957, 0.009290304183959961, 0.00923852825164795, 0.009181344032287597, 0.009195199966430664, 0.009236639976501464, 0.009346976280212402, 0.009249055862426758, 0.009218111991882324, 0.00921395206451416, 0.009213503837585449, 0.008984992027282715, 0.009198592185974122, 0.009305088043212891, 0.009244511604309082, 0.00919279956817627, 0.009203871726989747, 0.009234208106994629, 0.009165472030639648, 0.009222432136535645, 0.00922985553741455, 0.009214624404907227, 0.009193216323852539, 0.009224479675292968, 0.009147295951843262, 0.00923033618927002, 0.00926803207397461, 0.009291999816894532, 0.009236255645751952, 0.009239935874938964, 0.009226240158081055, 0.009233216285705566, 0.009379648208618164, 0.009263008117675782, 0.009201120376586915, 0.009210495948791505, 0.00922003173828125, 0.009195648193359374, 0.009240511894226075, 0.009266847610473633, 0.009230367660522461, 0.009232704162597655, 0.009184831619262696, 0.009238975524902344, 0.009227775573730468, 0.009232895851135254, 0.009221280097961426, 0.009212063789367677, 0.00917574405670166, 0.009211903572082519, 0.009241951942443847, 0.009165184020996094, 0.009125568389892579, 0.009156576156616211, 0.009153440475463867, 0.00920969581604004, 0.009217472076416015, 0.009185728073120117, 0.009197407722473145, 0.009246303558349609, 0.009187904357910157, 0.009197823524475098, 0.009289471626281739, 0.009289728164672852, 0.00921987247467041, 0.009245087623596191, 0.009321279525756836, 0.009284704208374024, 0.009305888175964356, 0.009293312072753907, 0.009257920265197754, 0.00928105640411377, 0.009228447914123535, 0.009493887901306152, 0.009281536102294922, 0.009381792068481446, 0.009322751998901367, 0.009391200065612794, 0.009344863891601562, 0.009251680374145508, 0.009332544326782226, 0.009203231811523438, 0.009213791847229005, 0.00934006404876709, 0.009240287780761719, 0.00920691204071045, 0.009188480377197265, 0.009254783630371093, 0.009248640060424805, 0.009220095634460449, 0.009207903861999512, 0.009492608070373535, 0.009153696060180664, 0.009258015632629395, 0.009284607887268067, 0.009250975608825684, 0.009257408142089843, 0.009228287696838379, 0.009209856033325196, 0.009276448249816894, 0.009276032447814941, 0.009224384307861329, 0.009347231864929199, 0.009226207733154296, 0.009193504333496094, 0.009207807540893554, 0.009246015548706054, 0.009190079689025878, 0.009203743934631348, 0.009234399795532226, 0.00922214412689209, 0.009260128021240235, 0.009206687927246094, 0.009381888389587402, 0.009197567939758301, 0.00919961643218994, 0.009261055946350098, 0.00919961643218994, 0.009185279846191406, 0.009248415946960449, 0.009202239990234375, 0.00929974365234375, 0.009269248008728028, 0.009218015670776368, 0.009320416450500489, 0.009269311904907226, 0.009223711967468263, 0.009234047889709473, 0.009266016006469726, 0.009291071891784668, 0.009220895767211914, 0.00927939224243164, 0.009378144264221192, 0.009221152305603027, 0.009362048149108886, 0.009277440071105958, 0.009229503631591796, 0.008969504356384278, 0.00923516845703125, 0.009265151977539063, 0.009190912246704102, 0.009759008407592773, 0.009279232025146484, 0.009226400375366211, 0.00926095962524414, 0.009257568359375, 0.009264512062072754, 0.009261216163635254, 0.009263392448425292, 0.009211584091186524, 0.00920150375366211, 0.009183263778686524, 0.009316800117492675, 0.009273344039916993, 0.009168607711791992, 0.00920195198059082, 0.00919164752960205, 0.009252639770507812, 0.009172991752624511, 0.009189375877380371, 0.009172991752624511, 0.009158656120300293, 0.009379839897155762, 0.009236479759216308, 0.009193663597106933, 0.009234047889709473, 0.00920800018310547, 0.009175040245056153, 0.009162752151489258, 0.009154879570007324, 0.009297599792480469, 0.009618720054626466, 0.009269887924194337, 0.009236703872680664, 0.009312000274658203, 0.00943932819366455, 0.009254303932189942, 0.009267616271972656, 0.00935580825805664, 0.009840127944946288, 0.009236672401428222, 0.00926416015625, 0.009244895935058594, 0.009288448333740235, 0.009303711891174316, 0.009458016395568848, 0.009360896110534669, 0.009237152099609375, 0.009241727828979493, 0.00923516845703125, 0.009186400413513183, 0.009208415985107422, 0.00924227237701416, 0.009202336311340332, 0.009254495620727539, 0.009284159660339356, 0.009232224464416504, 0.009199456214904785, 0.009197440147399902, 0.009216287612915038, 0.00912384033203125, 0.009302016258239745, 0.009256959915161133, 0.009240511894226075, 0.009217503547668458, 0.009208415985107422, 0.009215359687805175, 0.009677439689636231, 0.009231871604919433, 0.009263615608215332, 0.00923635196685791, 0.009221920013427734, 0.009215904235839843, 0.009269696235656738, 0.00922214412689209, 0.0092293119430542, 0.009311136245727538, 0.009213727951049805, 0.00921993637084961, 0.009228768348693848, 0.009269248008728028, 0.009223615646362305, 0.009339360237121582, 0.009250911712646484, 0.009219648361206055, 0.009333184242248535, 0.00928275203704834, 0.009173151969909667, 0.009220767974853515, 0.009270848274230957, 0.009222687721252442, 0.009265055656433105, 0.00931430435180664, 0.009259008407592773, 0.00933631992340088, 0.009327424049377442, 0.009379520416259765, 0.009314463615417481, 0.009918111801147461, 0.009947327613830567, 0.009252863883972168, 0.009379648208618164, 0.009251008033752442, 0.009261216163635254, 0.00930521583557129, 0.009216447830200196, 0.009256704330444336, 0.009304863929748535, 0.00926899242401123, 0.009232352256774903, 0.00926518440246582, 0.009252863883972168, 0.009264927864074707, 0.009285920143127442, 0.009285696029663086, 0.00928713607788086, 0.009316960334777831, 0.00927500820159912, 0.00941433620452881, 0.009301664352416993, 0.009308095932006835, 0.009228416442871094, 0.009261504173278808, 0.009195615768432617, 0.00929372787475586, 0.009276736259460449, 0.009282015800476074, 0.009273504257202149, 0.009295007705688477, 0.00932975959777832, 0.00935529613494873, 0.009464735984802247, 0.009470815658569337, 0.009373984336853027, 0.0095797119140625, 0.009367520332336426, 0.009363360404968261, 0.009357600212097169, 0.009493087768554688, 0.009311455726623535, 0.00925551986694336, 0.009342975616455078, 0.009277503967285156, 0.00932652759552002, 0.009467391967773438, 0.009284255981445312, 0.009289536476135254, 0.00930361557006836, 0.009254624366760254, 0.009290495872497558, 0.009313471794128418, 0.0093438081741333, 0.009285696029663086, 0.009265088081359863, 0.009172191619873047, 0.009284640312194824, 0.009217984199523925, 0.009256768226623534, 0.009219327926635741, 0.009293919563293456, 0.009239295959472656, 0.009201727867126464, 0.009291616439819336, 0.009277440071105958, 0.009215999603271484, 0.009213503837585449, 0.009256896018981933, 0.009189472198486329, 0.009216383934020996, 0.009240896224975585, 0.009251872062683105, 0.009243071556091309, 0.009271552085876465, 0.00923852825164795, 0.00923852825164795, 0.009246975898742675, 0.009236127853393554, 0.009335200309753417, 0.009363136291503905, 0.009378815650939941, 0.009278464317321777, 0.00930185604095459, 0.009386143684387206, 0.009187552452087402, 0.009209407806396484, 0.009197088241577148, 0.009026080131530763, 0.009230303764343261, 0.009273664474487305, 0.00925814437866211, 0.009295424461364747, 0.009243647575378417, 0.009289055824279786, 0.00932953643798828, 0.009334560394287109, 0.00930611228942871, 0.009358688354492188, 0.009318367958068847, 0.009355968475341796, 0.009338208198547363, 0.009347743988037109, 0.009408543586730957, 0.00953116798400879, 0.009347552299499512, 0.009307104110717774, 0.009289664268493652, 0.009212736129760743, 0.009302016258239745, 0.009301983833312988, 0.009248191833496094, 0.009235136032104492, 0.009260160446166992, 0.009222111701965331, 0.009261055946350098, 0.009247903823852538, 0.009254176139831543, 0.00928713607788086, 0.009249695777893066, 0.009268544197082519, 0.009435839653015136, 0.009277440071105958, 0.009244192123413085, 0.009250944137573242, 0.009276063919067382, 0.009287615776062012, 0.009422592163085938, 0.009327936172485351, 0.009227295875549317, 0.009345727920532227, 0.009309120178222656, 0.009343135833740234, 0.009333951950073242, 0.00937820816040039, 0.00934227180480957, 0.009653375625610352, 0.00931004810333252, 0.009283072471618652, 0.009541567802429199, 0.009402272224426269, 0.009472800254821776, 0.009332096099853516, 0.009292032241821289, 0.009248703956604003, 0.009283552169799805, 0.009371999740600587, 0.009228287696838379, 0.009183232307434081, 0.009181183815002441, 0.009267104148864747]",tokens/s,107.81190596869727,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1575.518208,1553.85856,0.0,1168.113664,1154.613248,s,1,8.24820703125,8.24820703125,0.0,8.24820703125,8.24820703125,8.24820703125,8.24820703125,[8.24820703125],,kWh,3.59541925457961e-05,3.958747977288742e-06,1.1608620397979275e-05,5.152156092106412e-05,,MB,1538.105344,1803.419648,0.0,1388.314624,1334.065152,s,10,0.822135154724121,0.0822135154724121,0.0011367801441087067,0.08241635131835937,0.08311858291625976,0.08346444282531738,0.08374113075256347,"[0.083810302734375, 0.08225411224365234, 0.08175830078125, 0.0830417251586914, 0.08301187133789062, 0.07933782196044922, 0.0817305908203125, 0.0825785903930664, 0.08199964904785156, 0.08261219024658203]",tokens/s,3113.8432474147685,kWh,2.531112080602997e-06,2.791374920217035e-07,1.6862824792930943e-06,4.4965320519177945e-06,tokens/kWh,56932764.41581567,MB,1542.299648,1805.5168,0.0,1388.314624,1372.847616,s,10,14.947659301757813,1.4947659301757814,0.003960315671429744,1.4949454956054686,1.5007934814453125,1.500963330078125,1.5010992089843749,"[1.4897203369140626, 1.4956087646484375, 1.5011331787109374, 1.492815185546875, 1.49494970703125, 1.4877900390625, 1.4949412841796874, 1.4940684814453125, 1.5007557373046876, 1.4958765869140624]",tokens/s,42.147067128156536,kWh,4.3327881192730614e-05,4.778721887669934e-06,2.128765256650716e-05,6.939425564690772e-05,tokens/kWh,907856.1245841009,,s,630,14.945071983337419,0.02372233648148794,0.00031892845745674984,0.023640640258789064,0.024035074996948243,0.02422165937423706,0.025016065750122076,"[0.02371571159362793, 0.023814271926879883, 0.023678560256958008, 0.023693727493286132, 0.023554048538208007, 0.023441408157348635, 0.02370150375366211, 0.023662591934204103, 0.023508991241455078, 0.023506784439086915, 0.023638175964355468, 0.023502431869506835, 0.023499168395996094, 0.023613439559936524, 0.023676031112670897, 0.023722879409790038, 0.023815296173095704, 0.023626623153686525, 0.023541759490966797, 0.023858400344848634, 0.023734495162963866, 0.02356809616088867, 0.023387039184570312, 0.02357651138305664, 0.023392255783081056, 0.023544960021972657, 0.023507104873657227, 0.0236080322265625, 0.023836383819580077, 0.02373865509033203, 0.023541759490966797, 0.02352332878112793, 0.0240762882232666, 0.023654399871826173, 0.023639360427856446, 0.02352124786376953, 0.023646240234375, 0.02364076805114746, 0.02369126319885254, 0.023567968368530274, 0.02353571128845215, 0.023513408660888673, 0.023545856475830077, 0.023471391677856446, 0.023511775970458983, 0.023558143615722657, 0.02353971290588379, 0.023443424224853515, 0.02346735954284668, 0.023398239135742186, 0.023474143981933593, 0.023513696670532228, 0.02363216018676758, 0.023635967254638672, 0.023824384689331055, 0.023627199172973633, 0.02368911933898926, 0.023558816909790038, 0.023749727249145508, 0.023772064208984374, 0.024014368057250976, 0.024609247207641603, 0.02422153663635254, 0.023914560317993164, 0.02371993637084961, 0.023700511932373047, 0.02374700736999512, 0.02375017547607422, 0.0236943359375, 0.02367283248901367, 0.023770336151123048, 0.024240928649902342, 0.02385305595397949, 0.023771135330200196, 0.025247743606567383, 0.02410441589355469, 0.024229984283447265, 0.023873983383178712, 0.023734272003173826, 0.023625728607177734, 0.023625728607177734, 0.023568384170532225, 0.023576576232910155, 0.023545856475830077, 0.02354380798339844, 0.023584768295288085, 0.023635456085205078, 0.0235863037109375, 0.023602176666259765, 0.02355200004577637, 0.023560192108154295, 0.02361084747314453, 0.023668928146362303, 0.02365065574645996, 0.02368511962890625, 0.023525375366210938, 0.023821727752685547, 0.025319711685180664, 0.02368262481689453, 0.023696128845214843, 0.02364825630187988, 0.023893888473510743, 0.02368320083618164, 0.023627775192260742, 0.023556095123291015, 0.023525375366210938, 0.02351513671875, 0.02361452865600586, 0.023617855072021486, 0.02358950424194336, 0.023619104385375976, 0.023583200454711913, 0.02348646354675293, 0.023684543609619142, 0.023820320129394532, 0.02361961555480957, 0.023732736587524415, 0.023797344207763672, 0.0237674560546875, 0.023701120376586914, 0.02367862319946289, 0.02359084892272949, 0.023614240646362306, 0.023633920669555664, 0.02351692771911621, 0.023482624053955077, 0.023568607330322264, 0.023760543823242188, 0.023742847442626953, 0.023576736450195312, 0.02385308837890625, 0.02369900894165039, 0.023633567810058594, 0.0235467529296875, 0.023594911575317384, 0.023755903244018554, 0.02389081573486328, 0.023991743087768556, 0.02402339172363281, 0.024572128295898436, 0.024335519790649414, 0.024046432495117186, 0.023746559143066406, 0.023633920669555664, 0.023508991241455078, 0.02349660873413086, 0.02366214370727539, 0.02354435157775879, 0.023590911865234376, 0.023524511337280275, 0.02352601623535156, 0.023621856689453127, 0.023597055435180665, 0.02416640090942383, 0.023777280807495117, 0.02387763214111328, 0.02389593505859375, 0.023787647247314452, 0.02429724884033203, 0.027330751419067382, 0.023948736190795898, 0.023795488357543946, 0.02372425651550293, 0.023790176391601563, 0.023549280166625976, 0.023642784118652345, 0.02349260711669922, 0.0236889591217041, 0.023558399200439454, 0.02355129623413086, 0.02347452735900879, 0.023587167739868162, 0.023455488204956056, 0.023494239807128905, 0.02353014373779297, 0.023500640869140624, 0.023560352325439453, 0.02366873550415039, 0.02392678451538086, 0.02410700798034668, 0.024010751724243166, 0.024061952590942383, 0.024459264755249024, 0.024598527908325195, 0.023834623336791993, 0.02369945526123047, 0.02366054344177246, 0.023611391067504883, 0.0237076473236084, 0.024208095550537108, 0.02403459167480469, 0.024056543350219728, 0.024162368774414064, 0.024018943786621092, 0.02372604751586914, 0.02349398422241211, 0.02355788803100586, 0.023666912078857422, 0.023717727661132813, 0.023556991577148436, 0.023508991241455078, 0.023541439056396486, 0.023607616424560548, 0.02352467155456543, 0.02350150489807129, 0.023564287185668945, 0.02354207992553711, 0.02348614311218262, 0.023758848190307616, 0.023535104751586915, 0.023549856185913084, 0.02346659278869629, 0.023640064239501952, 0.02358025550842285, 0.02354422378540039, 0.02350694465637207, 0.02362495994567871, 0.023685184478759766, 0.023925439834594726, 0.023881088256835936, 0.02377574348449707, 0.02386751937866211, 0.02373017692565918, 0.023608863830566405, 0.023577056884765624, 0.023529472351074218, 0.02353971290588379, 0.02353971290588379, 0.023655744552612306, 0.02367558479309082, 0.023578079223632812, 0.023549631118774415, 0.023628639221191405, 0.023537664413452147, 0.02389811134338379, 0.023800928115844725, 0.023886240005493165, 0.02439833641052246, 0.02442019271850586, 0.023709280014038086, 0.023566591262817384, 0.023777599334716796, 0.023773183822631837, 0.023663679122924806, 0.02359187126159668, 0.023617536544799804, 0.023641439437866212, 0.023595680236816408, 0.023537664413452147, 0.023570432662963867, 0.023537439346313478, 0.02356425666809082, 0.024268800735473633, 0.024022943496704103, 0.02372403144836426, 0.023672767639160156, 0.023710943222045897, 0.023419519424438477, 0.023601119995117187, 0.023783199310302733, 0.023625312805175783, 0.02355299186706543, 0.023613439559936524, 0.02352249526977539, 0.02364828872680664, 0.023552799224853517, 0.023723039627075195, 0.02362611198425293, 0.02363657569885254, 0.023582719802856447, 0.023735712051391602, 0.02359561538696289, 0.023668512344360352, 0.023775455474853515, 0.02405900764465332, 0.02373311996459961, 0.02376483154296875, 0.023629247665405275, 0.023552736282348632, 0.023545856475830077, 0.02346985626220703, 0.02346416091918945, 0.023575647354125977, 0.02366966438293457, 0.02347007942199707, 0.023486080169677733, 0.023472511291503906, 0.023469440460205077, 0.023603839874267576, 0.023683040618896485, 0.02380803108215332, 0.02384604835510254, 0.023888736724853515, 0.023900159835815428, 0.023781375885009767, 0.023836448669433595, 0.023984352111816407, 0.02393907165527344, 0.023928192138671874, 0.024221759796142578, 0.024220224380493163, 0.02429657554626465, 0.024103551864624023, 0.023867551803588866, 0.02376713562011719, 0.023582368850708007, 0.023518720626831056, 0.023752992630004882, 0.023804479598999024, 0.02375676727294922, 0.02422991943359375, 0.02353152084350586, 0.02351513671875, 0.02339958381652832, 0.023499616622924803, 0.025786687850952148, 0.024014528274536134, 0.02367897605895996, 0.023633184432983397, 0.02352934455871582, 0.023462751388549804, 0.023521280288696288, 0.02371993637084961, 0.023545856475830077, 0.023520288467407228, 0.0236713924407959, 0.023570816040039064, 0.023502111434936523, 0.023485151290893555, 0.023615327835083008, 0.023521440505981445, 0.02362691116333008, 0.02353420829772949, 0.02370787239074707, 0.023578527450561524, 0.023569536209106446, 0.023599424362182618, 0.023667360305786134, 0.023740415573120118, 0.023790815353393554, 0.024097568511962892, 0.023704832077026367, 0.02370364761352539, 0.023452320098876954, 0.02343267250061035, 0.023437856674194336, 0.023605247497558594, 0.023512191772460937, 0.02361577606201172, 0.02343996810913086, 0.02342092704772949, 0.023375680923461914, 0.02353308868408203, 0.02338268852233887, 0.023508991241455078, 0.023871231079101562, 0.023781631469726564, 0.023578079223632812, 0.023519775390625, 0.023371007919311522, 0.023748512268066405, 0.023528287887573242, 0.023402015686035157, 0.023635456085205078, 0.023523935317993162, 0.023639808654785155, 0.023804895401000975, 0.023667423248291016, 0.023498815536499025, 0.02336764717102051, 0.02344438362121582, 0.023432479858398438, 0.023548639297485352, 0.02349260711669922, 0.023588096618652344, 0.023427839279174804, 0.02345779228210449, 0.023407648086547852, 0.02420102310180664, 0.02487936019897461, 0.024146623611450195, 0.02367897605895996, 0.02374617576599121, 0.02365068817138672, 0.02368876838684082, 0.02360767936706543, 0.023533632278442383, 0.0234899845123291, 0.023384191513061522, 0.023400863647460936, 0.024791072845458985, 0.023625728607177734, 0.02370150375366211, 0.023555999755859376, 0.023564384460449218, 0.023963647842407225, 0.023619583129882812, 0.02388991928100586, 0.02353561592102051, 0.023576576232910155, 0.023480319976806642, 0.02353500747680664, 0.023492959976196288, 0.02342937660217285, 0.023547903060913086, 0.023472127914428712, 0.023654399871826173, 0.02361737632751465, 0.02340060806274414, 0.023387327194213867, 0.023390144348144532, 0.023397247314453126, 0.023417888641357423, 0.023409631729125975, 0.02344550323486328, 0.02348646354675293, 0.02355948829650879, 0.02363052749633789, 0.023731327056884764, 0.02400499153137207, 0.02446710395812988, 0.02399728012084961, 0.02413363265991211, 0.024120927810668946, 0.023969280242919923, 0.023970272064208983, 0.023871103286743165, 0.023880512237548827, 0.023967744827270508, 0.023851007461547852, 0.02365644836425781, 0.023579904556274414, 0.02375347137451172, 0.023658336639404295, 0.02349203109741211, 0.02357321548461914, 0.023631872177124022, 0.02405580711364746, 0.024033023834228517, 0.023645599365234374, 0.02367145538330078, 0.0239237117767334, 0.023826431274414063, 0.02356982421875, 0.02341539192199707, 0.023774751663208006, 0.023665472030639647, 0.023655136108398436, 0.023565248489379884, 0.023425024032592775, 0.023482208251953126, 0.023349184036254883, 0.023644384384155274, 0.023597055435180665, 0.023668575286865234, 0.024053760528564453, 0.023721567153930666, 0.023763519287109375, 0.02373404884338379, 0.023625951766967773, 0.02369868850708008, 0.023611711502075194, 0.023550399780273436, 0.02366464042663574, 0.023558080673217772, 0.023552064895629884, 0.023570432662963867, 0.023752704620361328, 0.02374239921569824, 0.02362169647216797, 0.023513088226318358, 0.02349251174926758, 0.023439456939697265, 0.02349875259399414, 0.023576576232910155, 0.025169151306152344, 0.024423168182373046, 0.024840192794799806, 0.023805952072143553, 0.024707071304321288, 0.023975936889648438, 0.023828128814697265, 0.024023008346557618, 0.023802207946777343, 0.02385923194885254, 0.023483583450317383, 0.023503679275512696, 0.023410463333129884, 0.023587039947509766, 0.023592063903808594, 0.023702016830444338, 0.02360153579711914, 0.023646144866943358, 0.023543424606323242, 0.023417184829711914, 0.02337187194824219, 0.023441408157348635, 0.02353958320617676, 0.023719039916992188, 0.023852031707763673, 0.023633920669555664, 0.023738367080688477, 0.023625280380249022, 0.023673280715942383, 0.02414112091064453, 0.0236976318359375, 0.02359343910217285, 0.02371788787841797, 0.023570335388183594, 0.02354185676574707, 0.023524864196777344, 0.023450111389160155, 0.023799808502197265, 0.023500799179077148, 0.023508991241455078, 0.023500032424926758, 0.024062015533447265, 0.023674623489379883, 0.023739328384399416, 0.02345155143737793, 0.023584447860717773, 0.02345523262023926, 0.02347715187072754, 0.023740415573120118, 0.023638015747070314, 0.023569408416748046, 0.023534048080444337, 0.02355868721008301, 0.02361459159851074, 0.023588960647583007, 0.02369001579284668, 0.02371174430847168, 0.02361136054992676, 0.023558176040649415, 0.0237076473236084, 0.023627775192260742, 0.02367840003967285, 0.023945695877075197, 0.024426240921020508, 0.024260351181030274, 0.025071903228759764, 0.02446963119506836, 0.024039424896240235, 0.024115392684936524, 0.02413542366027832, 0.023910655975341796, 0.023758848190307616, 0.023580064773559572, 0.023703296661376952, 0.023743328094482423, 0.023604799270629882, 0.02383030319213867, 0.023997087478637696, 0.02390220832824707, 0.024143871307373048, 0.023845056533813476, 0.023946527481079102, 0.02384499168395996, 0.024185247421264648, 0.024365055084228517, 0.025616384506225585, 0.023975936889648438, 0.0237774715423584, 0.023703359603881837, 0.023662591934204103, 0.023615488052368162, 0.02352681541442871, 0.024106847763061524, 0.02386345672607422, 0.023801504135131837, 0.023721824645996092, 0.02363033676147461, 0.02346143913269043, 0.023640512466430664, 0.023758848190307616, 0.02368511962890625, 0.02353152084350586, 0.023599103927612306, 0.02348543930053711, 0.023600128173828124, 0.023697376251220703, 0.023568416595458986, 0.023594655990600587, 0.023671136856079102, 0.023613407135009767, 0.023724063873291016, 0.023899679183959962, 0.02374835205078125, 0.023589088439941407, 0.023535680770874024, 0.02350739288330078, 0.023610368728637695, 0.0235914249420166, 0.02382464027404785, 0.023793920516967774, 0.023723199844360353, 0.02354185676574707, 0.02397052764892578, 0.024041471481323243, 0.023999711990356446, 0.023779296875, 0.02372006416320801, 0.02373084831237793, 0.023772544860839843, 0.023804576873779296, 0.023840831756591796, 0.023611328125, 0.023746463775634767, 0.023638111114501953, 0.02355200004577637, 0.023597055435180665, 0.023451648712158202, 0.02369033622741699, 0.023849727630615235, 0.023933055877685547, 0.023760223388671876, 0.02373446464538574, 0.023725696563720703, 0.023738752365112303, 0.02413747215270996, 0.023986944198608397, 0.023946304321289063, 0.024275903701782228, 0.023756799697875978, 0.02367487907409668, 0.02382569694519043, 0.023700191497802735, 0.02391004753112793, 0.023517536163330077, 0.024078336715698243]",tokens/s,42.15436370613679,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,897.273856,624.820224,0.0,222.298112,199.93344,s,1,8.7091513671875,8.7091513671875,0.0,8.7091513671875,8.7091513671875,8.7091513671875,8.7091513671875,[8.7091513671875],,kWh,1.8055900216654664e-05,1.9845430574386847e-06,5.7850046280005785e-06,2.5825447902093928e-05,,MB,1319.243776,679.346176,0.0,262.144,220.883456,s,15,0.6479503974914549,0.043196693166097,0.001218808551401371,0.0429516487121582,0.04334568862915039,0.044627900695800776,0.04699966766357422,"[0.04759260940551758, 0.04234908676147461, 0.042872833251953124, 0.04308063888549805, 0.04315852737426758, 0.04297331237792969, 0.0427674560546875, 0.042847423553466796, 0.042847328186035157, 0.0433573112487793, 0.04208860778808594, 0.04265043258666992, 0.0429516487121582, 0.04332825469970703, 0.04308492660522461]",tokens/s,5926.379572983657,kWh,1.4822706058798828e-06,1.6346693439720661e-07,9.805224990254076e-07,2.6262600393024972e-06,tokens/kWh,97477019.09518087,MB,1358.176256,694.02624,0.0,276.824064,220.886016,s,15,9.375625,0.6250416666666666,0.0029569561570136115,0.6252421875,0.6290924194335937,0.6293993041992187,0.6295799096679687,"[0.6251292114257813, 0.6263400268554687, 0.6257738647460938, 0.620337646484375, 0.623543701171875, 0.622717041015625, 0.6296250610351563, 0.6252421875, 0.6224072875976563, 0.6293025512695313, 0.6287772216796875, 0.6228433227539063, 0.6256033935546875, 0.62001025390625, 0.6279722290039063]",tokens/s,100.79328044797015,kWh,1.7865852637870233e-05,1.970129347146798e-06,7.337261932975592e-06,2.7173243917992618e-05,tokens/kWh,2318457.0892651095,,s,945,9.36760099315643,0.009912805283763421,0.00018937778885026632,0.009873663902282715,0.010094143676757813,0.01016392993927002,0.010472166633605957,"[0.009684991836547852, 0.010067071914672852, 0.00984169578552246, 0.009940832138061524, 0.009785280227661133, 0.009922080039978028, 0.009789119720458985, 0.009847647666931152, 0.00978553581237793, 0.009852831840515136, 0.009789343833923339, 0.009897791862487793, 0.009825759887695313, 0.009828096389770507, 0.009808223724365234, 0.009791744232177735, 0.00985852813720703, 0.009776032447814942, 0.009778752326965331, 0.009753024101257324, 0.009867008209228516, 0.00980134391784668, 0.009964159965515137, 0.009895008087158203, 0.009884575843811035, 0.010046655654907227, 0.009965663909912109, 0.010045727729797364, 0.010475968360900879, 0.010061696052551269, 0.009989312171936035, 0.01013856029510498, 0.009887743949890136, 0.009984000205993653, 0.00980998420715332, 0.010247360229492188, 0.009983872413635254, 0.00997980785369873, 0.009923583984375, 0.009941216468811034, 0.009921504020690917, 0.009910240173339843, 0.009886624336242676, 0.009928640365600585, 0.009844832420349121, 0.009878527641296387, 0.009923456192016601, 0.009854975700378419, 0.009807711601257324, 0.009797727584838867, 0.00971292781829834, 0.009698431968688965, 0.010116864204406738, 0.010066080093383788, 0.009944416046142579, 0.009885567665100098, 0.00990841579437256, 0.00990880012512207, 0.009875231742858887, 0.009915519714355468, 0.010010815620422364, 0.010150495529174805, 0.01009385585784912, 0.009956000328063964, 0.0102805118560791, 0.010191167831420898, 0.01021724796295166, 0.010017087936401367, 0.010103072166442871, 0.010198495864868163, 0.009984543800354004, 0.010125311851501465, 0.010033151626586915, 0.010053440093994141, 0.010199328422546387, 0.010126272201538085, 0.010046431541442871, 0.009967328071594238, 0.010037535667419434, 0.009967616081237793, 0.009840831756591797, 0.009832256317138672, 0.009911295890808105, 0.009794336318969726, 0.009805855751037597, 0.009743680000305175, 0.009919136047363282, 0.009865440368652344, 0.010036895751953125, 0.009789504051208497, 0.009881888389587402, 0.00989094352722168, 0.009779487609863281, 0.009869919776916505, 0.009916031837463379, 0.00977888011932373, 0.009769663810729981, 0.009729920387268066, 0.009817215919494629, 0.00989510440826416, 0.009762944221496582, 0.00984659194946289, 0.009768832206726074, 0.009801376342773437, 0.00985852813720703, 0.009880288124084473, 0.009941408157348633, 0.009826047897338867, 0.009811776161193848, 0.00981328010559082, 0.009812895774841308, 0.009822239875793457, 0.009904095649719238, 0.009995936393737793, 0.009939295768737793, 0.009867263793945312, 0.010012639999389648, 0.009864831924438476, 0.009875871658325196, 0.00988479995727539, 0.009976703643798828, 0.010007583618164062, 0.010144288063049317, 0.00996348762512207, 0.009968255996704102, 0.010060832023620606, 0.010051584243774414, 0.01024614429473877, 0.010260479927062988, 0.01025443172454834, 0.01024623966217041, 0.010129216194152833, 0.010135007858276366, 0.01005238437652588, 0.010094335556030273, 0.009930432319641113, 0.01003551959991455, 0.010038496017456055, 0.010043871879577638, 0.009950624465942384, 0.009878687858581543, 0.009812000274658204, 0.009960991859436035, 0.009709759712219239, 0.009893216133117676, 0.0098372163772583, 0.009873408317565918, 0.009945088386535645, 0.009818112373352051, 0.009839712142944336, 0.009803744316101074, 0.009819071769714355, 0.009793536186218262, 0.009755743980407714, 0.009711520195007324, 0.009941951751708984, 0.009914463996887207, 0.010021151542663575, 0.009901344299316406, 0.009877920150756836, 0.009859071731567384, 0.009850879669189454, 0.009834464073181153, 0.00977513599395752, 0.009963135719299316, 0.010043359756469727, 0.009881600379943848, 0.009791904449462891, 0.009853983879089355, 0.009812159538269043, 0.010001503944396972, 0.009923520088195801, 0.00987350368499756, 0.009824928283691406, 0.009846783638000489, 0.009897952079772948, 0.009804863929748536, 0.009778304100036621, 0.010722751617431641, 0.009834272384643555, 0.009775744438171387, 0.009765983581542969, 0.009833344459533691, 0.009813023567199706, 0.009912991523742675, 0.009995712280273437, 0.009790335655212402, 0.009762847900390625, 0.009838591575622559, 0.009900032043457031, 0.009961055755615235, 0.009936927795410156, 0.00983910369873047, 0.009885696411132813, 0.009775008201599121, 0.009789024353027344, 0.009765248298645019, 0.009805631637573243, 0.009758912086486816, 0.009764320373535156, 0.0098472957611084, 0.009842559814453125, 0.009726431846618653, 0.00973964786529541, 0.009750464439392089, 0.009762432098388672, 0.009729984283447266, 0.009790271759033202, 0.009723808288574219, 0.009743840217590331, 0.009665247917175292, 0.009719327926635742, 0.009801216125488281, 0.009822560310363769, 0.009733728408813477, 0.009845439910888671, 0.009712032318115234, 0.009957088470458984, 0.009801664352416993, 0.00988588809967041, 0.010130720138549805, 0.009900768280029297, 0.01003929615020752, 0.009970784187316895, 0.010296223640441894, 0.010074080467224121, 0.009965375900268554, 0.00993507194519043, 0.00993228816986084, 0.009796287536621094, 0.009836352348327637, 0.009844736099243164, 0.009778623580932616, 0.009783424377441407, 0.009719648361206056, 0.00996623992919922, 0.009725888252258301, 0.009836544036865234, 0.00978275203704834, 0.009798175811767578, 0.009730112075805665, 0.009727168083190917, 0.010031744003295899, 0.009809439659118652, 0.00978495979309082, 0.010045663833618164, 0.009796095848083495, 0.0097958402633667, 0.009789440155029297, 0.009807871818542481, 0.009809920310974121, 0.009805055618286132, 0.009864895820617677, 0.01003551959991455, 0.010465120315551758, 0.010090656280517578, 0.010208512306213379, 0.009937376022338866, 0.011066656112670898, 0.009913503646850586, 0.010063679695129394, 0.009955391883850098, 0.009907808303833008, 0.009928319931030273, 0.009927680015563965, 0.009897727966308594, 0.009879584312438966, 0.009842047691345215, 0.009935296058654786, 0.00995081615447998, 0.009965408325195312, 0.009816831588745117, 0.009873663902282715, 0.009805536270141602, 0.009858207702636719, 0.009769920349121095, 0.009747808456420898, 0.009939519882202149, 0.009777119636535644, 0.009791839599609374, 0.009729727745056153, 0.009836544036865234, 0.009676575660705566, 0.009851200103759766, 0.009754879951477051, 0.009828000068664551, 0.009751839637756348, 0.009753312110900879, 0.009767231941223144, 0.009750207901000977, 0.009789440155029297, 0.009830400466918946, 0.009970879554748536, 0.009821279525756836, 0.009831520080566406, 0.009849472045898438, 0.009797856330871582, 0.009826399803161622, 0.009770496368408203, 0.009856608390808106, 0.00984943962097168, 0.00982528018951416, 0.00986012840270996, 0.009787360191345214, 0.009737343788146972, 0.009831295967102052, 0.009731776237487793, 0.009762304306030273, 0.009765695571899415, 0.010032352447509765, 0.009935647964477538, 0.009819231986999511, 0.009866144180297852, 0.009975808143615723, 0.009965120315551758, 0.009814432144165039, 0.01008995246887207, 0.009941535949707031, 0.010088640213012695, 0.009905664443969727, 0.010116671562194824, 0.00989084815979004, 0.009954367637634277, 0.009806495666503906, 0.009875040054321289, 0.009890527725219726, 0.009790975570678711, 0.009768256187438965, 0.009880000114440918, 0.009918911933898926, 0.009898271560668945, 0.009880512237548829, 0.00991312026977539, 0.009864704132080078, 0.009785920143127442, 0.00987440013885498, 0.009854080200195313, 0.009978783607482911, 0.009867839813232422, 0.009867648124694824, 0.009817440032958985, 0.009814496040344239, 0.009781408309936523, 0.009746335983276367, 0.009781344413757324, 0.009701503753662109, 0.00982153606414795, 0.010336352348327636, 0.010387647628784179, 0.010022527694702148, 0.00990841579437256, 0.009832448005676269, 0.009849696159362794, 0.009860735893249511, 0.009822175979614258, 0.00997110366821289, 0.009863615989685059, 0.009996512413024902, 0.009831583976745606, 0.009846816062927246, 0.00980617618560791, 0.009754719734191895, 0.009728351593017577, 0.009953280448913575, 0.009844736099243164, 0.009850879669189454, 0.009778176307678223, 0.009837471961975097, 0.009863072395324708, 0.009840479850769043, 0.009749983787536621, 0.009789983749389648, 0.009820544242858886, 0.009783007621765137, 0.00979372787475586, 0.00980076789855957, 0.009820511817932129, 0.00985155200958252, 0.00974233627319336, 0.01004748821258545, 0.009813887596130371, 0.009842816352844239, 0.01011302375793457, 0.01023299217224121, 0.00978384017944336, 0.009916735649108886, 0.009783295631408692, 0.009871359825134277, 0.009789376258850098, 0.009895872116088868, 0.009836352348327637, 0.010103039741516114, 0.010051872253417969, 0.009895232200622559, 0.009836288452148437, 0.009923423767089844, 0.009825247764587403, 0.009931679725646972, 0.009842687606811524, 0.00981606388092041, 0.009805120468139649, 0.009804479598999024, 0.009992192268371582, 0.009970720291137696, 0.01020364761352539, 0.01005827236175537, 0.010176128387451171, 0.00999062442779541, 0.010024800300598144, 0.010467328071594239, 0.010100576400756836, 0.010244383811950683, 0.010020735740661622, 0.01015833568572998, 0.010026752471923828, 0.010268671989440918, 0.010209312438964844, 0.010149791717529296, 0.01012332820892334, 0.01009990406036377, 0.010116095542907716, 0.0101528959274292, 0.010094783782958984, 0.01006486415863037, 0.010053343772888183, 0.009929920196533203, 0.009910079956054687, 0.00995366382598877, 0.009888383865356445, 0.00982425594329834, 0.009837759971618652, 0.009834976196289062, 0.009812319755554199, 0.009802751541137696, 0.009829376220703125, 0.010008864402770996, 0.010096351623535156, 0.009965920448303222, 0.010066816329956055, 0.010070816040039062, 0.009963520050048828, 0.010121343612670898, 0.010098688125610352, 0.01003551959991455, 0.009890496253967285, 0.009950207710266114, 0.009834336280822755, 0.010207391738891602, 0.009891839981079101, 0.010255392074584962, 0.010038080215454102, 0.010434720039367676, 0.010139391899108887, 0.010938624382019042, 0.010297344207763673, 0.00993398380279541, 0.009960000038146972, 0.009857312202453613, 0.009834495544433594, 0.010095999717712403, 0.010001024246215821, 0.009815199851989746, 0.009815199851989746, 0.009856351852416992, 0.009778847694396972, 0.009777536392211914, 0.009780608177185058, 0.009781920433044433, 0.009724191665649414, 0.009897983551025391, 0.009803071975708007, 0.009829376220703125, 0.009854656219482422, 0.009835712432861328, 0.00982534408569336, 0.009783040046691895, 0.009725472450256348, 0.009894687652587891, 0.009821696281433106, 0.009736384391784668, 0.009826560020446778, 0.009946784019470216, 0.009893152236938476, 0.009835552215576172, 0.009768287658691406, 0.00990982437133789, 0.009804672241210937, 0.009848383903503418, 0.0098472318649292, 0.009805536270141602, 0.009803296089172363, 0.009796287536621094, 0.009861184120178222, 0.00989568042755127, 0.009846816062927246, 0.009889472007751464, 0.00977564811706543, 0.009861087799072266, 0.009967552185058594, 0.009939040184020996, 0.009906175613403321, 0.009779199600219727, 0.009885696411132813, 0.009870623588562012, 0.00983187198638916, 0.00996617603302002, 0.01012553596496582, 0.009946880340576171, 0.010091615676879882, 0.009976639747619628, 0.010053728103637695, 0.009832448005676269, 0.00997532844543457, 0.009847264289855957, 0.009930751800537109, 0.009902144432067871, 0.009936832427978515, 0.009899999618530273, 0.009861184120178222, 0.010046527862548829, 0.009935775756835937, 0.010061823844909668, 0.00996678352355957, 0.009859904289245606, 0.009805248260498048, 0.009783807754516602, 0.009724127769470214, 0.00971555233001709, 0.009750528335571289, 0.009738271713256835, 0.009733311653137207, 0.009665311813354492, 0.009696415901184082, 0.009727968215942382, 0.009748448371887207, 0.00967363166809082, 0.00974176025390625, 0.009744000434875489, 0.010261664390563965, 0.00981174373626709, 0.0097260160446167, 0.009772095680236816, 0.009774111747741699, 0.009760160446166993, 0.009801440238952637, 0.009750528335571289, 0.009992095947265625, 0.009783679962158204, 0.009697728157043457, 0.009752575874328612, 0.009744383811950684, 0.009703424453735352, 0.009736031532287597, 0.0097609281539917, 0.009926464080810547, 0.00978332805633545, 0.009873567581176758, 0.009861087799072266, 0.010008607864379883, 0.009995712280273437, 0.010004480361938477, 0.010081184387207032, 0.010155167579650879, 0.010151424407958985, 0.009959648132324219, 0.009935647964477538, 0.010012672424316407, 0.009883296012878419, 0.01023855972290039, 0.010162112236022949, 0.010104031562805175, 0.010056608200073243, 0.00998147201538086, 0.010043807983398437, 0.00999839973449707, 0.010059328079223632, 0.009914752006530761, 0.010075231552124024, 0.010117888450622559, 0.010084511756896973, 0.010141695976257324, 0.010310879707336426, 0.010164352416992187, 0.01006060791015625, 0.010044511795043945, 0.010103551864624024, 0.010051808357238769, 0.010027008056640625, 0.009962271690368653, 0.009987071990966797, 0.009975808143615723, 0.009977855682373048, 0.010045439720153808, 0.009981951713562011, 0.010381279945373535, 0.010080096244812011, 0.009926848411560059, 0.010034784317016602, 0.009900447845458984, 0.00989583969116211, 0.009804160118103027, 0.009928735733032226, 0.009907903671264649, 0.009862303733825684, 0.009814432144165039, 0.009783743858337402, 0.00977683162689209, 0.009881919860839843, 0.009900032043457031, 0.009820159912109374, 0.009857024192810059, 0.009797632217407226, 0.009789440155029297, 0.00978335952758789, 0.009885727882385254, 0.010258336067199707, 0.01009830379486084, 0.010047871589660645, 0.009988096237182617, 0.010135647773742675, 0.009987872123718261, 0.009885824203491211, 0.009873248100280762, 0.009829983711242676, 0.009849184036254883, 0.009930015563964844, 0.009866175651550292, 0.009834431648254394, 0.009883296012878419, 0.009934335708618165, 0.00991164779663086, 0.010090880393981934, 0.010010335922241211, 0.01002729606628418, 0.009879551887512206, 0.009936896324157715, 0.010225664138793946, 0.009883487701416015, 0.010141887664794923, 0.009990015983581542, 0.009990240097045898, 0.01004748821258545, 0.009852704048156738, 0.009920767784118652, 0.009955519676208497, 0.009963552474975585, 0.009922207832336426, 0.009928799629211426, 0.009867263793945312, 0.009885408401489259, 0.009932255744934082, 0.009929216384887696, 0.009872703552246094, 0.009865407943725585, 0.009810943603515625, 0.009780256271362305, 0.009966527938842774, 0.009858431816101075, 0.00982483196258545, 0.00986511993408203, 0.009834719657897948, 0.009881535530090331, 0.009737536430358887, 0.009775296211242675, 0.009769311904907227, 0.009871359825134277, 0.009770784378051758, 0.00982857608795166, 0.009960607528686523, 0.009781599998474121, 0.009753087997436523, 0.00985200023651123, 0.009789440155029297, 0.009735008239746094, 0.010481727600097657, 0.010228863716125488, 0.011033632278442384, 0.01064851188659668, 0.010125951766967773, 0.009924799919128417, 0.00988803195953369, 0.009903807640075684, 0.00986303997039795, 0.009860511779785155, 0.009890368461608887, 0.00980406379699707, 0.00982153606414795, 0.009968671798706055, 0.010883808135986328, 0.010075039863586426, 0.010061087608337403, 0.010128095626831054, 0.010132896423339843, 0.009779071807861328, 0.01004911994934082, 0.01000937557220459, 0.009969375610351562, 0.010083423614501954, 0.009880736351013183, 0.009945088386535645, 0.009796416282653809, 0.00996771240234375, 0.009842752456665039, 0.00991107177734375, 0.00993068790435791, 0.009852992057800294, 0.009807871818542481, 0.009799679756164551, 0.009856255531311035, 0.00983903980255127, 0.009814335823059081, 0.01002524757385254, 0.009973471641540527, 0.009883328437805176, 0.00993721580505371, 0.00981100845336914, 0.009761728286743165, 0.009887040138244629, 0.009988800048828125, 0.009870528221130371, 0.009900064468383788, 0.009802687644958495, 0.009776063919067383, 0.009712544441223145, 0.009881535530090331, 0.009775168418884277, 0.009815072059631348, 0.009860063552856445, 0.009862144470214844, 0.010088895797729492, 0.009892416000366212, 0.009938943862915038, 0.00984284782409668, 0.009898143768310547, 0.010122943878173828, 0.010010880470275879, 0.009897088050842285, 0.00991055965423584, 0.009775456428527831, 0.009854975700378419, 0.00981827163696289, 0.009893728256225585, 0.009828415870666504, 0.00984876823425293, 0.00986911964416504, 0.009978048324584961, 0.009877504348754883, 0.009840640068054199, 0.009837823867797852, 0.009771776199340821, 0.009893888473510743, 0.009807359695434571, 0.00984115219116211, 0.009756671905517577, 0.009761983871459962, 0.009820032119750976, 0.009824192047119141, 0.009939616203308105, 0.009922911643981933, 0.009852928161621094, 0.009840640068054199, 0.009790687561035157, 0.009818047523498534, 0.009892127990722656, 0.010013248443603516, 0.009953280448913575, 0.009967616081237793, 0.009861120223999023, 0.009831935882568359, 0.009767200469970703, 0.009791711807250976, 0.010010496139526367, 0.009857151985168457, 0.00974028778076172, 0.010069120407104492, 0.009919679641723633, 0.00993062400817871, 0.00983568000793457, 0.010023584365844726, 0.009846783638000489, 0.009934847831726074, 0.009978976249694824, 0.010427295684814453, 0.010057056427001953, 0.010052127838134766, 0.010055808067321778, 0.010123488426208496, 0.010129247665405273, 0.009939071655273437, 0.009910079956054687, 0.009918463706970216, 0.009803423881530762, 0.009892191886901856, 0.009927935600280761, 0.010052351951599122, 0.009815232276916504, 0.009898816108703613, 0.009780991554260253, 0.009785920143127442, 0.009967264175415038, 0.009842752456665039, 0.009896096229553222, 0.009813823699951173, 0.009971424102783203, 0.00982636833190918, 0.009857248306274415, 0.009838591575622559, 0.009827775955200195, 0.009814592361450195, 0.009857024192810059, 0.00990396785736084, 0.010084575653076171, 0.010137536048889161, 0.009998496055603028, 0.010073408126831055, 0.010029727935791016, 0.009818047523498534, 0.009908160209655762, 0.00984499168395996, 0.009778847694396972, 0.0099966402053833, 0.009965727806091309, 0.009903167724609375, 0.009863967895507813, 0.009830335617065429, 0.010162240028381347, 0.009895936012268066, 0.00998147201538086, 0.009861599922180176, 0.009912320137023926, 0.010131072044372559, 0.009900608062744141, 0.009964544296264649, 0.009963968276977538, 0.00996390438079834, 0.009836416244506836, 0.009848959922790527, 0.009889792442321778, 0.010408063888549805, 0.009910143852233887, 0.00990822410583496, 0.00993231964111328, 0.009779680252075195, 0.009782591819763183, 0.009816767692565918, 0.009754624366760254, 0.009719296455383301, 0.009765376091003418, 0.009746432304382324, 0.009820159912109374, 0.0097871675491333, 0.009801759719848632, 0.009704928398132324, 0.009684864044189454, 0.009744864463806152, 0.00968239974975586, 0.00964675235748291, 0.009669919967651368, 0.009700511932373047, 0.009647616386413574, 0.009753055572509765, 0.009795424461364746, 0.009713664054870605, 0.009703167915344238, 0.009718015670776367, 0.00970137596130371, 0.009674112319946289, 0.009722111701965332, 0.0097009916305542, 0.009835264205932618, 0.009701120376586913, 0.009849087715148926, 0.009834272384643555, 0.009789536476135254, 0.00983839988708496, 0.009793631553649903, 0.009813823699951173, 0.009984416007995605, 0.009864224433898925, 0.009831263542175292, 0.009820287704467773, 0.009977184295654296, 0.009829824447631835, 0.010021087646484374, 0.010068544387817383, 0.010042688369750977, 0.00992505645751953, 0.009944607734680175, 0.00976912021636963, 0.0097423677444458, 0.009779295921325683, 0.009686367988586426, 0.009831551551818848, 0.009800992012023925, 0.009888192176818848, 0.009891839981079101, 0.009895936012268066, 0.009969663619995118, 0.009938943862915038, 0.009836383819580078, 0.010211456298828124, 0.009902015686035156, 0.009841792106628418, 0.00981123161315918, 0.009829567909240724, 0.009845343589782715, 0.00995088005065918, 0.010062080383300781, 0.010032447814941406, 0.009951935768127442, 0.009990464210510254, 0.00995740795135498, 0.009915583610534668, 0.009904607772827149, 0.009822463989257813, 0.009824000358581542, 0.009835712432861328, 0.009847583770751954, 0.00994921588897705, 0.009873760223388673, 0.009821855545043945, 0.009842240333557128, 0.010219967842102051, 0.009904128074645996, 0.009822208404541016, 0.009775103569030762, 0.0097259521484375, 0.009772192001342774, 0.00974847984313965, 0.009747296333312989, 0.009862912178039552, 0.00989136028289795, 0.009822943687438964, 0.009773119926452637, 0.009817919731140136, 0.010016287803649902, 0.012435615539550781, 0.012026335716247559, 0.009973567962646484, 0.009902112007141113, 0.009965696334838868, 0.00982630443572998, 0.009858976364135743, 0.0097958402633667, 0.009887519836425782]",tokens/s,100.8796169574661,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1033.265152,874.381312,0.0,488.636416,482.553856,s,1,7.89728857421875,7.89728857421875,0.0,7.89728857421875,7.89728857421875,7.89728857421875,7.89728857421875,[7.89728857421875],,kWh,2.3134894824973646e-05,2.5447348538675583e-06,7.589172738020622e-06,3.3268802416861825e-05,,MB,1396.666368,1033.764864,0.0,616.562688,581.925888,s,10,0.35908691024780276,0.035908691024780275,0.0003067188453673132,0.035847999572753905,0.03620539093017578,0.03641253471374512,0.03657824974060059,"[0.036619678497314456, 0.035720256805419924, 0.03557392120361328, 0.035660991668701174, 0.03558425521850586, 0.03586268615722656, 0.03583331298828125, 0.036130847930908205, 0.03615935897827149, 0.03594160079956055]",tokens/s,7129.193314881253,kWh,1.1370931457683634e-06,1.2540116739264686e-07,7.533177232762526e-07,2.015812036437263e-06,tokens/kWh,126995967.56672475,MB,1435.611136,1046.347776,0.0,629.1456,597.192192,s,10,17.46274267578125,1.7462742675781247,0.006425132533619413,1.7491557006835938,1.7509768798828125,1.7527657836914061,1.7541969067382812,"[1.750579345703125, 1.7335897216796874, 1.7356512451171875, 1.749445068359375, 1.7451141357421875, 1.7445423583984374, 1.7503389892578125, 1.7545546875, 1.750060791015625, 1.7488663330078125]",tokens/s,36.076807160065144,kWh,5.073900541173617e-05,5.596186695961226e-06,1.962575413552414e-05,7.596094624322153e-05,tokens/kWh,829373.554645811,,s,630,17.457155151367196,0.02770977008153522,0.0004244085319440165,0.027613919258117674,0.02811825199127197,0.02831369466781616,0.02925644262313843,"[0.02726937675476074, 0.02787081527709961, 0.02762972831726074, 0.027647104263305664, 0.02765683174133301, 0.027629823684692384, 0.027533536911010743, 0.027430240631103515, 0.02751532745361328, 0.027457120895385743, 0.027766176223754883, 0.02746675109863281, 0.027455488204956056, 0.027531232833862305, 0.027514368057250976, 0.02754819107055664, 0.0276278076171875, 0.027510208129882814, 0.027548959732055664, 0.02758684730529785, 0.027415264129638673, 0.02766044807434082, 0.027717567443847655, 0.02745743942260742, 0.027498432159423828, 0.027738176345825195, 0.027678720474243163, 0.028118143081665038, 0.028485919952392576, 0.028172895431518553, 0.02760220718383789, 0.02768124771118164, 0.027789567947387694, 0.02806387138366699, 0.028458879470825194, 0.028446880340576172, 0.028208927154541017, 0.028266239166259765, 0.02810086441040039, 0.028190784454345703, 0.028251968383789062, 0.02799430465698242, 0.027838464736938476, 0.027709280014038086, 0.027611072540283204, 0.027632192611694337, 0.02763536071777344, 0.02748569679260254, 0.027449855804443358, 0.027535360336303712, 0.02736457633972168, 0.027431711196899414, 0.02750873565673828, 0.02752921676635742, 0.02747536087036133, 0.02749235153198242, 0.02755187225341797, 0.0299399356842041, 0.02927168083190918, 0.028131744384765626, 0.02773219108581543, 0.02760121536254883, 0.027878911972045898, 0.027436864852905272, 0.028145023345947266, 0.02776959991455078, 0.027907712936401367, 0.027677120208740233, 0.027752447128295898, 0.02761657524108887, 0.027677312850952148, 0.0274268798828125, 0.027551744461059572, 0.027348352432250977, 0.027451072692871094, 0.0272923526763916, 0.027449151992797852, 0.027432960510253908, 0.027429311752319337, 0.027522943496704103, 0.0274781436920166, 0.027545312881469726, 0.027457664489746094, 0.027412639617919923, 0.027443456649780273, 0.027434175491333007, 0.027463584899902343, 0.027505311965942383, 0.027376672744750977, 0.02735817527770996, 0.027371519088745116, 0.027406496047973634, 0.027276639938354493, 0.027503103256225587, 0.02728044891357422, 0.027345855712890624, 0.02745782470703125, 0.02728700828552246, 0.02803033638000488, 0.028035743713378906, 0.0278427848815918, 0.027699199676513672, 0.027922655105590822, 0.027619136810302734, 0.027482080459594726, 0.02750700759887695, 0.027630783081054686, 0.027438880920410157, 0.027443872451782227, 0.027516672134399414, 0.0273636474609375, 0.02731795120239258, 0.027408447265625, 0.027435264587402343, 0.02758428764343262, 0.027322784423828125, 0.027497791290283204, 0.027508256912231445, 0.027385919570922852, 0.02723148727416992, 0.027471071243286134, 0.02732217597961426, 0.02749020767211914, 0.02747670364379883, 0.027467199325561523, 0.027314975738525392, 0.027041088104248046, 0.027279487609863283, 0.02744940757751465, 0.027537727355957033, 0.02757446479797363, 0.027725984573364258, 0.027792287826538087, 0.02773321533203125, 0.02787046432495117, 0.0280184326171875, 0.028007135391235352, 0.027785215377807617, 0.027883039474487305, 0.027707584381103517, 0.027713151931762697, 0.02767519950866699, 0.027477216720581055, 0.027429759979248045, 0.02752889633178711, 0.027324735641479494, 0.02737107276916504, 0.027390304565429686, 0.027500640869140624, 0.027465696334838866, 0.02749388885498047, 0.02757663917541504, 0.027569856643676758, 0.027429407119750976, 0.027422719955444336, 0.02751283264160156, 0.027389951705932617, 0.027416576385498048, 0.027650047302246093, 0.027666208267211913, 0.02737993621826172, 0.02752511978149414, 0.027361600875854493, 0.02740915107727051, 0.027293920516967773, 0.02727190399169922, 0.027410432815551757, 0.027246240615844727, 0.027617759704589843, 0.028200096130371093, 0.027876064300537108, 0.02771968078613281, 0.02788688087463379, 0.02752102470397949, 0.02766441535949707, 0.027449024200439452, 0.027613855361938475, 0.02750931167602539, 0.027490272521972656, 0.02764406394958496, 0.0275882568359375, 0.027494400024414063, 0.027340799331665038, 0.027334623336791992, 0.027394399642944336, 0.027381439208984375, 0.027385856628417967, 0.027299135208129884, 0.02738412857055664, 0.026964927673339845, 0.027205631256103514, 0.02739344024658203, 0.02747452735900879, 0.027365375518798828, 0.027397247314453126, 0.027328927993774413, 0.02741891288757324, 0.028119232177734373, 0.027609088897705077, 0.027435007095336913, 0.027602336883544923, 0.02921913528442383, 0.03090460777282715, 0.027984031677246092, 0.027920223236083983, 0.028317279815673828, 0.027722143173217775, 0.027600896835327147, 0.027799072265625, 0.027408863067626955, 0.027289567947387697, 0.027842592239379883, 0.027637760162353517, 0.027635711669921875, 0.027880735397338867, 0.028039424896240235, 0.028086559295654297, 0.02800454330444336, 0.027978015899658204, 0.0279484806060791, 0.027857343673706056, 0.031035232543945312, 0.0281395206451416, 0.02795635223388672, 0.027730688095092774, 0.027529312133789063, 0.027572416305541993, 0.027518112182617186, 0.02738252830505371, 0.027519136428833007, 0.027473695755004884, 0.027663679122924806, 0.027496896743774413, 0.0276313591003418, 0.027508928298950196, 0.027510591506958008, 0.027624095916748047, 0.027735584259033202, 0.027474496841430666, 0.027496192932128908, 0.028030975341796875, 0.027647808074951173, 0.027463104248046873, 0.027654624938964843, 0.027359519958496094, 0.027437055587768554, 0.027282560348510742, 0.02731507110595703, 0.027576416015625, 0.027594751358032226, 0.027455167770385744, 0.027684864044189454, 0.027254528045654296, 0.02809267234802246, 0.028659711837768553, 0.028704063415527344, 0.027853504180908203, 0.0278056640625, 0.02787126350402832, 0.027648000717163085, 0.027648000717163085, 0.027586687088012696, 0.027495840072631835, 0.02745510482788086, 0.027429023742675782, 0.02783420753479004, 0.027415456771850585, 0.02744419288635254, 0.02744009590148926, 0.027449087142944337, 0.027451520919799806, 0.02754924774169922, 0.027498464584350586, 0.02804591941833496, 0.027826175689697266, 0.027914432525634764, 0.027692096710205078, 0.027663103103637697, 0.02782806396484375, 0.02747555160522461, 0.027479711532592772, 0.027314815521240234, 0.027436767578125, 0.027445823669433593, 0.027561983108520507, 0.02779052734375, 0.027663455963134766, 0.027883039474487305, 0.0279836483001709, 0.02766067123413086, 0.027584543228149416, 0.027813888549804686, 0.027958879470825194, 0.02758924865722656, 0.027757375717163087, 0.027712480545043945, 0.027471872329711915, 0.027656192779541015, 0.02762713623046875, 0.027578752517700197, 0.027617279052734374, 0.027623104095458983, 0.027492671966552733, 0.027483488082885744, 0.027465696334838866, 0.027499071121215822, 0.027469823837280274, 0.027459007263183594, 0.02767513656616211, 0.027631168365478517, 0.027645856857299804, 0.028326623916625975, 0.02819196891784668, 0.027985824584960937, 0.027969791412353517, 0.02736476707458496, 0.02768351936340332, 0.027522943496704103, 0.027459936141967775, 0.027539392471313477, 0.027807519912719725, 0.02803331184387207, 0.027875328063964845, 0.027820032119750978, 0.027658527374267577, 0.027733728408813475, 0.02780169677734375, 0.028198976516723633, 0.031145280838012695, 0.028170719146728515, 0.027652095794677735, 0.027686847686767577, 0.027738239288330076, 0.027752447128295898, 0.027825952529907227, 0.027805919647216796, 0.027593952178955078, 0.027799583435058593, 0.027294687271118164, 0.027436832427978515, 0.02750054359436035, 0.02754351997375488, 0.027444608688354494, 0.02747417640686035, 0.02746342468261719, 0.027642431259155272, 0.02748633575439453, 0.027578336715698242, 0.02753331184387207, 0.027746368408203125, 0.027496320724487305, 0.027379776000976564, 0.0275742073059082, 0.027506752014160155, 0.027422496795654297, 0.027487808227539063, 0.027480735778808593, 0.027604927062988283, 0.02782022476196289, 0.027837663650512694, 0.027643648147583008, 0.02753561592102051, 0.027620000839233397, 0.027542688369750976, 0.02753990364074707, 0.027470239639282225, 0.027426816940307616, 0.027471807479858397, 0.027764799118041993, 0.0277258243560791, 0.027486207962036133, 0.02759071922302246, 0.02752067184448242, 0.028401952743530273, 0.027406015396118165, 0.02739558410644531, 0.027378143310546874, 0.02764806365966797, 0.027407808303833006, 0.027638751983642577, 0.02817184066772461, 0.028036832809448242, 0.027959999084472657, 0.027776096343994142, 0.027722463607788086, 0.02750022315979004, 0.0275830078125, 0.027467424392700196, 0.027455455780029298, 0.027422176361083985, 0.02732534408569336, 0.027303936004638672, 0.027465152740478515, 0.027472448348999024, 0.02788761520385742, 0.027852800369262694, 0.027636032104492187, 0.027798944473266602, 0.027799840927124023, 0.02767417526245117, 0.027706975936889647, 0.027636735916137696, 0.02760483169555664, 0.027583711624145506, 0.027613983154296876, 0.02788105583190918, 0.02760540771484375, 0.027407424926757813, 0.027573440551757814, 0.027473440170288087, 0.027571903228759766, 0.030475072860717774, 0.02823561668395996, 0.027992063522338868, 0.027678239822387696, 0.028099231719970703, 0.027908832550048827, 0.028060640335083008, 0.02771968078613281, 0.027838464736938476, 0.027661504745483397, 0.02775935935974121, 0.02773142433166504, 0.027781728744506837, 0.027674623489379883, 0.027636831283569335, 0.02759359931945801, 0.027790719985961915, 0.02796611213684082, 0.028128416061401366, 0.027999423980712892, 0.028090335845947265, 0.028124128341674805, 0.027984479904174804, 0.027738239288330076, 0.027836639404296874, 0.027825183868408203, 0.027829151153564453, 0.027557600021362306, 0.027584640502929688, 0.027480064392089845, 0.027101247787475587, 0.027443199157714843, 0.027846879959106445, 0.027557600021362306, 0.02759004783630371, 0.027757471084594726, 0.02788230323791504, 0.027849344253540038, 0.02796931266784668, 0.028227296829223633, 0.028733791351318358, 0.028694175720214845, 0.02830931282043457, 0.028602975845336914, 0.02874387168884277, 0.028350624084472656, 0.0281213436126709, 0.027889408111572266, 0.027781055450439452, 0.027667903900146486, 0.027980415344238282, 0.027447296142578126, 0.0276495361328125, 0.0280581111907959, 0.027844608306884764, 0.027691007614135742, 0.029201887130737306, 0.027768863677978515, 0.027533824920654298, 0.027374719619750975, 0.027627391815185545, 0.02751113510131836, 0.02735580825805664, 0.027364448547363283, 0.027399040222167968, 0.027504415512084962, 0.027463935852050782, 0.02736128044128418, 0.027475488662719726, 0.02734515190124512, 0.027369695663452147, 0.027323551177978515, 0.02748092842102051, 0.027561407089233398, 0.02769977569580078, 0.028006431579589843, 0.027863136291503908, 0.02791206359863281, 0.027807519912719725, 0.028508256912231446, 0.02788332748413086, 0.027878719329833983, 0.027593727111816405, 0.027551744461059572, 0.02815555191040039, 0.027637760162353517, 0.027799840927124023, 0.027844192504882813, 0.028071903228759767, 0.02828339195251465, 0.02842416000366211, 0.028203039169311522, 0.0279836483001709, 0.02765283203125, 0.02764912033081055, 0.02745849609375, 0.027545375823974608, 0.027607263565063475, 0.027648000717163085, 0.027797504425048827, 0.027878944396972655, 0.027729984283447265, 0.027464096069335937, 0.027494400024414063, 0.02744723129272461, 0.027440736770629883, 0.027591136932373046, 0.027438880920410157, 0.02739142417907715, 0.02749875259399414, 0.027712032318115233, 0.02819491195678711, 0.028066944122314454, 0.027654560089111328, 0.027959680557250975, 0.028235776901245117, 0.027959007263183594, 0.02782441520690918, 0.027674528121948243, 0.02749807929992676, 0.027542015075683594, 0.027480127334594727, 0.02744633674621582, 0.027591552734375, 0.027525375366210938, 0.027682687759399412, 0.027635040283203124, 0.02743087959289551, 0.027607551574707033, 0.027496511459350587, 0.027836063385009765, 0.02758620834350586, 0.027585216522216797, 0.027447296142578126, 0.027553632736206056, 0.027367136001586915, 0.027482559204101562, 0.027352256774902342, 0.027592863082885742, 0.02750761604309082, 0.028731103897094726, 0.027872543334960937, 0.027685951232910157, 0.027688159942626953, 0.028173023223876954, 0.028366207122802734, 0.030794111251831055, 0.02832912063598633, 0.028575712203979493, 0.027993247985839843, 0.02807129669189453, 0.02796988868713379, 0.02791628837585449, 0.02774015998840332, 0.02775859260559082, 0.027604991912841798, 0.027407360076904298, 0.02762918472290039, 0.027528671264648436, 0.027633535385131837, 0.02755036735534668, 0.027463680267333986, 0.027604448318481446, 0.027505184173583986, 0.027570335388183594, 0.027588703155517577, 0.027717695236206055, 0.02752479934692383, 0.02758246421813965, 0.02747222328186035, 0.02756809616088867, 0.02762009620666504, 0.027743263244628905, 0.027719200134277342, 0.02743948745727539, 0.02739574432373047, 0.02759878349304199, 0.027378047943115234, 0.027345216751098633, 0.02746953582763672, 0.027490272521972656, 0.02805344009399414, 0.0282935676574707, 0.028601951599121093, 0.028329504013061522, 0.02830099105834961, 0.028275552749633788, 0.027889951705932618, 0.028144863128662108, 0.027975839614868166, 0.027810367584228515, 0.02771881675720215, 0.027892704010009765, 0.027565727233886717, 0.027652191162109374, 0.027551647186279296, 0.028565439224243164, 0.02823379135131836, 0.02837708854675293, 0.028059968948364256, 0.027914079666137695, 0.02775257682800293, 0.027695039749145507, 0.027883039474487305, 0.027739391326904297, 0.02774870491027832, 0.027849695205688477, 0.02763488006591797, 0.027557600021362306, 0.027643999099731444, 0.02749238395690918, 0.02770569610595703, 0.02770787239074707, 0.027588735580444335, 0.027639423370361328, 0.027684959411621093, 0.027967487335205078, 0.027502592086791993, 0.027762432098388672]",tokens/s,36.088354290112406,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,921.501696,704.512,0.0,301.989888,282.769408,s,1,7.7189326171875,7.7189326171875,0.0,7.7189326171875,7.7189326171875,7.7189326171875,7.7189326171875,[7.7189326171875],,kWh,1.9887378466728478e-05,2.1861412478310926e-06,6.742505394008447e-06,2.881602510856802e-05,,MB,1356.14464,759.037952,0.0,341.835776,318.94528,s,17,0.19501791763305665,0.011471642213709214,9.22493118743139e-05,0.011455615997314454,0.011610386848449707,0.01164992618560791,0.011690804481506348,"[0.011592543601989747, 0.011428863525390624, 0.011455615997314454, 0.011485823631286621, 0.011375712394714356, 0.01145644760131836, 0.011637151718139649, 0.011401760101318359, 0.011372639656066894, 0.011463295936584473, 0.011420767784118652, 0.011534784317016602, 0.01144108772277832, 0.011479455947875977, 0.011361536026000976, 0.01140940761566162, 0.011701024055480957]",tokens/s,22315.898214997203,kWh,3.3231862126991136e-07,3.6648848838381324e-08,1.8306044763787794e-07,5.520279177461706e-07,tokens/kWh,463744661.7649364,MB,1372.258304,784.203776,0.0,367.0016,318.94784,s,17,10.089940246582032,0.5935258968577666,0.0034069253726718607,0.5923961791992187,0.5963058715820313,0.5992733154296875,0.6038682373046875,"[0.59783740234375, 0.5922438354492188, 0.593423095703125, 0.59094921875, 0.6050169677734375, 0.5901730346679688, 0.591362548828125, 0.594068603515625, 0.5923961791992187, 0.592635498046875, 0.5921864624023437, 0.5949258422851562, 0.5917977294921875, 0.5952848510742188, 0.5910344848632813, 0.5915699462890625, 0.5930345458984375]",tokens/s,106.14532631774517,kWh,1.7213135020887523e-05,1.8983194664374817e-06,6.6824189009487795e-06,2.579387338827378e-05,tokens/kWh,2442440.460634369,,s,1071,10.08160049724578,0.00941325910106983,0.00027178040748129827,0.009376095771789552,0.009490048408508301,0.009557104110717774,0.010316518115997314,"[0.009251999855041503, 0.009315327644348144, 0.00934502410888672, 0.009328255653381348, 0.00933238410949707, 0.009373727798461915, 0.009325247764587402, 0.009326272010803223, 0.009322815895080567, 0.009313440322875977, 0.009313247680664062, 0.009368736267089844, 0.009355423927307129, 0.009447999954223633, 0.010276800155639648, 0.010768095970153808, 0.012027551651000976, 0.009537280082702637, 0.009483488082885742, 0.009434847831726075, 0.00939401626586914, 0.009506048202514649, 0.009438336372375488, 0.009379615783691406, 0.009453568458557129, 0.009412639617919922, 0.0094136323928833, 0.009448415756225586, 0.009418272018432617, 0.009390624046325683, 0.009396160125732422, 0.009357024192810058, 0.009511167526245118, 0.009401599884033203, 0.009421024322509765, 0.009416255950927734, 0.009525279998779296, 0.009446368217468262, 0.009502688407897949, 0.009449376106262206, 0.009465984344482422, 0.00954099178314209, 0.009528256416320801, 0.009532447814941405, 0.009426719665527343, 0.009343968391418458, 0.009344927787780762, 0.009383968353271484, 0.009297823905944825, 0.00937171173095703, 0.00938815975189209, 0.009393376350402832, 0.00949295997619629, 0.009424511909484862, 0.009414208412170411, 0.00937286376953125, 0.00935916805267334, 0.009347295761108399, 0.009355039596557617, 0.00932204818725586, 0.009362079620361329, 0.009473024368286133, 0.009392736434936523, 0.009183199882507324, 0.009376095771789552, 0.009374784469604493, 0.00949238395690918, 0.00945638370513916, 0.00960211181640625, 0.009374655723571778, 0.009308352470397949, 0.009334591865539551, 0.009359199523925782, 0.009362624168395997, 0.009339872360229493, 0.009388031959533692, 0.009449248313903808, 0.00940668773651123, 0.009490048408508301, 0.0094334716796875, 0.00941055965423584, 0.009375359535217285, 0.00937123203277588, 0.009433856010437012, 0.009373984336853027, 0.009313280105590821, 0.009349120140075684, 0.009351936340332032, 0.009396224021911622, 0.009398271560668945, 0.009412639617919922, 0.009381855964660644, 0.009411904335021972, 0.009490303993225098, 0.009370431900024415, 0.00940009593963623, 0.009388256072998046, 0.009383935928344727, 0.009451519966125489, 0.009406559944152832, 0.009406399726867676, 0.009473664283752441, 0.009328991889953613, 0.009392000198364258, 0.009369791984558106, 0.009533087730407714, 0.009388319969177246, 0.00938361644744873, 0.009345120429992675, 0.009349344253540039, 0.009379839897155762, 0.00932812786102295, 0.009361344337463379, 0.009373472213745118, 0.009394304275512695, 0.009394847869873046, 0.009422687530517579, 0.009367487907409667, 0.00938742446899414, 0.009481023788452149, 0.009381888389587402, 0.00945321559906006, 0.009371808052062988, 0.009381183624267578, 0.009361599922180176, 0.009353280067443848, 0.009111807823181153, 0.009390015602111817, 0.00935366439819336, 0.009365280151367188, 0.00933017635345459, 0.009349184036254883, 0.009379520416259765, 0.009357215881347657, 0.009319168090820312, 0.009443455696105958, 0.00944048023223877, 0.009336671829223632, 0.009294431686401367, 0.009370240211486816, 0.009383647918701173, 0.00936905574798584, 0.009425439834594727, 0.009307647705078125, 0.009357503890991211, 0.009482560157775878, 0.009315360069274902, 0.009382911682128906, 0.009357279777526855, 0.009348896026611328, 0.009380064010620116, 0.009404095649719238, 0.009417023658752442, 0.009392127990722657, 0.00939417552947998, 0.009385984420776367, 0.009391231536865234, 0.009786432266235351, 0.009492287635803223, 0.009496000289916992, 0.00939680004119873, 0.009537856101989746, 0.0094901123046875, 0.009379839897155762, 0.00942899227142334, 0.009377792358398437, 0.009418272018432617, 0.009414943695068359, 0.009480287551879883, 0.009332544326782226, 0.00971190357208252, 0.009529215812683105, 0.009378111839294434, 0.009395584106445313, 0.009595168113708497, 0.009446592330932617, 0.009437567710876464, 0.009462143898010254, 0.009439455986022948, 0.009406463623046875, 0.009406463623046875, 0.00939724826812744, 0.009363743782043457, 0.009374176025390625, 0.009488639831542968, 0.00940236759185791, 0.009355232238769532, 0.009472031593322754, 0.009494527816772461, 0.00920691204071045, 0.009445504188537597, 0.009484512329101562, 0.00938271999359131, 0.009374560356140137, 0.009347935676574708, 0.009358688354492188, 0.009349823951721192, 0.009465920448303222, 0.009358559608459473, 0.009333696365356445, 0.009334560394287109, 0.00935859203338623, 0.00934992027282715, 0.009381471633911133, 0.009419136047363282, 0.009354656219482421, 0.009458271980285645, 0.009371328353881835, 0.009322976112365723, 0.009385439872741699, 0.009382143974304199, 0.009328191757202148, 0.009339455604553223, 0.009316448211669923, 0.009293472290039063, 0.009328895568847657, 0.009326592445373535, 0.00943331241607666, 0.009333727836608886, 0.009329504013061523, 0.009302016258239745, 0.009340671539306641, 0.009416223526000977, 0.009343135833740234, 0.009332991600036622, 0.009416223526000977, 0.009344799995422363, 0.009366496086120606, 0.009380031585693359, 0.009377599716186524, 0.009398271560668945, 0.010053024291992188, 0.009351840019226074, 0.009394207954406738, 0.009422752380371094, 0.009353471755981444, 0.009346816062927246, 0.009312255859375, 0.009351008415222168, 0.009342592239379882, 0.009433183670043945, 0.009349344253540039, 0.00953775978088379, 0.009285632133483887, 0.00931948757171631, 0.009321120262145996, 0.009314687728881835, 0.009290752410888671, 0.009337759971618653, 0.009332736015319825, 0.00939417552947998, 0.009355135917663574, 0.009076831817626953, 0.00935747241973877, 0.009416159629821777, 0.009329024314880372, 0.00932249641418457, 0.00996294403076172, 0.009540160179138184, 0.010575200080871582, 0.009558719635009766, 0.00956003189086914, 0.009463808059692384, 0.00942204761505127, 0.009432095527648926, 0.009391936302185058, 0.00936956787109375, 0.009410528182983399, 0.009399392127990723, 0.009374624252319335, 0.00940236759185791, 0.009439231872558594, 0.009582207679748536, 0.009456000328063964, 0.009675935745239258, 0.009433952331542969, 0.009457152366638183, 0.00958672046661377, 0.009412351608276368, 0.009396896362304688, 0.00940447998046875, 0.009353280067443848, 0.009365440368652344, 0.009465855598449707, 0.009365663528442383, 0.009311136245727538, 0.009359487533569336, 0.009327424049377442, 0.009312543869018555, 0.009344736099243163, 0.009436384201049804, 0.009358367919921874, 0.009432928085327148, 0.009320192337036133, 0.010323328018188476, 0.011150336265563965, 0.01600022315979004, 0.009735936164855958, 0.009429856300354004, 0.009422368049621582, 0.009375807762145996, 0.009460351943969726, 0.00943238353729248, 0.009351615905761718, 0.009398271560668945, 0.009403871536254884, 0.009468159675598144, 0.009388319969177246, 0.009397695541381836, 0.00936956787109375, 0.009497183799743653, 0.009687264442443848, 0.009442751884460449, 0.00944480037689209, 0.009392095565795898, 0.009137855529785157, 0.009389823913574218, 0.00937052822113037, 0.00936086368560791, 0.009361791610717773, 0.00934432029724121, 0.009312959671020507, 0.009344799995422363, 0.00932636833190918, 0.009318240165710449, 0.00944803237915039, 0.009314080238342286, 0.0095131196975708, 0.009383999824523925, 0.009359359741210938, 0.009369343757629394, 0.009412863731384277, 0.009351391792297364, 0.009330464363098144, 0.009376992225646972, 0.009411359786987305, 0.0094269437789917, 0.009373696327209472, 0.00934227180480957, 0.009335328102111816, 0.00937507152557373, 0.009356096267700195, 0.009357312202453612, 0.009455391883850098, 0.009384160041809081, 0.009438719749450684, 0.009336383819580078, 0.009321311950683593, 0.009312383651733399, 0.009333919525146485, 0.009306943893432618, 0.0093306884765625, 0.009589887619018555, 0.009298815727233886, 0.009293824195861817, 0.009343999862670899, 0.009320799827575683, 0.009319071769714355, 0.009349120140075684, 0.00932044792175293, 0.009347071647644043, 0.009355199813842774, 0.009359423637390136, 0.00944495964050293, 0.009332511901855469, 0.009341279983520507, 0.009357600212097169, 0.00936774444580078, 0.009367551803588867, 0.009368736267089844, 0.00934499168395996, 0.0093570556640625, 0.009317472457885742, 0.009368895530700683, 0.009386207580566406, 0.009382207870483399, 0.009352224349975586, 0.009378815650939941, 0.009197888374328613, 0.009390624046325683, 0.010253279685974122, 0.009746687889099121, 0.009417632102966308, 0.009437215805053712, 0.009316384315490723, 0.00935859203338623, 0.009365920066833497, 0.009401760101318359, 0.009355680465698242, 0.009365344047546386, 0.009307711601257325, 0.009356287956237793, 0.009398048400878906, 0.00933676815032959, 0.009390336036682129, 0.009357215881347657, 0.009344927787780762, 0.009388031959533692, 0.009375807762145996, 0.00939628791809082, 0.009399231910705566, 0.009392095565795898, 0.009370016098022462, 0.009390496253967285, 0.009381792068481446, 0.009368160247802734, 0.009344672203063965, 0.0094269437789917, 0.009367775917053223, 0.009307231903076172, 0.009277567863464356, 0.009382464408874512, 0.009340191841125488, 0.009337568283081054, 0.009385215759277343, 0.009378560066223144, 0.009389568328857421, 0.009390848159790039, 0.009375136375427246, 0.009412960052490234, 0.009318400382995605, 0.009326592445373535, 0.00930611228942871, 0.00931388759613037, 0.009314784049987793, 0.009297856330871582, 0.00930611228942871, 0.009328351974487305, 0.009344544410705567, 0.009421567916870117, 0.009409664154052734, 0.009339776039123535, 0.009352224349975586, 0.009423839569091796, 0.009328031539916992, 0.009310144424438477, 0.009431455612182617, 0.009336576461791993, 0.009308671951293946, 0.009359359741210938, 0.00932863998413086, 0.009142399787902832, 0.009356287956237793, 0.00940339183807373, 0.009405728340148925, 0.009444448471069336, 0.009405535697937012, 0.009377856254577637, 0.00942131233215332, 0.00937724781036377, 0.00939673614501953, 0.00940236759185791, 0.009385919570922852, 0.009393792152404785, 0.009398719787597657, 0.009405471801757813, 0.009403136253356934, 0.0093820161819458, 0.009356608390808105, 0.009387136459350586, 0.009336735725402832, 0.009344767570495606, 0.00937174415588379, 0.009421728134155273, 0.009378560066223144, 0.009298175811767577, 0.009307616233825684, 0.009338879585266113, 0.009344672203063965, 0.009304448127746582, 0.009323007583618164, 0.00934012794494629, 0.009345279693603516, 0.009398816108703613, 0.009449472427368164, 0.0094551362991333, 0.00945952033996582, 0.009457856178283692, 0.009525471687316895, 0.009459967613220216, 0.009455167770385742, 0.009484479904174805, 0.009679231643676758, 0.009426815986633301, 0.00946774387359619, 0.009488384246826171, 0.009523551940917968, 0.009461824417114257, 0.009495519638061523, 0.009542431831359863, 0.009549311637878418, 0.009468671798706054, 0.00972332763671875, 0.009751935958862305, 0.009440192222595215, 0.00941977596282959, 0.009423871994018555, 0.009391103744506836, 0.009405471801757813, 0.00946070384979248, 0.009387007713317871, 0.009416383743286133, 0.00935763168334961, 0.00943513584136963, 0.00912611198425293, 0.009406944274902344, 0.009410431861877441, 0.009377216339111328, 0.009329055786132813, 0.00938806438446045, 0.00939145565032959, 0.00938486385345459, 0.009397407531738282, 0.009386848449707032, 0.00936451244354248, 0.009382176399230956, 0.009388735771179199, 0.009316255569458008, 0.009406559944152832, 0.00953331184387207, 0.009412511825561524, 0.0094487361907959, 0.00936780834197998, 0.009400832176208495, 0.009402527809143066, 0.00939625644683838, 0.009398143768310547, 0.00939840030670166, 0.00936678409576416, 0.009421504020690918, 0.009434240341186523, 0.009366463661193848, 0.009449472427368164, 0.009408512115478516, 0.009356351852416992, 0.009380031585693359, 0.009461983680725098, 0.009404704093933106, 0.009457216262817383, 0.009394720077514648, 0.009377056121826172, 0.009399168014526367, 0.009394240379333497, 0.009453503608703614, 0.009359135627746581, 0.009331007957458497, 0.009373791694641113, 0.009400128364562988, 0.009371839523315429, 0.009406271934509278, 0.00938316822052002, 0.009392255783081055, 0.00940886402130127, 0.0095316801071167, 0.009399519920349122, 0.009356032371520996, 0.009429023742675782, 0.009313983917236328, 0.009426719665527343, 0.00937446403503418, 0.009426719665527343, 0.009396224021911622, 0.009520768165588379, 0.009357983589172364, 0.009409664154052734, 0.009384575843811036, 0.00942086410522461, 0.009166848182678223, 0.00934502410888672, 0.009397919654846192, 0.009404800415039062, 0.009369503974914551, 0.009389568328857421, 0.009349727630615234, 0.009338848114013672, 0.009363455772399902, 0.009359359741210938, 0.009388256072998046, 0.009358943939208985, 0.009377984046936036, 0.009359359741210938, 0.009383232116699218, 0.009419072151184081, 0.009376128196716309, 0.009392127990722657, 0.009373408317565918, 0.009728287696838379, 0.009430944442749023, 0.009408384323120118, 0.010391776084899901, 0.009801728248596191, 0.009504128456115722, 0.009474687576293946, 0.009377792358398437, 0.009390080451965332, 0.00937168025970459, 0.00939401626586914, 0.009515135765075684, 0.00935321617126465, 0.009392160415649414, 0.009377408027648925, 0.009329183578491211, 0.009389216423034669, 0.009396927833557129, 0.009358816146850586, 0.009372384071350097, 0.009340959548950195, 0.009354016304016114, 0.009345343589782715, 0.009363615989685058, 0.009327103614807129, 0.009387680053710937, 0.009328000068664551, 0.009277600288391114, 0.009435999870300292, 0.009334752082824707, 0.00932249641418457, 0.0093306884765625, 0.009406047821044922, 0.00930406379699707, 0.0093024320602417, 0.009339136123657226, 0.00936297607421875, 0.009322527885437012, 0.009349311828613281, 0.009329952239990235, 0.009459872245788575, 0.00938265609741211, 0.009336544036865234, 0.009419967651367187, 0.009110336303710937, 0.009313471794128418, 0.009364447593688964, 0.009348959922790527, 0.009281439781188965, 0.009326687812805176, 0.00930611228942871, 0.009275391578674316, 0.009387295722961425, 0.00932703971862793, 0.009314559936523437, 0.009373727798461915, 0.009341119766235351, 0.00936473560333252, 0.009318047523498536, 0.009324959754943848, 0.009329312324523926, 0.00930799961090088, 0.009324383735656739, 0.009426719665527343, 0.009369983673095703, 0.009389920234680176, 0.009334943771362304, 0.009520607948303223, 0.009496895790100098, 0.009322527885437012, 0.009900064468383788, 0.010423583984375, 0.00939625644683838, 0.009402336120605469, 0.009493375778198242, 0.009373727798461915, 0.009349087715148925, 0.00937923240661621, 0.00933523178100586, 0.009337247848510742, 0.009377535820007324, 0.009340928077697755, 0.009377504348754883, 0.009336288452148438, 0.009450016021728515, 0.009357600212097169, 0.009375840187072755, 0.009335871696472167, 0.00931436824798584, 0.009438272476196288, 0.009320159912109376, 0.00932198429107666, 0.009363967895507813, 0.009383008003234864, 0.009347999572753906, 0.00947116756439209, 0.009381728172302246, 0.009358304023742676, 0.009369376182556153, 0.009379199981689453, 0.009352031707763673, 0.009407584190368651, 0.009513504028320312, 0.009406847953796387, 0.009389344215393067, 0.009453280448913574, 0.009483263969421387, 0.009209152221679687, 0.010048543930053711, 0.009518048286437989, 0.009348992347717286, 0.009343168258666993, 0.009411199569702149, 0.009365407943725586, 0.009390175819396973, 0.009496576309204101, 0.009372799873352052, 0.009358207702636718, 0.009406016349792481, 0.009380127906799316, 0.009351327896118164, 0.009365376472473144, 0.009367679595947266, 0.009435296058654785, 0.009348095893859864, 0.00931926441192627, 0.00941875171661377, 0.009361408233642577, 0.009355039596557617, 0.009385696411132812, 0.00944927978515625, 0.00937235164642334, 0.00940236759185791, 0.00939743995666504, 0.00934175968170166, 0.00933283233642578, 0.009445280075073241, 0.009361632347106933, 0.009336352348327637, 0.009380096435546876, 0.009352831840515137, 0.009394559860229492, 0.009381440162658692, 0.009484736442565918, 0.009534912109375, 0.009559807777404785, 0.009439711570739745, 0.00961571216583252, 0.009469951629638672, 0.009475711822509766, 0.009495039939880372, 0.009420672416687011, 0.009412384033203125, 0.009357536315917969, 0.00937929630279541, 0.009334527969360351, 0.009353280067443848, 0.009388768196105957, 0.009492511749267579, 0.010084159851074219, 0.010495200157165527, 0.009456064224243164, 0.009431455612182617, 0.00932156753540039, 0.009366016387939453, 0.009333375930786134, 0.00933078384399414, 0.009389856338500976, 0.009409919738769531, 0.009308639526367188, 0.009083904266357423, 0.009385472297668456, 0.009377535820007324, 0.009351936340332032, 0.009303903579711914, 0.009297408103942872, 0.009341312408447265, 0.009351424217224121, 0.009408479690551757, 0.009373023986816406, 0.009408767700195313, 0.009383775711059571, 0.009412960052490234, 0.00941801643371582, 0.009358176231384277, 0.009359264373779297, 0.009395999908447266, 0.009341407775878906, 0.009355263710021973, 0.009355263710021973, 0.00934620761871338, 0.00936451244354248, 0.009377599716186524, 0.009379584312438965, 0.009346559524536132, 0.00936627197265625, 0.00933683204650879, 0.009388031959533692, 0.009399680137634278, 0.009428799629211426, 0.00940550422668457, 0.009430784225463867, 0.009379776000976562, 0.00935654354095459, 0.009372480392456055, 0.009434399604797363, 0.009374431610107422, 0.009350591659545899, 0.009392383575439454, 0.00970137596130371, 0.009402175903320313, 0.009425215721130371, 0.009479647636413574, 0.00953007984161377, 0.009441503524780273, 0.009716575622558593, 0.00940659236907959, 0.009364447593688964, 0.009341119766235351, 0.009385631561279298, 0.00944108772277832, 0.009332927703857422, 0.009416704177856445, 0.009351296424865723, 0.00935318374633789, 0.009350624084472656, 0.009396672248840332, 0.009351167678833008, 0.009450943946838378, 0.009349696159362793, 0.009310208320617675, 0.009358752250671386, 0.00936406421661377, 0.009086976051330567, 0.00933683204650879, 0.009376928329467774, 0.009335007667541504, 0.009317024230957031, 0.009305952072143554, 0.00926956844329834, 0.009291584014892579, 0.009351167678833008, 0.009295712471008301, 0.009334943771362304, 0.00937936019897461, 0.009371583938598632, 0.009630240440368653, 0.009832159996032715, 0.009519392013549805, 0.009410400390625, 0.009368032455444335, 0.00950444793701172, 0.0113438720703125, 0.01075932788848877, 0.009622143745422364, 0.010006752014160156, 0.009635807991027832, 0.009352767944335937, 0.009534079551696778, 0.009362815856933594, 0.009377599716186524, 0.00930083179473877, 0.009414463996887208, 0.009347071647644043, 0.00930406379699707, 0.00932044792175293, 0.009332768440246582, 0.009301792144775391, 0.009324735641479492, 0.009340928077697755, 0.009311360359191894, 0.00930662441253662, 0.009340543746948243, 0.009279711723327637, 0.00937622356414795, 0.00937171173095703, 0.00931545639038086, 0.009314944267272949, 0.009339136123657226, 0.009315775871276856, 0.00937987232208252, 0.009402175903320313, 0.009466527938842773, 0.009410271644592285, 0.009389727592468262, 0.009336511611938476, 0.009333760261535644, 0.009375743865966797, 0.009383359909057616, 0.009389792442321777, 0.009400192260742188, 0.009409503936767578, 0.009369600296020507, 0.00936473560333252, 0.009464320182800292, 0.009335040092468262, 0.009123744010925293, 0.00933078384399414, 0.009518688201904296, 0.009361536026000976, 0.009290016174316407, 0.00933683204650879, 0.009330559730529785, 0.009312640190124512, 0.00936524772644043, 0.009368927955627441, 0.0093602876663208, 0.009559391975402832, 0.00936406421661377, 0.009369728088378906, 0.009453248023986817, 0.009401856422424316, 0.009431551933288575, 0.009431039810180664, 0.009500672340393066, 0.009457663536071777, 0.009469951629638672, 0.009567359924316406, 0.009427840232849122, 0.009393695831298829, 0.009343168258666993, 0.009354880332946777, 0.009375935554504394, 0.009339136123657226, 0.009319744110107421, 0.009363840103149414, 0.009353983879089355, 0.009332511901855469, 0.00938368034362793, 0.009375935554504394, 0.00927948760986328, 0.009289919853210449, 0.009318367958068847, 0.009306015968322754, 0.009382080078125, 0.009333791732788086, 0.009353280067443848, 0.009345727920532227, 0.009290911674499512, 0.009307007789611816, 0.009385791778564453, 0.009337023735046386, 0.009400287628173827, 0.009545599937438965, 0.009359519958496093, 0.009342047691345215, 0.009325440406799317, 0.00934505558013916, 0.009355039596557617, 0.009345600128173828, 0.009330400466918945, 0.009354304313659668, 0.009308159828186035, 0.009556863784790039, 0.009557344436645509, 0.009355936050415039, 0.009360671997070313, 0.009388768196105957, 0.009324543952941895, 0.009103679656982422, 0.009329919815063476, 0.009345664024353027, 0.009330656051635742, 0.009348447799682618, 0.009343392372131347, 0.009340800285339356, 0.009353759765625, 0.009324543952941895, 0.009340703964233399, 0.009422112464904785, 0.009320544242858888, 0.00930288028717041, 0.009332032203674316, 0.009310879707336427, 0.009331808090209961, 0.009344063758850097, 0.009291647911071777, 0.009299712181091309, 0.009385696411132812, 0.009289664268493652, 0.009308287620544434, 0.009308128356933594, 0.009341055870056153, 0.009326720237731933, 0.009347328186035156, 0.009329888343811035, 0.009296671867370605, 0.00935321617126465, 0.009351167678833008, 0.00931430435180664, 0.009381888389587402, 0.009335007667541504, 0.00950044822692871, 0.009406463623046875, 0.009402303695678712, 0.009346752166748047, 0.009352704048156739, 0.009325311660766602, 0.009344639778137208, 0.009351679801940918, 0.009334783554077148, 0.009344287872314453, 0.009347488403320312, 0.00934943962097168, 0.00941055965423584, 0.009389727592468262, 0.009392064094543457, 0.009463839530944824, 0.009458047866821289, 0.010268351554870606, 0.00975113582611084, 0.009457375526428223, 0.009430463790893556, 0.009347647666931152, 0.009381888389587402, 0.009362912178039551, 0.009353119850158692, 0.009354111671447753, 0.009720576286315919, 0.009441535949707031, 0.009399295806884766, 0.009513024330139161, 0.009140416145324707, 0.009381888389587402, 0.009433792114257813, 0.009336671829223632, 0.009315360069274902, 0.009380831718444824, 0.009308159828186035, 0.009447423934936524, 0.009377087593078613, 0.009386336326599121, 0.009400544166564941, 0.009356863975524902, 0.009361984252929688, 0.009359359741210938, 0.009347071647644043, 0.009367551803588867, 0.009392191886901855, 0.009332768440246582, 0.009381792068481446, 0.00940236759185791, 0.009324543952941895, 0.009359359741210938, 0.009361311912536622, 0.009320544242858888, 0.009357312202453612, 0.009358752250671386, 0.009402976036071778, 0.009478143692016602, 0.009328415870666504, 0.00931430435180664, 0.009342720031738281, 0.009369536399841309, 0.00937600040435791, 0.009316672325134277, 0.00938595199584961, 0.009531392097473144, 0.009345120429992675, 0.009352160453796386, 0.00937168025970459, 0.009380096435546876, 0.009396160125732422, 0.009330847740173339, 0.009411135673522949, 0.009363455772399902, 0.009367551803588867, 0.009349472045898437, 0.00933568000793457, 0.009379679679870606, 0.010228256225585937, 0.009622048377990723, 0.010313599586486817, 0.00944979190826416, 0.009418144226074218, 0.009402655601501464, 0.00943513584136963, 0.009426655769348145, 0.009367839813232422, 0.009368703842163087, 0.009528287887573242, 0.009414560317993164, 0.009357184410095214, 0.00936303997039795, 0.009339679718017578]",tokens/s,106.23313235756451,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6678.642688,9395.109888,0.0,8992.587776,8404.320768,s,1,14.09198046875,14.09198046875,0.0,14.09198046875,14.09198046875,14.09198046875,14.09198046875,[14.09198046875],,kWh,0.00020472667180001735,2.2575581736337233e-05,9.222618489199141e-05,0.000319528438428346,,MB,1558.679552,9409.789952,0.0,8992.587776,7880.40704,s,10,3.524734954833984,0.3524734954833984,0.001253602328356267,0.3528471527099609,0.3536143096923828,0.35374500579833984,0.35384956268310547,"[0.34937255859375, 0.3533468017578125, 0.35196456909179685, 0.3525855102539063, 0.35179421997070315, 0.3518486328125, 0.3531087951660156, 0.35325289916992186, 0.3538757019042969, 0.3535852661132812]",tokens/s,726.2957450145572,kWh,1.0295724955027442e-05,1.135431234058456e-06,6.8050724938610636e-06,1.823622868294696e-05,tokens/kWh,14037990.225434626,MB,1566.806016,9409.789952,0.0,8992.587776,8123.606528,s,10,25.482337646484375,2.5482337646484376,0.0024533141148315144,2.548085205078125,2.5518118408203128,2.5519429077148437,2.552047761230469,"[2.544296142578125, 2.55006201171875, 2.548058837890625, 2.548111572265625, 2.546191650390625, 2.552073974609375, 2.5487646484375, 2.547816162109375, 2.55178271484375, 2.545179931640625]",tokens/s,24.72300652867759,kWh,7.43830385470593e-05,8.204494518905732e-06,4.954008369334031e-05,0.00013212761675930536,tokens/kWh,476811.74871083937,,s,630,25.47751351547242,0.04044049764360701,0.0004290279994536834,0.040389328002929686,0.04080860748291016,0.04090904216766358,0.04164722217559815,"[0.04072857666015625, 0.04015955352783203, 0.03994428634643555, 0.039835617065429686, 0.03985964965820313, 0.03989462280273438, 0.040020961761474606, 0.039948287963867186, 0.03995347213745117, 0.039965152740478516, 0.039930335998535155, 0.03993119812011719, 0.04004227066040039, 0.04009667205810547, 0.039978431701660155, 0.040022590637207034, 0.04019561767578125, 0.04013644790649414, 0.04004905700683594, 0.04005100631713867, 0.040046592712402344, 0.04010598373413086, 0.040134208679199215, 0.04038051223754883, 0.04041340637207031, 0.04031423950195313, 0.040255809783935545, 0.04031856155395508, 0.04039491271972656, 0.040370880126953126, 0.04029849624633789, 0.04034953689575195, 0.04030268859863281, 0.04034566497802734, 0.04031404876708984, 0.040342334747314454, 0.040572830200195316, 0.04052294540405273, 0.04050831985473633, 0.040399105072021484, 0.04052761459350586, 0.0406874885559082, 0.04076486587524414, 0.04064940643310547, 0.0406497917175293, 0.040723392486572266, 0.040734752655029294, 0.04070191955566406, 0.040713630676269534, 0.04083363342285156, 0.040920608520507815, 0.040578784942626955, 0.04073686218261719, 0.04056054306030273, 0.04071609497070312, 0.040600513458251955, 0.040703201293945314, 0.040938270568847655, 0.040648128509521486, 0.04074758529663086, 0.040753150939941404, 0.04065075302124024, 0.04072627258300781, 0.0408298225402832, 0.04030652618408203, 0.04007465744018555, 0.039975231170654296, 0.04007161712646484, 0.04001753616333008, 0.04022220611572266, 0.040094593048095706, 0.039959999084472654, 0.039901023864746095, 0.040078048706054685, 0.04010377502441406, 0.04008752059936523, 0.03998041534423828, 0.040003616333007815, 0.04001200103759766, 0.03997734451293945, 0.04000582504272461, 0.044216064453125, 0.04025932693481445, 0.040143264770507815, 0.04009519958496094, 0.04021299362182617, 0.040287742614746096, 0.04038060760498047, 0.04028492736816406, 0.040177345275878906, 0.040218624114990234, 0.040138496398925784, 0.04015568161010742, 0.040285377502441405, 0.04016387176513672, 0.04026367950439453, 0.04022016143798828, 0.04024166488647461, 0.04047382354736328, 0.04047951889038086, 0.040521984100341794, 0.040261375427246095, 0.04040259170532227, 0.04037257766723633, 0.04037337493896485, 0.04036083221435547, 0.04048076629638672, 0.041060352325439455, 0.040710208892822265, 0.04061148834228516, 0.040636703491210936, 0.04087331390380859, 0.04069443130493164, 0.04059340667724609, 0.04058844757080078, 0.04062252807617187, 0.04056310272216797, 0.04136700820922851, 0.04111824035644531, 0.04076339340209961, 0.040683521270751956, 0.040613311767578125, 0.04226220703125, 0.04081126403808594, 0.04094700622558594, 0.040764225006103515, 0.04084787368774414, 0.04023862457275391, 0.03994262313842773, 0.03990118408203125, 0.039880702972412106, 0.03987865447998047, 0.03989039993286133, 0.040073089599609375, 0.039981983184814454, 0.039951488494873046, 0.03999808120727539, 0.0399725456237793, 0.03989846420288086, 0.040131393432617186, 0.04002595138549805, 0.03999151992797852, 0.04007126235961914, 0.04015446472167969, 0.04023772811889648, 0.04023091125488281, 0.04023062515258789, 0.040537696838378906, 0.04139049530029297, 0.040605567932128904, 0.04059961700439453, 0.04064012908935547, 0.04041516876220703, 0.040379169464111325, 0.04029872131347656, 0.04037519836425781, 0.04027072143554687, 0.04056268692016601, 0.040409088134765625, 0.040304256439208985, 0.04145395278930664, 0.04051148986816406, 0.04037353515625, 0.04042211151123047, 0.04050320053100586, 0.04060374450683594, 0.04047052764892578, 0.04047257614135742, 0.04049100875854492, 0.04077772903442383, 0.04052787017822266, 0.04064377593994141, 0.040752223968505856, 0.040873695373535156, 0.0407955207824707, 0.04070646286010742, 0.040669567108154295, 0.04061372756958008, 0.040623966217041015, 0.04059552001953125, 0.04066511917114258, 0.040718017578125, 0.04075763320922852, 0.040882144927978516, 0.0405975341796875, 0.0407174072265625, 0.04077609634399414, 0.04064716720581055, 0.04076784133911133, 0.040764129638671875, 0.040264961242675784, 0.03998796844482422, 0.03993804931640625, 0.03992153549194336, 0.03998086547851563, 0.03998342514038086, 0.03999059295654297, 0.040066879272460935, 0.040040576934814456, 0.04033203125, 0.04016332626342774, 0.04156134414672852, 0.040134464263916016, 0.04007827377319336, 0.04001587295532227, 0.040048641204833986, 0.04023091125488281, 0.0401297607421875, 0.04027260971069336, 0.04116592025756836, 0.04037065505981445, 0.040540672302246096, 0.040406719207763675, 0.04055072021484375, 0.04043775939941406, 0.040325119018554685, 0.040419681549072266, 0.040261280059814455, 0.04034323120117188, 0.04024908828735352, 0.040360862731933594, 0.040410049438476564, 0.040301280975341795, 0.04032636642456055, 0.040411937713623045, 0.04041046524047852, 0.040374752044677734, 0.04032531356811524, 0.040333663940429684, 0.04032262420654297, 0.04040214538574219, 0.040618881225585934, 0.04059958267211914, 0.04055855941772461, 0.040574977874755856, 0.040629344940185545, 0.04072233581542969, 0.04086816024780274, 0.040923198699951174, 0.04074764633178711, 0.04075136184692383, 0.04060707092285156, 0.04061430358886719, 0.04059689712524414, 0.04070835113525391, 0.040717697143554686, 0.04080847930908203, 0.04076179122924805, 0.04077014541625976, 0.04084726333618164, 0.04083097457885742, 0.0406014404296875, 0.04067241668701172, 0.04038243103027344, 0.040185726165771485, 0.04009056091308594, 0.04006739044189453, 0.04004003143310547, 0.04012432098388672, 0.04020028686523437, 0.040161537170410155, 0.040169376373291016, 0.040103935241699216, 0.040030208587646485, 0.04006623840332031, 0.04023174285888672, 0.04011110305786133, 0.04000191879272461, 0.03999932861328125, 0.04012521743774414, 0.04002348709106445, 0.04011884689331055, 0.04026819229125977, 0.040088958740234375, 0.04009603118896484, 0.04029433441162109, 0.04038451385498047, 0.04032921600341797, 0.040371646881103514, 0.04057145690917969, 0.04080355072021485, 0.04030534362792969, 0.04030223846435547, 0.040403392791748045, 0.04026777648925781, 0.0401715202331543, 0.040275966644287106, 0.04037836837768555, 0.04025542449951172, 0.040377761840820314, 0.040376319885253906, 0.04091913604736328, 0.04040556716918945, 0.04058486557006836, 0.04052329635620117, 0.040557376861572264, 0.04049692916870117, 0.04060182571411133, 0.04060774230957031, 0.04065280151367188, 0.04060979080200195, 0.04097014236450195, 0.04079951858520508, 0.04078470230102539, 0.04106649780273437, 0.04083059310913086, 0.04081292724609375, 0.04077363204956055, 0.04074300765991211, 0.04064041519165039, 0.04059065628051758, 0.040638721466064454, 0.04073926544189453, 0.04061798477172852, 0.040681472778320314, 0.04071788787841797, 0.04033817672729492, 0.0399818229675293, 0.0399617919921875, 0.040042560577392576, 0.04002799987792969, 0.040007518768310546, 0.040030208587646485, 0.03996422576904297, 0.039981056213378906, 0.03996307373046875, 0.040074848175048826, 0.03999785614013672, 0.03998720169067383, 0.040079360961914064, 0.041125408172607424, 0.04013894271850586, 0.04015270233154297, 0.040583839416503904, 0.040048641204833986, 0.041011199951171876, 0.04583939361572266, 0.040139713287353516, 0.04032275390625, 0.04034716796875, 0.0405302734375, 0.040219104766845704, 0.040395038604736325, 0.04025929641723633, 0.04025958251953125, 0.040379871368408204, 0.04037276840209961, 0.04039199829101563, 0.04044620895385742, 0.040376766204833985, 0.04034969711303711, 0.040271873474121096, 0.040262687683105466, 0.04030358505249024, 0.04034265518188476, 0.04036902236938476, 0.04050697708129883, 0.04040131378173828, 0.040371456146240235, 0.0403873291015625, 0.04044297790527344, 0.04048579025268555, 0.04050089645385742, 0.040586910247802734, 0.04070207977294922, 0.0408818244934082, 0.040702880859375, 0.04075478363037109, 0.04076755142211914, 0.04097171020507812, 0.041005985260009765, 0.040836769104003905, 0.0409090576171875, 0.04075939178466797, 0.040681472778320314, 0.04064051055908203, 0.04082454299926758, 0.04084288024902344, 0.04086163330078125, 0.040230976104736325, 0.03997849655151367, 0.039870975494384765, 0.03990108871459961, 0.03981923294067383, 0.04012246322631836, 0.04004780960083008, 0.03998358535766602, 0.0399343376159668, 0.04006911849975586, 0.0401646728515625, 0.04000019073486328, 0.04000153732299805, 0.040005374908447265, 0.04007961654663086, 0.04003366470336914, 0.040323711395263674, 0.04046131134033203, 0.04030748748779297, 0.040327392578125, 0.04029548645019531, 0.04040595245361328, 0.040557857513427734, 0.04034633636474609, 0.040286209106445314, 0.040290462493896485, 0.0403164176940918, 0.040319328308105466, 0.04033059310913086, 0.04037267303466797, 0.04050713729858398, 0.040404705047607424, 0.0404323844909668, 0.04055039978027344, 0.04050227355957031, 0.04060201644897461, 0.040491615295410156, 0.04057113647460937, 0.04052556610107422, 0.040549663543701174, 0.040618080139160156, 0.04049983978271485, 0.04070809555053711, 0.040519134521484375, 0.040634910583496095, 0.04064041519165039, 0.04082492828369141, 0.040789024353027344, 0.040661983489990235, 0.04067318344116211, 0.04062422561645508, 0.04232022476196289, 0.04075689697265625, 0.040611553192138675, 0.040634654998779295, 0.04072985458374023, 0.04085631942749023, 0.04065491104125977, 0.040787841796875, 0.04090902328491211, 0.04080214309692383, 0.04097859191894531, 0.040858272552490235, 0.04021475219726563, 0.03993945693969726, 0.039841888427734375, 0.039909919738769534, 0.03995785522460937, 0.039922401428222655, 0.03984000015258789, 0.03990297698974609, 0.04024524688720703, 0.04043161773681641, 0.03995379257202149, 0.040052734375, 0.04012505722045898, 0.040220672607421876, 0.04011142349243164, 0.04034835052490234, 0.0401921272277832, 0.04017462539672852, 0.04021744155883789, 0.040218624114990234, 0.04015068817138672, 0.04039084625244141, 0.04031094360351563, 0.04026777648925781, 0.040292350769042966, 0.04024524688720703, 0.040302593231201174, 0.04031078338623047, 0.040302593231201174, 0.04029849624633789, 0.04026367950439453, 0.0403590087890625, 0.04172483062744141, 0.040531967163085936, 0.040441631317138675, 0.040452159881591794, 0.040513694763183596, 0.040390464782714845, 0.04047065734863281, 0.040462398529052736, 0.040548351287841795, 0.0406162223815918, 0.04059337615966797, 0.04051724624633789, 0.04071027374267578, 0.040599681854248046, 0.04060348892211914, 0.04062771224975586, 0.04067177581787109, 0.04068966293334961, 0.04085760116577149, 0.040795455932617186, 0.04075004959106445, 0.040855262756347654, 0.04080393600463867, 0.040759712219238284, 0.04083203125, 0.04101424026489258, 0.040953857421875, 0.040699905395507815, 0.04080607986450195, 0.040724800109863284, 0.0408985595703125, 0.040277408599853515, 0.040194080352783206, 0.03994867324829102, 0.03995872116088867, 0.03998310470581055, 0.039929855346679685, 0.03993395233154297, 0.040081409454345705, 0.04016742324829101, 0.04009574508666992, 0.04016147232055664, 0.040302497863769535, 0.040286113739013675, 0.04014057540893555, 0.04025360107421875, 0.04004665756225586, 0.04002816009521484, 0.040321025848388675, 0.04047785568237305, 0.04032803344726563, 0.04037974548339844, 0.04041523361206055, 0.04183321762084961, 0.04047657775878906, 0.040468223571777345, 0.040330814361572265, 0.040408992767333986, 0.04033206558227539, 0.040308734893798825, 0.04031024169921875, 0.040280609130859374, 0.04024934387207031, 0.04032921600341797, 0.040323070526123043, 0.040302593231201174, 0.04050716781616211, 0.04050556945800781, 0.04046847915649414, 0.04053811264038086, 0.04044319915771484, 0.040506046295166014, 0.04068307113647461, 0.04076787185668945, 0.040631839752197266, 0.04086223983764648, 0.0412808952331543, 0.04143987274169922, 0.04088230514526367, 0.04076531219482422, 0.04080640029907227, 0.040787967681884765, 0.040680992126464845, 0.04167728042602539, 0.04066409683227539, 0.04061692810058594, 0.040630271911621094, 0.04069686508178711, 0.04075171279907226, 0.04079561614990234, 0.0408766098022461, 0.040816001892089844, 0.04080976104736328, 0.04079350280761719, 0.040403553009033207, 0.04009363174438477, 0.04002822494506836, 0.0400711669921875, 0.0399747200012207, 0.03989728164672852, 0.039979007720947264, 0.04003571319580078, 0.040061569213867186, 0.039995391845703124, 0.03995647811889649, 0.039997440338134765, 0.04010371017456055, 0.040376544952392575, 0.04025753784179688, 0.04009574508666992, 0.040068096160888675, 0.040164352416992184, 0.0401797103881836, 0.04022476959228516, 0.04030012893676758, 0.040388191223144534, 0.040368961334228515, 0.04035583877563476, 0.04032495880126953, 0.041573631286621095, 0.04043395233154297, 0.040406753540039066, 0.04027484893798828, 0.04046438217163086, 0.04031462478637695, 0.04030278396606445, 0.04020787048339844, 0.04015324783325196, 0.04028457641601563, 0.040308734893798825, 0.040355262756347654, 0.040360511779785155, 0.04039286422729492, 0.04050022506713867, 0.040409439086914065, 0.04034406280517578, 0.040501087188720704, 0.040488990783691406, 0.04057535934448242, 0.04066656112670899, 0.0406690559387207, 0.04058892822265625, 0.04061881637573242, 0.04074911880493164, 0.04075299072265625, 0.04066928100585938, 0.04064460754394531, 0.040888320922851565, 0.04059743881225586, 0.04071772766113281, 0.04070883178710937, 0.04060291290283203, 0.04066371154785156, 0.04057292938232422, 0.04051968002319336, 0.04072351837158203]",tokens/s,24.72768779486283,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2220.83072,2556.297216,0.0,2153.775104,2041.744384,s,1,9.668607421875,9.668607421875,0.0,9.668607421875,9.668607421875,9.668607421875,9.668607421875,[9.668607421875],,kWh,7.651731435836002e-05,8.432945840556308e-06,3.1165302709995624e-05,0.00011611556290891196,,MB,2291.134464,2570.97728,0.0,2153.775104,1917.691904,s,10,1.0124473571777344,0.10124473571777344,0.00041308148812317094,0.10131329345703124,0.10179029235839844,0.1018086669921875,0.10182336669921875,"[0.10029673767089843, 0.10099686431884766, 0.10178620910644531, 0.10132418823242187, 0.10096182250976563, 0.10124259185791015, 0.10134928131103516, 0.10130239868164062, 0.10136022186279296, 0.10182704162597657]",tokens/s,2528.52652718278,kWh,2.9728797262203796e-06,3.2785584465915536e-07,1.968983056666433e-06,5.269718627545967e-06,tokens/kWh,48579443.81732873,MB,2300.391424,2570.97728,0.0,2153.775104,2001.552384,s,10,15.402478515625,1.5402478515625,0.003200889105791735,1.540654296875,1.543710363769531,1.5443274108886718,1.5448210485839844,"[1.543399169921875, 1.5373753662109375, 1.534029541015625, 1.5411453857421875, 1.5435732421875, 1.5406497802734376, 1.5449444580078124, 1.5397939453125, 1.5406588134765624, 1.5369088134765625]",tokens/s,40.90250795421648,kWh,4.5077530274608744e-05,4.971722908998683e-06,2.3391565009533595e-05,7.344081819314103e-05,tokens/kWh,857833.5801531668,,s,630,15.399636583328254,0.024443867592584517,0.0003991592428558866,0.024339983940124512,0.024807225036621094,0.02496989049911499,0.02623726821899415,"[0.024680831909179687, 0.02485043144226074, 0.024745824813842774, 0.024445087432861327, 0.024284671783447266, 0.02417100715637207, 0.024391616821289062, 0.02497926330566406, 0.02459667205810547, 0.024746047973632813, 0.027312128067016602, 0.024895423889160155, 0.024670272827148437, 0.024657920837402345, 0.024584192276000977, 0.02470412826538086, 0.02460176086425781, 0.024519775390625, 0.02439436721801758, 0.024258623123168944, 0.024225568771362303, 0.02435702323913574, 0.0249234561920166, 0.024214208602905272, 0.02422368049621582, 0.02414803123474121, 0.024166208267211914, 0.024452991485595703, 0.024530527114868163, 0.02433008003234863, 0.02423616027832031, 0.02430175971984863, 0.024181312561035156, 0.024178367614746094, 0.024249792098999023, 0.024189823150634764, 0.024180736541748047, 0.024156160354614258, 0.024153120040893556, 0.024176704406738282, 0.024272991180419923, 0.024101696014404296, 0.024571903228759767, 0.024339775085449218, 0.025407583236694335, 0.02609449577331543, 0.024600255966186525, 0.024509439468383788, 0.024500415802001952, 0.024364896774291992, 0.02459663963317871, 0.024548160552978517, 0.024606719970703125, 0.02441539192199707, 0.024349536895751953, 0.02429248046875, 0.024241024017333985, 0.024298944473266602, 0.02425913619995117, 0.024309471130371095, 0.02415830421447754, 0.024146303176879883, 0.024092639923095703, 0.024729824066162108, 0.024506368637084962, 0.024426015853881836, 0.02442723274230957, 0.024167552947998047, 0.02429539108276367, 0.024226144790649416, 0.024422719955444337, 0.0242906551361084, 0.024412927627563478, 0.02417465591430664, 0.024141664505004882, 0.024128799438476563, 0.024322816848754883, 0.02418070411682129, 0.024139776229858398, 0.024468639373779296, 0.02422665596008301, 0.024213056564331054, 0.024342975616455077, 0.024449024200439453, 0.024431840896606445, 0.02428940773010254, 0.024179328918457033, 0.024163904190063475, 0.024117855072021483, 0.024536352157592773, 0.024317983627319337, 0.02427097511291504, 0.024170816421508787, 0.02422547149658203, 0.024195295333862304, 0.02437126350402832, 0.024148128509521485, 0.024516159057617188, 0.024154272079467774, 0.02416854476928711, 0.02481171226501465, 0.024475135803222657, 0.02415238380432129, 0.024215328216552735, 0.02437468719482422, 0.024824832916259764, 0.024978815078735353, 0.02482854461669922, 0.024811519622802734, 0.024944639205932616, 0.02464067268371582, 0.02449404716491699, 0.02441206359863281, 0.024648672103881837, 0.02435068893432617, 0.024282976150512694, 0.024282720565795897, 0.02453708839416504, 0.02455187225341797, 0.024504352569580078, 0.02467238426208496, 0.02431590461730957, 0.024129535675048826, 0.024579904556274415, 0.02435625648498535, 0.02495568084716797, 0.02508799934387207, 0.02460211181640625, 0.024416767120361327, 0.0243507194519043, 0.024252096176147462, 0.024252864837646486, 0.024403615951538084, 0.024303071975708006, 0.024259328842163086, 0.02426470375061035, 0.02437513542175293, 0.02519465637207031, 0.024434688568115235, 0.024329824447631834, 0.024329984664916992, 0.02433683204650879, 0.024379615783691407, 0.024349760055541993, 0.024318815231323242, 0.024248159408569336, 0.024112607955932615, 0.024188928604125977, 0.024099615097045897, 0.02409996795654297, 0.02418307113647461, 0.02422025680541992, 0.024155328750610352, 0.02412348747253418, 0.024087263107299806, 0.024153696060180665, 0.02410537528991699, 0.024417343139648436, 0.024247232437133788, 0.02418454360961914, 0.0241625919342041, 0.024285184860229493, 0.024420223236083984, 0.024752256393432617, 0.02432361602783203, 0.024165119171142578, 0.024122783660888672, 0.024258687973022462, 0.024372896194458007, 0.024279584884643556, 0.02425014305114746, 0.02412335968017578, 0.024146175384521483, 0.024495264053344727, 0.0243702392578125, 0.024217567443847655, 0.024356672286987305, 0.024336095809936523, 0.025016128540039064, 0.024438592910766603, 0.024443744659423828, 0.02440719985961914, 0.024308160781860353, 0.02457811164855957, 0.02457638359069824, 0.024466655731201173, 0.0243654727935791, 0.02444304084777832, 0.024399871826171874, 0.024901376724243165, 0.02470515251159668, 0.02480748748779297, 0.024553184509277345, 0.024469280242919923, 0.024506847381591798, 0.02430348777770996, 0.024254432678222658, 0.024350976943969725, 0.02425372886657715, 0.024418399810791015, 0.02451215934753418, 0.02455449676513672, 0.02438960075378418, 0.02424998474121094, 0.02443712043762207, 0.024364831924438477, 0.0242728328704834, 0.02434883117675781, 0.02429759979248047, 0.024254335403442382, 0.024434175491333008, 0.024291776657104493, 0.024260799407958986, 0.02698240089416504, 0.02535219192504883, 0.025271680831909178, 0.024631296157836914, 0.0244847354888916, 0.024454912185668944, 0.024581823348999023, 0.0245230712890625, 0.024686527252197266, 0.024488000869750975, 0.024329984664916992, 0.024307104110717775, 0.02438025665283203, 0.024268800735473633, 0.024205312728881836, 0.024309024810791016, 0.024344608306884764, 0.024251327514648438, 0.024198720932006836, 0.024262847900390624, 0.024309247970581056, 0.024304256439208985, 0.024403839111328124, 0.02432204818725586, 0.02428713607788086, 0.024264799118041993, 0.024223167419433592, 0.02433430480957031, 0.024484447479248047, 0.02450383949279785, 0.024545408248901366, 0.024522911071777342, 0.02432633590698242, 0.02426470375061035, 0.02421459197998047, 0.02427494430541992, 0.02432076835632324, 0.024286752700805665, 0.024167423248291017, 0.02590105628967285, 0.02502000045776367, 0.024915456771850586, 0.024588352203369142, 0.024963167190551756, 0.02494438362121582, 0.024499200820922853, 0.024223743438720705, 0.024120384216308594, 0.024355775833129884, 0.024231935501098634, 0.024262239456176758, 0.024207328796386717, 0.024298944473266602, 0.024271648406982423, 0.02439094352722168, 0.0243654727935791, 0.024252960205078125, 0.024242176055908202, 0.02425651168823242, 0.024252416610717774, 0.024228864669799805, 0.024216575622558592, 0.02501139259338379, 0.02427168083190918, 0.024276992797851563, 0.024258399963378908, 0.02429305648803711, 0.024334815979003905, 0.02450432014465332, 0.024815839767456056, 0.024401695251464843, 0.024329408645629883, 0.02425913619995117, 0.024311872482299806, 0.02445123291015625, 0.024481632232666015, 0.02450223922729492, 0.02432841682434082, 0.02433024024963379, 0.024315967559814453, 0.024982847213745118, 0.024627840042114258, 0.024831872940063476, 0.024813695907592772, 0.024751552581787108, 0.024631872177124023, 0.02472755241394043, 0.02468454360961914, 0.025085088729858398, 0.02490825653076172, 0.02462351989746094, 0.024487903594970703, 0.024465408325195313, 0.02443059158325195, 0.024348543167114257, 0.02428121566772461, 0.02427494430541992, 0.024258399963378908, 0.024354047775268554, 0.024591136932373046, 0.024401792526245115, 0.02425881576538086, 0.024764799118041993, 0.024729600906372072, 0.02456166458129883, 0.02452230453491211, 0.024807199478149414, 0.024588191986083984, 0.02596678352355957, 0.024431167602539064, 0.024276992797851563, 0.024360960006713867, 0.024395103454589843, 0.02433091163635254, 0.024379392623901368, 0.024426496505737305, 0.02431795120239258, 0.02522854423522949, 0.024531711578369142, 0.024584192276000977, 0.024665279388427733, 0.024916799545288085, 0.024829952239990235, 0.024636959075927733, 0.024442848205566407, 0.024326656341552736, 0.024369152069091796, 0.024271936416625978, 0.024255327224731445, 0.02436003112792969, 0.024233087539672852, 0.024268672943115233, 0.02495859146118164, 0.024303871154785155, 0.02419932746887207, 0.024153215408325195, 0.024286048889160156, 0.02436425590515137, 0.02432080078125, 0.024360960006713867, 0.024201215744018553, 0.02411516761779785, 0.024147520065307616, 0.024156255722045897, 0.024095104217529296, 0.024154111862182616, 0.024209407806396483, 0.02454528045654297, 0.0243240966796875, 0.024373023986816407, 0.02434889602661133, 0.024235328674316405, 0.02424083137512207, 0.024395296096801758, 0.02433273506164551, 0.024272928237915038, 0.02416774368286133, 0.024109664916992186, 0.02485036849975586, 0.024160415649414062, 0.0243507194519043, 0.02460211181640625, 0.024549888610839843, 0.02494054412841797, 0.024751359939575196, 0.024891456604003905, 0.02470159912109375, 0.024552608489990236, 0.024441696166992186, 0.02423097610473633, 0.024202047348022462, 0.024322336196899413, 0.024303455352783204, 0.02436841583251953, 0.024214303970336915, 0.02422172737121582, 0.024160160064697265, 0.02415001678466797, 0.024291135787963866, 0.02430335998535156, 0.02437164878845215, 0.027465728759765624, 0.02447769546508789, 0.028647424697875977, 0.02480544090270996, 0.024221408843994142, 0.024309215545654298, 0.02438425636291504, 0.024336448669433595, 0.024287168502807616, 0.024397792816162108, 0.02440809631347656, 0.024492191314697265, 0.024345792770385743, 0.024849151611328123, 0.02429532814025879, 0.02435215950012207, 0.02424892807006836, 0.024575935363769532, 0.0244583683013916, 0.024438751220703124, 0.024298463821411133, 0.024347679138183594, 0.024282079696655273, 0.024267839431762694, 0.02420832061767578, 0.024296512603759767, 0.0244685115814209, 0.02447052764892578, 0.024371456146240235, 0.024363679885864256, 0.024319616317749024, 0.024246015548706056, 0.024284799575805663, 0.02436403274536133, 0.02459359931945801, 0.026417984008789062, 0.024754047393798828, 0.024420480728149414, 0.02432009506225586, 0.024258527755737305, 0.02421139144897461, 0.024198816299438475, 0.02419327926635742, 0.02454742431640625, 0.024317888259887694, 0.024184192657470703, 0.024156864166259766, 0.025033504486083984, 0.02514089584350586, 0.024816032409667968, 0.025236576080322266, 0.024496416091918945, 0.02480316734313965, 0.024768352508544922, 0.02490070343017578, 0.024706304550170897, 0.024880767822265625, 0.024636383056640624, 0.02450432014465332, 0.02451251220703125, 0.024600576400756836, 0.024489984512329102, 0.024424448013305664, 0.024387264251708986, 0.02431161689758301, 0.024365247726440428, 0.024199487686157227, 0.024315135955810547, 0.024395999908447267, 0.024532928466796874, 0.024332895278930664, 0.02433244705200195, 0.024495967864990233, 0.02423916816711426, 0.024259807586669922, 0.024196832656860352, 0.024245407104492186, 0.0242446403503418, 0.024160703659057616, 0.024211328506469728, 0.024475711822509766, 0.02425801658630371, 0.02415225601196289, 0.024207775115966796, 0.024193023681640623, 0.024374399185180664, 0.024435583114624022, 0.02453708839416504, 0.024397216796875, 0.024397792816162108, 0.02433286476135254, 0.02494655990600586, 0.024266239166259765, 0.024310464859008788, 0.024417856216430663, 0.02469305610656738, 0.024776832580566406, 0.02456166458129883, 0.024473600387573242, 0.024393728256225586, 0.02433603286743164, 0.024211296081542967, 0.02413804817199707, 0.02416419219970703, 0.024174943923950195, 0.02412550354003906, 0.024200672149658202, 0.024156639099121094, 0.024147968292236328, 0.024102272033691405, 0.024848384857177733, 0.027023584365844726, 0.026295583724975587, 0.02480745506286621, 0.0244466552734375, 0.024288032531738283, 0.024316095352172853, 0.024469120025634766, 0.024496543884277345, 0.02428019142150879, 0.024195743560791017, 0.024250015258789063, 0.024437055587768555, 0.02422934341430664, 0.02425654411315918, 0.02445280075073242, 0.0243536319732666, 0.024289247512817382, 0.024975391387939454, 0.02431942367553711, 0.02449260711669922, 0.024344736099243165, 0.024248159408569336, 0.024412479400634766, 0.024329727172851562, 0.02418707275390625, 0.024270431518554687, 0.024283199310302733, 0.02422166442871094, 0.02419696044921875, 0.024229888916015626, 0.024238624572753907, 0.024246271133422852, 0.024211456298828125, 0.024195327758789062, 0.024235904693603514, 0.024244064331054686, 0.024692768096923827, 0.024405727386474608, 0.02441049575805664, 0.02429737663269043, 0.02428121566772461, 0.02427891159057617, 0.02424118423461914, 0.024253087997436522, 0.02425449562072754, 0.024283424377441406, 0.024197120666503907, 0.0245731201171875, 0.024173311233520508, 0.024258367538452147, 0.024494335174560546, 0.024363008499145508, 0.02457753562927246, 0.024320320129394533, 0.024300064086914062, 0.02437228775024414, 0.024461856842041017, 0.02456787109375, 0.024997119903564454, 0.024478464126586913, 0.024499359130859374, 0.02460348892211914, 0.02499078369140625, 0.02475436782836914, 0.024645471572875978, 0.024521631240844728, 0.024470720291137695, 0.02434889602661133, 0.024351232528686522, 0.02436115264892578, 0.02430294418334961, 0.024212032318115233, 0.024253503799438476, 0.024605983734130858, 0.024179775238037108, 0.024163040161132812, 0.024228767395019533, 0.024288127899169922, 0.024250463485717775, 0.024365055084228517, 0.024341663360595702, 0.024218624114990234, 0.02421449661254883, 0.024295583724975586, 0.024459711074829103, 0.024264991760253905, 0.024354719161987306, 0.024690784454345704, 0.024244224548339844, 0.02426448059082031, 0.02414204788208008, 0.024204320907592773, 0.025327680587768554, 0.025967519760131837, 0.024762176513671876, 0.02549942398071289, 0.024560031890869142, 0.024473312377929688, 0.02439811134338379, 0.02447488021850586, 0.02437401580810547, 0.024381280899047852, 0.024340192794799806, 0.024236736297607423, 0.024237375259399414, 0.024248767852783203, 0.02433433532714844, 0.024337408065795898, 0.024267168045043946, 0.024181631088256834, 0.024186208724975587, 0.02417286491394043, 0.024127552032470703, 0.024375167846679688, 0.02447577667236328, 0.024258560180664062, 0.02416383934020996, 0.02412928009033203, 0.024105056762695313, 0.024175039291381838, 0.024258432388305665, 0.024222047805786132, 0.024290336608886718, 0.024148448944091797, 0.024160703659057616]",tokens/s,40.910056324448746,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,922.640384,564.002816,0.0,178.25792,176.546816,s,1,7.99844482421875,7.99844482421875,0.0,7.99844482421875,7.99844482421875,7.99844482421875,7.99844482421875,[7.99844482421875],,kWh,1.5801135904174165e-05,1.7326500725907317e-06,4.792503834005757e-06,2.2326289810770654e-05,,MB,1281.908736,668.860416,0.0,253.755392,221.108736,s,16,0.22773865604400637,0.014233666002750396,0.0001227432837971364,0.014184832096099853,0.014434736251831054,0.014490248203277586,0.014493877124786376,"[0.014488736152648925, 0.014245023727416992, 0.014145279884338379, 0.014083392143249511, 0.014158143997192382, 0.014336159706115723, 0.014245439529418945, 0.014112192153930664, 0.014133919715881347, 0.014380736351013184, 0.014154848098754882, 0.014177344322204589, 0.014494784355163574, 0.014188320159912109, 0.014181344032287598, 0.014212991714477539]",tokens/s,17985.52810994249,kWh,4.265255704865992e-07,4.7031855793636e-08,2.8193534314748934e-07,7.554927694277245e-07,tokens/kWh,338851687.7453593,MB,1321.496576,685.637632,0.0,268.435456,221.111296,s,16,10.29339520263672,0.643337200164795,0.0035541057193582345,0.6429288330078125,0.6490890197753907,0.649431869506836,0.6495893035888671,"[0.6412854614257812, 0.6449509887695313, 0.6429357299804688, 0.6406949462890625, 0.6445343627929687, 0.64428564453125, 0.642240478515625, 0.6399418334960938, 0.6429219360351562, 0.649628662109375, 0.63609033203125, 0.6426907348632812, 0.648811767578125, 0.643429443359375, 0.6493662719726563, 0.6395866088867187]",tokens/s,97.92687253879014,kWh,1.856782815503516e-05,2.0477230073172067e-06,7.359876325851088e-06,2.797542748820346e-05,tokens/kWh,2251976.311231188,,s,1008,10.285248511314382,0.01020361955487539,0.00023204640451014431,0.010161808013916016,0.01036728982925415,0.010480872106552123,0.011102056245803831,"[0.009879551887512206, 0.010170111656188965, 0.010084863662719726, 0.010161919593811035, 0.010188032150268554, 0.010201855659484864, 0.010772480010986327, 0.010194944381713868, 0.010115072250366211, 0.010151935577392577, 0.010086400032043457, 0.010073760032653808, 0.01021168041229248, 0.010122591972351075, 0.010027680397033691, 0.010070207595825196, 0.010168319702148437, 0.01014355182647705, 0.010071104049682617, 0.010250592231750488, 0.010160799980163573, 0.010108511924743652, 0.0100795841217041, 0.010312191963195801, 0.010214943885803223, 0.010322239875793457, 0.010217408180236816, 0.010158495903015137, 0.010189279556274414, 0.010247296333312988, 0.010117119789123535, 0.010128031730651856, 0.01016585636138916, 0.010102656364440918, 0.010002176284790039, 0.010077024459838867, 0.010080256462097169, 0.010067680358886718, 0.010034815788269043, 0.010102815628051758, 0.010142335891723633, 0.010244223594665527, 0.010209152221679688, 0.010188799858093261, 0.010135552406311036, 0.01022976016998291, 0.010341728210449219, 0.010128160476684571, 0.010078144073486329, 0.010059871673583985, 0.01013436794281006, 0.010148415565490723, 0.010365504264831543, 0.010169535636901856, 0.01031443214416504, 0.01033011245727539, 0.010305536270141602, 0.010233856201171876, 0.010177536010742188, 0.010198111534118653, 0.010159232139587403, 0.010260959625244141, 0.010189120292663573, 0.00996675205230713, 0.01019388771057129, 0.010163647651672363, 0.01018723201751709, 0.0102706880569458, 0.010495103836059571, 0.010524895668029785, 0.012611680030822754, 0.01063379192352295, 0.010340352058410645, 0.01031987190246582, 0.010233856201171876, 0.010223487854003906, 0.010459456443786621, 0.010174079895019532, 0.010204704284667968, 0.010515104293823243, 0.010244095802307129, 0.010093728065490722, 0.010226528167724609, 0.010354687690734863, 0.010049087524414062, 0.010084320068359375, 0.010076640129089355, 0.010278719902038574, 0.010049728393554688, 0.010171520233154296, 0.01002780818939209, 0.010149984359741212, 0.010219167709350586, 0.01013974380493164, 0.010129664421081543, 0.01009875202178955, 0.01013548755645752, 0.010252287864685058, 0.010162176132202149, 0.010115072250366211, 0.010118464469909667, 0.010370911598205567, 0.01035103988647461, 0.010099103927612305, 0.010173600196838379, 0.01009055995941162, 0.010082143783569336, 0.010105792045593262, 0.010063296318054199, 0.01017676830291748, 0.010017087936401367, 0.01009385585784912, 0.010105119705200196, 0.010201536178588867, 0.010123040199279785, 0.010180831909179688, 0.010146880149841308, 0.010140607833862305, 0.010026623725891113, 0.01019878387451172, 0.01011571216583252, 0.010184320449829101, 0.010180992126464844, 0.010200799942016602, 0.010114944458007813, 0.010196991920471191, 0.009853887557983398, 0.010180607795715332, 0.010147711753845216, 0.010080384254455567, 0.010160127639770507, 0.010165535926818847, 0.0102608642578125, 0.010264927864074708, 0.010166048049926758, 0.01012553596496582, 0.010288319587707519, 0.0103635835647583, 0.010366368293762206, 0.010361568450927734, 0.010291328430175781, 0.010231679916381835, 0.010334015846252442, 0.010106080055236817, 0.010109536170959473, 0.010111040115356445, 0.010119487762451172, 0.010110976219177246, 0.010057727813720703, 0.010120991706848145, 0.010148063659667968, 0.010131456375122071, 0.010173503875732421, 0.010064160346984864, 0.010096863746643066, 0.010057855606079101, 0.01011689567565918, 0.010253984451293946, 0.010199935913085937, 0.010135552406311036, 0.0101396484375, 0.010092543601989747, 0.010090496063232422, 0.010199104309082031, 0.010159520149230957, 0.01013814353942871, 0.010265695571899413, 0.010322112083435059, 0.010181344032287598, 0.010285056114196778, 0.010340352058410645, 0.010252096176147462, 0.010419936180114745, 0.010308064460754395, 0.010178624153137207, 0.010102592468261718, 0.010146047592163086, 0.010180480003356933, 0.010225664138793946, 0.01023795223236084, 0.010197088241577149, 0.010336352348327636, 0.01017840003967285, 0.010147135734558105, 0.010328736305236817, 0.010250399589538575, 0.010444640159606933, 0.010324288368225097, 0.010202816009521484, 0.009903391838073731, 0.010261024475097656, 0.010379167556762695, 0.010105119705200196, 0.01016438388824463, 0.010092448234558106, 0.010233792304992675, 0.010093952178955078, 0.010153632164001464, 0.010345727920532226, 0.010172096252441407, 0.010167424201965331, 0.010255552291870118, 0.010221280097961426, 0.01018995189666748, 0.010177087783813477, 0.010170687675476074, 0.010112992286682129, 0.010071680068969727, 0.010149344444274903, 0.010123968124389648, 0.010117376327514648, 0.010403840065002442, 0.010034655570983887, 0.010193440437316894, 0.010004480361938477, 0.010098431587219238, 0.01009273624420166, 0.0100796480178833, 0.010129440307617187, 0.01006601619720459, 0.010220064163208007, 0.010049535751342773, 0.010022656440734863, 0.010006688117980956, 0.010080096244812011, 0.010129376411437989, 0.010055968284606934, 0.010057855606079101, 0.010130751609802245, 0.010158656120300293, 0.01072332763671875, 0.010175999641418456, 0.010166784286499024, 0.010112895965576171, 0.010170495986938477, 0.010403840065002442, 0.010474944114685058, 0.010232512474060058, 0.01010208034515381, 0.010117119789123535, 0.010068191528320312, 0.010076831817626954, 0.009987071990966797, 0.010076128005981445, 0.010259167671203613, 0.010270560264587402, 0.010098496437072753, 0.010275168418884277, 0.010236063957214355, 0.01023366355895996, 0.010171999931335449, 0.010076319694519043, 0.009799936294555663, 0.010178303718566895, 0.010123264312744141, 0.010091872215270995, 0.010167263984680175, 0.01006764793395996, 0.010020928382873535, 0.010014656066894532, 0.009977855682373048, 0.010013824462890624, 0.01003555202484131, 0.010046112060546874, 0.010003359794616699, 0.010133983612060547, 0.010207967758178711, 0.010151424407958985, 0.010257887840270995, 0.010097472190856934, 0.010040703773498536, 0.010011584281921386, 0.01028275203704834, 0.010298784255981446, 0.010148384094238282, 0.010096159934997559, 0.010217375755310058, 0.010094431877136231, 0.010074848175048829, 0.010184703826904297, 0.010231136322021484, 0.010220191955566406, 0.010139328002929687, 0.010129695892333984, 0.010301471710205078, 0.010494112014770508, 0.010530655860900878, 0.010673567771911622, 0.010383904457092285, 0.010273056030273437, 0.010231167793273925, 0.01024681568145752, 0.01027455997467041, 0.010076255798339843, 0.010082112312316895, 0.010219615936279297, 0.010141471862792968, 0.010154335975646973, 0.010128352165222168, 0.01012828826904297, 0.010078207969665527, 0.010080256462097169, 0.010072064399719239, 0.010051584243774414, 0.010061823844909668, 0.010144031524658204, 0.010116767883300782, 0.01010694408416748, 0.010201343536376953, 0.010079968452453613, 0.010096832275390625, 0.009999967575073243, 0.011952608108520509, 0.011907999992370605, 0.01117238426208496, 0.009968671798706055, 0.01035142421722412, 0.01009721565246582, 0.010223296165466309, 0.010072383880615234, 0.010205599784851074, 0.010164256095886231, 0.010168288230895996, 0.010247455596923829, 0.010141728401184082, 0.010133855819702149, 0.010240351676940918, 0.010260479927062988, 0.010203328132629395, 0.010301504135131836, 0.010282527923583985, 0.01042454433441162, 0.010415776252746582, 0.012329312324523925, 0.010200063705444335, 0.010184831619262695, 0.010125215530395509, 0.01017136001586914, 0.01012502384185791, 0.010221856117248535, 0.010077792167663574, 0.010119903564453124, 0.01009436798095703, 0.010071392059326173, 0.01005615997314453, 0.010141792297363282, 0.010065247535705566, 0.010111104011535645, 0.010320416450500488, 0.010290431976318359, 0.010150655746459961, 0.01006719970703125, 0.010175519943237305, 0.010139519691467286, 0.010129247665405273, 0.010115072250366211, 0.010112288475036622, 0.010064096450805663, 0.010115584373474122, 0.010163968086242675, 0.010083680152893066, 0.01009552001953125, 0.0101212158203125, 0.010161696434020996, 0.010551775932312012, 0.010442336082458497, 0.010260928153991699, 0.010225631713867188, 0.010206463813781739, 0.010295231819152833, 0.010179327964782716, 0.010055135726928711, 0.010070207595825196, 0.010586591720581054, 0.010116127967834473, 0.01008892822265625, 0.010308032035827636, 0.010082240104675294, 0.009782272338867188, 0.010160544395446777, 0.010267264366149902, 0.010161184310913085, 0.010123744010925292, 0.01010326385498047, 0.010422271728515625, 0.010280351638793945, 0.010190624237060547, 0.010220352172851563, 0.010128416061401368, 0.010142623901367187, 0.010092512130737304, 0.010317919731140136, 0.010124608039855957, 0.010215392112731933, 0.01023459243774414, 0.010101951599121094, 0.010082559585571289, 0.010098560333251953, 0.010068672180175781, 0.010117119789123535, 0.010089504241943359, 0.010167263984680175, 0.010127519607543945, 0.01025119972229004, 0.01026921558380127, 0.01017046356201172, 0.0101942720413208, 0.010288064002990722, 0.010231167793273925, 0.010193535804748535, 0.010287103652954101, 0.01024614429473877, 0.01008131217956543, 0.01017750358581543, 0.010125439643859863, 0.010172287940979003, 0.010249728202819825, 0.010801888465881348, 0.010226816177368163, 0.010164896011352538, 0.010141951560974121, 0.010127103805541993, 0.010181792259216308, 0.010102720260620117, 0.010129535675048828, 0.010054176330566406, 0.010103327751159668, 0.010087936401367188, 0.01024835205078125, 0.010157343864440917, 0.010182720184326172, 0.010223456382751465, 0.010232768058776855, 0.010202560424804687, 0.010148351669311523, 0.010177696228027344, 0.010359135627746582, 0.010252799987792969, 0.01015772819519043, 0.010285632133483887, 0.010234848022460937, 0.009951231956481933, 0.010170368194580079, 0.010129055976867675, 0.01011350440979004, 0.010086175918579101, 0.01008240032196045, 0.01006339168548584, 0.010104607582092286, 0.010076864242553711, 0.010052800178527832, 0.010223487854003906, 0.010123807907104493, 0.01009059238433838, 0.01009391975402832, 0.010138591766357423, 0.01011616039276123, 0.010077119827270508, 0.010067968368530274, 0.010041279792785645, 0.010119520187377929, 0.010122431755065917, 0.010318112373352051, 0.010268768310546876, 0.01091766357421875, 0.010235520362854003, 0.010189567565917969, 0.010200063705444335, 0.010242783546447753, 0.01029148769378662, 0.010171999931335449, 0.010160896301269531, 0.010468000411987305, 0.010172608375549317, 0.010189536094665528, 0.010077599525451661, 0.010119872093200684, 0.010070015907287597, 0.010114144325256348, 0.010095359802246094, 0.010094655990600585, 0.010153471946716308, 0.010033599853515625, 0.010260128021240234, 0.010183168411254882, 0.010036288261413574, 0.010060128211975097, 0.010011232376098633, 0.010100735664367675, 0.010142720222473145, 0.010077183723449706, 0.010058783531188964, 0.010035840034484864, 0.010127712249755859, 0.010060799598693848, 0.010126336097717285, 0.010121055603027344, 0.010204928398132324, 0.010152352333068848, 0.01010201644897461, 0.010220352172851563, 0.01020252799987793, 0.010131967544555665, 0.010383359909057617, 0.010149855613708497, 0.010320159912109375, 0.010217215538024902, 0.010233856201171876, 0.0106146879196167, 0.010196928024291993, 0.01019222354888916, 0.010115391731262207, 0.010168736457824706, 0.010145888328552247, 0.010130687713623047, 0.010134271621704102, 0.010158080101013184, 0.010284576416015625, 0.010244576454162598, 0.01028495979309082, 0.010470975875854492, 0.010248736381530761, 0.010358783721923828, 0.010091615676879882, 0.01010934352874756, 0.010068160057067871, 0.010210847854614258, 0.010058400154113769, 0.010143584251403808, 0.009998623847961425, 0.010010080337524414, 0.009984671592712402, 0.010100640296936036, 0.010049504280090332, 0.010167327880859375, 0.01015497589111328, 0.010208288192749023, 0.010072416305541992, 0.01012387180328369, 0.01020348834991455, 0.010147520065307617, 0.01006387233734131, 0.010155391693115235, 0.010047616004943848, 0.01012172794342041, 0.010171775817871093, 0.010203231811523437, 0.010213919639587402, 0.010114848136901855, 0.0102258882522583, 0.010143744468688964, 0.010136639595031739, 0.010193599700927734, 0.010237536430358886, 0.010161952018737793, 0.010279583930969238, 0.010197216033935547, 0.010065919876098632, 0.010188447952270507, 0.010145376205444336, 0.010111519813537598, 0.010241727828979492, 0.010398207664489746, 0.010397727966308595, 0.010557472229003906, 0.01060041618347168, 0.010436544418334961, 0.010236160278320312, 0.010573568344116211, 0.010485183715820313, 0.010378111839294433, 0.010217344284057617, 0.010177824020385742, 0.010094880104064941, 0.010274784088134765, 0.010156319618225098, 0.010127327919006347, 0.010102272033691406, 0.010201631546020508, 0.010102784156799317, 0.010112128257751464, 0.010242752075195313, 0.010156160354614257, 0.010255807876586915, 0.010213151931762695, 0.010156895637512207, 0.010080256462097169, 0.010153247833251952, 0.010060864448547363, 0.010100159645080566, 0.010100959777832031, 0.01026848030090332, 0.010290623664855957, 0.010245183944702148, 0.010262175559997559, 0.01026460838317871, 0.010385055541992188, 0.010209343910217284, 0.010207072257995605, 0.010262399673461915, 0.01042255973815918, 0.010330656051635741, 0.010254079818725587, 0.010300992012023925, 0.011450592041015626, 0.01294486427307129, 0.010828096389770508, 0.011104703903198242, 0.010770367622375489, 0.01041004753112793, 0.010147071838378907, 0.010201855659484864, 0.01016966438293457, 0.010158368110656738, 0.010158495903015137, 0.010188863754272462, 0.010134719848632812, 0.010233792304992675, 0.010269280433654784, 0.010291104316711425, 0.010276576042175294, 0.010304096221923829, 0.010166272163391114, 0.010116671562194824, 0.010080703735351563, 0.01001471996307373, 0.010036479949951172, 0.010017536163330077, 0.01015827178955078, 0.010063679695129394, 0.009784799575805664, 0.010070560455322265, 0.010048576354980469, 0.009991007804870606, 0.009984095573425293, 0.010015935897827148, 0.010021920204162598, 0.010053759574890136, 0.010052288055419922, 0.010060768127441406, 0.01004259204864502, 0.010263232231140137, 0.010083552360534667, 0.010072256088256836, 0.010077088356018067, 0.010067744255065918, 0.010153984069824219, 0.010024959564208985, 0.009994239807128906, 0.009983039855957032, 0.009989055633544922, 0.010088800430297851, 0.010130399703979493, 0.010013248443603516, 0.010014847755432129, 0.010168319702148437, 0.010158464431762696, 0.010145407676696777, 0.01025443172454834, 0.010262751579284668, 0.0101659517288208, 0.010128416061401368, 0.010455552101135255, 0.01035689640045166, 0.010168352127075196, 0.010104640007019043, 0.010135168075561524, 0.01010159969329834, 0.010031071662902832, 0.010049568176269532, 0.009989536285400391, 0.010197312355041503, 0.010071328163146973, 0.010037247657775878, 0.01021609592437744, 0.010023008346557618, 0.010066111564636231, 0.010053695678710938, 0.01009225559234619, 0.01008233642578125, 0.010141983985900879, 0.009983360290527344, 0.009976127624511718, 0.009967904090881347, 0.010049216270446778, 0.010019136428833007, 0.010085472106933594, 0.010079071998596192, 0.010308927536010742, 0.010062175750732422, 0.010148544311523438, 0.010091679573059082, 0.010086527824401855, 0.009736127853393554, 0.010364480018615722, 0.01032431983947754, 0.010444352149963379, 0.010121888160705566, 0.010237695693969727, 0.010213088035583497, 0.010141183853149414, 0.010147904396057129, 0.010056608200073243, 0.010120991706848145, 0.01015443229675293, 0.010305312156677246, 0.010223615646362304, 0.010293248176574708, 0.010278911590576171, 0.010290847778320313, 0.010125663757324219, 0.010110976219177246, 0.010089664459228516, 0.010025792121887207, 0.010073184013366699, 0.010035807609558106, 0.010109248161315918, 0.01010643196105957, 0.01015238380432129, 0.010174464225769043, 0.010201087951660156, 0.010149951934814452, 0.0102194242477417, 0.010330143928527833, 0.010298784255981446, 0.010193504333496094, 0.010223615646362304, 0.010277152061462402, 0.010146719932556152, 0.010128000259399413, 0.01016598415374756, 0.010139519691467286, 0.010096575736999512, 0.010088671684265136, 0.010143967628479003, 0.010100959777832031, 0.010123264312744141, 0.010151935577392577, 0.010169792175292968, 0.010162752151489257, 0.010174464225769043, 0.010288384437561035, 0.010195743560791016, 0.010125408172607422, 0.01023737621307373, 0.010223296165466309, 0.010197759628295899, 0.010184415817260742, 0.010152223587036133, 0.01026483154296875, 0.010197888374328614, 0.010333024024963378, 0.010395327568054199, 0.01043398380279541, 0.010449760437011719, 0.010310815811157227, 0.010072992324829102, 0.010448927879333497, 0.010680031776428223, 0.010385215759277344, 0.010375616073608398, 0.010548928260803222, 0.010512448310852051, 0.010438655853271485, 0.010326016426086425, 0.010201024055480958, 0.010114784240722657, 0.010115519523620605, 0.01010905647277832, 0.010198271751403809, 0.010196895599365234, 0.01021571159362793, 0.010246591567993164, 0.010237407684326173, 0.010209728240966796, 0.010223615646362304, 0.010172415733337402, 0.010260479927062988, 0.010194496154785156, 0.01016438388824463, 0.010056127548217773, 0.010134528160095215, 0.01008521556854248, 0.010108927726745605, 0.010130847930908203, 0.010088543891906738, 0.010297856330871581, 0.010243071556091308, 0.010418975830078125, 0.010123488426208496, 0.010094176292419434, 0.010152416229248047, 0.010284223556518556, 0.010752511978149413, 0.01026483154296875, 0.01015129566192627, 0.010100831985473633, 0.010850848197937012, 0.010096192359924316, 0.010082271575927735, 0.010248191833496094, 0.010238431930541992, 0.010108448028564454, 0.010117055892944336, 0.01061228847503662, 0.01205123233795166, 0.011066880226135254, 0.010369440078735352, 0.010254624366760254, 0.010143936157226563, 0.010392671585083007, 0.010167008399963378, 0.010195103645324707, 0.010268671989440918, 0.010241888046264648, 0.010073663711547852, 0.01028492832183838, 0.010127039909362793, 0.010191935539245606, 0.00996828842163086, 0.010268416404724121, 0.010212608337402344, 0.010183423995971679, 0.01010319995880127, 0.01024790382385254, 0.01019916820526123, 0.010128928184509276, 0.010032640457153321, 0.010083295822143555, 0.010213376045227051, 0.010261568069458007, 0.01024073600769043, 0.01012758445739746, 0.010168288230895996, 0.010283231735229492, 0.010252096176147462, 0.010143936157226563, 0.010104640007019043, 0.0101212158203125, 0.010067968368530274, 0.010049535751342773, 0.01006704044342041, 0.010243136405944825, 0.010034111976623536, 0.010017696380615235, 0.010044511795043945, 0.010118047714233398, 0.010166208267211914, 0.010243776321411133, 0.010340543746948242, 0.01031116771697998, 0.010348992347717285, 0.01046553611755371, 0.010650815963745118, 0.010435392379760742, 0.010350655555725098, 0.010379551887512207, 0.010266271591186524, 0.010333503723144531, 0.010566399574279786, 0.010423775672912597, 0.01040230369567871, 0.010210975646972657, 0.010215744018554688, 0.010248000144958497, 0.01010927963256836, 0.010067968368530274, 0.010167296409606934, 0.01021401596069336, 0.010186688423156738, 0.01008448028564453, 0.010058079719543457, 0.010016575813293457, 0.01021132755279541, 0.010235903739929199, 0.010094592094421387, 0.01018284797668457, 0.010047295570373534, 0.01033011245727539, 0.01019430446624756, 0.010145631790161133, 0.010218591690063476, 0.009899423599243165, 0.010463839530944824, 0.010199040412902831, 0.010156031608581542, 0.010211487770080567, 0.010288288116455078, 0.010175135612487793, 0.01008028793334961, 0.01008188819885254, 0.010078751564025878, 0.01012019157409668, 0.010138496398925782, 0.010095711708068847, 0.010138784408569336, 0.010255167961120606, 0.010294207572937012, 0.010217472076416016, 0.010334207534790038, 0.010356736183166505, 0.010612735748291016, 0.010487104415893555, 0.010422368049621582, 0.010424256324768066, 0.010349216461181641, 0.010356736183166505, 0.010307456016540527, 0.010229984283447265, 0.01032096004486084, 0.010373023986816407, 0.010271679878234864, 0.010406944274902344, 0.010575008392333985, 0.010399552345275878, 0.010620351791381835, 0.01052899169921875, 0.010484064102172851, 0.010515551567077636, 0.010514399528503419, 0.010766688346862794, 0.01045359992980957, 0.010453120231628418, 0.010405759811401367, 0.010264639854431152, 0.010323007583618165, 0.010236543655395508, 0.010185088157653809, 0.010137408256530762, 0.01021894359588623, 0.010181376457214356, 0.010220735549926759, 0.010250944137573242, 0.01024614429473877, 0.010496000289916992, 0.010293503761291505, 0.010260224342346191, 0.010194944381713868, 0.010354687690734863, 0.010176223754882812, 0.010203424453735352, 0.010166272163391114, 0.010142975807189942, 0.010178848266601563, 0.010254560470581054, 0.010085951805114747, 0.010551679611206054, 0.010258144378662109, 0.010166815757751465, 0.010231040000915527, 0.010064479827880859, 0.01002246379852295, 0.009986335754394532, 0.010090496063232422, 0.00996835231781006, 0.009997311592102051, 0.010014623641967773, 0.009969696044921875, 0.010060768127441406, 0.010299296379089355, 0.010139552116394043, 0.010434368133544921, 0.010111231803894043, 0.009997695922851562, 0.010080384254455567, 0.010030879974365234, 0.010119903564453124, 0.010070207595825196, 0.01006492805480957, 0.009994272232055665, 0.009982912063598633, 0.010028863906860351, 0.010075967788696289, 0.010057920455932618, 0.01005078411102295, 0.010072863578796387, 0.010201312065124512, 0.010049311637878418, 0.009989952087402344, 0.010127167701721191, 0.01001689624786377, 0.010026399612426757, 0.010033791542053222, 0.010189023971557618, 0.010002431869506835, 0.010035039901733399, 0.010076319694519043, 0.010008864402770996, 0.010052831649780273, 0.010042048454284669, 0.010157440185546875, 0.010082752227783202, 0.010088447570800782, 0.010045472145080567, 0.010074399948120117, 0.010057184219360351, 0.010274527549743652, 0.010119359970092773, 0.010031423568725585, 0.011294303894042968, 0.01167369556427002, 0.010258111953735351, 0.010267264366149902, 0.010323488235473632, 0.010047712326049806, 0.010103327751159668, 0.01009340763092041, 0.01015657615661621]",tokens/s,98.0044379959453,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2221.89568,2493.382656,0.0,2107.63776,1984.899072,s,1,8.9287197265625,8.9287197265625,0.0,8.9287197265625,8.9287197265625,8.9287197265625,8.9287197265625,[8.9287197265625],,kWh,4.649125556253845e-05,5.121067476371531e-06,1.4479733805972117e-05,6.609205684488209e-05,,MB,2286.477312,2833.12128,0.0,2415.919104,2256.887808,s,10,1.7444339447021482,0.17444339447021487,0.000723478408962777,0.17449862670898436,0.175037744140625,0.1751746337890625,0.17528414550781252,"[0.17500732421875, 0.17434962463378906, 0.1746728057861328, 0.17423481750488282, 0.17441285705566406, 0.17445904541015625, 0.1745382080078125, 0.17495106506347657, 0.17249667358398438, 0.1753115234375]",tokens/s,1467.5247565404973,kWh,5.205867513962546e-06,5.739006790608426e-07,3.465202577227664e-06,9.244970770251052e-06,tokens/kWh,27690731.14041313,MB,2295.45984,2875.06432,0.0,2457.862144,2339.93984,s,10,19.640491455078127,1.9640491455078124,0.008018463683238542,1.9644817504882812,1.9692302978515626,1.975309912109375,1.980173603515625,"[1.96640283203125, 1.9485126953125, 1.95746484375, 1.9598209228515624, 1.9661416015625, 1.9628218994140625, 1.9813895263671875, 1.967441650390625, 1.9626162109375, 1.9678792724609375]",tokens/s,32.07659041734982,kWh,5.7464352391462104e-05,6.338365963050981e-06,3.255871513077251e-05,9.636143348528561e-05,tokens/kWh,653788.5305496218,,s,630,19.637536848068244,0.031170693409632123,0.0005484206547393003,0.031054671287536622,0.03152696399688721,0.0317732575416565,0.03291022888183594,"[0.03184016036987305, 0.03152649688720703, 0.03129913520812988, 0.0313816967010498, 0.03154300880432129, 0.031351776123046876, 0.0308940486907959, 0.031071935653686523, 0.031002784729003908, 0.031307231903076174, 0.030974592208862305, 0.03144028854370117, 0.031383871078491214, 0.03159679985046387, 0.03152089691162109, 0.03142860794067383, 0.03131596755981445, 0.031307775497436525, 0.03097395133972168, 0.031031295776367186, 0.0307957763671875, 0.03093270492553711, 0.03133263969421387, 0.03117660713195801, 0.030976095199584962, 0.031264448165893556, 0.031074783325195313, 0.031569215774536134, 0.031054111480712892, 0.03117695999145508, 0.030934240341186522, 0.03121980857849121, 0.030881536483764647, 0.03107935905456543, 0.03100249671936035, 0.031206592559814453, 0.031080415725708008, 0.031080415725708008, 0.030970687866210937, 0.03106425666809082, 0.030826496124267577, 0.030851072311401367, 0.030885120391845704, 0.031017728805541992, 0.031034784317016603, 0.031568384170532225, 0.03147171211242676, 0.03138764762878418, 0.031048864364624024, 0.03102790451049805, 0.030877824783325195, 0.030924896240234374, 0.030992256164550782, 0.0309303035736084, 0.030955551147460937, 0.030927520751953125, 0.03079702377319336, 0.03117977523803711, 0.030922592163085936, 0.03090143966674805, 0.031100736618041993, 0.034689983367919924, 0.031561088562011716, 0.03136812782287598, 0.0312457275390625, 0.030962272644042967, 0.030979679107666015, 0.03082918357849121, 0.0308057918548584, 0.03094646453857422, 0.030986272811889648, 0.030755647659301756, 0.030818304061889647, 0.030745983123779297, 0.030763647079467774, 0.03077120018005371, 0.030889408111572265, 0.030801727294921876, 0.030919424057006838, 0.0308176326751709, 0.030875871658325196, 0.03077574348449707, 0.03080806350708008, 0.030870943069458007, 0.031048288345336916, 0.030853151321411133, 0.031038431167602538, 0.030978239059448243, 0.03133318328857422, 0.030838144302368163, 0.031047487258911134, 0.031140607833862306, 0.030965824127197266, 0.03080403137207031, 0.03094870376586914, 0.031002399444580078, 0.03164963150024414, 0.030854272842407226, 0.030859104156494142, 0.030774784088134766, 0.030800256729125976, 0.03059110450744629, 0.03099443244934082, 0.030988288879394532, 0.03144499206542969, 0.03129129600524903, 0.031088735580444334, 0.030905759811401368, 0.031009344100952147, 0.030810144424438475, 0.03092585563659668, 0.03076092720031738, 0.03057561683654785, 0.03074367904663086, 0.030759775161743164, 0.030689311981201173, 0.03095475196838379, 0.03081705665588379, 0.03084284782409668, 0.030731552124023436, 0.030919391632080077, 0.031196191787719728, 0.031122400283813478, 0.030885887145996094, 0.030758623123168946, 0.030773536682128906, 0.03317567825317383, 0.03146928024291992, 0.030957855224609376, 0.030914560317993164, 0.030851072311401367, 0.030740127563476563, 0.030708255767822264, 0.030844064712524415, 0.030755487442016602, 0.03085430335998535, 0.030825376510620117, 0.030947263717651368, 0.030916608810424805, 0.03078326416015625, 0.03072198486328125, 0.03088617515563965, 0.030926719665527343, 0.031133087158203124, 0.03109334373474121, 0.03095155143737793, 0.030902271270751954, 0.03100022315979004, 0.031048000335693358, 0.03144441604614258, 0.030899967193603516, 0.030955615997314452, 0.03098700714111328, 0.030947328567504883, 0.03138764762878418, 0.03134873580932617, 0.030971647262573242, 0.031071775436401366, 0.031013439178466797, 0.031010976791381838, 0.03086073684692383, 0.030975839614868165, 0.030873472213745118, 0.030886751174926758, 0.031129247665405275, 0.031090591430664064, 0.03097439956665039, 0.031038944244384765, 0.03096531105041504, 0.033476768493652345, 0.03128607940673828, 0.03131328010559082, 0.031048320770263673, 0.03096976089477539, 0.031000192642211915, 0.031017440795898438, 0.03092889595031738, 0.030952896118164062, 0.030881696701049805, 0.030919328689575195, 0.030943231582641603, 0.031162368774414063, 0.031137792587280274, 0.031116416931152344, 0.030929792404174806, 0.03089536094665527, 0.03094384002685547, 0.031062047958374025, 0.030975936889648437, 0.031684511184692385, 0.03155590438842774, 0.0316208324432373, 0.03142867279052734, 0.03117670440673828, 0.031350496292114255, 0.031160608291625976, 0.031113216400146484, 0.03116646385192871, 0.03097929573059082, 0.03084294319152832, 0.03088252830505371, 0.03089967918395996, 0.030882335662841796, 0.030988288879394532, 0.031143936157226562, 0.03083263969421387, 0.03085308837890625, 0.03093507194519043, 0.031306880950927735, 0.030952320098876954, 0.03131391906738281, 0.031111135482788085, 0.030986047744750975, 0.03098441505432129, 0.031092384338378905, 0.03098863983154297, 0.03116646385192871, 0.03094745635986328, 0.030857088088989258, 0.030876991271972656, 0.030947872161865234, 0.031263999938964844, 0.03220681762695313, 0.031125759124755858, 0.03091321563720703, 0.031130880355834962, 0.031183135986328124, 0.03220323181152344, 0.031030399322509766, 0.03098918342590332, 0.030839839935302735, 0.030799999237060546, 0.030694080352783204, 0.03086867141723633, 0.03079471969604492, 0.030842336654663086, 0.030890464782714844, 0.030761024475097657, 0.03078348731994629, 0.031614048004150394, 0.03115305519104004, 0.030883615493774413, 0.031143360137939453, 0.031048000335693358, 0.03222371292114258, 0.030967487335205077, 0.03097996711730957, 0.03103276824951172, 0.031091712951660157, 0.03122585678100586, 0.030978271484375, 0.030875423431396484, 0.031461664199829105, 0.03138291168212891, 0.031246719360351564, 0.031231264114379882, 0.030962303161621095, 0.030967424392700196, 0.030796512603759766, 0.030947263717651368, 0.030973152160644533, 0.031036256790161133, 0.03130745506286621, 0.030902591705322266, 0.0308874568939209, 0.031082975387573243, 0.03144704055786133, 0.031768575668334964, 0.03151651191711426, 0.031502496719360354, 0.03151872062683105, 0.031732736587524416, 0.031483232498168943, 0.0315695686340332, 0.031377344131469725, 0.031350847244262695, 0.03098111915588379, 0.030862335205078126, 0.030822463989257812, 0.031172576904296874, 0.03181167984008789, 0.03174182319641113, 0.03139785575866699, 0.0313732795715332, 0.031086656570434572, 0.031041536331176758, 0.030909664154052736, 0.031062816619873045, 0.030959327697753905, 0.030982431411743165, 0.03072115135192871, 0.03096460723876953, 0.03091996765136719, 0.03184304046630859, 0.03213523101806641, 0.031411487579345705, 0.03143337631225586, 0.0315447998046875, 0.03141279983520508, 0.03126883125305176, 0.031133056640625, 0.031824480056762694, 0.030852191925048827, 0.030913471221923828, 0.031120512008666994, 0.03092678451538086, 0.030811071395874023, 0.030930816650390627, 0.03089164733886719, 0.030740991592407226, 0.030950656890869142, 0.030943584442138673, 0.03101123237609863, 0.03137126350402832, 0.031112768173217772, 0.031448863983154295, 0.031203424453735352, 0.030929023742675782, 0.031225919723510742, 0.03108448028564453, 0.03128096008300781, 0.03139366340637207, 0.03138591957092285, 0.031264768600463864, 0.03134854316711426, 0.03150559997558594, 0.03133132743835449, 0.031131168365478516, 0.03097648048400879, 0.030737503051757813, 0.030924959182739256, 0.0313812484741211, 0.031120384216308594, 0.030899871826171876, 0.031197216033935545, 0.03088934326171875, 0.03094419288635254, 0.0310248966217041, 0.03158585548400879, 0.031619775772094724, 0.0315185604095459, 0.03130179214477539, 0.0310284481048584, 0.030976703643798828, 0.030866943359375, 0.030756927490234374, 0.030865951538085936, 0.03085436820983887, 0.03081248092651367, 0.031271392822265626, 0.0307871036529541, 0.030744384765625, 0.030857887268066406, 0.030765056610107422, 0.031653343200683595, 0.03201276779174805, 0.031306880950927735, 0.031551904678344726, 0.031168895721435545, 0.03090447998046875, 0.0309737606048584, 0.030814399719238283, 0.030891807556152343, 0.03144931221008301, 0.03092585563659668, 0.031167200088500976, 0.03120684814453125, 0.03095225524902344, 0.030892032623291016, 0.030792831420898437, 0.03103740882873535, 0.03115715217590332, 0.03139788818359375, 0.031442943572998046, 0.03135408020019531, 0.03122444725036621, 0.031441055297851565, 0.03154060745239258, 0.031350751876831055, 0.031241472244262696, 0.03116540718078613, 0.03134649658203125, 0.030975584030151368, 0.031014495849609375, 0.031238943099975585, 0.031321760177612304, 0.031111040115356446, 0.031107551574707033, 0.03145113563537598, 0.03223507308959961, 0.030972543716430663, 0.03107206344604492, 0.03140812873840332, 0.03144601631164551, 0.031023487091064453, 0.031072511672973632, 0.031331872940063475, 0.031793855667114256, 0.032463008880615235, 0.031246175765991212, 0.031057504653930663, 0.03096976089477539, 0.030900703430175782, 0.031138336181640625, 0.03103094482421875, 0.031194400787353516, 0.031099327087402345, 0.031054111480712892, 0.030951583862304687, 0.031113056182861327, 0.030879743576049806, 0.03103539276123047, 0.03561584091186523, 0.031267744064331054, 0.031072383880615236, 0.030977920532226564, 0.031096832275390625, 0.0309899845123291, 0.031017311096191408, 0.031054079055786134, 0.03103104019165039, 0.030842880249023437, 0.030887487411499024, 0.030894527435302733, 0.040339263916015625, 0.03290745544433594, 0.03123200035095215, 0.03234815979003906, 0.031778303146362305, 0.03145984077453613, 0.031268863677978515, 0.030977184295654298, 0.030916704177856445, 0.03087027168273926, 0.0312106876373291, 0.03131065559387207, 0.031073503494262696, 0.031071008682250975, 0.031137792587280274, 0.03119308853149414, 0.03143036842346191, 0.031656543731689454, 0.031710527420043946, 0.03126483154296875, 0.031207296371459962, 0.030968576431274413, 0.03102720069885254, 0.031029247283935548, 0.030999584197998045, 0.03103424072265625, 0.03126409530639648, 0.031087360382080077, 0.031286367416381834, 0.031201824188232422, 0.03135468864440918, 0.031824447631835937, 0.031856000900268557, 0.0313525447845459, 0.03142748832702637, 0.031269983291625975, 0.031277856826782226, 0.031223936080932616, 0.032911361694335936, 0.03310287857055664, 0.03139590454101562, 0.031306655883789065, 0.031045536041259765, 0.031100383758544924, 0.03092691230773926, 0.030842592239379883, 0.031032159805297853, 0.030910463333129884, 0.03099238395690918, 0.03082761573791504, 0.03096419143676758, 0.03090176010131836, 0.03099331283569336, 0.031, 0.031040096282958986, 0.031077695846557618, 0.031005376815795897, 0.030926847457885744, 0.030996831893920898, 0.031043231964111326, 0.031025152206420898, 0.03139993667602539, 0.03146956825256347, 0.031252479553222655, 0.03134873580932617, 0.031487424850463866, 0.031533632278442386, 0.031281152725219724, 0.031194816589355467, 0.03098480033874512, 0.031055231094360352, 0.031067968368530274, 0.03117283248901367, 0.031144063949584962, 0.03107766342163086, 0.03117251205444336, 0.031011264801025392, 0.031041088104248046, 0.030888608932495117, 0.030893472671508788, 0.03153116798400879, 0.03136684799194336, 0.031119327545166015, 0.031136159896850587, 0.03099452781677246, 0.031053632736206056, 0.031084192276000976, 0.03113814353942871, 0.031072479248046875, 0.031545343399047854, 0.0312073917388916, 0.03126307106018066, 0.031651199340820314, 0.030961984634399413, 0.030806207656860353, 0.03091436767578125, 0.0310230712890625, 0.030989919662475586, 0.03085971260070801, 0.030889984130859374, 0.030905696868896486, 0.031102783203125, 0.031005535125732422, 0.031231231689453125, 0.030990528106689452, 0.03092691230773926, 0.03076665687561035, 0.030884319305419922, 0.03080633544921875, 0.03111952018737793, 0.03094131278991699, 0.03101478385925293, 0.03215679931640625, 0.03096665573120117, 0.030906368255615234, 0.03108038330078125, 0.030951488494873048, 0.03116963195800781, 0.031005407333374025, 0.03126291275024414, 0.03113484764099121, 0.03126681518554687, 0.03166912078857422, 0.03185990333557129, 0.031462207794189456, 0.031492095947265625, 0.03139788818359375, 0.03146956825256347, 0.031440895080566404, 0.03137740707397461, 0.03122790336608887, 0.03122585678100586, 0.03120742416381836, 0.03103980827331543, 0.031016639709472656, 0.031066112518310547, 0.0311757755279541, 0.030818464279174805, 0.03111596870422363, 0.031116928100585937, 0.03101740837097168, 0.031084159851074218, 0.03085500717163086, 0.03166425514221191, 0.03155564880371094, 0.031238079071044922, 0.03153305625915527, 0.031780927658081055, 0.03187830352783203, 0.03192092704772949, 0.031777088165283206, 0.03144262313842774, 0.031676416397094724, 0.0313463363647461, 0.03132655906677246, 0.031313215255737305, 0.031206079483032226, 0.031139423370361328, 0.031202751159667967, 0.03081500816345215, 0.031084735870361327, 0.031975263595581054, 0.032798080444335936, 0.03118160057067871, 0.031109119415283205, 0.03094281578063965, 0.030955615997314452, 0.031117631912231446, 0.031093952178955078, 0.03101478385925293, 0.031138336181640625, 0.030992063522338867, 0.031130304336547853, 0.031092607498168945, 0.03126083183288574, 0.031024703979492186, 0.031135551452636717, 0.031140480041503906, 0.03130352020263672, 0.0311297607421875, 0.030947328567504883, 0.030908416748046875, 0.03120492744445801, 0.03133263969421387, 0.031344671249389645, 0.031225183486938476, 0.031249183654785156, 0.03132012748718262, 0.03139577674865723, 0.03127267265319824, 0.0313767032623291, 0.031308000564575195, 0.031357152938842774, 0.03103308868408203, 0.031058719635009766, 0.030971359252929688, 0.030962560653686525, 0.03091744041442871, 0.03086627197265625, 0.031055295944213867, 0.030718528747558593, 0.030695423126220703, 0.030828256607055664, 0.03099235153198242, 0.03090380859375, 0.03092911911010742]",tokens/s,32.081416568390736,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1051.582464,972.947456,0.0,570.425344,536.326656,s,1,9.2602236328125,9.2602236328125,0.0,9.2602236328125,9.2602236328125,9.2602236328125,9.2602236328125,[9.2602236328125],,kWh,3.138963327501187e-05,3.452039131413579e-06,1.1390564668034564e-05,4.623223707446001e-05,,MB,1425.42848,1042.153472,0.0,624.951296,594.377728,s,10,0.2707039680480957,0.02707039680480957,0.0001303903956057068,0.027032320022583007,0.027273387908935545,0.027319813728332518,0.027356954383850097,"[0.02695881652832031, 0.027027008056640624, 0.02703763198852539, 0.02696041679382324, 0.02695564842224121, 0.027008991241455078, 0.02708336067199707, 0.027263071060180662, 0.02736623954772949, 0.027042783737182618]",tokens/s,9456.824805557218,kWh,7.977514700365241e-07,8.793662234398146e-08,5.256743731804064e-07,1.4113624655609118e-06,tokens/kWh,181385013.59270525,MB,1466.765312,1056.833536,0.0,639.63136,607.71072,s,10,14.476691772460939,1.447669177246094,0.006819174967382823,1.4474523315429688,1.4553603393554688,1.4558354431152343,1.4562155261230467,"[1.4360029296875, 1.4552547607421875, 1.4437486572265625, 1.4437213134765625, 1.438738037109375, 1.4458489990234376, 1.4544759521484374, 1.4490556640625, 1.456310546875, 1.453534912109375]",tokens/s,43.51822984851078,kWh,4.251283650913542e-05,4.68870732792238e-06,1.6312172430421936e-05,6.351371626747974e-05,tokens/kWh,991911.7271406968,,s,630,14.47117685127259,0.022970121986146955,0.0004391296317252733,0.022879631996154785,0.02328590793609619,0.023497274684906005,0.024922882995605477,"[0.0223287353515625, 0.022706687927246092, 0.02266963195800781, 0.02286582374572754, 0.02264396858215332, 0.02272857666015625, 0.0226942081451416, 0.022737567901611327, 0.0228701114654541, 0.0227838077545166, 0.02277987289428711, 0.0226279354095459, 0.022606367111206054, 0.022697280883789063, 0.022675647735595703, 0.022690303802490236, 0.022648767471313478, 0.022926464080810546, 0.022706592559814453, 0.022799007415771483, 0.02266304016113281, 0.022609920501708985, 0.022699615478515626, 0.0229005126953125, 0.023196479797363282, 0.022779199600219728, 0.022866432189941405, 0.022898687362670898, 0.023020927429199218, 0.022925952911376953, 0.022998592376708985, 0.023578752517700197, 0.02307491111755371, 0.02265519905090332, 0.02288025665283203, 0.022753055572509766, 0.02284707260131836, 0.022692672729492186, 0.02267526435852051, 0.022693439483642577, 0.02264313507080078, 0.022806528091430665, 0.022657024383544923, 0.022600927352905274, 0.02264963150024414, 0.022980607986450196, 0.0227508487701416, 0.02263078308105469, 0.022822912216186524, 0.02274508857727051, 0.02288342475891113, 0.02283203125, 0.02315430450439453, 0.022878591537475585, 0.022786048889160155, 0.022761472702026365, 0.022767328262329103, 0.022920831680297852, 0.022751903533935545, 0.022681600570678712, 0.022744447708129882, 0.02275129508972168, 0.02267193603515625, 0.022245376586914063, 0.022912416458129883, 0.02402364730834961, 0.023578624725341796, 0.022976320266723634, 0.022959360122680662, 0.023046464920043946, 0.023013696670532227, 0.02312396812438965, 0.023017791748046874, 0.023580671310424805, 0.023175167083740233, 0.022975744247436522, 0.022902912139892578, 0.023050880432128905, 0.022947839736938477, 0.022890495300292968, 0.022777856826782225, 0.02285158348083496, 0.02282700729370117, 0.02284275245666504, 0.022893184661865233, 0.022945791244506835, 0.022852863311767578, 0.02297088050842285, 0.0230894718170166, 0.023309791564941406, 0.023461919784545898, 0.02344799995422363, 0.02323843193054199, 0.023370208740234374, 0.02352511978149414, 0.02347007942199707, 0.023342655181884765, 0.023302751541137694, 0.02304310417175293, 0.023204671859741212, 0.023125728607177733, 0.02295631980895996, 0.022943744659423827, 0.02592563247680664, 0.02319152069091797, 0.0229847354888916, 0.0228342399597168, 0.02284569549560547, 0.02281260871887207, 0.0229117431640625, 0.0229171199798584, 0.023128063201904296, 0.023113727569580078, 0.022974464416503908, 0.023070432662963866, 0.02280793571472168, 0.022928287506103515, 0.02271129608154297, 0.02299785614013672, 0.022907039642333985, 0.022794240951538085, 0.023106943130493163, 0.023018016815185546, 0.022982751846313477, 0.022742528915405274, 0.02278656005859375, 0.022337472915649415, 0.02265068817138672, 0.022687999725341797, 0.02264473533630371, 0.024987648010253907, 0.025405439376831054, 0.024425600051879885, 0.023251743316650392, 0.023595136642456056, 0.023100736618041993, 0.023195903778076173, 0.02279363250732422, 0.02286489677429199, 0.022970367431640625, 0.022765087127685546, 0.022766048431396485, 0.022806528091430665, 0.022724863052368163, 0.022784000396728517, 0.02270515251159668, 0.02267420768737793, 0.02270204734802246, 0.022734783172607423, 0.022755392074584962, 0.022799392700195313, 0.022764511108398437, 0.022777023315429686, 0.022663999557495117, 0.022855680465698244, 0.02280243110656738, 0.023080799102783205, 0.022790304183959963, 0.02278508758544922, 0.022684288024902344, 0.022819135665893556, 0.022767616271972657, 0.022779680252075194, 0.02298214340209961, 0.022758111953735352, 0.02270207977294922, 0.022710464477539063, 0.022759199142456055, 0.022859487533569336, 0.022849472045898437, 0.02280486488342285, 0.022670560836791993, 0.02271107292175293, 0.022638240814208985, 0.02272287940979004, 0.022695968627929688, 0.022671199798583983, 0.022722719192504882, 0.022820512771606447, 0.022701728820800782, 0.022796991348266602, 0.02280243110656738, 0.022833248138427735, 0.02278790473937988, 0.023299423217773438, 0.022780672073364257, 0.022697727203369142, 0.022884607315063477, 0.02285772705078125, 0.022362592697143555, 0.022727167129516602, 0.023010656356811522, 0.02283500862121582, 0.022878175735473634, 0.02291801643371582, 0.02288435173034668, 0.022885824203491213, 0.022831680297851563, 0.023006431579589842, 0.02339062309265137, 0.023259456634521485, 0.02342508888244629, 0.023218175888061524, 0.023197599411010742, 0.023105728149414063, 0.023387359619140624, 0.022936256408691406, 0.022965471267700197, 0.022897439956665037, 0.02277577590942383, 0.022822944641113282, 0.022826143264770508, 0.02280646324157715, 0.02265622329711914, 0.022748863220214844, 0.022702432632446288, 0.022689119338989257, 0.022812799453735353, 0.022667455673217773, 0.02261142349243164, 0.022706720352172853, 0.02392188835144043, 0.024764320373535157, 0.023036672592163087, 0.02294700813293457, 0.022762208938598632, 0.022718687057495118, 0.02269593620300293, 0.022700191497802735, 0.022896255493164062, 0.022732479095458984, 0.02292790412902832, 0.022834463119506834, 0.023270111083984375, 0.022826112747192383, 0.02273164749145508, 0.022855167388916017, 0.022833663940429686, 0.022724607467651366, 0.022687744140625, 0.022632448196411133, 0.022751583099365234, 0.02262406349182129, 0.022806367874145507, 0.02262937545776367, 0.022952959060668944, 0.023058048248291017, 0.022862207412719725, 0.0228286075592041, 0.02264716720581055, 0.022691904067993166, 0.02284553527832031, 0.022491296768188476, 0.022835744857788085, 0.0227061767578125, 0.022681600570678712, 0.022722560882568358, 0.022799455642700195, 0.02271891212463379, 0.022904672622680665, 0.02283075141906738, 0.022922336578369142, 0.022900064468383788, 0.022815263748168946, 0.022853471755981444, 0.023166175842285155, 0.022913984298706055, 0.022722560882568358, 0.022648832321166993, 0.022781375885009766, 0.022785856246948243, 0.022860544204711914, 0.022773759841918945, 0.022779903411865234, 0.02266726493835449, 0.02350284767150879, 0.022867136001586914, 0.022897439956665037, 0.023026079177856446, 0.022861408233642577, 0.02270380783081055, 0.02279203224182129, 0.022742847442626953, 0.022913728713989258, 0.022726655960083008, 0.023047712326049803, 0.022851839065551757, 0.022971744537353515, 0.022738815307617188, 0.02282703971862793, 0.022916191101074217, 0.022644607543945313, 0.022831104278564454, 0.02267740821838379, 0.022627647399902345, 0.02265567970275879, 0.022802528381347657, 0.022914751052856445, 0.022671648025512695, 0.02266729545593262, 0.022779903411865234, 0.02279020881652832, 0.022987871170043944, 0.022897056579589844, 0.022888479232788087, 0.022845632553100587, 0.02283318328857422, 0.022943744659423827, 0.02282700729370117, 0.02313644790649414, 0.0228756160736084, 0.022821344375610352, 0.022896928787231444, 0.02277120018005371, 0.0227488956451416, 0.022349855422973634, 0.022626047134399415, 0.022616416931152343, 0.02265292739868164, 0.022687807083129882, 0.022779584884643555, 0.025112831115722656, 0.028612607955932616, 0.023100927352905275, 0.02286342430114746, 0.022823871612548827, 0.022862016677856447, 0.023087167739868165, 0.023084800720214845, 0.022996992111206056, 0.023015424728393553, 0.02301907157897949, 0.02284998321533203, 0.02294169616699219, 0.02330975914001465, 0.022950464248657227, 0.022861568450927735, 0.022802047729492188, 0.02274777603149414, 0.02290483283996582, 0.022700031280517577, 0.02278716850280762, 0.022778783798217773, 0.022755327224731444, 0.022828575134277343, 0.0226964168548584, 0.022816768646240236, 0.022675455093383787, 0.022607168197631835, 0.022602176666259764, 0.022599903106689453, 0.022937631607055663, 0.022675455093383787, 0.02257872009277344, 0.022667104721069337, 0.023056127548217772, 0.02275174331665039, 0.022778079986572265, 0.02263875198364258, 0.022747135162353514, 0.022556671142578123, 0.02271615982055664, 0.02265318489074707, 0.022678911209106447, 0.02272310447692871, 0.022904928207397462, 0.02280243110656738, 0.022853055953979493, 0.022746719360351563, 0.022807519912719728, 0.02272425651550293, 0.022894943237304687, 0.022867040634155275, 0.023124895095825194, 0.022748287200927735, 0.023198591232299805, 0.023017471313476562, 0.022945791244506835, 0.022640703201293945, 0.022795391082763673, 0.022825855255126953, 0.022795679092407227, 0.02331532859802246, 0.02296188735961914, 0.02302566337585449, 0.022824384689331054, 0.022757919311523437, 0.023201824188232422, 0.02300707244873047, 0.023070144653320312, 0.022890527725219725, 0.023374464035034178, 0.023129600524902344, 0.02307302474975586, 0.023374240875244142, 0.02293328094482422, 0.022808319091796876, 0.022875648498535156, 0.022915199279785158, 0.022901504516601563, 0.022867584228515626, 0.02303014373779297, 0.023131359100341798, 0.02285628890991211, 0.0228570556640625, 0.02293356704711914, 0.022950559616088866, 0.0229737606048584, 0.023097375869750976, 0.022954431533813477, 0.02364246368408203, 0.023375551223754884, 0.02327897644042969, 0.02322323226928711, 0.023076351165771485, 0.023338495254516603, 0.023368095397949217, 0.023244863510131837, 0.02329439926147461, 0.02324924850463867, 0.023133951187133787, 0.023643808364868166, 0.023292415618896483, 0.023666112899780274, 0.023507328033447267, 0.023324703216552733, 0.023375871658325196, 0.0231507511138916, 0.023207296371459962, 0.022970272064208985, 0.02312015914916992, 0.022984256744384767, 0.02306105613708496, 0.022872095108032228, 0.022789823532104493, 0.02291958427429199, 0.022963359832763673, 0.023147327423095703, 0.02297657585144043, 0.02283103942871094, 0.022763584136962892, 0.022520704269409178, 0.02295903968811035, 0.023077600479125975, 0.023268831253051757, 0.023673696517944334, 0.022952863693237305, 0.022920032501220704, 0.022818336486816405, 0.02309343910217285, 0.023011552810668946, 0.02292665672302246, 0.02287027168273926, 0.02302729606628418, 0.022903711318969726, 0.02300649642944336, 0.02359574317932129, 0.02452479934692383, 0.023285184860229492, 0.02325503921508789, 0.02307472038269043, 0.022952608108520508, 0.023078367233276366, 0.022733343124389647, 0.022790143966674805, 0.02285158348083496, 0.02292937660217285, 0.022834623336791992, 0.02300579261779785, 0.02285308837890625, 0.022753471374511718, 0.02271676826477051, 0.022863807678222655, 0.02277791976928711, 0.022794624328613282, 0.022822528839111327, 0.022980096817016602, 0.022997600555419922, 0.02292108726501465, 0.02298240089416504, 0.02313471984863281, 0.022780927658081054, 0.022729503631591798, 0.022824960708618162, 0.02284284782409668, 0.02287811279296875, 0.02276620864868164, 0.02287526321411133, 0.022845727920532227, 0.022849695205688476, 0.022905279159545898, 0.02273689651489258, 0.02283910369873047, 0.02376959991455078, 0.022906688690185546, 0.022838752746582033, 0.022803167343139648, 0.022920896530151367, 0.02297007942199707, 0.023093599319458008, 0.02304198455810547, 0.023454944610595704, 0.023157215118408202, 0.023120191574096678, 0.0228623046875, 0.02289859199523926, 0.022970464706420897, 0.022895839691162108, 0.022944480895996093, 0.023193248748779295, 0.023294048309326174, 0.023103584289550783, 0.02288070487976074, 0.02289232063293457, 0.022933855056762695, 0.022972063064575197, 0.022899871826171876, 0.026249183654785156, 0.022999231338500976, 0.022954847335815428, 0.02299273681640625, 0.022979743957519533, 0.02296953582763672, 0.02277993583679199, 0.022845056533813475, 0.02274508857727051, 0.022738943099975584, 0.022765567779541016, 0.022980607986450196, 0.022814367294311525, 0.02284579277038574, 0.02400160026550293, 0.023960479736328123, 0.022978656768798827, 0.02298873519897461, 0.022988800048828126, 0.022902591705322266, 0.02289900779724121, 0.022845344543457033, 0.02295599937438965, 0.02295587158203125, 0.02332659149169922, 0.023025951385498046, 0.023175167083740233, 0.0229354248046875, 0.02304956817626953, 0.02287900733947754, 0.02290995216369629, 0.022789344787597657, 0.022931232452392578, 0.022832160949707032, 0.022776832580566408, 0.022965471267700197, 0.02301932716369629, 0.022960607528686523, 0.023114208221435548, 0.023066368103027344, 0.022972671508789063, 0.02297667121887207, 0.023480159759521484, 0.023537792205810548, 0.02593984031677246, 0.02314854431152344, 0.02298374366760254, 0.023073759078979492, 0.023215551376342774, 0.022807071685791016, 0.022419456481933595, 0.022740127563476563, 0.02284649658203125, 0.02305001640319824, 0.023105567932128906, 0.023524768829345705, 0.023502944946289062, 0.023470048904418946, 0.023269920349121093, 0.023144447326660156, 0.02327552032470703, 0.023405567169189453, 0.023439519882202147, 0.023199743270874023, 0.02320083236694336, 0.0232589111328125, 0.023320512771606447, 0.023121023178100587, 0.023259424209594728, 0.02325775909423828, 0.023204160690307618, 0.022986207962036133, 0.022989023208618165, 0.022853631973266602, 0.022896255493164062, 0.02298918342590332, 0.02299203109741211, 0.023011711120605467, 0.02310406494140625, 0.02307472038269043, 0.023041791915893554, 0.02304435157775879, 0.022988800048828126, 0.022814720153808594, 0.02291663932800293, 0.02283568000793457, 0.02290483283996582, 0.02287615966796875, 0.02299679946899414, 0.022874528884887696, 0.023025279998779298, 0.022867488861083984, 0.02292390441894531, 0.022757375717163086, 0.022806528091430665, 0.022752735137939455, 0.022850431442260743, 0.0229454402923584, 0.022837247848510742, 0.022810176849365236, 0.022839744567871093, 0.022815935134887694, 0.022745920181274415, 0.022827295303344725, 0.023002111434936523, 0.023079647064208984, 0.023066879272460938, 0.02309503936767578, 0.023130207061767577, 0.02306220817565918, 0.023631103515625, 0.024343648910522462, 0.023490463256835938]",tokens/s,43.534814512656475,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7422.939136,9464.315904,0.0,9061.793792,8463.626752,s,1,14.282189453125,14.282189453125,0.0,14.282189453125,14.282189453125,14.282189453125,14.282189453125,[14.282189453125],,kWh,0.00020822446782082504,2.2961068328723633e-05,9.519507615599054e-05,0.0003263806123055392,,MB,1755.471872,9478.995968,0.0,9061.793792,7981.246464,s,10,59.8304462890625,5.983044628906251,0.007914517241628114,5.9821706542968744,5.993629638671875,5.995847778320313,5.997622290039063,"[5.97485009765625, 5.97241748046875, 5.97657568359375, 5.981466796875, 5.97670849609375, 5.98287451171875, 5.9874775390625, 5.986873046875, 5.99313671875, 5.99806591796875]",tokens/s,42.787579882518585,kWh,0.00017525100245333606,1.9330775169855323e-05,0.00011649206541580192,0.0003110738430389933,tokens/kWh,822955.7249141975,MB,1764.958208,9478.995968,0.0,9061.793792,8267.75296,s,10,27.80796362304687,2.780796362304687,0.0020167303837971035,2.780684326171875,2.7839230712890624,2.784172351074219,2.7843717749023438,"[2.78054833984375, 2.778540771484375, 2.77809521484375, 2.780953857421875, 2.780478515625, 2.77860107421875, 2.7808203125, 2.78163623046875, 2.784421630859375, 2.78386767578125]",tokens/s,22.655380614705074,kWh,8.14487461112473e-05,8.98440797232119e-06,5.425023784459693e-05,0.00014468339192816537,tokens/kWh,435433.52944945614,,s,630,27.803847866058362,0.044133091850886264,0.00047712125641852547,0.04412859344482422,0.04480672264099121,0.04499372062683105,0.045189268493652346,"[0.04505523300170899, 0.043496192932128905, 0.043253726959228515, 0.04328860855102539, 0.04341961669921875, 0.04377958297729492, 0.04358607864379883, 0.043393024444580076, 0.04374931335449219, 0.043830783843994144, 0.04387078475952148, 0.04378822326660156, 0.044121566772460936, 0.04382575988769531, 0.0434442253112793, 0.04348937606811523, 0.043405216217041014, 0.04390812683105469, 0.04436383819580078, 0.04430940628051758, 0.04392755126953125, 0.043911167144775394, 0.04391731262207031, 0.04363468933105469, 0.043681793212890625, 0.04386195373535156, 0.04376697540283203, 0.04404022216796875, 0.04402467346191406, 0.04377324676513672, 0.044085952758789064, 0.04412211227416992, 0.04412623977661133, 0.04461939239501953, 0.044494880676269534, 0.044464447021484374, 0.04426518249511719, 0.04392534255981445, 0.0439189453125, 0.044108638763427736, 0.044488704681396485, 0.044197887420654294, 0.04415596771240234, 0.04483168029785156, 0.04483820724487304, 0.044784225463867185, 0.04473455810546875, 0.044955585479736326, 0.044130367279052736, 0.04402579116821289, 0.04434511947631836, 0.04432515335083008, 0.04451462554931641, 0.044595329284667966, 0.0443721923828125, 0.044250846862792965, 0.044859519958496095, 0.044515872955322264, 0.04413232040405273, 0.044068382263183596, 0.044703392028808596, 0.0451530876159668, 0.04486953735351563, 0.04493356704711914, 0.04423404693603516, 0.04374393463134765, 0.04355276870727539, 0.04353843307495117, 0.04333123016357422, 0.043368801116943356, 0.04348262405395508, 0.04339948654174805, 0.04367958450317383, 0.04348144149780273, 0.04375267028808594, 0.04360476684570312, 0.043476993560791016, 0.04361334228515625, 0.04385676956176758, 0.04426041412353516, 0.044196033477783205, 0.043719390869140624, 0.04356310272216797, 0.04407865524291992, 0.044579166412353516, 0.04398080062866211, 0.04375884628295899, 0.0438823356628418, 0.044111873626708986, 0.043907520294189456, 0.04388671875, 0.04404054260253906, 0.04373708724975586, 0.043870208740234375, 0.043781631469726565, 0.04419174575805664, 0.04444390487670898, 0.04406502532958984, 0.04395849609375, 0.04405430221557617, 0.04417536163330078, 0.044520896911621095, 0.04414726257324219, 0.04409872055053711, 0.04411625671386719, 0.044149471282958985, 0.044242782592773436, 0.044267520904541016, 0.04446144104003906, 0.04427836990356445, 0.04415596771240234, 0.044501983642578125, 0.04469145584106445, 0.044355583190917966, 0.044303585052490234, 0.04429904174804687, 0.044539745330810544, 0.04495935821533203, 0.04437001419067383, 0.04436422348022461, 0.04429523086547851, 0.04489471817016601, 0.04532064056396484, 0.0446382064819336, 0.0442081298828125, 0.04439244842529297, 0.04519232177734375, 0.043795295715332035, 0.04328579330444336, 0.04303494262695313, 0.04319504165649414, 0.043388702392578124, 0.04332444763183594, 0.04373193740844727, 0.04413446426391601, 0.043691967010498045, 0.04349542236328125, 0.043730560302734374, 0.043734752655029296, 0.04386268615722656, 0.043718814849853516, 0.04366873550415039, 0.04390972900390625, 0.043886592864990234, 0.04379852676391602, 0.04396569442749024, 0.04369420623779297, 0.044407455444335935, 0.04439241409301758, 0.04400243377685547, 0.04384159851074219, 0.04369622421264648, 0.04371734237670898, 0.04374323272705078, 0.0437391357421875, 0.043730175018310544, 0.044155647277832034, 0.04399638366699219, 0.044413089752197266, 0.043991680145263674, 0.0438579216003418, 0.04421587371826172, 0.04454252624511719, 0.04411520004272461, 0.04410569763183594, 0.04409001541137696, 0.044365825653076174, 0.044537857055664064, 0.04414499282836914, 0.04432860946655273, 0.04437315368652344, 0.044526432037353514, 0.04463411331176758, 0.044811649322509764, 0.04462860870361328, 0.04433084869384766, 0.044304542541503907, 0.04473855972290039, 0.04461948776245117, 0.04432515335083008, 0.04430601501464844, 0.04395254516601563, 0.044458080291748046, 0.04485676956176758, 0.044716449737548826, 0.04433212661743164, 0.04450739288330078, 0.04460358428955078, 0.04437075042724609, 0.04484899139404297, 0.043519840240478516, 0.04330732727050781, 0.043053054809570314, 0.04336163330078125, 0.04368649673461914, 0.043500831604003906, 0.04337859344482422, 0.04344268798828125, 0.043356704711914065, 0.04325360107421875, 0.04390003204345703, 0.04404038238525391, 0.04368211364746094, 0.043806209564208984, 0.04374924850463867, 0.04367462539672851, 0.043905025482177736, 0.044240062713623046, 0.04411385726928711, 0.04354547119140625, 0.043640830993652346, 0.043711841583251955, 0.04425772857666015, 0.04440700912475586, 0.04444569778442383, 0.04391526412963867, 0.043660736083984374, 0.043899585723876956, 0.043913089752197265, 0.04447366333007813, 0.04450559997558594, 0.04509715270996094, 0.043829280853271486, 0.044077022552490235, 0.044458080291748046, 0.0445684814453125, 0.04411379241943359, 0.043810462951660155, 0.043977184295654295, 0.04436787033081055, 0.04425014495849609, 0.04425212860107422, 0.04426675033569336, 0.044458751678466794, 0.044144641876220705, 0.04452511978149414, 0.044793281555175785, 0.04487475204467774, 0.04454515075683594, 0.04476198577880859, 0.04446003341674805, 0.04448435211181641, 0.043974910736083984, 0.04443360137939453, 0.044410686492919925, 0.044160064697265626, 0.044391361236572266, 0.04483891296386719, 0.044943294525146484, 0.04482835388183594, 0.0455786247253418, 0.04475503921508789, 0.04473244857788086, 0.043732959747314455, 0.04333903884887695, 0.043301601409912106, 0.04335123062133789, 0.043582271575927735, 0.04353228759765625, 0.04352000045776367, 0.04376166534423828, 0.0434417610168457, 0.04382921600341797, 0.04411027145385742, 0.04365676879882813, 0.04333932876586914, 0.04332649612426758, 0.04387401580810547, 0.043554977416992186, 0.04364432144165039, 0.043872737884521486, 0.04408943939208984, 0.04391020965576172, 0.04407593536376953, 0.04407913589477539, 0.04415596771240234, 0.04444255828857422, 0.04381801605224609, 0.043778465270996096, 0.04411225509643555, 0.04416348648071289, 0.04442095947265625, 0.044179393768310544, 0.04416307067871094, 0.0450819206237793, 0.04494611358642578, 0.04414851379394531, 0.04376959991455078, 0.04398060989379883, 0.044327232360839845, 0.0441429443359375, 0.04399679946899414, 0.04407315063476563, 0.04387772750854492, 0.04438108825683594, 0.04404780960083008, 0.04425369644165039, 0.04431603240966797, 0.04486835098266601, 0.044709758758544924, 0.04411391830444336, 0.04432691192626953, 0.04436387252807617, 0.04420393753051758, 0.04425049591064453, 0.04465727996826172, 0.04548604965209961, 0.04497001647949219, 0.044058624267578124, 0.04427980804443359, 0.04460543823242188, 0.044349441528320314, 0.044803585052490234, 0.04483107376098633, 0.04501327896118164, 0.044806175231933594, 0.043503585815429686, 0.0432592658996582, 0.043260257720947264, 0.043135265350341796, 0.04360121536254883, 0.043367103576660154, 0.04362035369873047, 0.04341350555419922, 0.04346575927734375, 0.043627487182617185, 0.043587520599365236, 0.04376121520996094, 0.043811294555664064, 0.04360809707641602, 0.04400332641601563, 0.04404851150512695, 0.04356630325317383, 0.043651615142822266, 0.043556991577148436, 0.044236640930175784, 0.044574878692626954, 0.04417740631103516, 0.04404864120483398, 0.04393139266967774, 0.043835391998291014, 0.04347052764892578, 0.04368124771118164, 0.04381782531738281, 0.04411603164672852, 0.04412633514404297, 0.04385488128662109, 0.04457961654663086, 0.04458313751220703, 0.044025630950927735, 0.04409958267211914, 0.04444569778442383, 0.044529441833496095, 0.04445820617675781, 0.04429209518432617, 0.044142593383789064, 0.04390457534790039, 0.04429471969604492, 0.04445990371704102, 0.04417919921875, 0.044447486877441406, 0.044298240661621094, 0.04446236801147461, 0.04431689453125, 0.044478462219238284, 0.04428995132446289, 0.04428790283203125, 0.04403833770751953, 0.04433651351928711, 0.044335742950439454, 0.044455936431884766, 0.04442512130737305, 0.04431068801879883, 0.04466476821899414, 0.04517232131958008, 0.04530217742919922, 0.045004737854003905, 0.045112415313720705, 0.044980255126953125, 0.04363167953491211, 0.04333865737915039, 0.043450366973876955, 0.04329676818847656, 0.04331110382080078, 0.04338278579711914, 0.04316291046142578, 0.04338351821899414, 0.043720703125, 0.043759456634521486, 0.04370060729980469, 0.043881343841552733, 0.04365609741210937, 0.04414620971679688, 0.04416150283813477, 0.04397875213623047, 0.04370636749267578, 0.04386816024780273, 0.04359299087524414, 0.04427446365356445, 0.04461500930786133, 0.04408380889892578, 0.04373289489746094, 0.04373235321044922, 0.04382147216796875, 0.04405894470214844, 0.04401465606689453, 0.044209087371826175, 0.04417740631103516, 0.043850814819335934, 0.043795425415039065, 0.04444105529785156, 0.043983360290527344, 0.04401766586303711, 0.044146942138671874, 0.04420991897583008, 0.04399289703369141, 0.04437996673583984, 0.04453209686279297, 0.04403206253051758, 0.043841472625732424, 0.04469158554077148, 0.04477324676513672, 0.04483001708984375, 0.04414739227294922, 0.043815967559814456, 0.044604385375976566, 0.04510105514526367, 0.044916481018066404, 0.044361984252929684, 0.043937793731689455, 0.04450614547729492, 0.044401630401611325, 0.04453376007080078, 0.04435968017578125, 0.044338977813720704, 0.044341217041015624, 0.044418880462646484, 0.045041793823242186, 0.04512704086303711, 0.04495219039916992, 0.045160606384277345, 0.04510665512084961, 0.04391891098022461, 0.04328739166259766, 0.04330643081665039, 0.043690559387207034, 0.04352614212036133, 0.043507713317871094, 0.04356300735473633, 0.04372038269042969, 0.04374560165405274, 0.04364831924438477, 0.04377260971069336, 0.04355276870727539, 0.04351363372802734, 0.04418515014648437, 0.0439384651184082, 0.04382515335083008, 0.04391948699951172, 0.04371852874755859, 0.0437770881652832, 0.04382815933227539, 0.044300289154052735, 0.04399728012084961, 0.04369776153564453, 0.04366294479370117, 0.04398767852783203, 0.043905025482177736, 0.043935134887695314, 0.04411180877685547, 0.043945953369140624, 0.043829952239990234, 0.04423481750488281, 0.04451331329345703, 0.04405238342285156, 0.04442726516723633, 0.04435148620605469, 0.04439363098144531, 0.04449776077270508, 0.0441869125366211, 0.044372703552246096, 0.04440057754516601, 0.04436588668823242, 0.04552207946777344, 0.04518179321289063, 0.04457174301147461, 0.04411689758300781, 0.04418560028076172, 0.04409958267211914, 0.0443983039855957, 0.04431081771850586, 0.04419548797607422, 0.04417561721801758, 0.044143905639648436, 0.04409222412109375, 0.044659870147705075, 0.04434211349487305, 0.04432691192626953, 0.044536865234375, 0.045079521179199215, 0.044902400970458986, 0.045094367980957034, 0.04457017517089844, 0.0445590705871582, 0.04506623840332031, 0.04421440124511719, 0.043456382751464843, 0.04302643203735351, 0.04309980773925781, 0.043364639282226565, 0.04340332794189453, 0.04372480010986328, 0.044165119171142575, 0.04383500671386719, 0.04365327835083008, 0.04385744094848633, 0.04391347122192383, 0.0436638069152832, 0.044267520904541016, 0.043920448303222656, 0.04356560134887695, 0.043569568634033204, 0.04394569778442383, 0.043640224456787106, 0.04406576156616211, 0.04414249420166016, 0.044034046173095705, 0.04436377716064453, 0.04436332702636719, 0.04391161727905273, 0.04361625671386719, 0.04364400100708008, 0.043810718536376955, 0.04445884704589844, 0.04428201675415039, 0.04384128189086914, 0.04403212738037109, 0.044241024017333985, 0.044242080688476564, 0.044090206146240235, 0.04401139068603516, 0.04430444717407227, 0.044310558319091795, 0.04412793731689453, 0.04507683181762695, 0.04494073486328125, 0.04451699066162109, 0.04435039901733399, 0.04492915344238281, 0.045015998840332035, 0.04422342300415039, 0.04375142288208008, 0.044594718933105466, 0.04496432113647461, 0.044644351959228515, 0.04432598495483398, 0.044405567169189454, 0.04471567916870117, 0.04495600128173828, 0.044499038696289066, 0.044273662567138675, 0.04442931365966797, 0.044556289672851565, 0.04483651351928711, 0.045076320648193356, 0.04480575942993164, 0.04492524719238281, 0.04505430221557617, 0.04365484619140625, 0.043288894653320316, 0.043105438232421876, 0.043350879669189456, 0.04351795196533203, 0.043687934875488284, 0.04372995376586914, 0.04359267044067383, 0.04394150543212891, 0.04364531326293945, 0.04356201553344727, 0.04384451293945312, 0.04367776107788086, 0.043650177001953124, 0.043839969635009766, 0.04372316741943359, 0.0436607666015625, 0.044261920928955076, 0.04426342391967773, 0.04409148788452148, 0.04410563278198242, 0.044349441528320314, 0.04444364929199219, 0.044410911560058594, 0.04412924957275391, 0.04366636657714844, 0.04368320083618164, 0.04387846374511719, 0.04411891174316406, 0.04428160095214844, 0.04390092849731445, 0.044216320037841796, 0.04421343994140625, 0.04408979034423828, 0.04429452896118164, 0.0441077766418457, 0.04414230346679687, 0.04416236877441406, 0.04470476913452148, 0.04452742385864258, 0.044157089233398436, 0.04460489654541016, 0.04478339385986328, 0.04511756896972656, 0.04519990539550781, 0.044931198120117186, 0.04450300979614258, 0.04416921615600586, 0.0441343994140625, 0.044365825653076174, 0.04453769683837891, 0.04429852676391602, 0.04409027099609375, 0.04459142303466797, 0.0444156494140625, 0.044572673797607425, 0.04435865783691406, 0.04413859176635742, 0.04468624114990234, 0.04506419372558594, 0.045146110534667966, 0.04509491348266602]",tokens/s,22.658734252717405,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7365.615616,9680.32256,0.0,9277.800448,8679.633408,s,1,14.6065517578125,14.6065517578125,0.0,14.6065517578125,14.6065517578125,14.6065517578125,14.6065517578125,[14.6065517578125],,kWh,0.0002202249773500474,2.4275743730692758e-05,9.910480150601697e-05,0.00034360552258675715,,MB,1655.689216,9695.002624,0.0,9277.800448,8206.444544,s,10,3.7120443420410156,0.37120443420410154,0.0006681567095566875,0.3711235809326172,0.3720616821289063,0.3721268035888672,0.37217890075683596,"[0.3702177734375, 0.37020516967773437, 0.3708755798339844, 0.3708663330078125, 0.3710728759765625, 0.3720472106933594, 0.3718988037109375, 0.37219192504882814, 0.37117428588867185, 0.371494384765625]",tokens/s,689.646934172241,kWh,1.0856331238579672e-05,1.1968106982419532e-06,7.166312317407341e-06,1.9219454254228966e-05,tokens/kWh,13319837.10950954,MB,1662.369792,9695.002624,0.0,9277.800448,8483.104256,s,10,24.172592041015626,2.417259204101563,0.0015979727995271006,2.4176365966796873,2.4189665039062502,2.419009130859375,2.419043232421875,"[2.414333984375, 2.416475341796875, 2.41579931640625, 2.415375, 2.417224853515625, 2.41804833984375, 2.41895703125, 2.41856298828125, 2.4190517578125, 2.418763427734375]",tokens/s,26.062575289030946,kWh,7.055182806766771e-05,7.782042443606228e-06,4.702684214819368e-05,0.00012536071265946763,tokens/kWh,502549.79142575926,,s,630,24.168888195037855,0.03836331459529816,0.0003634643988102865,0.03837334442138672,0.038835912704467776,0.038894379234313964,0.038991026992797856,"[0.038518463134765625, 0.03791251373291016, 0.037675457000732424, 0.037698848724365235, 0.037688289642333984, 0.03775692749023438, 0.03774188613891601, 0.03784956741333008, 0.03786886215209961, 0.037911136627197264, 0.03787545776367188, 0.03793155288696289, 0.03794128036499023, 0.03791462326049805, 0.03784003067016602, 0.037868385314941404, 0.03787939071655273, 0.03791299057006836, 0.038043647766113284, 0.038174720764160154, 0.03830704116821289, 0.03842127990722656, 0.0381168327331543, 0.038279361724853515, 0.03819139099121094, 0.038230079650878906, 0.038246078491210936, 0.038346046447753905, 0.03820851135253906, 0.03822358322143555, 0.038158016204833986, 0.03813846588134766, 0.038276737213134765, 0.03836147308349609, 0.03831542587280273, 0.03830998229980469, 0.0384189453125, 0.03849420928955078, 0.038458751678466796, 0.03840252685546875, 0.038396064758300784, 0.038383617401123046, 0.038508544921875, 0.03857718276977539, 0.038530017852783205, 0.03852288055419922, 0.03862527847290039, 0.03864166259765625, 0.03866537475585938, 0.038689216613769534, 0.03869913482666015, 0.03880121612548828, 0.03876617431640625, 0.038761024475097654, 0.03869318389892578, 0.03887638473510742, 0.038793697357177734, 0.038869087219238284, 0.03875609588623047, 0.03874828720092773, 0.038879295349121094, 0.03897987365722656, 0.038932479858398435, 0.03863513565063476, 0.03789561462402344, 0.03779270553588867, 0.037787647247314454, 0.037789695739746096, 0.03780777740478516, 0.03781872177124023, 0.037908287048339845, 0.03787747192382813, 0.03788217544555664, 0.0378779182434082, 0.03792281723022461, 0.037924095153808596, 0.03798912048339844, 0.03800678253173828, 0.037930240631103514, 0.03788019180297852, 0.03787782287597656, 0.03795999908447266, 0.03806412887573242, 0.038160385131835936, 0.038243358612060546, 0.03827811050415039, 0.0380948486328125, 0.03820537567138672, 0.03837958526611328, 0.038317214965820315, 0.03824316787719727, 0.03827257537841797, 0.03842297744750976, 0.038299648284912106, 0.03856793594360351, 0.03841024017333984, 0.038408191680908206, 0.038623233795166016, 0.0384532470703125, 0.03838771057128906, 0.038432769775390625, 0.03842435073852539, 0.03842284774780273, 0.038497440338134764, 0.03851545715332031, 0.03846758270263672, 0.03849609756469727, 0.03852508926391601, 0.03866387176513672, 0.03863580703735352, 0.03855916976928711, 0.03860131072998047, 0.03873996734619141, 0.0387665901184082, 0.0387523193359375, 0.03879315185546875, 0.0387949104309082, 0.03886428833007813, 0.03884128189086914, 0.03883567810058594, 0.03884236907958984, 0.0387889289855957, 0.038849246978759765, 0.03884233474731445, 0.03883590316772461, 0.03887129592895508, 0.03857702255249024, 0.03791987228393555, 0.037802879333496096, 0.037738048553466796, 0.037742046356201174, 0.037809120178222654, 0.037873504638671875, 0.03795167922973633, 0.037898208618164064, 0.03783865737915039, 0.037770431518554685, 0.0378603515625, 0.037832447052001957, 0.03799577713012695, 0.0379504623413086, 0.03796355056762695, 0.03794761657714844, 0.037943294525146484, 0.037986305236816405, 0.03807743835449219, 0.038265857696533206, 0.038289310455322266, 0.03811337661743164, 0.03807353591918945, 0.038126049041748045, 0.038266433715820315, 0.03832073593139648, 0.038320159912109374, 0.03823427200317383, 0.03826204681396484, 0.03828937530517578, 0.03842329788208008, 0.0384450569152832, 0.038332416534423826, 0.03830579376220703, 0.03851468658447266, 0.03839907073974609, 0.03846620941162109, 0.03837475204467773, 0.03849030303955078, 0.03862713623046875, 0.038607776641845705, 0.038526817321777346, 0.03856300735473633, 0.03854025650024414, 0.03867647933959961, 0.03867136001586914, 0.03868560028076172, 0.03866223907470703, 0.03864780807495117, 0.03875020980834961, 0.03875020980834961, 0.03872467041015625, 0.03879964828491211, 0.038746784210205075, 0.03881500625610351, 0.03877324676513672, 0.0388403205871582, 0.038887649536132815, 0.038811649322509766, 0.038817440032958984, 0.038903743743896484, 0.03883446502685547, 0.03846464157104492, 0.03809779357910156, 0.037795520782470705, 0.037765087127685545, 0.03770544052124023, 0.037788127899169924, 0.037787071228027345, 0.03781827163696289, 0.03791331100463867, 0.03786966323852539, 0.037923999786376957, 0.03791548919677734, 0.037935104370117184, 0.03799647903442383, 0.03796384048461914, 0.03785903930664063, 0.03783657455444336, 0.03786092758178711, 0.038076766967773436, 0.038099552154541014, 0.03815423965454102, 0.038209537506103515, 0.03818051147460937, 0.03819145584106445, 0.038093921661376956, 0.03815283203125, 0.038273151397705076, 0.038309600830078124, 0.03826265716552734, 0.038330238342285154, 0.038256767272949216, 0.0382583999633789, 0.0382690544128418, 0.038305919647216795, 0.03837724685668945, 0.03841312026977539, 0.038342144012451174, 0.038317790985107424, 0.0383719367980957, 0.03842886352539063, 0.038424575805664066, 0.038706207275390626, 0.03867132949829102, 0.038550689697265626, 0.03852124786376953, 0.03860934448242188, 0.038670337677001954, 0.03866553497314453, 0.03870995330810547, 0.038732990264892575, 0.038644542694091795, 0.03868239974975586, 0.03867628860473633, 0.03880089569091797, 0.038873374938964846, 0.038808193206787106, 0.038811649322509766, 0.03883599853515625, 0.03878115081787109, 0.038786911010742185, 0.0389521598815918, 0.038960063934326175, 0.039120670318603515, 0.038537025451660156, 0.03790428924560547, 0.03776899337768555, 0.03770329666137695, 0.037731201171875, 0.037860542297363284, 0.037851905822753905, 0.03784483337402344, 0.037824737548828126, 0.03784499359130859, 0.037983585357666015, 0.037931678771972656, 0.037975841522216794, 0.037937374114990235, 0.03795072174072266, 0.03795225524902344, 0.03793072128295898, 0.03797804641723633, 0.038100639343261716, 0.03830598449707031, 0.03832617568969727, 0.03826054382324219, 0.03816115188598633, 0.0380816650390625, 0.038077152252197266, 0.038231967926025394, 0.03839814376831055, 0.03832432174682617, 0.03830579376220703, 0.03834470367431641, 0.03827507019042969, 0.03832012939453125, 0.038348800659179685, 0.03843686294555664, 0.03836431884765625, 0.03847420883178711, 0.03850809478759765, 0.03857081604003906, 0.038432769775390625, 0.038424575805664066, 0.0384716796875, 0.03860684967041016, 0.03867647933959961, 0.038652992248535155, 0.038617374420166016, 0.038684574127197266, 0.03874278259277344, 0.03877443313598633, 0.038728031158447265, 0.038778881072998046, 0.03886211013793945, 0.03875299072265625, 0.03874166488647461, 0.03878294372558594, 0.03883660888671875, 0.038768638610839845, 0.03874185562133789, 0.03878710556030274, 0.03887260818481445, 0.03883478546142578, 0.03879935836791992, 0.03890995025634766, 0.03890585708618164, 0.03853279876708984, 0.037841312408447264, 0.037741920471191404, 0.037806751251220704, 0.03780729675292969, 0.037988704681396486, 0.03785504150390625, 0.03782457733154297, 0.03781897735595703, 0.03784499359130859, 0.03802316665649414, 0.0380579833984375, 0.03792486572265625, 0.03789823913574219, 0.03790140914916992, 0.038017822265625, 0.038077983856201175, 0.038000320434570314, 0.03811420822143555, 0.03826483154296875, 0.03830915069580078, 0.03829219055175781, 0.03822332763671875, 0.038212127685546875, 0.038155582427978514, 0.038259391784667966, 0.038324222564697266, 0.03837542343139649, 0.03830988693237305, 0.03845302581787109, 0.03844252777099609, 0.0384150390625, 0.03832777786254883, 0.03825923156738281, 0.03832556915283203, 0.03833721542358398, 0.03841228866577148, 0.038529022216796875, 0.03847577667236328, 0.03837542343139649, 0.03844617462158203, 0.03848694229125976, 0.038634559631347654, 0.03869177627563476, 0.03862732696533203, 0.038698112487792966, 0.03874643325805664, 0.03868729782104492, 0.03879020690917969, 0.03879008102416992, 0.03877027130126953, 0.038794654846191406, 0.03889984130859375, 0.03891043090820313, 0.03883788681030274, 0.038754142761230466, 0.03879417419433594, 0.03886489486694336, 0.03881315231323242, 0.03878249740600586, 0.0388078727722168, 0.0388656005859375, 0.03899558258056641, 0.038640640258789063, 0.037966846466064456, 0.03781631851196289, 0.037763072967529294, 0.037806079864501956, 0.03782451248168945, 0.037768672943115235, 0.03775542449951172, 0.03784089660644531, 0.03802019119262695, 0.03792784118652344, 0.037932064056396486, 0.037922878265380856, 0.03799542236328125, 0.0380313606262207, 0.037967872619628903, 0.037926910400390625, 0.03799039840698242, 0.038152191162109376, 0.038266880035400394, 0.038223167419433594, 0.0381569938659668, 0.03818035125732422, 0.03819318389892578, 0.03836975860595703, 0.038397953033447264, 0.03840204620361328, 0.038308929443359375, 0.03828012847900391, 0.03832627105712891, 0.038316032409667966, 0.03853107070922852, 0.038547454833984376, 0.038449153900146485, 0.038338558197021484, 0.03836723327636719, 0.038438911437988284, 0.03852492904663086, 0.03848172760009766, 0.038411487579345704, 0.03840304183959961, 0.03851878356933594, 0.03853238296508789, 0.03861977767944336, 0.03889091110229492, 0.03879596710205078, 0.03865564727783203, 0.038607200622558596, 0.03862732696533203, 0.038833343505859375, 0.03882681655883789, 0.0387883186340332, 0.03896809768676758, 0.03893977737426758, 0.038863742828369144, 0.038877185821533204, 0.03890073776245117, 0.039010494232177735, 0.03891487884521484, 0.03887308883666992, 0.038803455352783206, 0.0389222412109375, 0.038888927459716796, 0.03879935836791992, 0.038074047088623046, 0.037789886474609374, 0.037671039581298825, 0.03775283050537109, 0.03781347274780274, 0.037817119598388675, 0.037924095153808596, 0.03790105438232422, 0.03793008041381836, 0.037870494842529294, 0.03801283264160156, 0.037924800872802734, 0.03800867080688477, 0.03795999908447266, 0.037879585266113285, 0.03792832183837891, 0.03795439910888672, 0.03812351989746094, 0.03813721466064453, 0.03813235092163086, 0.03821894454956055, 0.03817350387573242, 0.03826278305053711, 0.03825161743164063, 0.03838447952270508, 0.03837734222412109, 0.03836307144165039, 0.03823436737060547, 0.0382457275390625, 0.03817539215087891, 0.038309856414794924, 0.03849628829956055, 0.038553119659423825, 0.038382049560546874, 0.038405887603759764, 0.038451454162597654, 0.038534271240234376, 0.03850944137573242, 0.038434814453125, 0.03851059341430664, 0.03859251022338867, 0.038446399688720705, 0.0385420150756836, 0.03862732696533203, 0.038725631713867184, 0.03874816131591797, 0.038717025756835936, 0.03868867111206055, 0.038806015014648435, 0.038940414428710934, 0.038832382202148436, 0.038796672821044924, 0.038801280975341794, 0.03882060623168945, 0.03883212661743164, 0.03878092956542969, 0.038897216796875, 0.03895750427246094, 0.038964767456054684, 0.03887971115112305, 0.03904716873168945, 0.03900617599487305, 0.038645759582519534, 0.037961025238037106, 0.03783340835571289, 0.03790848159790039, 0.0378787841796875, 0.03783929443359375, 0.037798465728759764, 0.037748737335205076, 0.03790198516845703, 0.03791024017333984, 0.037982719421386715, 0.037910655975341795, 0.03802828979492188, 0.03804876708984375, 0.03800678253173828, 0.03802521514892578, 0.0380497932434082, 0.03805305480957031, 0.03824313735961914, 0.03822959899902344, 0.038114974975585934, 0.03813443374633789, 0.03815945434570313, 0.03815523147583008, 0.038289310455322266, 0.038287487030029294, 0.03821363067626953, 0.03835084915161133, 0.038358432769775394, 0.03835667037963867, 0.038245281219482424, 0.03830764770507813, 0.03832598495483398, 0.03842095947265625, 0.03852694320678711, 0.03860063934326172, 0.03851683044433594, 0.038518367767333986, 0.03847004699707031, 0.03849814224243164, 0.03845097732543945, 0.038541152954101564, 0.03867081451416016, 0.038700736999511716, 0.038709632873535155, 0.03865190505981445, 0.038569664001464846, 0.038656318664550784, 0.038653953552246094, 0.03871539306640625, 0.03879270553588867, 0.03893612670898437, 0.03881260681152344, 0.03874326324462891, 0.038771488189697265, 0.038870975494384764, 0.03883404922485351, 0.03885276794433594, 0.03888336181640625, 0.038946273803710935, 0.03890639877319336, 0.039050846099853515, 0.03908240127563477, 0.03853107070922852, 0.03782451248168945, 0.0377375373840332, 0.037854145050048825, 0.0377446403503418, 0.03783065414428711, 0.03793414306640625, 0.03794425582885742, 0.03789798355102539, 0.038023422241210934, 0.037994495391845705, 0.037943294525146484, 0.037910209655761716, 0.038069793701171875, 0.03798054504394531, 0.037914241790771484, 0.03798505783081055, 0.03805184173583984, 0.03808051300048828, 0.038219135284423826, 0.038314624786376955, 0.03835408020019531, 0.03827775955200195, 0.03828348922729492, 0.038250495910644534, 0.03822556686401367, 0.03820169448852539, 0.03819830322265625, 0.03827344131469727, 0.03826540756225586, 0.03831398391723633, 0.03841743850708008, 0.03836163330078125, 0.038537662506103514, 0.03854131317138672, 0.03855155181884766, 0.038422527313232424, 0.03850774383544922, 0.03850640106201172, 0.03865689468383789, 0.0386110725402832, 0.03864345550537109, 0.03855168151855469, 0.038561790466308594, 0.03864524841308594, 0.038652030944824216, 0.038766975402832034, 0.03874198532104492, 0.03871132659912109, 0.03885232162475586, 0.038851871490478515, 0.03881180953979492, 0.038720352172851566, 0.038771839141845704, 0.038756416320800784, 0.038803905487060544, 0.03873830413818359, 0.038785022735595705, 0.038903167724609375, 0.03892841720581055, 0.038916702270507815, 0.03890969467163086, 0.03885696029663086]",tokens/s,26.066569339724385,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6756.962304,9179.103232,0.0,8776.58112,8188.314112,s,1,14.1703427734375,14.1703427734375,0.0,14.1703427734375,14.1703427734375,14.1703427734375,14.1703427734375,[14.1703427734375],,kWh,0.00019324395340833384,2.130692485338129e-05,8.742868105399149e-05,0.0003019795593157066,,MB,1677.561856,9193.783296,0.0,8776.58112,7663.089664,s,10,54.29687890625,5.429687890625001,0.0037331231340487696,5.430859130859375,5.43243642578125,5.43279853515625,5.43308822265625,"[5.41960205078125, 5.42863818359375, 5.4275537109375, 5.43092138671875, 5.4316552734375, 5.430796875, 5.43316064453125, 5.43235595703125, 5.43214794921875, 5.430046875]",tokens/s,47.14819804689222,kWh,0.0001579229014475004,1.741932692939311e-05,0.00010502005623819755,0.0002803622846150911,tokens/kWh,913104.2727500312,MB,1688.039424,9193.783296,0.0,8776.58112,7906.289152,s,10,27.879911132812502,2.78799111328125,0.0014530938717368998,2.7884769287109377,2.7896196533203126,2.7896219604492187,2.7896238061523437,"[2.785320068359375, 2.78697119140625, 2.789624267578125, 2.788497314453125, 2.789508056640625, 2.78597216796875, 2.78735009765625, 2.78845654296875, 2.78859228515625, 2.789619140625]",tokens/s,22.596915642910307,kWh,8.145568311416655e-05,8.985223876190704e-06,5.4101404392201125e-05,0.00014454231138255852,tokens/kWh,435858.53441390325,,s,630,27.876565055847166,0.04424851596166217,0.0004372625950080227,0.044235198974609376,0.04467927398681641,0.04480108528137207,0.04601438064575196,"[0.046448768615722655, 0.04435424041748047, 0.04375699234008789, 0.04352057647705078, 0.04364262390136719, 0.04370832061767578, 0.04347110366821289, 0.04332515335083008, 0.04344473648071289, 0.04356620788574219, 0.04381577682495117, 0.0437410888671875, 0.044217376708984374, 0.04376470565795899, 0.04384486389160156, 0.04385065460205078, 0.04383119964599609, 0.043850719451904295, 0.043915679931640625, 0.04388083267211914, 0.0441776008605957, 0.04441292953491211, 0.04438544082641602, 0.04433804702758789, 0.04409955215454102, 0.0441995849609375, 0.04395043182373047, 0.04423884963989258, 0.04410988616943359, 0.04395206451416016, 0.043929214477539065, 0.04400892639160156, 0.04423158264160156, 0.0442531852722168, 0.04418073654174805, 0.04428057479858399, 0.044305950164794924, 0.04440111923217773, 0.04443260955810547, 0.04430108642578125, 0.044027904510498046, 0.044222015380859375, 0.04465913772583008, 0.044779582977294924, 0.04458444976806641, 0.044507423400878904, 0.04447817611694336, 0.04427798461914063, 0.04498179244995117, 0.044694206237792966, 0.04451062393188476, 0.04413872146606445, 0.04427999877929688, 0.04432915115356445, 0.04426102447509766, 0.044363422393798826, 0.04451532745361328, 0.04443001556396484, 0.04458905410766602, 0.04437923049926758, 0.04469033432006836, 0.04445724868774414, 0.044720222473144534, 0.04619673538208008, 0.04444569778442383, 0.04384678268432617, 0.04370460891723633, 0.0437479362487793, 0.04370841598510742, 0.04363695907592773, 0.04358335876464844, 0.043415454864501955, 0.043658496856689454, 0.04376448059082031, 0.04400035095214844, 0.04400406265258789, 0.04399532699584961, 0.04406233596801758, 0.04400809478759766, 0.044289344787597655, 0.0440219841003418, 0.04388678359985351, 0.04378543853759766, 0.04394425582885742, 0.044612064361572265, 0.044505088806152344, 0.044322113037109374, 0.04424569702148438, 0.04413644790649414, 0.04400492858886719, 0.043940128326416014, 0.04403779220581055, 0.04399715042114258, 0.044060447692871096, 0.044475135803222654, 0.04408303833007812, 0.044031806945800785, 0.04405078506469726, 0.04431039810180664, 0.04436966323852539, 0.04416304016113281, 0.04423881530761719, 0.044498912811279295, 0.04498684692382812, 0.04414668655395508, 0.04449087905883789, 0.044619647979736325, 0.04453555297851562, 0.04432204818725586, 0.04439756774902344, 0.04447177505493164, 0.04460307312011719, 0.04442607879638672, 0.04419379043579102, 0.044174785614013674, 0.044374847412109376, 0.04436943817138672, 0.0445577278137207, 0.04436563110351562, 0.044329376220703126, 0.044560543060302736, 0.044681663513183596, 0.044447742462158206, 0.04455948638916016, 0.04456265640258789, 0.04465116882324219, 0.04598166275024414, 0.04439859390258789, 0.04376108932495117, 0.043690208435058595, 0.043534175872802734, 0.04338083267211914, 0.04345657730102539, 0.04372719955444336, 0.043732864379882816, 0.043665504455566405, 0.043699710845947266, 0.043902496337890624, 0.043762401580810545, 0.04376604843139648, 0.044072158813476564, 0.043916065216064455, 0.044323871612548825, 0.044264415740966796, 0.04414838409423828, 0.04413481521606445, 0.04424515151977539, 0.04451110458374023, 0.04457257461547852, 0.04456243133544922, 0.04422860717773437, 0.044066368103027345, 0.043940288543701175, 0.0440645751953125, 0.045442752838134766, 0.04401408004760742, 0.04416460800170898, 0.04413455963134766, 0.04433545684814453, 0.04415078353881836, 0.04410688018798828, 0.044194686889648435, 0.04434675216674805, 0.04412604904174805, 0.044480510711669925, 0.04438505554199219, 0.04445782470703125, 0.04432400131225586, 0.04429721450805664, 0.0447628173828125, 0.04464262390136719, 0.04459724807739258, 0.044455936431884766, 0.044506401062011716, 0.04446281433105469, 0.04435353469848633, 0.04430758285522461, 0.04418649673461914, 0.0441396484375, 0.04452646255493164, 0.04442227172851562, 0.04473535919189453, 0.0446135368347168, 0.04478534317016602, 0.04474921417236328, 0.044485729217529295, 0.044374942779541016, 0.04490848159790039, 0.044779327392578124, 0.04622300720214844, 0.04428630447387695, 0.04385955047607422, 0.04363100814819336, 0.04364265441894531, 0.043649246215820316, 0.04348271942138672, 0.043712287902832034, 0.04456307220458984, 0.04364265441894531, 0.04357712173461914, 0.04382297515869141, 0.04387699127197266, 0.044007137298583986, 0.04389068984985352, 0.04396464157104492, 0.04411391830444336, 0.04394566345214844, 0.043990814208984375, 0.044060543060302736, 0.04422927856445313, 0.045239326477050784, 0.04443849563598633, 0.04457062530517578, 0.044482048034667966, 0.044157440185546876, 0.044072032928466794, 0.04379676818847656, 0.0440241584777832, 0.04417721557617187, 0.04418608093261719, 0.043982433319091796, 0.04389110565185547, 0.044058624267578124, 0.04407295989990234, 0.04415900802612305, 0.04435760116577148, 0.044148735046386715, 0.04396607971191406, 0.04417574310302735, 0.04432486343383789, 0.04444140625, 0.04470502471923828, 0.04459206390380859, 0.044703678131103514, 0.044741985321044925, 0.04466352081298828, 0.044539390563964845, 0.04446572875976563, 0.044305343627929684, 0.044516960144042966, 0.04433148956298828, 0.044279232025146484, 0.04428236770629883, 0.0445164794921875, 0.04432166290283203, 0.044553409576416014, 0.044715007781982424, 0.04467638397216797, 0.04456502532958984, 0.04445167922973633, 0.04465679931640625, 0.04467900848388672, 0.045867103576660156, 0.04424758529663086, 0.04368601608276367, 0.04358348846435547, 0.04351715087890625, 0.04350217437744141, 0.044050689697265624, 0.04418297576904297, 0.04379062271118164, 0.04395849609375, 0.043892032623291014, 0.04369382476806641, 0.043770591735839845, 0.043950145721435546, 0.04407699203491211, 0.043796703338623046, 0.04420105743408203, 0.044525985717773435, 0.043944286346435546, 0.04413251113891602, 0.04410515213012695, 0.044620094299316404, 0.04441628646850586, 0.04405302429199219, 0.04400969696044922, 0.044191967010498046, 0.0446032943725586, 0.04399222564697266, 0.043899646759033205, 0.044095680236816405, 0.04414284896850586, 0.044007007598876956, 0.04407040023803711, 0.04434422302246094, 0.044355136871337894, 0.044286144256591796, 0.044130016326904296, 0.04416675186157227, 0.0444730224609375, 0.04468339157104492, 0.044351551055908205, 0.0445335693359375, 0.044615264892578124, 0.04456195068359375, 0.04441907119750976, 0.044399486541748044, 0.04432915115356445, 0.0447053108215332, 0.04425116729736328, 0.04498195266723633, 0.044225086212158204, 0.04441292953491211, 0.044926559448242184, 0.04425878524780273, 0.044219329833984376, 0.044300193786621096, 0.04440435028076172, 0.044437984466552734, 0.044668926239013675, 0.04466479873657227, 0.044707038879394534, 0.045067073822021485, 0.044728321075439455, 0.04584272003173828, 0.04438447952270508, 0.04391145706176758, 0.043697952270507816, 0.04344892883300781, 0.04351705551147461, 0.043477088928222656, 0.043765953063964844, 0.04368815994262695, 0.04356300735473633, 0.04354374313354492, 0.04357759857177734, 0.04367603302001953, 0.04384390258789062, 0.04412540817260742, 0.04402204895019531, 0.043991424560546874, 0.04400537490844727, 0.044066814422607424, 0.04405609512329101, 0.044009952545166015, 0.0445849609375, 0.04465663909912109, 0.04466483306884766, 0.04431248092651367, 0.04397884750366211, 0.043918689727783206, 0.04395484924316406, 0.04398899078369141, 0.043972190856933595, 0.04410192108154297, 0.04399113464355469, 0.04396854400634766, 0.04415484619140625, 0.044156574249267576, 0.044231040954589844, 0.04428121566772461, 0.04407571029663086, 0.04438739013671875, 0.04435647964477539, 0.04451087951660156, 0.044406398773193356, 0.04443939208984375, 0.044526206970214845, 0.04455654525756836, 0.044668670654296874, 0.044703392028808596, 0.044757598876953124, 0.04457235336303711, 0.04420579147338867, 0.04422684860229492, 0.04447673416137695, 0.044386302947998044, 0.04424508666992188, 0.044407936096191404, 0.044395393371582034, 0.04439849472045899, 0.04463411331176758, 0.044430400848388674, 0.044462337493896484, 0.04449292755126953, 0.04491683197021484, 0.04487830352783203, 0.04620521545410156, 0.044270465850830075, 0.043829727172851565, 0.043585952758789064, 0.043757568359375, 0.04344128036499023, 0.04356556701660156, 0.04385747146606445, 0.043699008941650394, 0.04373503875732422, 0.0436121597290039, 0.04396851348876953, 0.044036094665527346, 0.04390639877319336, 0.043803230285644534, 0.044036449432373045, 0.045136672973632816, 0.04375225448608398, 0.04404217529296875, 0.0440786247253418, 0.04411248016357422, 0.04446828842163086, 0.04614342498779297, 0.04416704177856445, 0.04412639999389648, 0.04408086395263672, 0.04381254577636719, 0.043969120025634766, 0.04397078323364258, 0.043902751922607425, 0.04395955276489258, 0.044088062286376954, 0.043937793731689455, 0.04405619049072266, 0.044130687713623044, 0.043991039276123044, 0.044308448791503904, 0.04407708740234375, 0.044101375579833985, 0.04422457504272461, 0.04443155288696289, 0.044539905548095705, 0.04469686508178711, 0.04461846542358398, 0.04469145584106445, 0.04484096145629883, 0.04482457733154297, 0.04445798492431641, 0.04436358261108399, 0.04425747299194336, 0.04437184143066406, 0.04409971237182617, 0.044385887145996096, 0.04434748840332031, 0.04431494522094727, 0.044228416442871094, 0.04446438217163086, 0.044517311096191406, 0.04451449584960938, 0.04452860641479492, 0.04437737655639649, 0.044362239837646485, 0.04480364990234375, 0.046027744293212894, 0.04425932693481445, 0.04366899108886719, 0.04390969467163086, 0.04361401748657227, 0.04350707244873047, 0.043680446624755856, 0.04357440185546875, 0.04382595062255859, 0.043912384033203126, 0.04402447891235352, 0.0438930549621582, 0.043857376098632814, 0.0437665901184082, 0.04409724807739258, 0.04393369674682617, 0.0441932487487793, 0.04407292938232422, 0.04399548721313477, 0.043929695129394535, 0.04416320037841797, 0.04438127899169922, 0.04457155227661133, 0.04435078430175781, 0.044257984161376954, 0.04396646499633789, 0.04376780700683594, 0.04379612731933594, 0.04401414489746094, 0.04405779266357422, 0.044020320892333986, 0.04417049789428711, 0.04400835037231445, 0.04416304016113281, 0.044104736328125, 0.044286144256591796, 0.04428867340087891, 0.04475699234008789, 0.044607425689697264, 0.04446022415161133, 0.044453758239746094, 0.04429619216918945, 0.04444364929199219, 0.044808414459228514, 0.044896190643310546, 0.04474249649047852, 0.04460543823242188, 0.044555423736572265, 0.04433958435058594, 0.04441683197021484, 0.04435609436035156, 0.04408342361450195, 0.04421200180053711, 0.044342880249023435, 0.04435184097290039, 0.044429534912109374, 0.045146110534667966, 0.04454358291625977, 0.04474921417236328, 0.04467302322387695, 0.044423168182373046, 0.04460950469970703, 0.044715969085693356, 0.046066558837890625, 0.04443715286254883, 0.0437691535949707, 0.04352422332763672, 0.04347289657592773, 0.04344496154785156, 0.043568992614746095, 0.04359564971923828, 0.04374739074707031, 0.04372092819213867, 0.043675617218017576, 0.04396809768676758, 0.044028350830078125, 0.04398652648925781, 0.04404060745239258, 0.04405228805541992, 0.043839679718017575, 0.0441794548034668, 0.0442531852722168, 0.04413644790649414, 0.04406643295288086, 0.04417504119873047, 0.04420211029052734, 0.044497470855712894, 0.044432769775390624, 0.044311168670654294, 0.044113246917724606, 0.04408729553222656, 0.04392975997924805, 0.04408089447021484, 0.043988929748535154, 0.04401587295532226, 0.04422019195556641, 0.04421027374267578, 0.044175712585449216, 0.04431475067138672, 0.04431494522094727, 0.044126113891601565, 0.044548095703125, 0.04440803146362305, 0.04424489593505859, 0.044327232360839845, 0.04449337768554688, 0.04524832153320312, 0.04454419326782227, 0.044695552825927735, 0.0447979507446289, 0.04451686477661133, 0.04449871826171875, 0.04459539031982422, 0.04446284866333008, 0.0444681282043457, 0.04450268936157226, 0.04432825469970703, 0.044665760040283206, 0.044420799255371096, 0.04457708740234375, 0.044644351959228515, 0.04451859283447265, 0.04451206588745117, 0.044423263549804685, 0.04446403121948242, 0.04457241439819336, 0.0458623046875, 0.044292671203613285, 0.04358364868164062, 0.043468639373779296, 0.04377804946899414, 0.04399087905883789, 0.043802783966064456, 0.043671070098876955, 0.04354838562011719, 0.04368265533447266, 0.04379430389404297, 0.04387401580810547, 0.04380105590820312, 0.04378620910644531, 0.043945022583007816, 0.04381526565551758, 0.04400511932373047, 0.04411260986328125, 0.043966049194335936, 0.043794849395751956, 0.04389388656616211, 0.044337982177734374, 0.044507457733154294, 0.04437990570068359, 0.04437395095825195, 0.04403792190551758, 0.04415216064453125, 0.04562992095947266, 0.04408163070678711, 0.04384972763061523, 0.043835647583007814, 0.044119808197021486, 0.04406476974487305, 0.04399657440185547, 0.04418620681762695, 0.04478358459472656, 0.04592233657836914, 0.044197887420654294, 0.04434534454345703, 0.04427529525756836, 0.044137153625488285, 0.04414787292480469, 0.044696128845214844, 0.044648193359375, 0.044576416015625, 0.04465631866455078, 0.044534271240234374, 0.044673439025878905, 0.04464863967895508, 0.04452851104736328, 0.04482320022583008, 0.04444803237915039, 0.04427280044555664, 0.044302398681640626, 0.04463455963134766, 0.044402145385742185, 0.044453887939453124, 0.04432128143310547, 0.04450035095214844, 0.04451456069946289, 0.04470694351196289, 0.044617664337158205, 0.044503742218017575]",tokens/s,22.59962799354493,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1068.531712,972.947456,0.0,570.425344,525.840896,s,1,7.98743310546875,7.98743310546875,0.0,7.98743310546875,7.98743310546875,7.98743310546875,7.98743310546875,[7.98743310546875],,kWh,2.6730272479153423e-05,2.941267131023003e-06,9.09584061004054e-06,3.876738022021697e-05,,MB,1340.68224,1014.890496,0.0,597.68832,584.940544,s,10,1.5586878662109374,0.15586878662109377,0.0007503300508319614,0.15568619537353515,0.15641563720703125,0.1571077896118164,0.15766151153564453,"[0.15779994201660155, 0.15600051879882812, 0.1553948211669922, 0.15523558044433594, 0.15592515563964843, 0.15544723510742187, 0.15626182556152343, 0.1553744659423828, 0.15509584045410157, 0.15615248107910157]",tokens/s,1642.4070883564282,kWh,4.773362480175584e-06,5.264121357180841e-07,3.1564317366129845e-06,8.456206352506653e-06,tokens/kWh,30273622.63033169,MB,1353.863168,1029.57056,0.0,612.368384,597.290496,s,10,11.253393920898437,1.1253393920898438,0.004400241990184523,1.125660888671875,1.1275730224609375,1.132034130859375,1.135603017578125,"[1.1259849853515624, 1.119443115234375, 1.12368115234375, 1.1211156005859375, 1.1364952392578125, 1.1260740966796876, 1.1264990234375, 1.1253367919921875, 1.1221822509765624, 1.1265816650390625]",tokens/s,55.983110911103935,kWh,3.235351238940947e-05,3.56822173740068e-06,1.3582470543387206e-05,4.950420467019735e-05,tokens/kWh,1272619.1728503301,,s,630,11.247792383193959,0.017853638703482493,0.0003503114338392176,0.017770463943481446,0.018118466758728028,0.01830409116744995,0.01920559673309326,"[0.018544639587402344, 0.018227039337158205, 0.017838239669799805, 0.01793177604675293, 0.017750528335571288, 0.01777663993835449, 0.0177860164642334, 0.01775846481323242, 0.018028255462646484, 0.01776313591003418, 0.01783113670349121, 0.01781235122680664, 0.01778495979309082, 0.017686176300048827, 0.017825983047485353, 0.017752063751220702, 0.01773139190673828, 0.017825983047485353, 0.017800416946411133, 0.017756959915161134, 0.0179749755859375, 0.01784864044189453, 0.01782579231262207, 0.01779052734375, 0.017895872116088868, 0.017696767807006835, 0.01783216094970703, 0.017815168380737306, 0.01782595252990723, 0.017891231536865233, 0.01790166473388672, 0.017741216659545898, 0.017828447341918945, 0.017737728118896484, 0.01782374382019043, 0.018141088485717775, 0.017930335998535156, 0.017829887390136717, 0.018094079971313477, 0.018011808395385742, 0.017864032745361327, 0.01787596893310547, 0.01770086479187012, 0.01825161552429199, 0.017846431732177735, 0.01807151985168457, 0.01775823974609375, 0.01770086479187012, 0.01780735969543457, 0.019125856399536133, 0.017746559143066408, 0.017708831787109375, 0.017597536087036132, 0.017685407638549804, 0.017729120254516603, 0.017661376953125, 0.017597408294677735, 0.01780735969543457, 0.017655807495117186, 0.017696767807006835, 0.01790278434753418, 0.018058048248291016, 0.01790140724182129, 0.01830678367614746, 0.018187904357910158, 0.017869600296020506, 0.017836032867431642, 0.017773855209350587, 0.01776304054260254, 0.017688575744628905, 0.017707008361816406, 0.01768966484069824, 0.0176231689453125, 0.01765679931640625, 0.01768022346496582, 0.017496063232421876, 0.017602527618408203, 0.01753670310974121, 0.017869279861450194, 0.01791168022155762, 0.01778860855102539, 0.017641279220581056, 0.017568256378173826, 0.01778892707824707, 0.017589792251586914, 0.017781471252441405, 0.018058368682861328, 0.01792848014831543, 0.018282079696655275, 0.01801315116882324, 0.01788809585571289, 0.018010208129882813, 0.017957632064819335, 0.017936479568481444, 0.01790764808654785, 0.01792620849609375, 0.01788047981262207, 0.017922464370727538, 0.017860864639282225, 0.01774995231628418, 0.017679424285888673, 0.01777145576477051, 0.017757312774658203, 0.01763759994506836, 0.017616607666015624, 0.017700960159301758, 0.017636192321777343, 0.017632255554199217, 0.017640447616577147, 0.017581632614135742, 0.017532384872436524, 0.017695775985717775, 0.01773103904724121, 0.017652191162109376, 0.017701087951660158, 0.017645023345947267, 0.01767865562438965, 0.017583583831787108, 0.01765635108947754, 0.017715200424194336, 0.017725439071655275, 0.017683744430541992, 0.01767430305480957, 0.017566368103027342, 0.017618495941162108, 0.01771958351135254, 0.018417215347290038, 0.0184652156829834, 0.018509695053100586, 0.01847923278808594, 0.01813055992126465, 0.017979904174804686, 0.01774959945678711, 0.017998111724853515, 0.01762726402282715, 0.017966175079345705, 0.017825599670410155, 0.01809916877746582, 0.018135040283203126, 0.0181060791015625, 0.017909503936767577, 0.018020896911621093, 0.017711103439331053, 0.017694719314575197, 0.01763737678527832, 0.017689823150634765, 0.017695520401000978, 0.017680383682250975, 0.01769603157043457, 0.01772572708129883, 0.017713727951049803, 0.017700128555297852, 0.01768448066711426, 0.017842559814453124, 0.017754335403442383, 0.017827840805053712, 0.01777459144592285, 0.01779478454589844, 0.01767452812194824, 0.017789087295532226, 0.01773551940917969, 0.017727615356445313, 0.01781670379638672, 0.017662559509277344, 0.017844383239746093, 0.017707008361816406, 0.017629024505615234, 0.017625247955322266, 0.017588224411010742, 0.01773129653930664, 0.01758582305908203, 0.017584543228149414, 0.01763145637512207, 0.01765497589111328, 0.017635744094848634, 0.017555871963500978, 0.01764575958251953, 0.017635135650634765, 0.01780735969543457, 0.017870336532592773, 0.017731359481811523, 0.017717983245849608, 0.01763532829284668, 0.0176167049407959, 0.0176560001373291, 0.017620288848876953, 0.017894079208374023, 0.019171327590942384, 0.01792153549194336, 0.018481088638305665, 0.01804911994934082, 0.0178319034576416, 0.01775766372680664, 0.01776508712768555, 0.017639232635498048, 0.01768448066711426, 0.01762303924560547, 0.01776639938354492, 0.0176693115234375, 0.01771299171447754, 0.017662656784057616, 0.01791619110107422, 0.01780121612548828, 0.017750015258789064, 0.017588224411010742, 0.017743871688842772, 0.01767747116088867, 0.017766559600830078, 0.017661823272705077, 0.017796159744262696, 0.01840006446838379, 0.017619775772094726, 0.017612735748291017, 0.017561792373657226, 0.017666048049926757, 0.017635168075561522, 0.021006271362304686, 0.018379199981689454, 0.017605663299560547, 0.017674495697021484, 0.01757027244567871, 0.017661951065063478, 0.017655839920043947, 0.017590272903442384, 0.01759587287902832, 0.0177260799407959, 0.017569696426391602, 0.017622783660888673, 0.017591552734375, 0.01757900810241699, 0.017512447357177736, 0.01765900802612305, 0.01758297538757324, 0.017655807495117186, 0.017653440475463866, 0.01778236770629883, 0.017834047317504882, 0.017704704284667968, 0.017720224380493164, 0.017801088333129884, 0.017693887710571288, 0.018123680114746094, 0.017831968307495116, 0.017889215469360353, 0.017680448532104494, 0.017633407592773438, 0.01762656021118164, 0.017774080276489256, 0.017647872924804686, 0.017723295211791994, 0.017711872100830077, 0.01766771125793457, 0.01806937599182129, 0.017964927673339844, 0.017762720108032228, 0.01761996841430664, 0.019569440841674803, 0.019345504760742187, 0.01867366409301758, 0.017838048934936523, 0.01837059211730957, 0.021952287673950195, 0.018235263824462892, 0.017758560180664063, 0.01774595260620117, 0.01771660804748535, 0.017574495315551757, 0.017727487564086913, 0.017640544891357423, 0.017701375961303712, 0.017754400253295898, 0.01782089614868164, 0.018041664123535157, 0.01802249526977539, 0.018004127502441406, 0.018066368103027343, 0.018035200119018553, 0.017917823791503907, 0.017886816024780275, 0.01811123275756836, 0.01807910346984863, 0.01796998405456543, 0.01816969680786133, 0.017895423889160156, 0.019204256057739257, 0.01818966484069824, 0.018115232467651367, 0.017902591705322265, 0.018078975677490235, 0.01805695915222168, 0.018114431381225586, 0.018096063613891603, 0.017967296600341798, 0.017966815948486328, 0.017825632095336913, 0.017818048477172853, 0.017830144882202147, 0.01786854362487793, 0.01769267272949219, 0.017698816299438477, 0.017682432174682617, 0.017713151931762695, 0.017700672149658203, 0.017619136810302735, 0.017715200424194336, 0.017881248474121095, 0.01766387176513672, 0.017612384796142577, 0.017684640884399413, 0.01763475227355957, 0.01762179183959961, 0.017764352798461915, 0.018135040283203126, 0.01845631980895996, 0.017614368438720704, 0.018167936325073242, 0.018077695846557617, 0.01784182357788086, 0.017934080123901366, 0.017861215591430665, 0.017750015258789064, 0.017713151931762695, 0.017659711837768554, 0.01764575958251953, 0.017649663925170898, 0.01778198432922363, 0.017796960830688477, 0.017632192611694335, 0.017823488235473632, 0.017608959197998045, 0.017661951065063478, 0.01783296012878418, 0.0178187198638916, 0.018149280548095705, 0.01815769577026367, 0.018300800323486327, 0.018117887496948242, 0.018438943862915037, 0.01800921630859375, 0.017822559356689454, 0.018044288635253907, 0.017858943939208984, 0.017813760757446288, 0.01774131202697754, 0.017737472534179687, 0.01775187110900879, 0.01767430305480957, 0.017748863220214843, 0.017633279800415038, 0.017683456420898438, 0.01773465538024902, 0.017786880493164063, 0.017696767807006835, 0.017721343994140625, 0.017718271255493166, 0.017660383224487305, 0.017639263153076172, 0.01779318428039551, 0.017574560165405272, 0.018157440185546873, 0.018382463455200195, 0.01780499267578125, 0.01763372802734375, 0.018104576110839845, 0.01967942428588867, 0.018033599853515624, 0.017892223358154297, 0.017770496368408203, 0.017844032287597657, 0.01769286346435547, 0.01780735969543457, 0.01791971206665039, 0.017795360565185547, 0.017750015258789064, 0.01780531120300293, 0.017817184448242186, 0.017682912826538087, 0.017686464309692382, 0.018249727249145507, 0.018241535186767577, 0.01807155227661133, 0.018255872726440428, 0.017999872207641602, 0.018135040283203126, 0.01920614433288574, 0.017911808013916015, 0.01780944061279297, 0.017887264251708983, 0.017948480606079103, 0.017700096130371094, 0.017734272003173828, 0.017701183319091797, 0.017778335571289064, 0.017817216873168944, 0.0176912956237793, 0.01778233528137207, 0.017738176345825196, 0.01772287940979004, 0.017644031524658203, 0.017887231826782226, 0.017832128524780274, 0.018059072494506837, 0.01771308708190918, 0.017655328750610353, 0.017715744018554688, 0.01759347152709961, 0.017574560165405272, 0.018516191482543944, 0.01759231948852539, 0.017682432174682617, 0.01761894416809082, 0.017795072555541993, 0.017727487564086913, 0.01778278350830078, 0.017827680587768555, 0.017774560928344726, 0.017766592025756835, 0.01785036849975586, 0.017919296264648436, 0.017832576751708986, 0.01772480010986328, 0.01775686454772949, 0.01781760025024414, 0.01803856086730957, 0.017739999771118165, 0.017640768051147462, 0.017652095794677733, 0.017639392852783202, 0.01772313690185547, 0.01769327926635742, 0.017823392868041993, 0.01772480010986328, 0.017695615768432617, 0.017728960037231446, 0.017684127807617186, 0.01780838394165039, 0.017821695327758787, 0.018059680938720703, 0.01839084815979004, 0.018286367416381837, 0.01862563133239746, 0.018601984024047852, 0.018312192916870116, 0.018015071868896483, 0.017819807052612303, 0.017879360198974608, 0.01778166389465332, 0.017777376174926758, 0.01769068717956543, 0.017936384201049805, 0.017704959869384765, 0.017770496368408203, 0.01762704086303711, 0.017644672393798827, 0.017683456420898438, 0.01832908821105957, 0.017791007995605467, 0.017785280227661134, 0.017737728118896484, 0.01789651107788086, 0.017893312454223632, 0.017828575134277345, 0.017760255813598632, 0.017787328720092775, 0.017628416061401368, 0.017669952392578125, 0.017707807540893555, 0.017709056854248048, 0.017716991424560548, 0.0180001277923584, 0.01762918472290039, 0.017612255096435547, 0.01764816093444824, 0.017680192947387697, 0.01762323188781738, 0.017661951065063478, 0.017971200942993162, 0.020929887771606447, 0.018835552215576173, 0.01794927978515625, 0.017749984741210936, 0.01768448066711426, 0.017580032348632812, 0.018013408660888672, 0.017789888381958007, 0.017704160690307617, 0.017838720321655274, 0.017697792053222656, 0.017929216384887696, 0.017875104904174804, 0.017868640899658204, 0.017757440567016603, 0.017708799362182618, 0.0176812801361084, 0.017928319931030272, 0.017673952102661133, 0.017735328674316406, 0.017746559143066408, 0.01761408042907715, 0.017622079849243164, 0.017650623321533204, 0.01775276756286621, 0.017534496307373047, 0.017615392684936525, 0.018347936630249022, 0.01799635124206543, 0.017776575088500977, 0.017886432647705078, 0.017806112289428713, 0.01759791946411133, 0.017631200790405272, 0.017633792877197265, 0.017674367904663087, 0.017731584548950196, 0.017922048568725587, 0.017670143127441407, 0.017702911376953127, 0.017709056854248048, 0.017704959869384765, 0.017745920181274414, 0.0177271671295166, 0.017920320510864257, 0.017707231521606446, 0.017664960861206055, 0.017673055648803712, 0.017762304306030274, 0.017819648742675782, 0.017826847076416016, 0.01783475112915039, 0.01772172737121582, 0.01777836799621582, 0.018016416549682616, 0.017946624755859376, 0.01798102378845215, 0.017865119934082033, 0.01822127914428711, 0.017763744354248046, 0.017819583892822264, 0.0177127685546875, 0.01789961624145508, 0.017859519958496092, 0.017735456466674803, 0.01807155227661133, 0.017876096725463867, 0.01778495979309082, 0.01769139289855957, 0.0176844482421875, 0.017862495422363282, 0.01818844795227051, 0.017802656173706053, 0.017856319427490233, 0.017705440521240234, 0.017709407806396484, 0.017721343994140625, 0.01776639938354492, 0.017729536056518554, 0.017686527252197267, 0.017753440856933592, 0.017814176559448242, 0.017942655563354493, 0.017717119216918945, 0.017811456680297853, 0.017735679626464843, 0.01766383934020996, 0.01779871940612793, 0.01779158401489258, 0.017696767807006835, 0.018225311279296875, 0.018081439971923827, 0.018143775939941407, 0.017915231704711914, 0.01784079933166504, 0.017947839736938476, 0.017942975997924805, 0.017838239669799805, 0.017745567321777345, 0.017755775451660155, 0.017961919784545897, 0.017747488021850586, 0.01818876838684082, 0.01809779167175293, 0.018080127716064452, 0.018190336227416993, 0.018169183731079102, 0.01806377601623535, 0.018039295196533203, 0.01806719970703125, 0.017958240509033205, 0.018113183975219726, 0.018190336227416993, 0.017806976318359376, 0.017760608673095705, 0.017870880126953124, 0.017842111587524415, 0.01770086479187012, 0.017737791061401366, 0.01765996742248535, 0.017653696060180663, 0.017774560928344726, 0.017716960906982424, 0.01770639991760254, 0.01770569610595703, 0.01771120071411133, 0.017803359985351562, 0.01761052894592285, 0.017629312515258788, 0.018243679046630858, 0.017741472244262695, 0.017664447784423828, 0.01783363151550293, 0.017807199478149415, 0.01786617660522461, 0.017820831298828124, 0.017737823486328123, 0.017743583679199218, 0.017719295501708983, 0.017769792556762695, 0.017836383819580078, 0.0178056640625, 0.017735904693603515, 0.017854240417480467, 0.017878879547119142, 0.01789529609680176, 0.017919551849365233, 0.018023199081420898, 0.017782720565795898, 0.0179036808013916, 0.01777043151855469, 0.017732767105102538, 0.01792086410522461]",tokens/s,56.010991182707315,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7357.227008,9680.32256,0.0,9277.800448,8679.633408,s,1,14.63746875,14.63746875,0.0,14.63746875,14.63746875,14.63746875,14.63746875,[14.63746875],,kWh,0.00021881269433338276,2.4125730179328053e-05,9.808285624401947e-05,0.0003410212807567303,,MB,1774.20288,9695.002624,0.0,9277.800448,8206.575616,s,10,3.9579817199707032,0.39579817199707035,0.002183568118028987,0.39579788208007816,0.3973909362792969,0.3990055419921875,0.4002972265625,"[0.39171075439453124, 0.3934425048828125, 0.40062014770507814, 0.3954133911132813, 0.39585723876953127, 0.39571035766601564, 0.3959869384765625, 0.39703213500976564, 0.395738525390625, 0.3964697265625]",tokens/s,646.7942959622736,kWh,1.147750153958476e-05,1.2649393206617255e-06,7.602644970999401e-06,2.0345085831245888e-05,tokens/kWh,12582891.127784597,MB,1785.1392,9695.002624,0.0,9277.800448,8480.92416,s,10,27.59565698242188,2.759565698242188,0.002787377926266119,2.759844482421875,2.762532592773437,2.762802307128906,2.763018078613281,"[2.753501953125, 2.763072021484375, 2.757341796875, 2.7610771484375, 2.762303466796875, 2.759253662109375, 2.757710693359375, 2.75848828125, 2.76247265625, 2.760435302734375]",tokens/s,22.829679336908082,kWh,8.055827870708947e-05,8.886574458631513e-06,5.358865398200117e-05,0.00014303350714772213,tokens/kWh,440456.23474040156,,s,630,27.59219088745116,0.04379712839277963,0.000472317259624237,0.04376972770690918,0.04416977844238281,0.044275498771667476,0.045491162147521975,"[0.04407603073120117, 0.0434411506652832, 0.043077278137207034, 0.04318207931518555, 0.04386851119995117, 0.04324966430664062, 0.04317184066772461, 0.04345206451416016, 0.043194721221923825, 0.043358207702636715, 0.04327135848999023, 0.0433322868347168, 0.04339110565185547, 0.0433493766784668, 0.043266689300537106, 0.04330672073364258, 0.043482879638671874, 0.043340320587158206, 0.043450366973876955, 0.04365039825439453, 0.043605663299560546, 0.043590431213378904, 0.043608287811279296, 0.04362179183959961, 0.043631103515625, 0.04364883041381836, 0.04356134414672851, 0.043561153411865235, 0.04351919937133789, 0.04356585693359375, 0.043585247039794925, 0.04364643096923828, 0.04362499237060547, 0.043943935394287106, 0.04373289489746094, 0.043722846984863284, 0.04365478515625, 0.0438337287902832, 0.04377324676513672, 0.04383718490600586, 0.043850143432617186, 0.04392911911010742, 0.04400019073486328, 0.044143680572509766, 0.04429081726074219, 0.04406464004516602, 0.04407948684692383, 0.044034046173095705, 0.04387807846069336, 0.04380656051635742, 0.043923072814941407, 0.043873184204101565, 0.04385955047607422, 0.043905376434326175, 0.04398899078369141, 0.044187198638916014, 0.04396012878417969, 0.044079456329345706, 0.04402819061279297, 0.0440074234008789, 0.04398899078369141, 0.044023231506347654, 0.04411859130859375, 0.043883102416992184, 0.04344636917114258, 0.04322652816772461, 0.04327459335327148, 0.04311843109130859, 0.043200897216796874, 0.04323219299316406, 0.04317875289916992, 0.043130977630615235, 0.04325983810424805, 0.043255870819091796, 0.04333567810058594, 0.04347283172607422, 0.04333539199829101, 0.04328444671630859, 0.04342595291137695, 0.043540702819824216, 0.04348825454711914, 0.04343910217285156, 0.04377958297729492, 0.043604480743408204, 0.0437391357421875, 0.043671550750732424, 0.0436121597290039, 0.04364287948608398, 0.043816638946533204, 0.04376521682739258, 0.043705184936523436, 0.04374118423461914, 0.043843006134033205, 0.043677375793457034, 0.04362646484375, 0.043834239959716796, 0.04361161422729492, 0.04362678527832031, 0.04382339096069336, 0.04366745758056641, 0.04382080078125, 0.04413459014892578, 0.044202175140380856, 0.04395945739746094, 0.04402380752563476, 0.04398710250854492, 0.04412681579589844, 0.04564169692993164, 0.04903705596923828, 0.04401587295532226, 0.04418764877319336, 0.04427775955200195, 0.044123489379882815, 0.04394659042358398, 0.04401142501831055, 0.04408540725708008, 0.043976703643798826, 0.043960319519042966, 0.044027904510498046, 0.04397055816650391, 0.04413747024536133, 0.04412313461303711, 0.044251136779785157, 0.04411391830444336, 0.044119327545166016, 0.04415116882324219, 0.04380672073364258, 0.0433023681640625, 0.043116352081298825, 0.04315004730224609, 0.04319990539550781, 0.04323091125488281, 0.0431932487487793, 0.043245567321777346, 0.0433070068359375, 0.04328444671630859, 0.043257278442382814, 0.043509857177734375, 0.04341302490234375, 0.043526336669921874, 0.0434409294128418, 0.043583137512207035, 0.0436363525390625, 0.0438138542175293, 0.04371996688842773, 0.04364064025878906, 0.04366403198242187, 0.04372447967529297, 0.04369388961791992, 0.04435148620605469, 0.04374784088134766, 0.04379676818847656, 0.04376956939697266, 0.04366131210327148, 0.04360976028442383, 0.043657569885253905, 0.043708385467529295, 0.043660640716552734, 0.0437275505065918, 0.04371865463256836, 0.043846817016601564, 0.04383420944213867, 0.04371046447753906, 0.04380166244506836, 0.0437011833190918, 0.044087615966796875, 0.043851455688476565, 0.044130016326904296, 0.04407734298706055, 0.044058624267578124, 0.04395212936401367, 0.044007007598876956, 0.04402422332763672, 0.044160255432128905, 0.04394630432128906, 0.04399763107299805, 0.04421836853027344, 0.04399135971069336, 0.04404601669311523, 0.044009471893310545, 0.04405657577514648, 0.04400940704345703, 0.04393715286254883, 0.044143295288085936, 0.044077056884765625, 0.044050430297851564, 0.04402156829833984, 0.04411126327514649, 0.0441126708984375, 0.04400707244873047, 0.043461185455322265, 0.04320479965209961, 0.04324246215820313, 0.043539295196533205, 0.04492892837524414, 0.04343817520141602, 0.04335174560546875, 0.0434252815246582, 0.04348396682739258, 0.04338675308227539, 0.043505790710449216, 0.04335411071777344, 0.04341350555419922, 0.04335580825805664, 0.043532638549804686, 0.0437841911315918, 0.0437391357421875, 0.043515903472900394, 0.04357273483276367, 0.043678207397460936, 0.04358758544921875, 0.04383129501342774, 0.0436341438293457, 0.043458335876464846, 0.04355763244628906, 0.04356915283203125, 0.043700031280517575, 0.0435918083190918, 0.04369619369506836, 0.04370431900024414, 0.04384153747558594, 0.04379033660888672, 0.04386611175537109, 0.04373299026489258, 0.04376959991455078, 0.04372326278686523, 0.043801822662353516, 0.043743392944335935, 0.043999614715576174, 0.04441088104248047, 0.044049854278564456, 0.044017215728759766, 0.04406988906860351, 0.04406486511230469, 0.044058528900146485, 0.04404633712768555, 0.044055999755859374, 0.0440305290222168, 0.04420608139038086, 0.04397875213623047, 0.04403734588623047, 0.044366622924804686, 0.04433651351928711, 0.044180095672607424, 0.04421222305297851, 0.04415871810913086, 0.04411008071899414, 0.04415283203125, 0.04416307067871094, 0.04414048004150391, 0.04417715072631836, 0.04422588729858398, 0.04396928024291992, 0.04345446395874023, 0.04319440078735352, 0.04318819046020508, 0.04328201675415039, 0.04378870391845703, 0.04452499389648437, 0.04327686309814453, 0.04330672073364258, 0.04330115127563477, 0.04314316940307617, 0.043241374969482424, 0.043484798431396486, 0.04346345520019531, 0.04345206451416016, 0.04340943908691406, 0.043548671722412106, 0.04344595336914062, 0.043501888275146484, 0.04345840072631836, 0.043561119079589844, 0.04362371063232422, 0.043580127716064454, 0.04375142288208008, 0.04354048156738281, 0.04359711837768555, 0.043739711761474606, 0.04367494583129883, 0.04380140686035156, 0.0437507209777832, 0.04361286544799805, 0.04372275161743164, 0.04394710540771484, 0.0436940803527832, 0.04367657470703125, 0.043732383728027346, 0.04374723052978516, 0.04395692825317383, 0.04417536163330078, 0.044058399200439455, 0.04392777633666992, 0.043872001647949216, 0.04398105621337891, 0.043937633514404294, 0.04397030258178711, 0.0439463996887207, 0.04392051315307617, 0.04390796661376953, 0.04396646499633789, 0.04408662414550781, 0.04410559844970703, 0.04398076629638672, 0.043959102630615234, 0.043939422607421875, 0.04393571090698242, 0.04402182388305664, 0.04401190567016602, 0.04502732849121094, 0.045039615631103515, 0.0458076171875, 0.04555980682373047, 0.0442429428100586, 0.04440646362304688, 0.044169055938720704, 0.0435079345703125, 0.04327916717529297, 0.04314265441894531, 0.04316793441772461, 0.043268096923828124, 0.04318361663818359, 0.04331980895996094, 0.04320665740966797, 0.043211071014404294, 0.04329859161376953, 0.04334172821044922, 0.043319297790527345, 0.04346220779418945, 0.043485633850097655, 0.04355481719970703, 0.043509471893310545, 0.04348092651367187, 0.04347459030151367, 0.04364777755737305, 0.04362879943847656, 0.043859649658203125, 0.0437820816040039, 0.043996673583984375, 0.044781822204589844, 0.04381734466552734, 0.043730945587158204, 0.043769855499267575, 0.043730945587158204, 0.04360396957397461, 0.043568897247314456, 0.04370867156982422, 0.04400259017944336, 0.04360470581054687, 0.04362854385375976, 0.04492902374267578, 0.044394142150878904, 0.044003681182861326, 0.044034046173095705, 0.04396028900146484, 0.04386345672607422, 0.04394867324829101, 0.04395334243774414, 0.04394188690185547, 0.043926334381103514, 0.04411139297485352, 0.04410825729370117, 0.04424499130249023, 0.04389888000488281, 0.043925407409667966, 0.04389043045043945, 0.044042591094970704, 0.04394982528686524, 0.043909374237060546, 0.04400444793701172, 0.04398979187011719, 0.04401270294189453, 0.043971134185791017, 0.04401126480102539, 0.04410585784912109, 0.04405916976928711, 0.044272735595703126, 0.04422505569458008, 0.04403823852539063, 0.04360367965698242, 0.04322544097900391, 0.043106239318847654, 0.04311164855957031, 0.04314511871337891, 0.04324441528320312, 0.043294654846191404, 0.04330281448364258, 0.043253982543945316, 0.04332883071899414, 0.043270240783691405, 0.04333622360229492, 0.04343807983398437, 0.04337868881225586, 0.04335001754760742, 0.043669185638427734, 0.045859134674072266, 0.04345651245117187, 0.043649024963378906, 0.043687934875488284, 0.04395600128173828, 0.043745502471923825, 0.043804672241210936, 0.04362444686889649, 0.043579391479492184, 0.04365311813354492, 0.04352384185791015, 0.043598079681396486, 0.04368588638305664, 0.04349542236328125, 0.04358892822265625, 0.043815616607666016, 0.043614334106445315, 0.04360179138183594, 0.04372480010986328, 0.043763713836669924, 0.04388988876342773, 0.04392425537109375, 0.043939201354980466, 0.04394172668457031, 0.04399593734741211, 0.04407855987548828, 0.04418825531005859, 0.04418899154663086, 0.044122303009033206, 0.044039871215820314, 0.04393072128295898, 0.04395792007446289, 0.043977886199951174, 0.04382332611083985, 0.04397068786621094, 0.043958782196044925, 0.04402175903320313, 0.04400243377685547, 0.044055423736572265, 0.04393983840942383, 0.04410163116455078, 0.04415225601196289, 0.04420998382568359, 0.04424969482421875, 0.04415663909912109, 0.0440544319152832, 0.04399158477783203, 0.0433616943359375, 0.04323142242431641, 0.04314595031738281, 0.043072990417480465, 0.04317033767700195, 0.0431566390991211, 0.043125598907470704, 0.043184127807617184, 0.043244609832763674, 0.04322604751586914, 0.0433438720703125, 0.04323328018188476, 0.04324969482421875, 0.04336841583251953, 0.04328179168701172, 0.04344812774658203, 0.043934654235839844, 0.04377529525756836, 0.0442803840637207, 0.043548671722412106, 0.043646976470947264, 0.04360752105712891, 0.0437026252746582, 0.0434312973022461, 0.04365190505981445, 0.04360192108154297, 0.04354851150512695, 0.04353612899780274, 0.043558879852294924, 0.04365561676025391, 0.0436363525390625, 0.04390719985961914, 0.04362470245361328, 0.04370636749267578, 0.04378582382202149, 0.043721183776855466, 0.04381923294067383, 0.04383084869384766, 0.04401168060302734, 0.0440437126159668, 0.044053150177001954, 0.04408681488037109, 0.0441748161315918, 0.044093345642089846, 0.0441190071105957, 0.04406480026245117, 0.04397596740722656, 0.04385456085205078, 0.0450334701538086, 0.045408416748046875, 0.044049888610839846, 0.04403443145751953, 0.04408729553222656, 0.044109825134277345, 0.04401116943359375, 0.04402406311035156, 0.04417526245117188, 0.04405654525756836, 0.04437001419067383, 0.044222240447998044, 0.044232128143310546, 0.04447308731079101, 0.04539209747314453, 0.04359942245483398, 0.04323974227905274, 0.043200511932373044, 0.0432492790222168, 0.04331161499023437, 0.04325724792480469, 0.04325027084350586, 0.04335126495361328, 0.04330547332763672, 0.04334211349487305, 0.04348720169067383, 0.0434299201965332, 0.043576446533203125, 0.043594623565673826, 0.04366118240356445, 0.04364505767822266, 0.04352201461791992, 0.04359686279296875, 0.04350870513916016, 0.043638046264648435, 0.04358015823364258, 0.043654945373535155, 0.04364502334594727, 0.04356835174560547, 0.04358028793334961, 0.043587039947509766, 0.043616798400878905, 0.04366131210327148, 0.043651359558105465, 0.04359958267211914, 0.04366262435913086, 0.043878368377685543, 0.043633121490478516, 0.04382953643798828, 0.04386643218994141, 0.04406969451904297, 0.04399808120727539, 0.043905025482177736, 0.043997024536132814, 0.04394329452514648, 0.04407366561889649, 0.0441319694519043, 0.04422089767456055, 0.044328289031982424, 0.0443152961730957, 0.04416902542114258, 0.044320705413818356, 0.04552495956420898, 0.044063007354736325, 0.04424499130249023, 0.04418124771118164, 0.044058879852294924, 0.04412729644775391, 0.0440432014465332, 0.04421222305297851, 0.044074657440185544, 0.044136383056640624, 0.04421257781982422, 0.044154945373535155, 0.04410563278198242, 0.04420780944824219, 0.044171680450439454, 0.04399375915527344, 0.04351795196533203, 0.04320479965209961, 0.043237377166748046, 0.04335615921020508, 0.04323676681518555, 0.04327420806884766, 0.043228927612304686, 0.043200576782226566, 0.04366950225830078, 0.04355958557128906, 0.04350912094116211, 0.04336284637451172, 0.04337036895751953, 0.043430526733398436, 0.043615169525146484, 0.043531070709228514, 0.043556640625, 0.04349359893798828, 0.04367948913574219, 0.04365132904052734, 0.043546302795410156, 0.04358979034423828, 0.043626911163330076, 0.04355456161499023, 0.04353222274780273, 0.04346681594848633, 0.04351795196533203, 0.04357475280761719, 0.04799964904785156, 0.04359148788452148, 0.0436798095703125, 0.0436429443359375, 0.04381187057495117, 0.04368809509277344, 0.04446214294433594, 0.043702945709228516, 0.04388780975341797, 0.043894783020019534, 0.04414089584350586, 0.044023361206054684, 0.043895744323730466, 0.043991039276123044, 0.04400537490844727, 0.043888641357421876, 0.04390480041503906, 0.0439007682800293, 0.044044864654541015, 0.043883777618408205, 0.04388079833984375, 0.04394195175170899, 0.04394614410400391, 0.04395827102661133, 0.04406272125244141, 0.04395622253417969, 0.044181503295898435, 0.04429971313476563, 0.044157310485839846, 0.0441695671081543, 0.044076190948486325, 0.04404624176025391, 0.04413702392578125, 0.04415273666381836]",tokens/s,22.832547171399927,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1496.080384,1830.682624,0.0,1428.160512,1322.516992,s,1,8.5152880859375,8.5152880859375,0.0,8.5152880859375,8.5152880859375,8.5152880859375,8.5152880859375,[8.5152880859375],,kWh,4.183571504583293e-05,4.607358800771167e-06,1.583779044803113e-05,6.228086429463523e-05,,MB,1461.583872,1851.654144,0.0,1434.451968,1322.072064,s,10,5.818452941894531,0.5818452941894531,0.002179254416891313,0.5811725158691406,0.5824312133789062,0.5853568725585937,0.5876973999023438,"[0.5882825317382813, 0.5803014526367187, 0.5811823120117188, 0.5811836547851562, 0.5808369140625, 0.5809910278320313, 0.5811627197265625, 0.5816063842773438, 0.5811248779296875, 0.5817810668945312]",tokens/s,439.9794972246429,kWh,1.733241638333436e-05,1.9114516805034928e-06,1.1507754304233924e-05,3.0751622368071776e-05,tokens/kWh,8324764.038003892,MB,1461.583872,1851.654144,0.0,1434.451968,1374.923264,s,10,11.087667236328125,1.1087667236328127,0.006146035786839412,1.1079638671875,1.1140128784179686,1.1178418884277344,1.1209050964355467,"[1.10992333984375, 1.1122777099609376, 1.1124521484375, 1.1131619873046874, 1.1216708984375, 1.1053837890625, 1.1043323974609376, 1.1037122802734376, 1.10600439453125, 1.098748291015625]",tokens/s,56.81988704854344,kWh,3.200213116916252e-05,3.5297630649601783e-06,1.728262983916726e-05,5.281452407328998e-05,tokens/kWh,1192853.6913932194,,s,630,11.082291168212901,0.017590938362242684,0.0004795568966315623,0.017467967987060545,0.017855353927612307,0.018274387931823725,0.01982349622726441,"[0.018621503829956056, 0.01792300796508789, 0.017632736206054687, 0.01755913543701172, 0.017441055297851563, 0.01742505645751953, 0.01774723243713379, 0.018086624145507813, 0.017432031631469728, 0.01739967918395996, 0.01748441505432129, 0.017493759155273438, 0.017424671173095704, 0.017485376358032226, 0.0174370231628418, 0.017541215896606444, 0.01746124839782715, 0.01746124839782715, 0.017505760192871093, 0.01742812728881836, 0.017852928161621092, 0.017627519607543947, 0.017803264617919923, 0.01750009536743164, 0.017366111755371092, 0.017449951171875, 0.01737113571166992, 0.017360895156860352, 0.018754560470581053, 0.01758515167236328, 0.01742438316345215, 0.01747491264343262, 0.017661663055419923, 0.017633567810058592, 0.01747545623779297, 0.01745590400695801, 0.017442815780639647, 0.01751379203796387, 0.017474239349365234, 0.01768448066711426, 0.020885055541992187, 0.01906528091430664, 0.017540544509887696, 0.017480255126953125, 0.017497535705566405, 0.017490495681762697, 0.017471456527709962, 0.017383455276489258, 0.01738662338256836, 0.017438911437988282, 0.017458976745605467, 0.01733705520629883, 0.017280351638793944, 0.017398624420166015, 0.017388927459716798, 0.01742880058288574, 0.0174083194732666, 0.017368831634521485, 0.017356767654418945, 0.017275167465209962, 0.01744278335571289, 0.017475616455078124, 0.017364736557006835, 0.0186856632232666, 0.017908000946044923, 0.01832467269897461, 0.021572320938110352, 0.017707103729248046, 0.01746246337890625, 0.017449184417724608, 0.017501983642578125, 0.017402175903320313, 0.017350175857543945, 0.01743561553955078, 0.017514495849609374, 0.01747747230529785, 0.017451072692871095, 0.01746361541748047, 0.017378271102905272, 0.01737558364868164, 0.01738390350341797, 0.01738137626647949, 0.017393600463867186, 0.017417856216430664, 0.01749168014526367, 0.017433311462402342, 0.017429920196533204, 0.017395999908447264, 0.017353023529052734, 0.01740300750732422, 0.017343360900878905, 0.017469440460205078, 0.017513824462890626, 0.01742095947265625, 0.01739491271972656, 0.017306400299072267, 0.017356800079345702, 0.017491968154907226, 0.017573888778686524, 0.01737932777404785, 0.017781919479370117, 0.01756822395324707, 0.017600799560546877, 0.01749932861328125, 0.017417119979858398, 0.017375232696533204, 0.017549312591552735, 0.01774367904663086, 0.017365184783935547, 0.017434623718261717, 0.01741414451599121, 0.017394943237304686, 0.017386240005493166, 0.01741414451599121, 0.017380704879760744, 0.01749603271484375, 0.01798214340209961, 0.017530879974365234, 0.017479679107666016, 0.01743440055847168, 0.017456575393676756, 0.017507104873657225, 0.017719295501708983, 0.01765376091003418, 0.019868736267089845, 0.019471296310424803, 0.018609119415283204, 0.017967103958129883, 0.01776972770690918, 0.017709184646606445, 0.017824256896972656, 0.017895551681518556, 0.01756979179382324, 0.017491872787475587, 0.017432319641113282, 0.017571680068969725, 0.017475711822509767, 0.01755513572692871, 0.0177488956451416, 0.01765932846069336, 0.017832288742065428, 0.017786880493164063, 0.017756128311157228, 0.017706464767456055, 0.017674816131591796, 0.01788313674926758, 0.017991680145263672, 0.017944095611572265, 0.01780374336242676, 0.019341312408447265, 0.020658176422119142, 0.017580032348632812, 0.01739491271972656, 0.017328927993774414, 0.01741801643371582, 0.017494239807128907, 0.01743667221069336, 0.017307647705078123, 0.01738751983642578, 0.017476800918579102, 0.017513280868530275, 0.017373184204101562, 0.017442399978637696, 0.017420703887939454, 0.01744233512878418, 0.01734217643737793, 0.017449344635009766, 0.01733628845214844, 0.017379520416259765, 0.017584224700927735, 0.01744435119628906, 0.017412607192993163, 0.01755353546142578, 0.017438432693481446, 0.017408287048339844, 0.01741823959350586, 0.01744076728820801, 0.017473535537719728, 0.017562911987304686, 0.01749679946899414, 0.017510400772094727, 0.017593727111816407, 0.017527135848999023, 0.017510528564453124, 0.017559711456298827, 0.01741619110107422, 0.01742438316345215, 0.01747865676879883, 0.01739263916015625, 0.018625247955322267, 0.01790771293640137, 0.017557504653930665, 0.017543167114257813, 0.01740355110168457, 0.017501760482788085, 0.017347232818603515, 0.01739583969116211, 0.01741414451599121, 0.0174202880859375, 0.01740185546875, 0.017469440460205078, 0.01742131233215332, 0.017544191360473634, 0.017483776092529296, 0.01761075210571289, 0.017583999633789062, 0.01761497688293457, 0.017655008316040038, 0.01758902359008789, 0.017607872009277343, 0.017452991485595704, 0.01749286460876465, 0.017471136093139647, 0.017617088317871094, 0.0174531192779541, 0.017469535827636717, 0.017569183349609375, 0.01752675247192383, 0.017381824493408204, 0.01747974395751953, 0.017542591094970705, 0.017465984344482422, 0.017449024200439454, 0.017559551239013673, 0.017504255294799806, 0.017489919662475584, 0.017584127426147463, 0.017731584548950196, 0.01760223960876465, 0.017528703689575195, 0.017672351837158203, 0.017499584197998047, 0.017468255996704103, 0.017661951065063478, 0.02295100784301758, 0.020261791229248045, 0.017832128524780274, 0.01772115135192871, 0.01748956871032715, 0.017520992279052735, 0.017403200149536134, 0.017498783111572266, 0.017374656677246095, 0.01734003257751465, 0.01747043228149414, 0.017430303573608398, 0.01738979148864746, 0.017442815780639647, 0.017520639419555666, 0.017481727600097655, 0.017340415954589843, 0.017296415328979492, 0.018863712310791016, 0.018557695388793944, 0.018392927169799806, 0.017897472381591797, 0.017571231842041016, 0.01741081619262695, 0.01778278350830078, 0.02003763198852539, 0.019656511306762697, 0.017559904098510742, 0.017459039688110353, 0.017440095901489257, 0.017536800384521486, 0.01762803268432617, 0.01759231948852539, 0.017731136322021484, 0.017602943420410157, 0.01763692855834961, 0.017592832565307616, 0.0176680965423584, 0.017776159286499022, 0.017471744537353517, 0.017626623153686523, 0.01766169548034668, 0.01772431945800781, 0.017772607803344727, 0.017565696716308594, 0.01754662322998047, 0.017645471572875975, 0.01764339256286621, 0.017738592147827147, 0.017557504653930665, 0.0178606071472168, 0.017682432174682617, 0.017815359115600587, 0.01784441566467285, 0.01782374382019043, 0.01783945655822754, 0.01773174476623535, 0.017719520568847656, 0.017584415435791017, 0.017506303787231444, 0.01760416030883789, 0.01761734390258789, 0.01784204864501953, 0.017637504577636718, 0.01760380744934082, 0.01781635284423828, 0.017690624237060547, 0.017706432342529298, 0.017703296661376954, 0.01769068717956543, 0.017620639801025392, 0.018212928771972656, 0.017753599166870117, 0.018058143615722656, 0.017732704162597656, 0.017689504623413087, 0.017672191619873046, 0.017770496368408203, 0.017663999557495116, 0.017630720138549806, 0.01796499252319336, 0.018851455688476564, 0.018063871383666993, 0.017744319915771484, 0.01756585693359375, 0.01755529594421387, 0.01751030349731445, 0.01746544075012207, 0.01744857597351074, 0.017490304946899415, 0.01746518325805664, 0.01749772834777832, 0.01747609519958496, 0.017678367614746095, 0.017800607681274415, 0.017693056106567382, 0.018397407531738283, 0.01738751983642578, 0.017362943649291994, 0.017313791275024415, 0.017297407150268555, 0.017348608016967772, 0.018562240600585936, 0.019712736129760742, 0.017601728439331055, 0.01744895935058594, 0.01741241645812988, 0.017930656433105468, 0.017379167556762696, 0.017510751724243163, 0.017487871170043946, 0.017426431655883787, 0.017446495056152343, 0.017316255569458008, 0.017489824295043945, 0.017428319931030275, 0.01732633590698242, 0.01740287971496582, 0.017401952743530274, 0.017408927917480468, 0.017340415954589843, 0.017309696197509765, 0.01726163291931152, 0.017421247482299805, 0.01730953598022461, 0.01735036849975586, 0.017383615493774415, 0.017313600540161133, 0.017254751205444337, 0.017367136001586913, 0.01735647964477539, 0.017292991638183593, 0.01731612777709961, 0.017410400390625, 0.01735593605041504, 0.01742848014831543, 0.017412384033203124, 0.01743440055847168, 0.017476383209228515, 0.017601568222045897, 0.017222623825073242, 0.017366943359375, 0.017428575515747072, 0.017301504135131835, 0.018573183059692383, 0.017854560852050783, 0.017487295150756837, 0.017424671173095704, 0.017409727096557616, 0.017631872177124024, 0.01734822463989258, 0.0173875846862793, 0.017416288375854492, 0.017658079147338867, 0.01745305633544922, 0.017938432693481447, 0.01882521629333496, 0.017667680740356444, 0.017588191986083985, 0.017661600112915038, 0.01736579132080078, 0.017325983047485352, 0.017584224700927735, 0.017282175064086913, 0.01721843147277832, 0.017471071243286132, 0.01729782485961914, 0.017262399673461912, 0.017346111297607422, 0.017255039215087892, 0.017268735885620116, 0.01729292869567871, 0.01736947250366211, 0.01721318435668945, 0.01723417663574219, 0.017808992385864256, 0.01751081657409668, 0.01745510482788086, 0.017697792053222656, 0.017308671951293944, 0.017387136459350586, 0.017371519088745117, 0.01740348815917969, 0.017349023818969727, 0.01737932777404785, 0.017340415954589843, 0.017409887313842774, 0.017412256240844727, 0.01742438316345215, 0.017385471343994142, 0.017350656509399414, 0.017338367462158204, 0.017307647705078123, 0.01741209602355957, 0.01745884895324707, 0.0173919677734375, 0.017287168502807617, 0.017383424758911133, 0.017353887557983397, 0.017425247192382812, 0.017862272262573243, 0.017318271636962892, 0.01746732711791992, 0.01884281539916992, 0.018386943817138672, 0.017855104446411134, 0.017495391845703125, 0.018628864288330077, 0.01785759925842285, 0.017600223541259764, 0.017503040313720703, 0.01735055923461914, 0.017369344711303712, 0.0172891845703125, 0.01731996726989746, 0.017403743743896485, 0.017395776748657228, 0.017354528427124025, 0.01741632080078125, 0.01746553611755371, 0.017597984313964844, 0.017543647766113283, 0.01742576026916504, 0.018117279052734376, 0.01743052864074707, 0.017442815780639647, 0.01747148895263672, 0.01738252830505371, 0.017412992477416993, 0.017493600845336913, 0.017384960174560548, 0.017385631561279296, 0.017375072479248046, 0.017478431701660156, 0.01747088050842285, 0.017367424011230467, 0.017727039337158204, 0.01761676788330078, 0.0178100471496582, 0.01751068878173828, 0.017313791275024415, 0.01744892883300781, 0.01777872085571289, 0.017336320877075196, 0.017493759155273438, 0.017418495178222658, 0.01733603286743164, 0.01741209602355957, 0.01746767997741699, 0.01740924835205078, 0.017465375900268556, 0.018021120071411132, 0.017489919662475584, 0.017336320877075196, 0.017284223556518555, 0.01735148811340332, 0.017420352935791014, 0.017922048568725587, 0.017718944549560547, 0.017415807723999022, 0.017330911636352538, 0.017448064804077148, 0.017298303604125976, 0.017362560272216797, 0.017437055587768556, 0.017362943649291994, 0.0172728328704834, 0.017395200729370116, 0.018397695541381837, 0.017940256118774416, 0.01855718421936035, 0.017928319931030272, 0.017606143951416017, 0.01747430419921875, 0.01746512031555176, 0.017361120223999025, 0.017367040634155274, 0.0174202880859375, 0.01752876853942871, 0.01737939262390137, 0.017409088134765625, 0.017676959991455077, 0.017649951934814452, 0.01738265609741211, 0.01748044776916504, 0.01737264060974121, 0.017355264663696288, 0.017385311126708984, 0.017340608596801758, 0.017326080322265625, 0.017338367462158204, 0.017340415954589843, 0.017346559524536134, 0.01739776039123535, 0.017323551177978517, 0.017281408309936522, 0.017368959426879882, 0.01735702323913574, 0.01741619110107422, 0.017508352279663086, 0.017764352798461915, 0.017625087738037108, 0.01757561683654785, 0.017639328002929687, 0.01761075210571289, 0.018114816665649413, 0.01780512046813965, 0.017620384216308595, 0.01762726402282715, 0.017726240158081056, 0.017583904266357423, 0.017428735733032226, 0.017464767456054686, 0.017273408889770508, 0.01735203170776367, 0.017423007965087892, 0.01742848014831543, 0.01803059196472168, 0.017729536056518554, 0.01801625633239746, 0.017582080841064454, 0.01741414451599121, 0.01769267272949219, 0.017409759521484373, 0.018018592834472658, 0.017510400772094727, 0.017541120529174805, 0.017481727600097655, 0.017607839584350585, 0.017419103622436524, 0.017463232040405275, 0.017450271606445314, 0.0175644474029541, 0.018612192153930663, 0.01782905578613281, 0.01761305618286133, 0.017326656341552733, 0.017385215759277345, 0.017336576461791993, 0.017317888259887695, 0.01728102493286133, 0.017397119522094728, 0.017633279800415038, 0.017559488296508788, 0.017455808639526366, 0.017625087738037108, 0.017569984436035156, 0.01737504005432129, 0.017733440399169922, 0.01742233657836914, 0.017367231369018556, 0.01740185546875, 0.01787071990966797, 0.017453088760375976, 0.017428575515747072, 0.017319103240966797, 0.01734124755859375, 0.01748944091796875, 0.017422815322875977, 0.017260543823242186, 0.017337503433227538, 0.017273696899414062, 0.017301504135131835, 0.017342464447021484, 0.017942432403564454, 0.01744659233093262, 0.017562015533447266, 0.017473535537719728, 0.01756265640258789, 0.017398752212524415, 0.017438720703125, 0.017616416931152342, 0.01737673568725586, 0.01737993621826172, 0.01734288024902344, 0.017317888259887695, 0.017338367462158204, 0.017231359481811523, 0.017227296829223634, 0.01731273651123047, 0.017250303268432618, 0.017270784378051757, 0.017278976440429687, 0.01725644874572754, 0.017276191711425783, 0.01732067108154297, 0.017285120010375975, 0.017228864669799806, 0.017288127899169923, 0.01727663993835449, 0.01749600028991699, 0.017344863891601562, 0.017659488677978515, 0.01731216049194336, 0.017220991134643555, 0.01727110481262207]",tokens/s,56.847450625283706,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1494.347776,1553.85856,0.0,1168.113664,1154.613248,s,1,8.2734580078125,8.2734580078125,0.0,8.2734580078125,8.2734580078125,8.2734580078125,8.2734580078125,[8.2734580078125],,kWh,3.4190758029141453e-05,3.759357973458888e-06,1.1608064841989174e-05,4.9558180844589516e-05,,MB,1509.548032,1803.419648,0.0,1388.314624,1334.065152,s,10,0.8246467819213867,0.08246467819213868,0.001153647590139349,0.08291596984863281,0.08355518264770508,0.08358621559143066,0.08361104194641113,"[0.08354828643798828, 0.08155168151855469, 0.08351254272460938, 0.0831138916015625, 0.08169152069091797, 0.07980300903320313, 0.08361724853515624, 0.08271804809570313, 0.08195625305175781, 0.08313430023193359]",tokens/s,3104.359413172419,kWh,2.5251219812139946e-06,2.784765565586025e-07,1.6823552730860704e-06,4.485953810858667e-06,tokens/kWh,57067016.46823208,MB,1514.397696,1805.5168,0.0,1388.314624,1372.847616,s,10,14.951676879882813,1.4951676879882814,0.006515765711879546,1.49517822265625,1.5043569335937501,1.504917236328125,1.505365478515625,"[1.4939146728515624, 1.4964417724609376, 1.496705810546875, 1.4902347412109376, 1.48637939453125, 1.49099169921875, 1.4863380126953125, 1.5009608154296874, 1.5054775390625, 1.504232421875]",tokens/s,42.1357420349053,kWh,4.368317610169994e-05,4.817901871554678e-06,2.137244094851382e-05,6.987351892176846e-05,tokens/kWh,901629.1289198679,,s,630,14.948524448394778,0.02372781658475361,0.0003943927964533428,0.023650959968566893,0.024032016372680664,0.02420075693130493,0.02525305318832398,"[0.02388595199584961, 0.02366582489013672, 0.023630752563476562, 0.02386534309387207, 0.023702592849731446, 0.02363692855834961, 0.02353753662109375, 0.0239105281829834, 0.023783424377441405, 0.02367283248901367, 0.023797760009765623, 0.024031232833862305, 0.02373823928833008, 0.023634048461914064, 0.02450841522216797, 0.023631744384765625, 0.02347760009765625, 0.023543935775756836, 0.023661216735839843, 0.02351206398010254, 0.023444480895996093, 0.023463935852050782, 0.02347417640686035, 0.023472127914428712, 0.02342092704772949, 0.024025087356567384, 0.023595008850097656, 0.023568384170532225, 0.023664480209350587, 0.023633888244628906, 0.023504127502441408, 0.023528352737426757, 0.023599136352539064, 0.023355392456054686, 0.02353561592102051, 0.023334911346435547, 0.02345065689086914, 0.023466976165771484, 0.023445407867431642, 0.02348192024230957, 0.023452192306518554, 0.02353971290588379, 0.023746559143066406, 0.02360259246826172, 0.023779647827148438, 0.023653728485107422, 0.02378233528137207, 0.023649311065673827, 0.023817184448242188, 0.0236474552154541, 0.023808704376220704, 0.02410655975341797, 0.02414236831665039, 0.02435660743713379, 0.024254720687866212, 0.02429952049255371, 0.02411929512023926, 0.024006656646728516, 0.023894111633300782, 0.023692384719848632, 0.02361631965637207, 0.02372540855407715, 0.0236876163482666, 0.023787519454956055, 0.024368768692016603, 0.023787744522094728, 0.023726240158081054, 0.023598335266113282, 0.023636224746704102, 0.025165536880493163, 0.024541984558105467, 0.023808000564575195, 0.02388787269592285, 0.023624895095825195, 0.023554880142211913, 0.023404544830322265, 0.023400224685668946, 0.023463872909545897, 0.023481760025024414, 0.023466560363769533, 0.02355232048034668, 0.023533567428588868, 0.02352463912963867, 0.023507680892944336, 0.023558143615722657, 0.023602880477905274, 0.023514495849609376, 0.02355513572692871, 0.02353753662109375, 0.023599103927612306, 0.023599103927612306, 0.023621152877807618, 0.023554527282714843, 0.023427072525024413, 0.023572576522827147, 0.02387548828125, 0.02365235137939453, 0.023477535247802734, 0.02342745590209961, 0.02352070426940918, 0.023550880432128905, 0.023514688491821287, 0.02354537582397461, 0.023597888946533203, 0.023543903350830078, 0.02348806381225586, 0.023650751113891602, 0.02391244888305664, 0.02360281562805176, 0.023515520095825197, 0.023576576232910155, 0.023500799179077148, 0.023535104751586915, 0.02360371208190918, 0.02350796890258789, 0.02374143981933594, 0.02404047966003418, 0.023581184387207032, 0.023554527282714843, 0.023619583129882812, 0.023654048919677734, 0.024117599487304686, 0.024976512908935548, 0.023851903915405273, 0.027242496490478517, 0.02412057685852051, 0.023982431411743162, 0.02392064094543457, 0.024155391693115234, 0.023758623123168947, 0.02396028709411621, 0.023703008651733398, 0.023720735549926757, 0.023570432662963867, 0.02366464042663574, 0.023883775711059572, 0.023666688919067383, 0.023524864196777344, 0.023590816497802734, 0.02368511962890625, 0.023641696929931642, 0.023608320236206053, 0.023508991241455078, 0.023495935440063478, 0.023497024536132814, 0.02348431968688965, 0.024010528564453126, 0.02370832061767578, 0.023650400161743163, 0.023651647567749023, 0.02363667106628418, 0.023834623336791993, 0.02389811134338379, 0.023990272521972656, 0.02371174430847168, 0.023926111221313478, 0.023706272125244142, 0.023888927459716797, 0.023700447082519532, 0.02373747253417969, 0.023588064193725587, 0.023631519317626953, 0.023559999465942384, 0.02351865577697754, 0.02528879928588867, 0.024285728454589844, 0.023702880859375, 0.023786272048950195, 0.023584768295288085, 0.023524480819702147, 0.023444351196289064, 0.023461408615112304, 0.02346441650390625, 0.023564287185668945, 0.02332182312011719, 0.02329884719848633, 0.02338400077819824, 0.023474239349365236, 0.023660512924194337, 0.023869472503662108, 0.02392448043823242, 0.02385945510864258, 0.023945215225219727, 0.023957504272460937, 0.023954687118530275, 0.023905023574829102, 0.024195072174072265, 0.024067520141601562, 0.024054208755493165, 0.02391551971435547, 0.023887456893920897, 0.023580415725708008, 0.023631744384765625, 0.023536415100097657, 0.0235435848236084, 0.02351456069946289, 0.02401273536682129, 0.023516000747680663, 0.023645824432373046, 0.02342745590209961, 0.0235949764251709, 0.023528703689575194, 0.02361404800415039, 0.02352345657348633, 0.023480384826660157, 0.02352332878112793, 0.02348236846923828, 0.023611391067504883, 0.02349875259399414, 0.023801855087280274, 0.023648128509521485, 0.02449625587463379, 0.02351513671875, 0.023529472351074218, 0.02350284767150879, 0.02369331169128418, 0.023543231964111327, 0.02349113655090332, 0.023633920669555664, 0.02390630340576172, 0.023768831253051757, 0.023689472198486328, 0.0234967041015625, 0.02352332878112793, 0.023785472869873047, 0.02372812843322754, 0.02371993637084961, 0.02369276809692383, 0.023937568664550782, 0.023826303482055663, 0.023677055358886718, 0.023633440017700194, 0.023781824111938476, 0.023576608657836916, 0.023777280807495117, 0.023694400787353517, 0.023706592559814454, 0.023682880401611327, 0.02362700843811035, 0.023788448333740234, 0.023770368576049805, 0.023874303817749024, 0.02353705596923828, 0.023410335540771484, 0.023481279373168944, 0.02345475196838379, 0.023415103912353515, 0.02334992027282715, 0.023601152420043944, 0.023752704620361328, 0.023633920669555664, 0.02373222351074219, 0.023994367599487306, 0.02389401626586914, 0.023820192337036132, 0.02370159912109375, 0.023586816787719726, 0.023473535537719727, 0.02354240036010742, 0.023889408111572266, 0.023617887496948244, 0.02345132827758789, 0.02350332832336426, 0.023630176544189453, 0.02354755210876465, 0.023529472351074218, 0.02371776008605957, 0.023740543365478515, 0.023746559143066406, 0.023508991241455078, 0.02352742385864258, 0.023416704177856445, 0.023345279693603515, 0.023335935592651368, 0.023403520584106444, 0.023422496795654297, 0.023355199813842775, 0.023267328262329103, 0.02345769691467285, 0.023493375778198242, 0.023472095489501955, 0.02342300796508789, 0.023475616455078126, 0.02345225524902344, 0.02335436820983887, 0.023341632843017577, 0.023386560440063476, 0.023379520416259767, 0.023443775177001955, 0.023445568084716796, 0.023451103210449218, 0.023720544815063478, 0.023857152938842774, 0.02357151985168457, 0.023434175491333007, 0.02345916748046875, 0.024062623977661134, 0.023762367248535157, 0.023550304412841797, 0.023555295944213867, 0.02472243118286133, 0.023864831924438477, 0.023670944213867187, 0.023839071273803712, 0.023764991760253908, 0.023619583129882812, 0.02369740867614746, 0.023750656127929686, 0.023451648712158202, 0.023427072525024413, 0.023662591934204103, 0.023422431945800782, 0.023558143615722657, 0.023519775390625, 0.023569791793823243, 0.024154111862182616, 0.023891904830932616, 0.0240435848236084, 0.024038496017456053, 0.024136608123779296, 0.023792800903320314, 0.023741056442260742, 0.023703359603881837, 0.023848480224609375, 0.023689407348632813, 0.023560895919799804, 0.023487808227539063, 0.02340630340576172, 0.023634912490844727, 0.023508991241455078, 0.023484031677246095, 0.023998847961425783, 0.023649824142456054, 0.023417055130004884, 0.02347007942199707, 0.023533599853515625, 0.023967424392700196, 0.023452192306518554, 0.023645248413085938, 0.023529535293579103, 0.02354470443725586, 0.023489919662475586, 0.023423616409301757, 0.023670463562011718, 0.023875904083251954, 0.02371379280090332, 0.023525375366210938, 0.023445375442504884, 0.02375276756286621, 0.023697471618652342, 0.02365644836425781, 0.02364825630187988, 0.02364825630187988, 0.023598655700683594, 0.023696895599365234, 0.02338025665283203, 0.023444128036499024, 0.023472127914428712, 0.02349465560913086, 0.023410688400268553, 0.023605024337768555, 0.023469919204711913, 0.023454080581665038, 0.023450752258300782, 0.023450496673583985, 0.023558143615722657, 0.023541759490966797, 0.023598112106323243, 0.02335228729248047, 0.023364831924438476, 0.02385968017578125, 0.02406982421875, 0.024138368606567383, 0.02411110305786133, 0.023977983474731446, 0.023629280090332033, 0.023700000762939454, 0.023990272521972656, 0.02375881576538086, 0.023633504867553713, 0.023553983688354492, 0.023448543548583986, 0.02361891174316406, 0.02362169647216797, 0.02384547233581543, 0.023568384170532225, 0.025044992446899415, 0.02470265579223633, 0.023713247299194336, 0.023685983657836914, 0.023418848037719726, 0.023379199981689452, 0.02346883201599121, 0.023762975692749023, 0.023907360076904298, 0.023458751678466797, 0.023596576690673828, 0.023357343673706055, 0.02348908805847168, 0.023330816268920897, 0.023414783477783203, 0.023425216674804687, 0.02383033561706543, 0.023594591140747072, 0.023433088302612304, 0.023650848388671875, 0.023428447723388673, 0.023458463668823242, 0.023576576232910155, 0.023480319976806642, 0.023488512039184572, 0.023392255783081056, 0.023513088226318358, 0.023500640869140624, 0.023496864318847656, 0.02353971290588379, 0.02368511962890625, 0.02344313621520996, 0.023338367462158203, 0.023495616912841796, 0.023459840774536132, 0.02347417640686035, 0.02353286361694336, 0.023611263275146486, 0.023583616256713867, 0.023541696548461916, 0.023623680114746092, 0.023658496856689453, 0.02369945526123047, 0.023638015747070314, 0.023510591506958008, 0.02362335968017578, 0.023651071548461914, 0.023521280288696288, 0.023468032836914062, 0.023474016189575196, 0.023530656814575196, 0.02346905517578125, 0.02345369529724121, 0.02350694465637207, 0.023443456649780273, 0.0236167049407959, 0.023665472030639647, 0.023549247741699218, 0.023372480392456055, 0.023441408157348635, 0.02365804862976074, 0.0238983039855957, 0.02382259178161621, 0.024149120330810545, 0.024034175872802734, 0.02415167999267578, 0.024258560180664062, 0.02403775978088379, 0.023633920669555664, 0.023576000213623046, 0.023691839218139648, 0.02372371292114258, 0.023740575790405272, 0.023555648803710937, 0.023446111679077147, 0.02354380798339844, 0.023525312423706056, 0.023551263809204102, 0.02350713539123535, 0.02371753692626953, 0.02357548713684082, 0.023727968215942384, 0.023848447799682617, 0.023706272125244142, 0.024440128326416014, 0.02829792022705078, 0.02391859245300293, 0.023793664932250977, 0.02375814437866211, 0.023712448120117188, 0.023613439559936524, 0.02340640068054199, 0.02356003189086914, 0.02352367973327637, 0.023494335174560548, 0.023486431121826173, 0.02384111976623535, 0.023500768661499024, 0.02345372772216797, 0.02339558410644531, 0.023440128326416017, 0.023476224899291992, 0.023736320495605468, 0.023842559814453126, 0.024141952514648436, 0.02415577507019043, 0.024280576705932616, 0.024187135696411132, 0.024062719345092774, 0.02426470375061035, 0.02404761505126953, 0.02412441635131836, 0.023730367660522462, 0.02364908790588379, 0.023690528869628906, 0.02363670349121094, 0.02368921661376953, 0.023629823684692384, 0.023824224472045897, 0.023568639755249022, 0.02354902458190918, 0.02360563278198242, 0.02374300765991211, 0.02364348793029785, 0.023677600860595702, 0.023453407287597657, 0.023621856689453127, 0.02358822441101074, 0.023574239730834962, 0.023474624633789062, 0.023601696014404296, 0.02353318405151367, 0.0235064640045166, 0.023417055130004884, 0.02380188751220703, 0.023478879928588867, 0.0237476806640625, 0.025550815582275392, 0.02382863998413086, 0.0234749755859375, 0.02353152084350586, 0.023601152420043944, 0.02354380798339844, 0.023418655395507814, 0.023634143829345703, 0.02367897605895996, 0.023983360290527344, 0.023846879959106445, 0.02389891242980957, 0.023801279067993165, 0.023861824035644533, 0.023810047149658203, 0.02384681510925293, 0.02375801658630371, 0.023688095092773438, 0.02370355224609375, 0.023738367080688477, 0.02370137596130371, 0.023793792724609374, 0.023738367080688477, 0.023908351898193358, 0.023799072265625, 0.0238799991607666, 0.023925119400024415, 0.02567795181274414, 0.026193824768066407, 0.02435264015197754, 0.02408255958557129, 0.02395955276489258, 0.024145919799804686, 0.02387763214111328, 0.02370476722717285, 0.02367161560058594, 0.02375606346130371, 0.0237076473236084, 0.023793920516967774, 0.02384124755859375, 0.02389116859436035, 0.023773151397705077, 0.024869695663452148, 0.026327039718627928, 0.02400604820251465, 0.02389462471008301, 0.023854591369628905, 0.02378108787536621, 0.023748672485351563, 0.023734912872314454, 0.023786848068237304, 0.02370025634765625, 0.023766143798828125, 0.023775264739990233, 0.023836576461791992, 0.023861503601074217, 0.0237861442565918, 0.02379964828491211, 0.023721664428710938, 0.023785951614379883, 0.023803520202636718, 0.024883520126342772, 0.02377244758605957, 0.023812896728515626, 0.023766496658325195, 0.024031776428222656, 0.023754432678222658, 0.023808256149291992, 0.024064064025878906, 0.023842752456665037, 0.02376095962524414, 0.02395136070251465, 0.0237076473236084, 0.023657983779907226, 0.02377779197692871, 0.02389308738708496, 0.023869888305664062, 0.023849344253540038, 0.023873632431030273, 0.023694559097290038, 0.023745311737060546, 0.023808000564575195, 0.023738367080688477, 0.023814144134521483, 0.023775232315063476, 0.02427494430541992, 0.023920352935791016, 0.024358335494995116, 0.02503766441345215, 0.024229248046875, 0.02401862335205078, 0.024021472930908203, 0.023927200317382814, 0.02371139144897461, 0.02373468780517578, 0.023750656127929686, 0.023791616439819335, 0.02375641632080078, 0.023746944427490233, 0.02377052879333496, 0.023745119094848634, 0.0236810245513916, 0.0236911678314209, 0.024205408096313476, 0.023822336196899413, 0.023764991760253908, 0.023738367080688477]",tokens/s,42.14462786443458,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1548.951552,1553.85856,0.0,1168.113664,1154.613248,s,1,8.22476953125,8.22476953125,0.0,8.22476953125,8.22476953125,8.22476953125,8.22476953125,[8.22476953125],,kWh,3.4250087841686154e-05,3.7709582642445e-06,1.1621120408006336e-05,4.9642166513936994e-05,,MB,1508.298752,1784.54528,0.0,1369.440256,1323.44832,s,10,0.782551612854004,0.0782551612854004,0.0016933718778332977,0.07850286483764649,0.07971990356445312,0.08020591812133789,0.0805947297668457,"[0.08069193267822265, 0.07398214721679687, 0.07866022491455078, 0.07716764831542969, 0.07837091064453125, 0.07839292907714844, 0.07861280059814453, 0.07923023986816406, 0.07783087921142579, 0.07961190032958984]",tokens/s,3271.349720516906,kWh,2.538181190796593e-06,2.798110749326183e-07,1.6943346888000149e-06,4.512326954529226e-06,tokens/kWh,56733477.55597392,MB,1516.101632,1805.5168,0.0,1390.411776,1377.26208,s,10,14.446285156250001,1.4446285156249998,0.006289740836671263,1.4420291137695314,1.4505562622070312,1.4549727111816406,1.4585058703613283,"[1.4495748291015624, 1.4387657470703126, 1.4427535400390625, 1.4413046875, 1.446776123046875, 1.4490166015625, 1.438257080078125, 1.45938916015625, 1.44095263671875, 1.4394947509765625]",tokens/s,43.60982724527202,kWh,4.1991764193362675e-05,4.631432288680785e-06,2.0756349938400996e-05,6.737954642044446e-05,tokens/kWh,935001.8417589761,,s,630,14.440870342254629,0.022922016416277204,0.0003535439001535959,0.02282087993621826,0.023241058731079102,0.023408967781066894,0.02451752796173096,"[0.023375328063964845, 0.02325775909423828, 0.022816768646240236, 0.022700031280517577, 0.02289004707336426, 0.022839744567871093, 0.022730752944946288, 0.02274627113342285, 0.022737760543823243, 0.022725887298583984, 0.02270899200439453, 0.022769664764404295, 0.022621631622314453, 0.02266499137878418, 0.022735647201538086, 0.022849536895751952, 0.023362720489501953, 0.02319443130493164, 0.023113855361938475, 0.022966175079345702, 0.02287820816040039, 0.022892160415649412, 0.022983232498168946, 0.022742847442626953, 0.022668607711791994, 0.022639520645141603, 0.022766687393188476, 0.022809280395507812, 0.02271232032775879, 0.022704063415527345, 0.02279449653625488, 0.022978368759155272, 0.02294745635986328, 0.023136608123779295, 0.023156768798828126, 0.02304400062561035, 0.023044448852539062, 0.023029760360717775, 0.02333465576171875, 0.023187231063842774, 0.023214303970336914, 0.022942911148071288, 0.022940479278564452, 0.023014720916748048, 0.02525254440307617, 0.023366880416870118, 0.023112607955932618, 0.022924800872802735, 0.02276118469238281, 0.022875968933105468, 0.02301628875732422, 0.022917280197143553, 0.023041791915893554, 0.02416239929199219, 0.02394316864013672, 0.023259424209594728, 0.023146400451660155, 0.02293564796447754, 0.022816064834594727, 0.0228353271484375, 0.022909215927124024, 0.022940895080566407, 0.02275971221923828, 0.02533865547180176, 0.02293987274169922, 0.02287615966796875, 0.022883903503417968, 0.02268204879760742, 0.022675424575805663, 0.02256879997253418, 0.02277395248413086, 0.02277347183227539, 0.02265673637390137, 0.02274470329284668, 0.022668224334716797, 0.022740991592407226, 0.022757471084594725, 0.022540191650390624, 0.02271232032775879, 0.02292230415344238, 0.02279110336303711, 0.022804447174072266, 0.02273094367980957, 0.022586463928222656, 0.02275632095336914, 0.022791839599609374, 0.022702112197875976, 0.022720287322998047, 0.023216480255126952, 0.02278396797180176, 0.02280243110656738, 0.02296611213684082, 0.02263894462585449, 0.02283027267456055, 0.022604415893554688, 0.02277280044555664, 0.022719423294067384, 0.022775808334350587, 0.02277897644042969, 0.02269398307800293, 0.022977119445800782, 0.022606048583984375, 0.02385011291503906, 0.022692832946777344, 0.02278335952758789, 0.022637184143066407, 0.0228002872467041, 0.022693632125854492, 0.02281497573852539, 0.02303603172302246, 0.023060352325439452, 0.022634496688842775, 0.022734848022460938, 0.022738943099975584, 0.022700031280517577, 0.022740608215332032, 0.0230098876953125, 0.022865184783935545, 0.022753791809082033, 0.02288435173034668, 0.022828800201416016, 0.022761728286743162, 0.02280243110656738, 0.02288025665283203, 0.0227508487701416, 0.022780288696289064, 0.02300214385986328, 0.022879199981689455, 0.0229550724029541, 0.022956768035888673, 0.022794431686401367, 0.022638784408569337, 0.022693119049072265, 0.022782079696655272, 0.022765247344970704, 0.02284214401245117, 0.0228822078704834, 0.02297360038757324, 0.023104448318481446, 0.02310348892211914, 0.023093151092529296, 0.023158464431762695, 0.023257247924804686, 0.02321392059326172, 0.023468448638916017, 0.023301984786987303, 0.02318956756591797, 0.02306844711303711, 0.023240415573120118, 0.02311759948730469, 0.023181343078613283, 0.022871936798095703, 0.02271753692626953, 0.022755104064941405, 0.022798368453979492, 0.022765663146972655, 0.02265897560119629, 0.02273286437988281, 0.022833087921142577, 0.022814720153808594, 0.022977855682373045, 0.022778144836425783, 0.022687711715698243, 0.022685152053833008, 0.022798303604125977, 0.022650016784667968, 0.022697280883789063, 0.022749727249145507, 0.023127967834472657, 0.022987936019897463, 0.022868032455444335, 0.022833919525146483, 0.022799583435058595, 0.02274393653869629, 0.022702112197875976, 0.022695167541503906, 0.02271513557434082, 0.022942943572998045, 0.022770463943481447, 0.022814720153808594, 0.0227893123626709, 0.022856096267700195, 0.022829471588134767, 0.022674751281738282, 0.023066751480102538, 0.022800960540771485, 0.02302988815307617, 0.022761695861816405, 0.022830591201782227, 0.023333248138427735, 0.022923040390014648, 0.022763263702392577, 0.024100448608398436, 0.022811519622802735, 0.022595584869384764, 0.022793312072753907, 0.02271039962768555, 0.02290377616882324, 0.02279609680175781, 0.022603776931762694, 0.022920831680297852, 0.02270889663696289, 0.02266819190979004, 0.022731168746948242, 0.02260963249206543, 0.022706335067749023, 0.02280089569091797, 0.022715776443481446, 0.022639263153076173, 0.02310550308227539, 0.02323459243774414, 0.02291878318786621, 0.02266761589050293, 0.022636575698852537, 0.022634048461914063, 0.022671808242797853, 0.022781919479370118, 0.022761440277099608, 0.02294144058227539, 0.022807872772216797, 0.022862464904785156, 0.022925024032592774, 0.022985248565673827, 0.02321766471862793, 0.023210176467895506, 0.023150848388671874, 0.02339859199523926, 0.023226367950439454, 0.02324684715270996, 0.023127328872680663, 0.022973152160644533, 0.02286726379394531, 0.022759328842163085, 0.02281068801879883, 0.023036256790161132, 0.022750688552856446, 0.022789024353027345, 0.02262838363647461, 0.022636512756347656, 0.02273859214782715, 0.022753631591796875, 0.02285977554321289, 0.022878143310546876, 0.022747039794921875, 0.02280668830871582, 0.022902912139892578, 0.022676959991455078, 0.02270044708251953, 0.022810367584228514, 0.022696191787719727, 0.02289411163330078, 0.022749664306640625, 0.023429088592529297, 0.023056543350219727, 0.02323263931274414, 0.022849279403686522, 0.02285158348083496, 0.02271219253540039, 0.022679679870605467, 0.022961599349975586, 0.02278223991394043, 0.02265452766418457, 0.022850271224975585, 0.02285670471191406, 0.022725151062011718, 0.02347238349914551, 0.02309552001953125, 0.02301152038574219, 0.02292950439453125, 0.022908447265625, 0.02266111946105957, 0.02274127960205078, 0.02285763168334961, 0.022763519287109374, 0.022779199600219728, 0.022654687881469727, 0.022649824142456056, 0.022732799530029296, 0.022589439392089843, 0.0227554874420166, 0.02276131248474121, 0.022794240951538085, 0.022698207855224608, 0.024530399322509767, 0.02410323143005371, 0.023795711517333985, 0.022985824584960936, 0.023095775604248046, 0.02277014350891113, 0.02298876762390137, 0.022820127487182616, 0.02273459243774414, 0.02276767921447754, 0.022794336318969727, 0.022749984741210937, 0.022995040893554686, 0.022896575927734374, 0.02315673637390137, 0.02290278434753418, 0.022923263549804687, 0.022972415924072266, 0.023001087188720702, 0.02314854431152344, 0.02283657646179199, 0.022837919235229494, 0.02278175926208496, 0.024006847381591798, 0.02291302490234375, 0.022732799530029296, 0.02282700729370117, 0.02288038444519043, 0.02276543998718262, 0.02291836738586426, 0.022991775512695312, 0.022988031387329102, 0.022906879425048828, 0.0230645751953125, 0.02289606475830078, 0.02286867141723633, 0.022845312118530272, 0.022730752944946288, 0.023111391067504882, 0.022935840606689455, 0.022839296340942384, 0.022699167251586914, 0.022797151565551756, 0.022828895568847655, 0.02273084831237793, 0.022939327239990235, 0.022905216217041016, 0.02289254379272461, 0.02269388771057129, 0.022718463897705078, 0.022700031280517577, 0.022837472915649415, 0.022619935989379884, 0.022707359313964844, 0.02280451202392578, 0.02337260818481445, 0.02305843162536621, 0.022847648620605468, 0.022725759506225587, 0.022836992263793945, 0.023129056930541993, 0.02292665672302246, 0.022936256408691406, 0.0229171199798584, 0.023314111709594725, 0.023224639892578124, 0.023938528060913088, 0.023937568664550782, 0.02340447998046875, 0.023154752731323242, 0.02331648063659668, 0.022978143692016603, 0.022994688034057617, 0.02291164779663086, 0.02299660873413086, 0.022913408279418946, 0.022763519287109374, 0.02277577590942383, 0.022709503173828124, 0.022778656005859373, 0.022778079986572265, 0.022769439697265626, 0.023031360626220704, 0.022731039047241212, 0.022666879653930664, 0.022726816177368166, 0.022698623657226562, 0.02395840072631836, 0.02484841537475586, 0.023160736083984376, 0.02312224006652832, 0.022954496383666992, 0.02292937660217285, 0.022931072235107423, 0.023097888946533203, 0.023132671356201173, 0.02306656074523926, 0.023050048828125, 0.022923456192016602, 0.02289571189880371, 0.022764448165893555, 0.02264441680908203, 0.022724927902221678, 0.02275884819030762, 0.02285763168334961, 0.023057056427001954, 0.023256448745727538, 0.02327315139770508, 0.02313043212890625, 0.02311759948730469, 0.023412639617919923, 0.02290889549255371, 0.022860767364501954, 0.022824064254760742, 0.022844287872314455, 0.022828224182128907, 0.022862655639648437, 0.022768896102905275, 0.022670080184936523, 0.022743040084838868, 0.022650400161743165, 0.02266979217529297, 0.022761472702026365, 0.022781280517578124, 0.022739360809326172, 0.022663423538208008, 0.022788320541381836, 0.022682687759399415, 0.022785856246948243, 0.022775903701782226, 0.022841215133666992, 0.02282579231262207, 0.022666656494140625, 0.022673343658447264, 0.022800447463989258, 0.022671680450439453, 0.02260799980163574, 0.022675424575805663, 0.02257155227661133, 0.02286115264892578, 0.022743392944335937, 0.022743135452270507, 0.022853631973266602, 0.02290892791748047, 0.022804607391357423, 0.022600736618041992, 0.022629215240478517, 0.02277337646484375, 0.022794624328613282, 0.02311769676208496, 0.022894208908081054, 0.02298316764831543, 0.022607872009277344, 0.02266726493835449, 0.022737184524536134, 0.022681407928466797, 0.022599008560180663, 0.022602304458618164, 0.022933088302612304, 0.022788415908813475, 0.022722591400146486, 0.022726720809936523, 0.02270627212524414, 0.02276736068725586, 0.02261984062194824, 0.022632736206054688, 0.022707679748535155, 0.02448601531982422, 0.025106687545776368, 0.023208288192749022, 0.023232511520385742, 0.023146495819091797, 0.023250944137573244, 0.023218175888061524, 0.023140159606933594, 0.025964960098266602, 0.02326483154296875, 0.02329827117919922, 0.023278656005859374, 0.02302047920227051, 0.023041887283325194, 0.022898847579956055, 0.023879711151123046, 0.024909791946411134, 0.023480319976806642, 0.023121919631958008, 0.022951839447021484, 0.02282454490661621, 0.022858240127563476, 0.022901792526245118, 0.02288924789428711, 0.022752927780151366, 0.022720287322998047, 0.022988992691040037, 0.022779584884643555, 0.022819711685180665, 0.022863872528076173, 0.02291302490234375, 0.022978559494018554, 0.022939264297485353, 0.022827295303344725, 0.022751327514648437, 0.022773759841918945, 0.022942752838134767, 0.022836191177368164, 0.02287615966796875, 0.023021568298339845, 0.023154272079467773, 0.02327596855163574, 0.023452959060668944, 0.02344211196899414, 0.02328985595703125, 0.02338604736328125, 0.02339455986022949, 0.0236312313079834, 0.02352908706665039, 0.02319443130493164, 0.0229171199798584, 0.022777856826782225, 0.02278540802001953, 0.022733440399169923, 0.023048095703125, 0.022983903884887694, 0.02283139228820801, 0.022932064056396483, 0.02300044822692871, 0.022806207656860353, 0.022913984298706055, 0.023123071670532225, 0.023058816909790038, 0.022852096557617187, 0.023154687881469727, 0.023257087707519532, 0.02324025535583496, 0.02326803207397461, 0.022903615951538087, 0.022918079376220705, 0.02311315155029297, 0.022966848373413087, 0.022787328720092773, 0.02275779151916504, 0.022657119750976562, 0.022645248413085937, 0.022650623321533205, 0.022720096588134765, 0.0226144962310791, 0.022675392150878906, 0.022763519287109374, 0.022635679244995117, 0.022722816467285155, 0.022723167419433594, 0.022529312133789062, 0.0226964168548584, 0.0226461124420166, 0.022760000228881836, 0.02263859176635742, 0.022702432632446288, 0.022634496688842775, 0.02280243110656738, 0.022637887954711913, 0.02264339256286621, 0.022894784927368163, 0.02280838394165039, 0.02266111946105957, 0.022724607467651366, 0.022697439193725587, 0.022907167434692382, 0.02287027168273926, 0.022730207443237303, 0.022813215255737304, 0.02290835189819336, 0.023216224670410155, 0.023290336608886717, 0.023035520553588866, 0.022993280410766603, 0.023011327743530274, 0.022964160919189455, 0.023285215377807617, 0.023059040069580077, 0.022779903411865234, 0.022744543075561525, 0.022724288940429688, 0.02298966407775879, 0.02275062370300293, 0.02330403137207031, 0.023005855560302733, 0.023025568008422852, 0.022795936584472657, 0.02277596855163574, 0.022695999145507812, 0.022808448791503906, 0.022710336685180663, 0.02277609634399414, 0.022581247329711913, 0.022867647171020508, 0.022740320205688478, 0.022697088241577148, 0.022681312561035158, 0.022723968505859376, 0.022771839141845704, 0.022717056274414064, 0.02267750358581543, 0.022591487884521484, 0.022702016830444337, 0.022654016494750975, 0.022762496948242186, 0.02277337646484375, 0.022876575469970704, 0.022798303604125977, 0.022761472702026365, 0.022796512603759766, 0.022795040130615233, 0.022842464447021486, 0.023592575073242188, 0.02294131278991699, 0.024097440719604492, 0.02352332878112793, 0.02289254379272461, 0.022972415924072266, 0.022746240615844727, 0.022637439727783204, 0.02266111946105957, 0.023310335159301757, 0.02281376075744629, 0.02278291130065918, 0.023844703674316407, 0.022814559936523437, 0.022775936126708984, 0.02273689651489258, 0.022599103927612305, 0.022830976486206054, 0.022633344650268554, 0.022640640258789063, 0.022678783416748047, 0.02265372848510742, 0.022809823989868163, 0.022645183563232422, 0.022679872512817383, 0.022725824356079102, 0.02276639938354492, 0.022571008682250978, 0.02280166435241699, 0.022821632385253907, 0.0227042236328125, 0.022759328842163085, 0.022773759841918945, 0.022871936798095703]",tokens/s,43.6261793831492,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1577.525248,1553.85856,0.0,1168.113664,1154.613248,s,1,8.2009658203125,8.2009658203125,0.0,8.2009658203125,8.2009658203125,8.2009658203125,8.2009658203125,[8.2009658203125],,kWh,3.3661625841690085e-05,3.705841141436111e-06,1.1688620461997035e-05,4.905608744512323e-05,,MB,1545.68704,1784.54528,0.0,1369.440256,1323.44832,s,10,0.7627115859985352,0.07627115859985352,0.0004998856671174652,0.07607849884033203,0.07686925811767578,0.07713139877319336,0.07734111129760743,"[0.07739353942871094, 0.075989501953125, 0.07656441497802734, 0.0759548797607422, 0.07601411437988281, 0.07635391998291016, 0.07614288330078126, 0.07587865447998046, 0.07681100463867188, 0.07560867309570313]",tokens/s,3356.4456696281477,kWh,2.359526385416341e-06,2.602102816221486e-07,1.5644815383224792e-06,4.1842182053609686e-06,tokens/kWh,61182277.65272942,MB,1554.403328,1805.5168,0.0,1390.411776,1377.233408,s,10,11.667509765624999,1.1667509765625002,0.00522265728113798,1.1662162475585938,1.171786315917969,1.174808380126953,1.1772260314941407,"[1.1681614990234375, 1.169867431640625, 1.1613896484375, 1.17111474609375, 1.1695963134765626, 1.1778304443359375, 1.161491455078125, 1.16273681640625, 1.1610504150390626, 1.16427099609375]",tokens/s,53.996097938234925,kWh,3.3664398667088296e-05,3.7127514286836295e-06,1.806686750007899e-05,5.54440175958509e-05,tokens/kWh,1136281.2929471862,,s,630,11.6639503993988,0.018514206983172705,0.0003214227811257374,0.01843180847167969,0.018795789527893066,0.0190296781539917,0.019995790729522723,"[0.018980928421020508, 0.02014851188659668, 0.018728448867797853, 0.01852387237548828, 0.018532928466796876, 0.01845884895324707, 0.018724159240722658, 0.01839379119873047, 0.01855398368835449, 0.018406272888183594, 0.01861222457885742, 0.01834172821044922, 0.01835753631591797, 0.01845542335510254, 0.018401311874389648, 0.018372831344604493, 0.018556640625, 0.018470943450927733, 0.018447359085083007, 0.018412128448486328, 0.018456991195678712, 0.018560480117797852, 0.018666015625, 0.018515968322753908, 0.01838489532470703, 0.018251775741577148, 0.01833580780029297, 0.018425792694091798, 0.018548736572265623, 0.01846188735961914, 0.018443071365356445, 0.01844166374206543, 0.018287168502807618, 0.018461856842041015, 0.01843667221069336, 0.018437759399414062, 0.018410144805908205, 0.018577407836914063, 0.018565120697021483, 0.018739391326904296, 0.018775808334350587, 0.018946111679077147, 0.01914259147644043, 0.018779424667358397, 0.018508575439453126, 0.018441791534423827, 0.018460992813110352, 0.018716800689697267, 0.01833145523071289, 0.018333631515502928, 0.018379072189331054, 0.018323392868041993, 0.018503679275512695, 0.018726911544799805, 0.018421760559082033, 0.018451744079589844, 0.01862112045288086, 0.018415872573852538, 0.018606016159057617, 0.01851759910583496, 0.01840563201904297, 0.018391040802001952, 0.01845382308959961, 0.020314752578735353, 0.021073919296264648, 0.018755584716796874, 0.018618368148803712, 0.018464160919189454, 0.018440256118774413, 0.018367008209228517, 0.018380096435546875, 0.018467519760131838, 0.018487199783325196, 0.018458719253540038, 0.01830108833312988, 0.018351648330688478, 0.01823161506652832, 0.018488767623901368, 0.018330175399780272, 0.01834185600280762, 0.018278432846069337, 0.018239488601684572, 0.018513919830322266, 0.018268352508544923, 0.018501440048217774, 0.018292736053466797, 0.018221023559570313, 0.018452512741088868, 0.01864249610900879, 0.01850156784057617, 0.018858495712280272, 0.018448671340942382, 0.01869923210144043, 0.018524864196777343, 0.018483264923095703, 0.01843612861633301, 0.018421728134155272, 0.018366559982299805, 0.01838275146484375, 0.018333471298217774, 0.018352352142333984, 0.018493440628051756, 0.018497055053710937, 0.018618207931518554, 0.01848588752746582, 0.01851919937133789, 0.018403392791748047, 0.018344736099243163, 0.018448383331298827, 0.01852739143371582, 0.01889980888366699, 0.018669567108154296, 0.019340959548950196, 0.018853696823120117, 0.01921718406677246, 0.018675455093383787, 0.018500703811645508, 0.018565248489379883, 0.01844099235534668, 0.018420896530151366, 0.018303840637207032, 0.018480575561523438, 0.018436479568481445, 0.01847929573059082, 0.018414655685424806, 0.018502592086791992, 0.018704416275024414, 0.01859612846374512, 0.018581056594848634, 0.018384416580200194, 0.018456287384033203, 0.018563104629516602, 0.0185699520111084, 0.018392576217651366, 0.018420352935791015, 0.018354175567626953, 0.018578784942626953, 0.018721439361572265, 0.01877507209777832, 0.018706655502319335, 0.01952204895019531, 0.01863091278076172, 0.018505727767944336, 0.018502880096435546, 0.018380767822265626, 0.0183438720703125, 0.01850374412536621, 0.018475807189941407, 0.018424863815307616, 0.018457151412963866, 0.018330047607421875, 0.01831491279602051, 0.018358591079711915, 0.018386751174926757, 0.018303199768066405, 0.018323328018188475, 0.018335872650146485, 0.018353408813476562, 0.018344512939453127, 0.018526399612426758, 0.01845167922973633, 0.018535200119018554, 0.01857676887512207, 0.018327199935913085, 0.018619680404663087, 0.018301631927490233, 0.018297279357910156, 0.018280031204223633, 0.01828118324279785, 0.01834828758239746, 0.01830012893676758, 0.018340768814086913, 0.018336992263793945, 0.018264095306396486, 0.018260383605957033, 0.01835977554321289, 0.018350496292114257, 0.018385248184204103, 0.018348031997680665, 0.018230815887451172, 0.018338272094726563, 0.01827599906921387, 0.018309471130371093, 0.018327552795410155, 0.01827020835876465, 0.01834185600280762, 0.018319551467895507, 0.01833558464050293, 0.018321407318115233, 0.019329471588134764, 0.019365856170654297, 0.019474271774291993, 0.019128639221191405, 0.01902617645263672, 0.01909350395202637, 0.018966527938842775, 0.018881599426269532, 0.018778911590576174, 0.018874528884887696, 0.019161088943481445, 0.01884156799316406, 0.01881283187866211, 0.01895846366882324, 0.01872649574279785, 0.01869455909729004, 0.018630847930908204, 0.018507583618164063, 0.018460512161254883, 0.018374784469604492, 0.018444063186645508, 0.018436256408691405, 0.018527488708496093, 0.018345855712890626, 0.01854080009460449, 0.01843414306640625, 0.018435935974121093, 0.019051456451416017, 0.018818912506103517, 0.019072351455688478, 0.01862118339538574, 0.01844009590148926, 0.01847091293334961, 0.01830019187927246, 0.01836476707458496, 0.01829635238647461, 0.018365280151367187, 0.01834204864501953, 0.01836835289001465, 0.018274400711059572, 0.01824300765991211, 0.01828054428100586, 0.01850815963745117, 0.01848031997680664, 0.018305856704711913, 0.018280479431152345, 0.01831648063659668, 0.018288927078247072, 0.018373119354248048, 0.018308767318725584, 0.018366592407226563, 0.01831158447265625, 0.018319168090820313, 0.01828659248352051, 0.01838489532470703, 0.018675199508666994, 0.018428415298461915, 0.01841152000427246, 0.01843110466003418, 0.018536415100097656, 0.018318239212036132, 0.018292736053466797, 0.01834409523010254, 0.018924896240234374, 0.018739904403686523, 0.018653152465820312, 0.018651456832885743, 0.018683584213256835, 0.01866080093383789, 0.01846124839782715, 0.018472959518432617, 0.01840947151184082, 0.018798208236694335, 0.018407520294189454, 0.01847939109802246, 0.018548351287841797, 0.018735071182250977, 0.018827680587768555, 0.018720544815063477, 0.01857967948913574, 0.018517311096191407, 0.018558944702148437, 0.018703071594238282, 0.018695871353149415, 0.01859529685974121, 0.01907094383239746, 0.018729856491088867, 0.018562911987304687, 0.018674911499023436, 0.01874835205078125, 0.018388992309570314, 0.018315263748168945, 0.018345951080322265, 0.018442272186279297, 0.018559200286865234, 0.018527135848999024, 0.018516128540039062, 0.01851055908203125, 0.018534400939941405, 0.018337791442871093, 0.018538272857666016, 0.018354591369628907, 0.01829599952697754, 0.01848179244995117, 0.0188351993560791, 0.018636640548706056, 0.018900928497314454, 0.01869443130493164, 0.018374847412109374, 0.01841971206665039, 0.018329599380493163, 0.01842585563659668, 0.018372224807739257, 0.018563232421875, 0.01847318458557129, 0.018380191802978514, 0.018432607650756837, 0.018474815368652343, 0.018413631439208985, 0.01841779136657715, 0.018489343643188477, 0.018550783157348632, 0.01857257652282715, 0.018606815338134765, 0.018652223587036134, 0.01856780815124512, 0.01962188720703125, 0.01921379280090332, 0.019159584045410155, 0.019076704025268554, 0.018895263671875, 0.018757280349731446, 0.018696544647216796, 0.01843529510498047, 0.018359071731567384, 0.01847430419921875, 0.018692800521850586, 0.018577407836914063, 0.0184703369140625, 0.018698816299438478, 0.01877337646484375, 0.019072959899902344, 0.01882796859741211, 0.018909183502197266, 0.01864908790588379, 0.019403968811035156, 0.020738880157470704, 0.01901308822631836, 0.019032543182373046, 0.021429376602172853, 0.0205055046081543, 0.018771968841552734, 0.018464672088623048, 0.018460351943969725, 0.018366912841796874, 0.018374528884887695, 0.018471168518066405, 0.018444000244140626, 0.018313119888305664, 0.01847228813171387, 0.01836739158630371, 0.018294752120971678, 0.018335935592651367, 0.018289888381958008, 0.018449024200439455, 0.018249727249145507, 0.018440191268920898, 0.018543840408325196, 0.018481952667236328, 0.018366464614868162, 0.01835212707519531, 0.01850953674316406, 0.01837494468688965, 0.01847478485107422, 0.018426080703735352, 0.01848428726196289, 0.01838345527648926, 0.018362720489501952, 0.018386144638061524, 0.01843280029296875, 0.01835955238342285, 0.018424575805664062, 0.018354175567626953, 0.01840140724182129, 0.018546783447265625, 0.01849091148376465, 0.018343904495239257, 0.01834649658203125, 0.01838489532470703, 0.018761760711669923, 0.018500959396362305, 0.018637792587280273, 0.018380800247192384, 0.018374303817749023, 0.018556896209716796, 0.01862486457824707, 0.01862883186340332, 0.018450239181518554, 0.018444192886352538, 0.01866352081298828, 0.018404767990112304, 0.01854524803161621, 0.018374656677246092, 0.018726911544799805, 0.018513919830322266, 0.018687999725341797, 0.01864499282836914, 0.01846681594848633, 0.018349248886108397, 0.01842265510559082, 0.018359968185424805, 0.018464895248413087, 0.0184421443939209, 0.018417919158935547, 0.01838489532470703, 0.018390975952148437, 0.018458688735961914, 0.01850268745422363, 0.0184385929107666, 0.01842838478088379, 0.018429695129394533, 0.018415903091430662, 0.018384927749633788, 0.018347295761108398, 0.018358272552490236, 0.01840729522705078, 0.018392127990722658, 0.018378496170043945, 0.01837059211730957, 0.018345504760742187, 0.018471391677856445, 0.018425664901733398, 0.018319391250610353, 0.018398880004882812, 0.01840140724182129, 0.018452863693237304, 0.018415615081787108, 0.018319360733032225, 0.018317312240600587, 0.018364416122436524, 0.018370559692382812, 0.01836857604980469, 0.018333631515502928, 0.018329471588134767, 0.018284671783447264, 0.01832499122619629, 0.01830143928527832, 0.018272256851196288, 0.018274303436279296, 0.0183306884765625, 0.018455488204956055, 0.01830297660827637, 0.01947238349914551, 0.01923628807067871, 0.018942432403564455, 0.018849279403686522, 0.018729568481445313, 0.018722816467285155, 0.018558719635009765, 0.01848281669616699, 0.018412160873413085, 0.01835212707519531, 0.018224191665649415, 0.018357471466064455, 0.01829654312133789, 0.01828659248352051, 0.01833113670349121, 0.01827686309814453, 0.018317312240600587, 0.01824278450012207, 0.018254623413085938, 0.018288799285888672, 0.01825798416137695, 0.01826793670654297, 0.018296768188476562, 0.01832147216796875, 0.01832294464111328, 0.018411359786987304, 0.018346656799316408, 0.018453727722167967, 0.018331743240356444, 0.01837945556640625, 0.01846067237854004, 0.018450016021728514, 0.018346399307250977, 0.018415807723999023, 0.018339168548583983, 0.018821855545043946, 0.018540288925170897, 0.018376415252685546, 0.01845599937438965, 0.01833456039428711, 0.0184586238861084, 0.018319360733032225, 0.018382848739624022, 0.018363744735717773, 0.01842243194580078, 0.018398591995239258, 0.01843014335632324, 0.01839164733886719, 0.018442047119140624, 0.018397216796875, 0.01843814468383789, 0.018384639739990234, 0.018295040130615236, 0.018398399353027343, 0.01838902473449707, 0.018424287796020507, 0.018837087631225585, 0.01856572723388672, 0.018489471435546877, 0.018423456192016602, 0.018482847213745116, 0.01839788818359375, 0.018413408279418945, 0.018892799377441406, 0.0187064323425293, 0.018743295669555664, 0.019494335174560548, 0.01842848014831543, 0.018421760559082033, 0.018295072555541993, 0.018355583190917967, 0.01824188804626465, 0.018297056198120117, 0.018558752059936522, 0.018241535186767577, 0.018583168029785158, 0.018597440719604494, 0.018510656356811525, 0.018531423568725586, 0.01840220832824707, 0.018358272552490236, 0.018333568572998046, 0.018266239166259766, 0.01823472023010254, 0.01828112030029297, 0.01830656051635742, 0.018432512283325195, 0.018495231628417968, 0.01836595153808594, 0.018444671630859374, 0.018321760177612306, 0.018247007369995117, 0.018174112319946287, 0.018317855834960938, 0.018175199508666993, 0.018182304382324217, 0.01832819175720215, 0.018286016464233397, 0.018252351760864257, 0.018300928115844727, 0.018163711547851562, 0.018317312240600587, 0.018272256851196288, 0.018229248046875, 0.018305023193359374, 0.01820057678222656, 0.018261663436889647, 0.018293088912963867, 0.018628543853759765, 0.018355648040771486, 0.01824211120605469, 0.018411167144775392, 0.01856051254272461, 0.018946975708007813, 0.01842585563659668, 0.018479103088378905, 0.018378751754760742, 0.01850912094116211, 0.01849737548828125, 0.018322015762329103, 0.018559104919433595, 0.018267520904541014, 0.018281215667724608, 0.019136512756347656, 0.018406463623046876, 0.018940448760986328, 0.018795520782470702, 0.01862553596496582, 0.01889795112609863, 0.018791391372680665, 0.018633056640625, 0.018609600067138674, 0.018421823501586915, 0.018261152267456053, 0.018238655090332033, 0.018197568893432617, 0.018320127487182616, 0.018241376876831056, 0.018285791397094728, 0.01842681694030762, 0.018563039779663087, 0.01836649513244629, 0.018332672119140626, 0.018553375244140625, 0.01838947105407715, 0.0183767032623291, 0.018314559936523436, 0.01839094352722168, 0.018371360778808594, 0.018296831130981444, 0.018339839935302735, 0.018236671447753906, 0.01825663948059082, 0.018335744857788085, 0.0186429443359375, 0.018558879852294922, 0.018782304763793944, 0.018456607818603515, 0.01862777519226074, 0.018573759078979492, 0.018529855728149414, 0.018518815994262694, 0.018356224060058594, 0.018327232360839843, 0.018352096557617187, 0.01829862403869629, 0.01834009552001953, 0.018315519332885742, 0.01848534393310547, 0.01838243293762207, 0.018608800888061522, 0.01829680061340332, 0.018279232025146485, 0.018379295349121094, 0.018297088623046874, 0.01833184051513672, 0.018689184188842772, 0.01835094451904297, 0.018676799774169923, 0.01916431999206543, 0.02024015998840332, 0.018856096267700195, 0.018381856918334962, 0.01839548873901367, 0.018289119720458984, 0.018310848236083983, 0.018233663558959962, 0.018222368240356446, 0.018202976226806642]",tokens/s,54.01257536490143,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2221.604864,2971.533312,0.0,2569.0112,2295.745536,s,1,9.6190166015625,9.6190166015625,0.0,9.6190166015625,9.6190166015625,9.6190166015625,9.6190166015625,[9.6190166015625],,kWh,7.298496555004022e-05,8.043595000497683e-06,3.0054190710004924e-05,0.00011108275126054282,,MB,2234.322944,2988.310528,0.0,2571.108352,2282.381824,s,10,0.8596202621459961,0.08596202621459961,0.0003158673009239867,0.08581840133666992,0.08627934341430664,0.08652176704406739,0.08671570594787598,"[0.08676419067382812, 0.0858189468383789, 0.08584233856201172, 0.0861646728515625, 0.08572726440429687, 0.08579167938232422, 0.08573197174072265, 0.08622547149658204, 0.08573587036132813, 0.08581785583496093]",tokens/s,2978.0591648794975,kWh,2.574765603728939e-06,2.839352537000697e-07,1.7147430871927813e-06,4.57344394462179e-06,tokens/kWh,55975322.55775148,MB,2240.917504,2988.310528,0.0,2571.108352,2391.673344,s,10,15.148233642578125,1.5148233642578126,0.004446785780560509,1.51541357421875,1.5190765869140623,1.5211724487304688,1.5228491381835938,"[1.51861083984375, 1.516307861328125, 1.514519287109375, 1.5168350830078126, 1.5169815673828124, 1.5074818115234374, 1.523268310546875, 1.5109942626953126, 1.5140625, 1.509172119140625]",tokens/s,41.58900733014957,kWh,4.378559632293431e-05,4.82920464193052e-06,2.367099944361072e-05,7.228580040847555e-05,tokens/kWh,871540.4636041523,,s,630,15.145406845092786,0.024040328325544084,0.0006891557871525547,0.02386364841461182,0.024372579765319823,0.024832561779022214,0.028227753238677985,"[0.0242521915435791, 0.02408243179321289, 0.023797760009765623, 0.02383635139465332, 0.028692447662353515, 0.025798976898193358, 0.023951391220092773, 0.024078336715698243, 0.023893312454223634, 0.02393891143798828, 0.023654975891113282, 0.02377961540222168, 0.023684480667114257, 0.023904224395751954, 0.02368374443054199, 0.023758848190307616, 0.023760896682739258, 0.023736320495605468, 0.023588863372802735, 0.023700511932373047, 0.023920991897583007, 0.023882112503051757, 0.0240676155090332, 0.023714527130126953, 0.02366281509399414, 0.023752479553222655, 0.025255456924438476, 0.023854976654052736, 0.02372643280029297, 0.023705856323242187, 0.023768415451049806, 0.02402899169921875, 0.028398431777954102, 0.024197120666503907, 0.024018943786621092, 0.023995935440063478, 0.02443107223510742, 0.024016895294189454, 0.023885120391845704, 0.02381279945373535, 0.02410086441040039, 0.023766271591186522, 0.023736799240112304, 0.0238123836517334, 0.023637792587280274, 0.023736543655395508, 0.02369331169128418, 0.023758848190307616, 0.02358003234863281, 0.023624319076538086, 0.023736320495605468, 0.024333791732788088, 0.025889312744140625, 0.025667104721069336, 0.023783903121948242, 0.023769088745117187, 0.02368819236755371, 0.023786495208740235, 0.02363750457763672, 0.023783935546875, 0.023705440521240233, 0.023732255935668946, 0.02372220802307129, 0.024090560913085937, 0.023993791580200194, 0.023855743408203126, 0.02388787269592285, 0.023659967422485353, 0.02383110427856445, 0.023771135330200196, 0.02395110321044922, 0.028063488006591798, 0.024084991455078125, 0.02406505584716797, 0.024257055282592775, 0.023683040618896485, 0.023876064300537108, 0.02371583938598633, 0.023734272003173826, 0.023648384094238282, 0.023785343170166017, 0.023660640716552734, 0.02370345687866211, 0.0235532169342041, 0.023704320907592773, 0.023695423126220704, 0.02372403144836426, 0.023625856399536134, 0.024038560867309572, 0.02377801513671875, 0.024774656295776368, 0.02482585525512695, 0.02407753562927246, 0.023894815444946288, 0.023858591079711913, 0.02400704002380371, 0.02374790382385254, 0.023764896392822265, 0.02405068778991699, 0.02390608024597168, 0.023788896560668946, 0.023804319381713866, 0.02404400062561035, 0.0237260799407959, 0.023760799407958985, 0.02372825622558594, 0.023774496078491213, 0.023690975189208985, 0.023795808792114258, 0.023871904373168946, 0.02383286476135254, 0.02378892707824707, 0.024023168563842772, 0.024183488845825194, 0.02902835273742676, 0.024424448013305664, 0.024840192794799806, 0.024331872940063476, 0.024159807205200196, 0.02396656036376953, 0.024020992279052734, 0.023758304595947265, 0.023925024032592772, 0.0238656005859375, 0.02388172721862793, 0.02368716812133789, 0.024372352600097656, 0.024131872177124022, 0.023912736892700195, 0.02393120002746582, 0.02383568000793457, 0.02389295959472656, 0.023631872177124022, 0.023947231292724608, 0.023650335311889648, 0.023645280838012695, 0.023654687881469728, 0.023839359283447267, 0.023631872177124022, 0.023746559143066406, 0.0238787841796875, 0.023812992095947266, 0.023729440689086913, 0.02384764862060547, 0.02371174430847168, 0.02388172721862793, 0.023875583648681642, 0.024027135848999022, 0.023783424377441405, 0.023805696487426756, 0.02376905632019043, 0.024087839126586914, 0.02413670349121094, 0.026333343505859374, 0.027835840225219725, 0.024447391510009766, 0.024024864196777344, 0.023955680847167968, 0.023801855087280274, 0.02390630340576172, 0.023738367080688477, 0.023842815399169923, 0.02370969581604004, 0.023824384689331055, 0.023740415573120118, 0.023859199523925782, 0.023862911224365235, 0.02390179252624512, 0.024156959533691406, 0.023971071243286134, 0.02371865653991699, 0.02389401626586914, 0.023814144134521483, 0.02467020797729492, 0.025046815872192384, 0.024139007568359374, 0.023926816940307617, 0.02444326400756836, 0.02379155158996582, 0.024072832107543945, 0.024252416610717774, 0.02412544059753418, 0.024202367782592774, 0.023999359130859373, 0.02374224090576172, 0.023860960006713866, 0.02373379135131836, 0.02385193634033203, 0.02387945556640625, 0.02457811164855957, 0.024374624252319337, 0.024208192825317384, 0.024231935501098634, 0.028694528579711914, 0.024213504791259766, 0.023973119735717772, 0.023843584060668947, 0.02370355224609375, 0.02389187240600586, 0.02372003173828125, 0.023818239212036133, 0.023629823684692384, 0.023738367080688477, 0.02371116828918457, 0.023796287536621094, 0.02375881576538086, 0.02405379295349121, 0.023742464065551756, 0.023801855087280274, 0.023773183822631837, 0.02380998420715332, 0.02375276756286621, 0.02377231979370117, 0.023628639221191405, 0.023762943267822266, 0.023623071670532226, 0.024437088012695313, 0.023635391235351563, 0.023847743988037108, 0.023791616439819335, 0.024057855606079103, 0.02393907165527344, 0.02431407928466797, 0.02376495933532715, 0.023829696655273437, 0.02372879981994629, 0.023814111709594726, 0.023678783416748048, 0.023737791061401368, 0.02362972831726074, 0.023913312911987304, 0.024057855606079103, 0.024467456817626954, 0.025198591232299804, 0.02409622383117676, 0.024128032684326173, 0.02829484748840332, 0.02407046318054199, 0.024002111434936524, 0.023749055862426757, 0.02447926330566406, 0.023816576004028322, 0.02389311981201172, 0.02369638442993164, 0.023816160202026367, 0.023775232315063476, 0.023873247146606446, 0.023962112426757814, 0.023926559448242186, 0.023724063873291016, 0.023967552185058593, 0.02383273506164551, 0.024611007690429686, 0.024094720840454102, 0.02388172721862793, 0.023810047149658203, 0.023818239212036133, 0.02370969581604004, 0.024073856353759766, 0.023870880126953126, 0.023622079849243163, 0.02369331169128418, 0.023751199722290037, 0.024807424545288087, 0.03099014472961426, 0.024094911575317384, 0.023857152938842774, 0.023949312210083007, 0.02388956832885742, 0.023935327529907225, 0.023812095642089845, 0.024010112762451172, 0.02384320068359375, 0.023812351226806642, 0.023910400390625, 0.02836070442199707, 0.024211328506469728, 0.023976064682006835, 0.023797760009765623, 0.023889503479003905, 0.023646623611450195, 0.023733631134033203, 0.023724672317504882, 0.02379968070983887, 0.023716127395629883, 0.024043359756469727, 0.023635967254638672, 0.024352767944335937, 0.023750080108642577, 0.023710336685180664, 0.023658464431762696, 0.023860223770141603, 0.023982847213745117, 0.023787744522094728, 0.023619359970092773, 0.02373244857788086, 0.023709888458251952, 0.0237425594329834, 0.023695072174072265, 0.023980031967163085, 0.023714080810546875, 0.023852767944335936, 0.02382784080505371, 0.024418848037719727, 0.023886112213134764, 0.023916351318359376, 0.023831680297851564, 0.023909248352050782, 0.023746559143066406, 0.02395657539367676, 0.023790496826171875, 0.02409881591796875, 0.02384486389160156, 0.024193023681640623, 0.023764032363891602, 0.027324735641479494, 0.0242890567779541, 0.024000736236572267, 0.02393199920654297, 0.023789472579956054, 0.02386227226257324, 0.02391449546813965, 0.023807680130004883, 0.023839040756225584, 0.02366464042663574, 0.02376028823852539, 0.023744224548339844, 0.023573375701904296, 0.023703359603881837, 0.023588319778442383, 0.023704288482666015, 0.023690591812133788, 0.023810720443725585, 0.0237871036529541, 0.02373593521118164, 0.023759647369384764, 0.02381564712524414, 0.02376166343688965, 0.023864160537719725, 0.023655263900756836, 0.023791711807250978, 0.023662591934204103, 0.023814144134521483, 0.023856767654418944, 0.023852928161621094, 0.023687679290771483, 0.023871488571166992, 0.023788639068603516, 0.023966304779052733, 0.023934528350830077, 0.024080320358276366, 0.024034400939941407, 0.024190847396850585, 0.02419081687927246, 0.02415932846069336, 0.023935712814331055, 0.024127904891967773, 0.024157119750976563, 0.024158111572265627, 0.024275295257568358, 0.02428988838195801, 0.0239554557800293, 0.023895231246948243, 0.023743295669555665, 0.023824384689331055, 0.02385305595397949, 0.024432640075683593, 0.02366409683227539, 0.023746112823486328, 0.023610336303710937, 0.023764991760253908, 0.023590015411376952, 0.023764928817749022, 0.023580991744995117, 0.023802495956420897, 0.02371379280090332, 0.02392038345336914, 0.02411724853515625, 0.024182783126831055, 0.023987327575683594, 0.023844831466674803, 0.024916479110717774, 0.023862815856933593, 0.023876287460327147, 0.02367068862915039, 0.02394140815734863, 0.023660383224487304, 0.023658655166625978, 0.024184831619262694, 0.025061216354370117, 0.023748767852783202, 0.023738367080688477, 0.023628992080688478, 0.023976032257080077, 0.023648767471313475, 0.023786848068237304, 0.023802751541137694, 0.02392678451538086, 0.024270208358764648, 0.024065919876098633, 0.02644598388671875, 0.026135135650634765, 0.023981504440307617, 0.02409324836730957, 0.023786527633666992, 0.02375369644165039, 0.023855104446411132, 0.023859199523925782, 0.025048255920410156, 0.0240034236907959, 0.024037120819091796, 0.023776479721069336, 0.023673856735229492, 0.023782560348510742, 0.02411929512023926, 0.023865503311157228, 0.023750879287719726, 0.024279520034790038, 0.023787519454956055, 0.023795711517333985, 0.02377494430541992, 0.02539289665222168, 0.023857696533203125, 0.023992319107055664, 0.02652774429321289, 0.024435808181762695, 0.02421443176269531, 0.02411520004272461, 0.023968767166137696, 0.023985151290893555, 0.02380124855041504, 0.0242325439453125, 0.023855232238769532, 0.024850303649902344, 0.024319999694824217, 0.024465408325195313, 0.02436297607421875, 0.024466495513916015, 0.024273120880126953, 0.0243637752532959, 0.024395711898803712, 0.024838048934936522, 0.024488319396972658, 0.024202144622802735, 0.024265216827392577, 0.02375459289550781, 0.023806432723999023, 0.023816192626953125, 0.02390553665161133, 0.023700223922729493, 0.023827871322631835, 0.023685184478759766, 0.023744863510131838, 0.023719232559204103, 0.02384761619567871, 0.023854848861694335, 0.023826879501342775, 0.023842208862304686, 0.02381065559387207, 0.02366873550415039, 0.023820287704467775, 0.02373222351074219, 0.023824256896972658, 0.023787647247314452, 0.023803903579711915, 0.023742464065551756, 0.023809951782226564, 0.024311904907226563, 0.02424831962585449, 0.02408857536315918, 0.024253887176513673, 0.024002559661865236, 0.023968320846557617, 0.024420352935791017, 0.023752704620361328, 0.02371129608154297, 0.023834943771362305, 0.0237260799407959, 0.023926816940307617, 0.02374665641784668, 0.023965696334838867, 0.023807136535644532, 0.023847135543823242, 0.024048288345336913, 0.0243855037689209, 0.02519785690307617, 0.025156320571899413, 0.02395462417602539, 0.024001344680786133, 0.023932928085327147, 0.023836671829223634, 0.023729856491088868, 0.023976255416870117, 0.024014848709106446, 0.024518655776977538, 0.023857152938842774, 0.02385487937927246, 0.024109216690063478, 0.023905664443969726, 0.023715744018554686, 0.023911199569702148, 0.023851007461547852, 0.023969791412353517, 0.024034719467163086, 0.024123071670532226, 0.023947488784790038, 0.02386511993408203, 0.023993759155273436, 0.02443324851989746, 0.023870559692382814, 0.02406083106994629, 0.024045055389404296, 0.024078144073486327, 0.023773887634277343, 0.02371583938598633, 0.023740352630615233, 0.023709760665893555, 0.02371708869934082, 0.023732160568237303, 0.023885759353637695, 0.0241591682434082, 0.02433430480957031, 0.02399135971069336, 0.0237923526763916, 0.023959775924682618, 0.023846624374389648, 0.02382464027404785, 0.023977472305297853, 0.023881887435913084, 0.02383500862121582, 0.023797760009765623, 0.024568992614746092, 0.023904447555541993, 0.024064191818237303, 0.023790048599243163, 0.024225791931152343, 0.023748735427856445, 0.023854976654052736, 0.023627775192260742, 0.023745632171630858, 0.023759775161743164, 0.023833728790283202, 0.023778175354003905, 0.02423756790161133, 0.024197248458862303, 0.02418076705932617, 0.02409503936767578, 0.024638784408569335, 0.0252010555267334, 0.024396095275878906, 0.02427827262878418, 0.0241975040435791, 0.024125823974609373, 0.02408019256591797, 0.024051904678344727, 0.02492736053466797, 0.02410540771484375, 0.02412928009033203, 0.02402707290649414, 0.024056575775146485, 0.023939039230346678, 0.024084512710571288, 0.023980031967163085, 0.02419014358520508, 0.0239453125, 0.023859935760498045, 0.023863136291503908, 0.024959360122680664, 0.024375200271606445, 0.02396579170227051, 0.024065439224243163, 0.0240677433013916, 0.023954368591308593, 0.02392268753051758, 0.02373017692565918, 0.023738367080688477, 0.023833728790283202, 0.02361248016357422, 0.02361529541015625, 0.0235863037109375, 0.023763456344604493, 0.023832576751708984, 0.023747776031494142, 0.023855936050415038, 0.024037376403808593, 0.024032255172729493, 0.02412031936645508, 0.0241213436126709, 0.024260608673095704, 0.023932287216186524, 0.024133407592773437, 0.023861087799072266, 0.02389913558959961, 0.023771135330200196, 0.023774208068847655, 0.023765119552612304, 0.023830591201782228, 0.02367571258544922, 0.02379792022705078, 0.023668512344360352, 0.023719167709350585, 0.02389504051208496, 0.0240883846282959, 0.024143871307373048, 0.024131584167480468, 0.023922431945800782, 0.023933183670043944, 0.023791616439819335, 0.023905664443969726, 0.02389788818359375, 0.023833440780639648, 0.023752704620361328, 0.024321088790893553, 0.02386582374572754, 0.023790048599243163, 0.023985248565673828, 0.02397792053222656, 0.023938016891479494, 0.02452889633178711, 0.023822336196899413, 0.023859199523925782, 0.023801855087280274, 0.02386944007873535, 0.023875583648681642, 0.023977216720581056, 0.023847679138183593, 0.02406809616088867, 0.02467840003967285, 0.024358911514282225, 0.024000511169433594]",tokens/s,41.596769663808985,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7423.451136,7815.954432,0.0,7430.209536,7414.23104,s,1,11.595765625,11.595765625,0.0,11.595765625,11.595765625,11.595765625,11.595765625,[11.595765625],,kWh,0.00013157114591250924,1.4505656099530295e-05,4.413281308396355e-05,0.0001902096150960031,,MB,1759.203328,8581.414912,0.0,8166.309888,8044.111872,s,10,7.11821044921875,0.7118210449218749,0.0014261692148965948,0.7117133178710937,0.7135462158203125,0.7138508666992187,0.7140945874023438,"[0.7097128295898437, 0.7110859375, 0.7101524047851563, 0.7105596313476562, 0.7115587768554688, 0.713478515625, 0.7122205810546876, 0.714155517578125, 0.7134183959960938, 0.7118678588867188]",tokens/s,359.6409544594133,kWh,2.085015352291369e-05,2.299410959780034e-06,1.3943780996285887e-05,3.709334547897961e-05,tokens/kWh,6901507.445454667,MB,1762.254848,8749.187072,0.0,8334.082048,8265.686528,s,10,31.90846728515625,3.190846728515625,0.0077679896907146394,3.192784545898437,3.1986811279296874,3.1987084838867186,3.198730368652344,"[3.1766376953125, 3.181797607421875, 3.1851884765625, 3.185459716796875, 3.18967529296875, 3.1980703125, 3.195893798828125, 3.19833349609375, 3.19873583984375, 3.198675048828125]",tokens/s,19.74397561530869,kWh,9.370738393624983e-05,1.0335458117044888e-05,6.207627981971244e-05,0.00016611912187300716,tokens/kWh,379245.9247898115,,s,630,31.904615425109878,0.05064224670652359,0.0006904843426848194,0.05065523147583008,0.051468381118774414,0.051708851623535154,0.05287393089294434,"[0.052416126251220704, 0.049266464233398435, 0.04945366287231445, 0.050136096954345705, 0.04966499328613281, 0.049086463928222655, 0.04979097747802735, 0.049810623168945314, 0.05013708877563477, 0.04979180908203125, 0.05096054458618164, 0.050340801239013674, 0.04978985595703125, 0.04948992156982422, 0.050083839416503906, 0.049326080322265625, 0.049620990753173826, 0.04990563201904297, 0.0503337287902832, 0.050628608703613284, 0.05097286224365234, 0.05078611373901367, 0.049563648223876954, 0.049934112548828125, 0.04934473419189453, 0.051140609741210936, 0.05002239990234375, 0.04980326461791992, 0.04955136108398438, 0.050421760559082034, 0.0504727668762207, 0.04969504165649414, 0.049909313201904296, 0.049781055450439454, 0.0503087043762207, 0.05100892639160156, 0.050801856994628906, 0.05064025497436524, 0.05089849472045899, 0.051803009033203125, 0.05082495880126953, 0.05071488189697266, 0.05047216033935547, 0.050784385681152344, 0.05055769729614258, 0.05051356887817383, 0.05080207824707031, 0.050342750549316403, 0.049821697235107425, 0.05084316635131836, 0.05090963363647461, 0.0510852165222168, 0.050796737670898436, 0.05101766586303711, 0.05075763320922851, 0.05127372741699219, 0.05111532974243164, 0.051130718231201175, 0.05109590530395508, 0.051041534423828125, 0.05134390258789062, 0.05107673645019531, 0.05100956726074219, 0.05293875122070312, 0.04938489532470703, 0.04976083374023438, 0.05036022567749023, 0.049895519256591796, 0.04896739196777344, 0.05004832077026367, 0.05009916687011719, 0.05007974243164062, 0.050810142517089846, 0.049193695068359376, 0.0494837760925293, 0.04976435089111328, 0.050108417510986325, 0.04994435119628906, 0.050200801849365234, 0.04970832061767578, 0.04984809494018555, 0.051018177032470705, 0.05147903823852539, 0.050721920013427735, 0.04989593505859375, 0.04981184005737305, 0.05101772689819336, 0.04971110534667969, 0.050353504180908205, 0.05095481491088867, 0.05019247817993164, 0.050142974853515626, 0.050510078430175784, 0.0500030403137207, 0.05058832168579101, 0.05070604705810547, 0.050649246215820315, 0.050106529235839845, 0.049955135345458986, 0.05012275314331055, 0.0514334716796875, 0.05078015899658203, 0.05082534408569336, 0.05082060623168945, 0.05137820816040039, 0.05051337432861328, 0.05015203094482422, 0.049700225830078125, 0.051469215393066405, 0.05039888000488281, 0.050522464752197266, 0.05069363021850586, 0.05086054229736328, 0.050859905242919924, 0.05043827056884766, 0.051636062622070315, 0.05068947219848633, 0.050987743377685545, 0.05096857452392578, 0.05192294311523438, 0.05066656112670898, 0.05037337493896484, 0.05066976165771484, 0.051699710845947267, 0.05125939178466797, 0.051138561248779295, 0.053491649627685545, 0.04989427185058594, 0.04995296096801758, 0.050270015716552735, 0.04976435089111328, 0.049194305419921876, 0.05007024002075195, 0.050796512603759766, 0.04989052963256836, 0.04989174270629883, 0.05072422409057617, 0.049740798950195314, 0.04967833709716797, 0.050320384979248046, 0.050299903869628904, 0.050206718444824217, 0.04973158264160156, 0.049860607147216796, 0.05014681625366211, 0.0514785270690918, 0.04989388656616211, 0.050587646484375, 0.05036646270751953, 0.050587646484375, 0.05006687927246094, 0.05104288101196289, 0.05022515106201172, 0.04986880111694336, 0.04997663879394531, 0.049811935424804686, 0.04965760040283203, 0.05057542419433594, 0.050995616912841796, 0.050589664459228516, 0.05038854217529297, 0.050694625854492185, 0.05104844665527344, 0.05088665771484375, 0.05038809585571289, 0.05134201431274414, 0.050811073303222654, 0.050726913452148435, 0.050223102569580076, 0.050391040802001956, 0.05088460922241211, 0.050797569274902345, 0.05112131118774414, 0.05136931228637695, 0.05001062393188477, 0.05093580627441406, 0.05083273696899414, 0.05019705581665039, 0.05087855911254883, 0.05090291213989258, 0.0510011215209961, 0.05107664108276367, 0.05099126434326172, 0.050939998626708984, 0.05170988845825195, 0.05118207931518555, 0.05105433654785156, 0.05108732986450195, 0.05133148956298828, 0.05403033447265625, 0.04986880111694336, 0.04928076934814453, 0.05035238265991211, 0.04956124877929687, 0.04893731307983398, 0.05005516815185547, 0.04998486328125, 0.04974448013305664, 0.049581985473632816, 0.05033385467529297, 0.05055692672729492, 0.0501363525390625, 0.04969750213623047, 0.04989270401000977, 0.04994678497314453, 0.05027667236328125, 0.05067164611816406, 0.05106809616088867, 0.05096537780761719, 0.04992009735107422, 0.05038675308227539, 0.05150726318359375, 0.05073113632202148, 0.050407424926757816, 0.05037615966796875, 0.049409664154052735, 0.049806240081787106, 0.04985638427734375, 0.05017817687988281, 0.05001420974731445, 0.050105663299560545, 0.05004115295410156, 0.050125247955322264, 0.050081504821777346, 0.05088278579711914, 0.05137958526611328, 0.05137388610839844, 0.050182975769042966, 0.05072028732299805, 0.05140256118774414, 0.05105116653442383, 0.05137408065795898, 0.05171974563598633, 0.05056556701660156, 0.050802623748779294, 0.05048735809326172, 0.05038249588012695, 0.050823455810546876, 0.05052614212036133, 0.05010665512084961, 0.05048303985595703, 0.050966529846191405, 0.050431358337402345, 0.051423423767089846, 0.051130718231201175, 0.051250656127929686, 0.05047356796264649, 0.051257183074951175, 0.05145401763916015, 0.051311809539794924, 0.051837886810302734, 0.051330303192138674, 0.0526599349975586, 0.05060172653198242, 0.04949440002441406, 0.049324287414550784, 0.04944390487670899, 0.049802078247070315, 0.0502927360534668, 0.050132991790771485, 0.04955955123901367, 0.04984380722045898, 0.05012307357788086, 0.049614273071289065, 0.05018281555175781, 0.04995481491088867, 0.05028044891357422, 0.04980640029907227, 0.05097478485107422, 0.05045443344116211, 0.050962944030761716, 0.05094652938842773, 0.04981145477294922, 0.05077142333984375, 0.051124542236328126, 0.050753761291503906, 0.050348033905029295, 0.049802303314208984, 0.050439102172851566, 0.04984783935546875, 0.049809886932373044, 0.0496104621887207, 0.05048665618896484, 0.049955745697021485, 0.05072895812988281, 0.05073923110961914, 0.050706401824951175, 0.05101907348632812, 0.05093868637084961, 0.0508023681640625, 0.050475200653076174, 0.05083900833129883, 0.05138848114013672, 0.05148041534423828, 0.051746463775634764, 0.05163692855834961, 0.050974273681640626, 0.05080665588378906, 0.050807392120361325, 0.05123487854003906, 0.05091347122192383, 0.050522113800048826, 0.05158092880249023, 0.05101107025146484, 0.050606399536132815, 0.05019257736206055, 0.05036236953735351, 0.05022719955444336, 0.05138431930541992, 0.05177753448486328, 0.05066044616699219, 0.05129487991333008, 0.05179212951660156, 0.05131468963623047, 0.05209292984008789, 0.05311072158813476, 0.05016787338256836, 0.05039923095703125, 0.05076582336425781, 0.050116607666015625, 0.05037395095825195, 0.050315967559814455, 0.05024563217163086, 0.050374462127685544, 0.04981779098510742, 0.05010227203369141, 0.04961215972900391, 0.0508197135925293, 0.050910240173339845, 0.05051631927490234, 0.050420448303222655, 0.05056060791015625, 0.050293056488037106, 0.0507611198425293, 0.051014240264892576, 0.05062656021118164, 0.05031107330322265, 0.05039932632446289, 0.05069004821777344, 0.05063033676147461, 0.04989984130859375, 0.050495487213134765, 0.050618366241455076, 0.050444286346435545, 0.04999116897583008, 0.04953139114379883, 0.05057484817504883, 0.05111155319213867, 0.05055385589599609, 0.05049711990356445, 0.051085601806640624, 0.05134880065917969, 0.05075571060180664, 0.05142995071411133, 0.0508636474609375, 0.05075174331665039, 0.0512042236328125, 0.05156249618530274, 0.05032508850097656, 0.05078476715087891, 0.05052643203735351, 0.05082089614868164, 0.05034963226318359, 0.05101612854003906, 0.05151948928833008, 0.05109958267211914, 0.05084947204589844, 0.05032352066040039, 0.051607902526855466, 0.0511200942993164, 0.051691520690917966, 0.051120128631591794, 0.05108707046508789, 0.05033827209472656, 0.05179974365234375, 0.051781246185302734, 0.051455711364746096, 0.05201795196533203, 0.05295308685302735, 0.05017116928100586, 0.04970550537109375, 0.05019855880737305, 0.04955152130126953, 0.05004288101196289, 0.04953497695922852, 0.05006335830688476, 0.04985036849975586, 0.05079999923706055, 0.05023398590087891, 0.051058399200439454, 0.05033603286743164, 0.05086617660522461, 0.050329601287841794, 0.049909278869628905, 0.04974435043334961, 0.05090697479248047, 0.050985118865966794, 0.050313343048095704, 0.051315967559814456, 0.050538368225097656, 0.05009280014038086, 0.05047296142578125, 0.05019443130493164, 0.04977158355712891, 0.04957443237304687, 0.04992233657836914, 0.05019865417480469, 0.05087583923339844, 0.05063328170776367, 0.05062604904174805, 0.05056358337402344, 0.05134748840332031, 0.05084975814819336, 0.050874366760253906, 0.05082521438598633, 0.050800640106201174, 0.05053155136108398, 0.05094278335571289, 0.05078326416015625, 0.051450817108154294, 0.05113241577148438, 0.05175638580322266, 0.050827934265136716, 0.05058969497680664, 0.05086822509765625, 0.050553886413574216, 0.050856929779052734, 0.05057263946533203, 0.05204560089111328, 0.05156256103515625, 0.051190593719482425, 0.05067270278930664, 0.051608512878417966, 0.05158092880249023, 0.05119161605834961, 0.05204102325439453, 0.05059577560424805, 0.0517457275390625, 0.05122662353515625, 0.051073024749755856, 0.0510832633972168, 0.05313417434692383, 0.049986591339111326, 0.04937628936767578, 0.05032134246826172, 0.050028545379638675, 0.04959641647338867, 0.050966529846191405, 0.05027568054199219, 0.0495827522277832, 0.05016371154785156, 0.04960870361328125, 0.0504785270690918, 0.05032182312011719, 0.05041372680664063, 0.05116473770141602, 0.050868671417236326, 0.05010860824584961, 0.05108307266235351, 0.051100833892822266, 0.051276287078857424, 0.05078252792358398, 0.05060927963256836, 0.04981647872924805, 0.050214336395263674, 0.05099782562255859, 0.05057478332519531, 0.04982361602783203, 0.04988998413085938, 0.05059379196166992, 0.05033964920043945, 0.05024787139892578, 0.05123481750488281, 0.05087955093383789, 0.05168019104003906, 0.05039308929443359, 0.05083545684814453, 0.05079439926147461, 0.05132297515869141, 0.05123612976074219, 0.051768096923828125, 0.051406784057617186, 0.050482593536376956, 0.05071459197998047, 0.05043264007568359, 0.051468288421630856, 0.050933631896972656, 0.050824993133544924, 0.05028489685058594, 0.05043225479125977, 0.05035187149047852, 0.0516541748046875, 0.051083583831787106, 0.051607200622558594, 0.05130035018920898, 0.05120687866210937, 0.0516907844543457, 0.05118822479248047, 0.05118463897705078, 0.051227649688720706, 0.051740673065185545, 0.051181312561035155, 0.05108947372436524, 0.050579071044921875, 0.052715232849121094, 0.04994688034057617, 0.049764415740966794, 0.0499969596862793, 0.04942665481567383, 0.05055136108398438, 0.050515201568603514, 0.05043049621582031, 0.050200801849365234, 0.05008588790893555, 0.05000806427001953, 0.05088256072998047, 0.05024563217163086, 0.05035964965820312, 0.05035279846191406, 0.051332542419433594, 0.050962944030761716, 0.05072803115844727, 0.051120384216308594, 0.05120428848266602, 0.05099164962768555, 0.05067571258544922, 0.05046262359619141, 0.050976863861083986, 0.04981568145751953, 0.0499997444152832, 0.05048323059082031, 0.05005289459228516, 0.04934048080444336, 0.049909664154052735, 0.050697662353515624, 0.05113731384277344, 0.05033548736572266, 0.050753631591796876, 0.050767040252685545, 0.05083852767944336, 0.05075145721435547, 0.05082265472412109, 0.05108992004394531, 0.051477920532226565, 0.05111254501342773, 0.051722240447998044, 0.051500926971435546, 0.050712192535400394, 0.05154009628295898, 0.050968673706054686, 0.050730686187744144, 0.050606689453125, 0.05087641525268555, 0.050569217681884764, 0.051146751403808595, 0.050321056365966794, 0.05088662338256836, 0.05097107315063477, 0.051205760955810545, 0.051661121368408204, 0.051148414611816406, 0.05166691207885742, 0.051224990844726564, 0.051587265014648435, 0.05163529586791992, 0.05141916656494141, 0.050931713104248044, 0.05331071853637695, 0.05083417510986328, 0.05016336059570312, 0.05000636672973633, 0.04925417709350586, 0.04956796646118164, 0.049565696716308595, 0.05072860717773438, 0.05072867202758789, 0.04981961441040039, 0.050143489837646486, 0.05023171234130859, 0.05098630523681641, 0.05065539169311523, 0.050121246337890626, 0.050192001342773435, 0.05012928009033203, 0.050900993347167967, 0.05065523147583008, 0.05119913482666016, 0.05071084976196289, 0.05145145416259766, 0.05074358367919922, 0.05100204849243164, 0.049838081359863284, 0.05000147247314453, 0.05105452728271485, 0.050627166748046876, 0.049890625, 0.049856353759765625, 0.04971971130371094, 0.05144198226928711, 0.05094803237915039, 0.05051356887817383, 0.04999622344970703, 0.050561023712158204, 0.05065523147583008, 0.051320831298828126, 0.050991104125976565, 0.051100704193115236, 0.05144409561157227, 0.05156105422973633, 0.05170758438110352, 0.05097299194335937, 0.05147625732421875, 0.050982303619384765, 0.050832000732421875, 0.050385089874267576, 0.050237342834472655, 0.05024505615234375, 0.05177411270141601, 0.05045248031616211, 0.051104000091552734, 0.051205886840820315, 0.050796222686767575, 0.050656768798828126, 0.05223040008544922, 0.05178835296630859, 0.051310592651367185, 0.05125939178466797, 0.05158707046508789, 0.051468288421630856, 0.05120198440551758]",tokens/s,19.746359315278617,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11154.857984,14593.949696,0.0,14191.427584,13325.783552,s,1,18.1119609375,18.1119609375,0.0,18.1119609375,18.1119609375,18.1119609375,18.1119609375,[18.1119609375],,kWh,0.00032110022627083854,3.5412235573635996e-05,0.0001468240063479953,0.0005033364681924699,,MB,2111.676416,14608.62976,0.0,14191.427584,12582.029312,s,10,96.06249511718748,9.60624951171875,0.005399481449131139,9.6070751953125,9.61213798828125,9.612965478515624,9.613627470703124,"[9.59622265625, 9.5989599609375, 9.60462890625, 9.6081552734375, 9.61379296875, 9.6119541015625, 9.61165234375, 9.606400390625, 9.60775, 9.602978515625]",tokens/s,26.649318205580993,kWh,0.00027970535306291516,3.085226873755438e-05,0.0001858857875974007,0.0004964434093978703,tokens/kWh,515668.0402112681,MB,2115.977216,14608.62976,0.0,14191.427584,12935.916032,s,10,42.378484375,4.2378484375,0.0018905054505437013,4.237484619140625,4.239791162109375,4.240912426757812,4.2418094384765626,"[4.23737646484375, 4.23530908203125, 4.23721826171875, 4.23930078125, 4.24203369140625, 4.2395419921875, 4.23655810546875, 4.2378134765625, 4.2375927734375, 4.23573974609375]",tokens/s,14.866034245709146,kWh,0.00012398269334208407,1.3676884598462097e-05,8.266748280059966e-05,0.00022032706074114582,tokens/kWh,285938.54875600775,,s,630,42.346482192993165,0.06721663840157645,0.0005523383560159218,0.06723809432983399,0.06791511993408203,0.06805313377380372,0.06827436981201171,"[0.0673807373046875, 0.0668905258178711, 0.06658252716064453, 0.0664985580444336, 0.06614838409423827, 0.0663111343383789, 0.06669414520263672, 0.06668915557861328, 0.06610316467285156, 0.06660304260253906, 0.06651286315917969, 0.06697971343994141, 0.06677110290527344, 0.06670899200439454, 0.06684674835205077, 0.06689126586914063, 0.06691526031494141, 0.06647606658935547, 0.06671756744384766, 0.0670946273803711, 0.06698111724853516, 0.0666611557006836, 0.0666562271118164, 0.06667855834960937, 0.06658480072021485, 0.06705359649658203, 0.06668656158447266, 0.06715020751953125, 0.06712319946289062, 0.06746428680419922, 0.06729414367675782, 0.06731158447265626, 0.06734848022460938, 0.06713734436035156, 0.06801334381103516, 0.06754396820068359, 0.06687718200683594, 0.0670692138671875, 0.06735977935791015, 0.06715570831298828, 0.0678051528930664, 0.06730572509765625, 0.06752252960205078, 0.06783554840087891, 0.06767356872558594, 0.0675271987915039, 0.0676655044555664, 0.06763750457763672, 0.06774425506591797, 0.06865449523925782, 0.06747366333007812, 0.0678416976928711, 0.06736457824707032, 0.06715206146240234, 0.06786876678466797, 0.06767203521728515, 0.06749593353271484, 0.06795545959472657, 0.06788835144042969, 0.06816639709472656, 0.06806121826171875, 0.06820041656494141, 0.06792806243896485, 0.06737638092041015, 0.06696339416503906, 0.0667914276123047, 0.0665136947631836, 0.06610944366455078, 0.06596150207519531, 0.06639993286132813, 0.06643382263183593, 0.06678323364257813, 0.06668902587890625, 0.06651427459716797, 0.0667174072265625, 0.0666490249633789, 0.06654771423339843, 0.06687490844726562, 0.06705379486083984, 0.06716239929199219, 0.06692041778564453, 0.06714886474609374, 0.0664332504272461, 0.06583782196044922, 0.06647395324707031, 0.06695734405517578, 0.06659478759765625, 0.06713139343261719, 0.0670125732421875, 0.06699155426025391, 0.0672786865234375, 0.0672468490600586, 0.06726041412353516, 0.0672726058959961, 0.06725843048095703, 0.06718204498291015, 0.0674897918701172, 0.06740560150146484, 0.06684297943115235, 0.0674287338256836, 0.06716995239257813, 0.06722752380371094, 0.06761257934570312, 0.06753753662109375, 0.06777584075927734, 0.06762060546875, 0.06786083221435547, 0.06722525024414062, 0.06771366119384765, 0.06721564483642578, 0.06751026916503906, 0.06790694427490235, 0.06779353332519532, 0.06756483459472656, 0.06769331359863281, 0.06775939178466797, 0.06767072296142577, 0.06785187530517578, 0.06748201751708985, 0.06801168060302734, 0.06783420562744141, 0.06754508972167969, 0.06809305572509766, 0.067644287109375, 0.06780313873291016, 0.06777801513671874, 0.06782441711425781, 0.06673407745361327, 0.06614838409423827, 0.06614777374267578, 0.06650527954101562, 0.06659478759765625, 0.06674195098876953, 0.06627155303955078, 0.06665744018554688, 0.06620041656494141, 0.06608195495605469, 0.06674928283691406, 0.06681705474853515, 0.06682463836669922, 0.06715650939941406, 0.0671662368774414, 0.06698185729980469, 0.06695891571044922, 0.06655023956298828, 0.06713484954833984, 0.06654544067382813, 0.06652531433105469, 0.06628832244873047, 0.06675772857666015, 0.06695532989501952, 0.06666326141357422, 0.0668569564819336, 0.06725363159179687, 0.06751910400390625, 0.06728089904785156, 0.06786252593994141, 0.06739961242675781, 0.06710047912597657, 0.06737741088867187, 0.06750003051757812, 0.06742835235595702, 0.06671564483642578, 0.06687744140625, 0.06728704071044922, 0.06768959808349609, 0.0674497299194336, 0.06723788452148438, 0.06764134216308594, 0.0672109146118164, 0.0674287338256836, 0.06776006317138672, 0.06777449798583984, 0.06735052490234375, 0.06796272277832031, 0.06792742156982422, 0.06734518432617187, 0.06762870025634765, 0.06744512176513671, 0.0676388168334961, 0.06783634948730469, 0.06759833526611328, 0.06799155426025391, 0.06783340454101562, 0.06806559753417969, 0.0678687973022461, 0.06841548919677734, 0.0682720947265625, 0.06817558288574219, 0.06706790161132813, 0.06656614685058594, 0.06616233825683594, 0.06623868560791016, 0.06663593292236328, 0.06641654205322266, 0.06607059478759765, 0.0661053466796875, 0.06698802947998046, 0.06708342742919922, 0.06671651458740234, 0.06658866882324219, 0.0674303970336914, 0.0668315200805664, 0.0666878433227539, 0.06735871887207032, 0.06636115264892578, 0.066478271484375, 0.06693478393554687, 0.06679270172119141, 0.0673880615234375, 0.06703030395507813, 0.06678406524658204, 0.06701670074462891, 0.06719020843505859, 0.06722531127929687, 0.06737395477294922, 0.06734835052490235, 0.06700028991699218, 0.06704927825927734, 0.06724015808105469, 0.06715961456298829, 0.06747337341308594, 0.0669988784790039, 0.06719404602050781, 0.0668058853149414, 0.06744134521484375, 0.06704329681396484, 0.06748892974853515, 0.06780809783935547, 0.06763929748535157, 0.06787484741210938, 0.06766524505615235, 0.0674044189453125, 0.06747135925292969, 0.06750962829589843, 0.06748188781738282, 0.06783769226074218, 0.06759689331054687, 0.06734028625488281, 0.06790281677246093, 0.067818115234375, 0.067261474609375, 0.06789427185058594, 0.06799565124511718, 0.06789929962158203, 0.06793536376953126, 0.06794278717041016, 0.06788960266113281, 0.06792822265625, 0.06764134216308594, 0.06793011474609376, 0.06860985565185547, 0.06710675048828126, 0.06651014709472657, 0.06639234924316406, 0.06680009460449218, 0.06629718780517578, 0.06698214721679688, 0.0668996124267578, 0.06640889739990234, 0.0668482894897461, 0.06683628845214844, 0.06669001770019531, 0.06702822113037109, 0.06704364776611328, 0.06662131500244141, 0.06682681274414062, 0.06668434906005859, 0.06721183776855469, 0.067019775390625, 0.06698278045654296, 0.06680384063720703, 0.06688358306884766, 0.06655795288085938, 0.06683238220214843, 0.06714749145507813, 0.06662992095947265, 0.06697574615478516, 0.06672383880615235, 0.06715392303466797, 0.06677299499511719, 0.06705996704101562, 0.06735590362548828, 0.06785865783691407, 0.06739523315429688, 0.06714227294921875, 0.06787398529052735, 0.06750701141357422, 0.06738944244384766, 0.06785779571533203, 0.06791436767578125, 0.06757376098632813, 0.06757891082763672, 0.0673799057006836, 0.06795648193359374, 0.06711721801757813, 0.06778304290771485, 0.0677816925048828, 0.06754605102539063, 0.06780518341064454, 0.06809190368652343, 0.06745705413818359, 0.06723974609375, 0.06788726043701172, 0.06752668762207031, 0.06792301177978516, 0.06801296234130859, 0.06799769592285156, 0.06800592041015625, 0.06734025573730469, 0.06804061126708984, 0.06799369812011719, 0.06748745727539063, 0.06804508972167969, 0.06816973114013672, 0.06706988525390625, 0.06596607971191407, 0.06613606262207031, 0.06612786865234375, 0.06654156494140626, 0.06641049957275391, 0.06668428802490234, 0.0662305908203125, 0.06623673248291016, 0.06673817443847656, 0.06701235198974609, 0.06677529907226562, 0.06697366333007812, 0.06715805053710938, 0.06686723327636719, 0.06668633270263671, 0.0671995849609375, 0.0665128936767578, 0.06701801300048828, 0.06664224243164063, 0.0668082275390625, 0.06682579040527344, 0.06715641784667968, 0.0666844482421875, 0.06669974517822265, 0.06715744018554687, 0.0674402847290039, 0.06734121704101563, 0.06733004760742188, 0.06731980895996094, 0.06731366729736328, 0.06742221069335938, 0.06723174285888672, 0.06681807708740234, 0.06723538970947265, 0.06721142578125, 0.06754329681396484, 0.06766387176513672, 0.0677396469116211, 0.06707984161376954, 0.06753475189208985, 0.06748777770996094, 0.06771756744384766, 0.06764259338378906, 0.06746943664550781, 0.06767478179931641, 0.06778995513916015, 0.06718489837646484, 0.06728950500488282, 0.06738758087158203, 0.06767616271972657, 0.06777855682373046, 0.06766329956054687, 0.06744496154785157, 0.06825154876708985, 0.06807917022705078, 0.06800828552246094, 0.06809961700439453, 0.06818294525146484, 0.06827529907226562, 0.06842185974121094, 0.06842797088623047, 0.06777507019042969, 0.06698188781738282, 0.06613983917236328, 0.06657158660888672, 0.06612889862060548, 0.06637363433837891, 0.0668564453125, 0.06614419555664063, 0.06605471801757813, 0.06673760223388672, 0.06680585479736328, 0.06670793914794922, 0.06645350646972656, 0.06685065460205078, 0.06698188781738282, 0.06718876647949219, 0.06705168151855469, 0.0664103012084961, 0.06613113403320313, 0.06624559783935546, 0.06705753326416015, 0.06700214385986328, 0.06683478546142578, 0.06717411041259766, 0.06711698913574218, 0.0669085464477539, 0.06712902069091797, 0.06703052520751954, 0.0673957748413086, 0.06720162963867188, 0.06742633819580078, 0.06731977844238281, 0.06687888336181641, 0.06680818939208985, 0.06673388671875, 0.06723830413818359, 0.06775193786621093, 0.06758755493164062, 0.06728144073486328, 0.06775193786621093, 0.06740991973876953, 0.0678256607055664, 0.06733731079101563, 0.0676176986694336, 0.06738070678710938, 0.06748623657226563, 0.06765773010253906, 0.06753075408935547, 0.06735791778564452, 0.06775888061523437, 0.06777241516113282, 0.06712899017333984, 0.0677359390258789, 0.06792189025878906, 0.06770066833496094, 0.06803462219238281, 0.06751026916503906, 0.06823935699462891, 0.06802582550048829, 0.06750262451171875, 0.0680953598022461, 0.06787750244140625, 0.067706787109375, 0.06818211364746093, 0.06694947052001952, 0.06632412719726563, 0.06697811126708984, 0.06651087951660156, 0.06665971374511719, 0.06700505828857421, 0.06685282897949218, 0.06645558166503907, 0.06695935821533203, 0.0661971206665039, 0.06593110656738281, 0.06613657379150391, 0.06619955444335937, 0.06746889495849609, 0.0670212173461914, 0.06642278289794921, 0.06623625946044921, 0.06702505493164063, 0.06709609222412109, 0.06720355224609376, 0.06714163208007813, 0.06710886383056641, 0.06683238220214843, 0.06744882965087891, 0.06720441436767578, 0.06708636474609375, 0.06673270416259766, 0.066766845703125, 0.06698598480224609, 0.06661491394042969, 0.06728294372558594, 0.06736287689208985, 0.0671962890625, 0.06748185729980469, 0.06736966705322266, 0.06717440032958985, 0.06775193786621093, 0.06757376098632813, 0.06773506927490235, 0.06753740692138673, 0.06700847625732421, 0.06753260803222656, 0.06742425537109376, 0.06726265716552735, 0.06776617431640625, 0.06771414184570312, 0.06734265899658202, 0.06757353973388672, 0.06757584381103515, 0.06776716613769532, 0.0681164779663086, 0.06801206207275391, 0.06767407989501953, 0.06816902160644531, 0.0673921890258789, 0.06786252593994141, 0.06742755126953125, 0.067271484375, 0.06797513580322266, 0.06788025665283202, 0.06808799743652344, 0.06788960266113281, 0.06777865600585938, 0.06739100646972657, 0.06689459228515625, 0.06645891571044922, 0.06600978851318359, 0.06599884796142579, 0.06603981018066406, 0.06620326232910156, 0.0661176986694336, 0.06635346984863282, 0.06675369262695313, 0.06661411285400391, 0.06722953796386719, 0.06723190307617187, 0.06695120239257812, 0.06679110717773437, 0.06676825714111329, 0.0670646743774414, 0.06719084930419922, 0.06669516754150391, 0.06654345703125, 0.06652329254150391, 0.06642825317382813, 0.06706851196289063, 0.06660307312011719, 0.06647602844238282, 0.06745689392089843, 0.06718889617919922, 0.06744879913330078, 0.0673034210205078, 0.06699830627441407, 0.0671615982055664, 0.06789097595214844, 0.06757180786132813, 0.06733251190185546, 0.06712134552001953, 0.06736077117919922, 0.06675039672851563, 0.06700656127929687, 0.06750819396972656, 0.06712035369873047, 0.0680597152709961, 0.06762902069091797, 0.06720127868652344, 0.06772108459472656, 0.06750220489501953, 0.06773350524902344, 0.06786595153808593, 0.06777513885498047, 0.06763014221191406, 0.06760543823242188, 0.06763724517822266, 0.06789500427246094, 0.06766329956054687, 0.06717922973632813, 0.0679395523071289, 0.06811090850830079, 0.06741974639892578, 0.06784281921386719, 0.06808131408691406, 0.06764169311523438, 0.06839836883544922, 0.06812854766845704, 0.067943359375, 0.06730339050292969, 0.06624256134033203, 0.06617088317871093, 0.06668000030517578, 0.06699874877929687, 0.06666454315185547, 0.06615634918212891, 0.06612387084960937, 0.06607686614990234, 0.0666605453491211, 0.06631830596923828, 0.06612710571289063, 0.0663088607788086, 0.06683443450927734, 0.06663782501220702, 0.06689180755615234, 0.06683340454101562, 0.06701267242431641, 0.06658108520507812, 0.06731398773193359, 0.06707405090332032, 0.06717235565185548, 0.06724518585205078, 0.06717120361328124, 0.06689997100830078, 0.06738534545898438, 0.06722089385986328, 0.0667305908203125, 0.06696678161621093, 0.06698473358154297, 0.06706787109375, 0.06738435363769531, 0.06724502563476563, 0.06735667419433594, 0.067687744140625, 0.06724678039550781, 0.06699619293212891, 0.0674775390625, 0.06727839660644531, 0.0670680923461914, 0.06754124450683593, 0.06726246643066407, 0.0672807388305664, 0.06797328186035156, 0.06783507537841797, 0.06731244659423828, 0.06740582275390625, 0.06796697235107421, 0.06739491271972656, 0.06735651397705078, 0.067887939453125, 0.06743551635742187, 0.06768495941162109, 0.06782403564453125, 0.06779065704345703, 0.06775385284423828, 0.06741024017333984, 0.06773136138916015, 0.06784627532958984, 0.06815731048583984, 0.06771014404296875, 0.06819254302978515, 0.06803929901123047]",tokens/s,14.877268839682806,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1588.969472,1868.43136,0.0,1465.909248,1358.169088,s,1,8.747935546875,8.747935546875,0.0,8.747935546875,8.747935546875,8.747935546875,8.747935546875,[8.747935546875],,kWh,4.938278266245106e-05,5.4269298269293e-06,1.8828348396005e-05,7.363806088538536e-05,,MB,1531.711488,1889.40288,0.0,1472.200704,1356.544512,s,10,0.513274845123291,0.0513274845123291,0.00014811893687181554,0.05129239845275879,0.051506348800659185,0.05156083927154541,0.051604431648254394,"[0.05161532974243164, 0.051454975128173826, 0.05122675323486328, 0.05128444671630859, 0.05149423980712891, 0.05130035018920898, 0.051100318908691406, 0.051251712799072265, 0.05118243026733398, 0.051364288330078126]",tokens/s,4987.58126240353,kWh,1.5485494663579753e-06,1.7077634974568452e-07,1.0233503189734734e-06,2.742676135077133e-06,tokens/kWh,93339493.032341,MB,1539.19488,1889.40288,0.0,1472.200704,1409.728,s,10,14.389342407226563,1.4389342407226562,0.0027623745723931318,1.4386029663085937,1.4424807861328124,1.4433451904296875,1.4440367138671875,"[1.4422886962890624, 1.437983154296875, 1.438698974609375, 1.4442095947265625, 1.438259033203125, 1.43611376953125, 1.438665283203125, 1.4406256103515624, 1.4385406494140625, 1.4339576416015625]",tokens/s,43.782403821567534,kWh,4.193070545822721e-05,4.624567920868282e-06,1.8903748956028775e-05,6.545902233512427e-05,tokens/kWh,962434.1725952604,,s,630,14.38641005325318,0.022835571513100276,0.0003331348147773755,0.02276803207397461,0.023094371604919433,0.023237818241119385,0.02404900068283082,"[0.022805343627929686, 0.022867103576660157, 0.02353219223022461, 0.022857023239135743, 0.022661216735839845, 0.022816640853881836, 0.022776607513427735, 0.022775808334350587, 0.022760543823242187, 0.022795167922973633, 0.022758399963378906, 0.02285875129699707, 0.022956031799316406, 0.02325859260559082, 0.0227108154296875, 0.022816768646240236, 0.022966272354125978, 0.022960031509399414, 0.0227410888671875, 0.022857343673706055, 0.02309382438659668, 0.02279609680175781, 0.022741024017333984, 0.022777824401855468, 0.02279155158996582, 0.022687519073486328, 0.02266582489013672, 0.022642400741577147, 0.022845983505249023, 0.022894176483154297, 0.022972831726074217, 0.02313113594055176, 0.023145471572875977, 0.022820959091186522, 0.022744991302490233, 0.022808576583862306, 0.02288435173034668, 0.022904544830322265, 0.022884223937988283, 0.022835615158081055, 0.02285103988647461, 0.022952480316162108, 0.022982463836669922, 0.023111679077148437, 0.023481760025024414, 0.023352096557617188, 0.02316035270690918, 0.022994495391845702, 0.02287504005432129, 0.022794240951538085, 0.02282694435119629, 0.022812416076660156, 0.022860095977783202, 0.022692863464355468, 0.02274995231628418, 0.022782207489013672, 0.022834335327148438, 0.022631488800048827, 0.022896352767944335, 0.022703231811523436, 0.023114208221435548, 0.02282524871826172, 0.023151039123535156, 0.023114816665649414, 0.023262079238891602, 0.022767967224121093, 0.022845087051391603, 0.022759424209594727, 0.022786048889160155, 0.02274287986755371, 0.022799968719482422, 0.022842111587524413, 0.02276911926269531, 0.022771295547485353, 0.022976703643798828, 0.022892192840576173, 0.022909856796264647, 0.022807872772216797, 0.02285843276977539, 0.022936576843261718, 0.022823295593261718, 0.02275760078430176, 0.02283513641357422, 0.023250783920288086, 0.022989023208618165, 0.02281705665588379, 0.0228185920715332, 0.022997440338134764, 0.022970272064208985, 0.022812671661376953, 0.022816640853881836, 0.022761440277099608, 0.022816640853881836, 0.022719871520996093, 0.022786048889160155, 0.022737823486328124, 0.022873600006103514, 0.022823423385620118, 0.022865856170654297, 0.022759136199951173, 0.022741151809692384, 0.022702272415161134, 0.022920671463012694, 0.022788639068603515, 0.02288025665283203, 0.02295737648010254, 0.022722719192504882, 0.022777984619140625, 0.022704416275024415, 0.022663295745849608, 0.02272585678100586, 0.022724992752075197, 0.02278793525695801, 0.022710655212402345, 0.022800512313842773, 0.02279430389404297, 0.022734848022460938, 0.022744447708129882, 0.022612480163574217, 0.022796512603759766, 0.02290678405761719, 0.022760799407958984, 0.022709024429321288, 0.022738208770751955, 0.022718751907348633, 0.022671648025512695, 0.02333513641357422, 0.02347660827636719, 0.023187456130981447, 0.023211456298828124, 0.023071296691894533, 0.02307276725769043, 0.023056192398071287, 0.022810815811157226, 0.022800384521484376, 0.022679359436035156, 0.02279859161376953, 0.02258883285522461, 0.02276406478881836, 0.02270742416381836, 0.02265782356262207, 0.02268182373046875, 0.02259881591796875, 0.022577056884765623, 0.022694623947143555, 0.022605728149414063, 0.022622304916381834, 0.022687103271484373, 0.022686336517333986, 0.023468032836914062, 0.024285472869873048, 0.022803455352783202, 0.02277244758605957, 0.022582944869995118, 0.022641183853149414, 0.022629823684692383, 0.022722560882568358, 0.02264512062072754, 0.022597631454467772, 0.02263654327392578, 0.02266316795349121, 0.022722560882568358, 0.022828575134277343, 0.022904767990112304, 0.022678207397460938, 0.02258518409729004, 0.022545600891113283, 0.02257593536376953, 0.022971391677856445, 0.022743711471557616, 0.023126367568969727, 0.02291107177734375, 0.022923168182373048, 0.023111679077148437, 0.023226367950439454, 0.02290380859375, 0.022830080032348633, 0.022804479598999023, 0.0228351993560791, 0.022606048583984375, 0.022674591064453124, 0.022797088623046875, 0.02271011161804199, 0.02268694305419922, 0.022960927963256834, 0.022829055786132812, 0.022697984695434572, 0.022808576583862306, 0.02265292739868164, 0.022724607467651366, 0.022730752944946288, 0.022822912216186524, 0.022673408508300782, 0.02267238426208496, 0.022793216705322264, 0.02263039970397949, 0.022603391647338867, 0.02302195167541504, 0.02267750358581543, 0.022611967086791994, 0.022667327880859376, 0.022626239776611327, 0.022568607330322267, 0.022655616760253905, 0.022705856323242186, 0.022658336639404298, 0.022980863571166993, 0.02279475212097168, 0.02262015914916992, 0.02276543998718262, 0.022816703796386718, 0.022585599899291993, 0.023035327911376954, 0.022839616775512696, 0.023048383712768555, 0.022779199600219728, 0.02277564811706543, 0.022616640090942382, 0.022459007263183593, 0.022625951766967772, 0.022687744140625, 0.02264841651916504, 0.023283168792724608, 0.02259040069580078, 0.022648832321166993, 0.022753248214721678, 0.022732831954956054, 0.022661184310913084, 0.022963359832763673, 0.027568384170532225, 0.0227225284576416, 0.022522432327270508, 0.022665119171142577, 0.022852895736694336, 0.025969184875488282, 0.02292355155944824, 0.02271027183532715, 0.022670783996582032, 0.022681983947753906, 0.022675647735595703, 0.02263039970397949, 0.02265727996826172, 0.022779136657714843, 0.022722591400146486, 0.02272627258300781, 0.02258211135864258, 0.022544384002685547, 0.0226060791015625, 0.022605024337768554, 0.025526464462280272, 0.023789920806884766, 0.02321788787841797, 0.022826143264770508, 0.022936256408691406, 0.02282307243347168, 0.02270412826538086, 0.02281782341003418, 0.022685983657836913, 0.022688287734985352, 0.02264694404602051, 0.0226693115234375, 0.022571104049682617, 0.022648735046386717, 0.02272591972351074, 0.02271027183532715, 0.022751136779785155, 0.022647712707519533, 0.02262620735168457, 0.02272870445251465, 0.02282700729370117, 0.0228351993560791, 0.022689792633056642, 0.02276140785217285, 0.02281443214416504, 0.02287449645996094, 0.022806495666503907, 0.022889984130859374, 0.022833663940429686, 0.022964223861694336, 0.022803903579711914, 0.022890111923217774, 0.022723520278930664, 0.022798336029052735, 0.022659231185913085, 0.022673088073730467, 0.022621536254882814, 0.022659456253051758, 0.022657087326049805, 0.0226615047454834, 0.02277324867248535, 0.02270044708251953, 0.022708255767822264, 0.02269164848327637, 0.02269398307800293, 0.02294566345214844, 0.02311743927001953, 0.022927167892456055, 0.022960992813110353, 0.023239744186401366, 0.024123615264892578, 0.023059167861938477, 0.02310758399963379, 0.022816768646240236, 0.022994943618774414, 0.023028928756713866, 0.02273539161682129, 0.022829343795776367, 0.022716415405273437, 0.022697984695434572, 0.023244800567626952, 0.023055839538574218, 0.02279648017883301, 0.022722463607788086, 0.02273459243774414, 0.022857887268066406, 0.022767295837402345, 0.022634687423706053, 0.02268614387512207, 0.022608991622924804, 0.022784608840942383, 0.02265088081359863, 0.02271574401855469, 0.022689855575561524, 0.02285833549499512, 0.022789663314819335, 0.022796768188476563, 0.022666784286499025, 0.022653152465820312, 0.022622432708740235, 0.022679712295532225, 0.02278598403930664, 0.022650144577026368, 0.02272528076171875, 0.022949567794799806, 0.02269830322265625, 0.022664960861206056, 0.02270675277709961, 0.023398080825805665, 0.023084352493286133, 0.02293174362182617, 0.02295235252380371, 0.023240575790405272, 0.023116256713867186, 0.023443103790283203, 0.023248767852783202, 0.023172895431518556, 0.02303420829772949, 0.023007104873657227, 0.02310883140563965, 0.02310758399963379, 0.02323891258239746, 0.022917280197143553, 0.022691423416137696, 0.022852415084838866, 0.02263804817199707, 0.022533824920654297, 0.022561727523803712, 0.022675455093383787, 0.022644960403442382, 0.022740768432617187, 0.022614015579223632, 0.022607839584350586, 0.02258940887451172, 0.0226366081237793, 0.022613407135009766, 0.022663776397705077, 0.02270207977294922, 0.02269593620300293, 0.02267136001586914, 0.022726655960083008, 0.02262015914916992, 0.022702112197875976, 0.022567071914672852, 0.022640447616577148, 0.022683935165405275, 0.022654815673828124, 0.022615936279296874, 0.022747072219848632, 0.02277599906921387, 0.022698368072509766, 0.023174335479736328, 0.023385087966918947, 0.02297260856628418, 0.022998655319213867, 0.023119840621948242, 0.022859807968139648, 0.02281612777709961, 0.02277155113220215, 0.022788896560668945, 0.022783456802368166, 0.022771583557128907, 0.02298054313659668, 0.023227104187011717, 0.02279216003417969, 0.02265500831604004, 0.02281881523132324, 0.022759231567382812, 0.022726112365722657, 0.02274550437927246, 0.022697887420654296, 0.02258473587036133, 0.02279849624633789, 0.022827808380126952, 0.022730815887451173, 0.02278009605407715, 0.022939456939697265, 0.0227392635345459, 0.022814111709594728, 0.02272879981994629, 0.022929344177246094, 0.022915327072143554, 0.02277494430541992, 0.022789024353027345, 0.022686687469482422, 0.022649248123168944, 0.02294592094421387, 0.022829343795776367, 0.022892704010009767, 0.022818143844604493, 0.022762399673461914, 0.0226562557220459, 0.022723072052001952, 0.022834848403930665, 0.02286012840270996, 0.022882303237915038, 0.023166976928710937, 0.02345801544189453, 0.02299395179748535, 0.022890335083007814, 0.022893472671508788, 0.022829055786132812, 0.022773088455200194, 0.022659008026123046, 0.02272329521179199, 0.022615392684936522, 0.02264950370788574, 0.022644704818725585, 0.022677440643310547, 0.02270572853088379, 0.022693759918212892, 0.02272528076171875, 0.022705888748168944, 0.022735136032104492, 0.022615392684936522, 0.022720544815063477, 0.022855775833129883, 0.02268217658996582, 0.022843072891235352, 0.022728992462158204, 0.022723840713500976, 0.022697824478149414, 0.02298255920410156, 0.02268841552734375, 0.022763263702392577, 0.022939264297485353, 0.0226942081451416, 0.02268022346496582, 0.022685184478759765, 0.022774272918701172, 0.022691295623779296, 0.022753952026367187, 0.022744512557983397, 0.02271001625061035, 0.02276652717590332, 0.022973983764648438, 0.02284316825866699, 0.022960575103759765, 0.023903615951538087, 0.02296486473083496, 0.023067968368530273, 0.022725311279296875, 0.02277356719970703, 0.02279846382141113, 0.022685728073120116, 0.02266729545593262, 0.02284067153930664, 0.02263248062133789, 0.02272934341430664, 0.02267136001586914, 0.022690143585205078, 0.02285532760620117, 0.02281603240966797, 0.022784671783447265, 0.023089088439941407, 0.02410838317871094, 0.023266080856323243, 0.0236810245513916, 0.02305638313293457, 0.02283263969421387, 0.02273535919189453, 0.022812671661376953, 0.02314854431152344, 0.022795648574829103, 0.023656192779541015, 0.022768096923828127, 0.022862207412719725, 0.022786176681518555, 0.022728607177734374, 0.022781951904296875, 0.022756864547729492, 0.0226200008392334, 0.022704992294311523, 0.022757343292236328, 0.022759359359741212, 0.023025184631347655, 0.023167455673217773, 0.022957311630249024, 0.022814815521240234, 0.022879135131835936, 0.022910720825195314, 0.02291001510620117, 0.022938560485839844, 0.023078752517700196, 0.02275859260559082, 0.022836511611938476, 0.022748064041137696, 0.022713119506835938, 0.02265091133117676, 0.022705984115600587, 0.02267747116088867, 0.02276780891418457, 0.022736383438110352, 0.022823423385620118, 0.02268956756591797, 0.022797983169555665, 0.022738752365112306, 0.02272559928894043, 0.0226977596282959, 0.022683135986328123, 0.02260223960876465, 0.02270345687866211, 0.02270102310180664, 0.02265056037902832, 0.022716192245483397, 0.022830495834350584, 0.02270675277709961, 0.022711679458618163, 0.022747936248779296, 0.02265507125854492, 0.022681184768676758, 0.022677568435668944, 0.022691200256347657, 0.02282963180541992, 0.022621599197387696, 0.022653087615966797, 0.02264473533630371, 0.022629215240478517, 0.02253824043273926, 0.022665216445922853, 0.022980607986450196, 0.022998016357421876, 0.023219295501708984, 0.023236480712890625, 0.023236352920532225, 0.02317955207824707, 0.022970687866210936, 0.022943424224853515, 0.023099296569824217, 0.023258432388305664, 0.023001888275146484, 0.023121919631958008, 0.022956031799316406, 0.022866016387939454, 0.022837152481079103, 0.022739999771118163, 0.022813312530517577, 0.02275724792480469, 0.02275449562072754, 0.02267616081237793, 0.022693471908569338, 0.022577215194702148, 0.023091712951660157, 0.024159839630126953, 0.023042303085327148, 0.02288579177856445, 0.022874303817749023, 0.022897151947021483, 0.0228002872467041, 0.022716415405273437, 0.022811775207519532, 0.022774496078491212, 0.02265100860595703, 0.022676992416381835, 0.02265331268310547, 0.02273276710510254, 0.02273286437988281, 0.022779264450073243, 0.022741151809692384, 0.022753887176513672, 0.02271027183532715, 0.022747007369995118, 0.022780031204223634, 0.022607872009277344, 0.022607648849487304, 0.022524127960205077, 0.02254800033569336, 0.022534624099731445, 0.022595008850097655, 0.02272480010986328, 0.02300499153137207, 0.02302115249633789, 0.023051231384277344, 0.022808576583862306, 0.022769695281982423, 0.02267747116088867, 0.022657024383544923, 0.02266726493835449, 0.022611488342285158, 0.02260639953613281, 0.02262825584411621, 0.022642688751220705, 0.022586719512939453, 0.02253398323059082, 0.02259596824645996, 0.02263264083862305, 0.022687999725341797, 0.022580896377563477, 0.022681631088256837, 0.02262236785888672, 0.022665376663208007, 0.02268943977355957, 0.022800031661987304, 0.02302012825012207, 0.022900224685668946, 0.022755935668945314, 0.02273689651489258, 0.022799423217773437, 0.02284796714782715, 0.022652864456176758, 0.022825408935546875]",tokens/s,43.79132790376284,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1553.063936,1576.927232,0.0,1174.40512,1147.036672,s,1,8.8884296875,8.8884296875,0.0,8.8884296875,8.8884296875,8.8884296875,8.8884296875,[8.8884296875],,kWh,5.1388671687504936e-05,5.661038658535728e-06,1.859945932400353e-05,7.564916967004419e-05,,MB,1629.45024,1648.2304,0.0,1231.028224,1064.7808,s,10,0.5563568649291992,0.055635686492919924,0.00023691395648061002,0.05571343994140625,0.055830359268188474,0.05583262004852295,0.05583442867279053,"[0.05503081512451172, 0.05555580902099609, 0.05542380905151367, 0.055769790649414064, 0.05582198333740234, 0.05582985687255859, 0.05573823928833008, 0.05568864059448242, 0.05566304016113281, 0.055834880828857424]",tokens/s,4601.3631921766255,kWh,1.6425787363763265e-06,1.811456139113412e-07,1.096235271494368e-06,2.9199596217820354e-06,tokens/kWh,87672445.2250352,MB,1633.66912,1650.327552,0.0,1233.125376,1119.937024,s,10,11.882388427734375,1.1882388427734374,0.0050516534965831025,1.1869660034179688,1.1949722167968748,1.19713125,1.1988584765624999,"[1.194492431640625, 1.199290283203125, 1.1831859130859375, 1.187224365234375, 1.18858544921875, 1.1867076416015625, 1.184647216796875, 1.191041259765625, 1.1828126220703126, 1.1844012451171875]",tokens/s,53.019643637430114,kWh,3.4638644834873604e-05,3.820204145570738e-06,1.6188195222704997e-05,5.464704420314934e-05,tokens/kWh,1152852.8380382059,,s,630,11.879840843200688,0.018856890227302674,0.0004390847251764888,0.018770223617553712,0.019106063842773437,0.019222457885742186,0.020360034942626955,"[0.02024665641784668, 0.01879859161376953, 0.018876415252685547, 0.02313216018676758, 0.018749439239501953, 0.018718719482421875, 0.018542591094970702, 0.018501632690429686, 0.01895542335510254, 0.018527072906494142, 0.019994623184204103, 0.019126272201538085, 0.019378175735473634, 0.018720415115356444, 0.01869443130493164, 0.018644672393798828, 0.01872863960266113, 0.01869795227050781, 0.01860668754577637, 0.018700063705444334, 0.01860259246826172, 0.01920614433288574, 0.018806560516357422, 0.018815200805664064, 0.018705663681030275, 0.018881183624267578, 0.018620735168457032, 0.01860585594177246, 0.018754911422729493, 0.01872732734680176, 0.01862272071838379, 0.018683904647827147, 0.018634752273559572, 0.01857459259033203, 0.018979583740234375, 0.01869593620300293, 0.018804224014282226, 0.018916095733642578, 0.018898527145385743, 0.019005855560302733, 0.01903164863586426, 0.018743520736694337, 0.01875958442687988, 0.0187108154296875, 0.01864031982421875, 0.018700864791870116, 0.01881907272338867, 0.01859174346923828, 0.018679807662963867, 0.018778112411499022, 0.018669183731079102, 0.018618751525878906, 0.01864499282836914, 0.0186060791015625, 0.018677759170532226, 0.018763776779174804, 0.01867366409301758, 0.02004991912841797, 0.02203628730773926, 0.018993087768554687, 0.018992576599121094, 0.01921830368041992, 0.01893881607055664, 0.018981855392456056, 0.01896611213684082, 0.01893008041381836, 0.018996639251708983, 0.018844255447387694, 0.018907039642333985, 0.018886751174926757, 0.018761728286743166, 0.01901145553588867, 0.018953407287597656, 0.018828224182128907, 0.01898700714111328, 0.019138559341430664, 0.01886617660522461, 0.01884060859680176, 0.01872287940979004, 0.018887584686279296, 0.01877017593383789, 0.018738943099975584, 0.018769920349121092, 0.01881292724609375, 0.019051647186279295, 0.018703231811523436, 0.018695327758789064, 0.018902912139892578, 0.018854879379272462, 0.018993471145629885, 0.019092767715454102, 0.01936630439758301, 0.01922585678100586, 0.019403520584106444, 0.019189504623413085, 0.018854143142700196, 0.019013631820678712, 0.018937183380126954, 0.019026592254638673, 0.0190830078125, 0.01898255920410156, 0.01886672019958496, 0.018782112121582033, 0.01870454406738281, 0.018696191787719727, 0.018672992706298828, 0.01882931137084961, 0.018885280609130858, 0.019066207885742186, 0.023742368698120117, 0.01924764823913574, 0.01917359924316406, 0.0188538875579834, 0.018782207489013672, 0.01882111930847168, 0.01882111930847168, 0.018926784515380858, 0.020099903106689455, 0.02010316848754883, 0.01918579292297363, 0.018927488327026367, 0.01873744010925293, 0.018681568145751955, 0.01864089584350586, 0.018771968841552734, 0.01903753662109375, 0.019064672470092775, 0.018933984756469728, 0.018892480850219728, 0.018852575302124024, 0.018657119750976562, 0.01872502326965332, 0.01876521682739258, 0.018887264251708984, 0.018647039413452148, 0.01869932746887207, 0.018660287857055664, 0.018617631912231446, 0.018800512313842773, 0.018940479278564452, 0.018687744140625, 0.018752031326293946, 0.018554880142211915, 0.018626560211181642, 0.0187347526550293, 0.018590047836303712, 0.018579391479492186, 0.018625696182250975, 0.018487712860107423, 0.01847318458557129, 0.018566463470458986, 0.018557920455932617, 0.018659328460693358, 0.018718719482421875, 0.018669567108154296, 0.018718719482421875, 0.018739200592041014, 0.01869004821777344, 0.018890687942504883, 0.018860095977783202, 0.018964351654052733, 0.023310239791870118, 0.019104991912841797, 0.01878291130065918, 0.018812639236450195, 0.018702816009521485, 0.018673088073730467, 0.018727615356445314, 0.018896896362304686, 0.01881497573852539, 0.018757471084594725, 0.018661535263061524, 0.018528255462646484, 0.018544639587402344, 0.018499584197998048, 0.01886732864379883, 0.018776960372924804, 0.018593791961669923, 0.018728832244873046, 0.01847923278808594, 0.018514944076538087, 0.018449119567871094, 0.018438432693481447, 0.01844428825378418, 0.018431999206542968, 0.018497535705566406, 0.018710527420043945, 0.018876415252685547, 0.018997247695922852, 0.019074527740478516, 0.019091039657592773, 0.019128671646118166, 0.018903648376464844, 0.019269632339477538, 0.018894399642944336, 0.018764223098754883, 0.01867366409301758, 0.018760704040527345, 0.018747711181640626, 0.01873151969909668, 0.018688192367553712, 0.01864201545715332, 0.018739999771118163, 0.018670944213867186, 0.01886288070678711, 0.018657440185546874, 0.018691936492919923, 0.01865449523925781, 0.018585952758789062, 0.018769952774047853, 0.018524511337280274, 0.018667520523071288, 0.018868223190307617, 0.018940000534057616, 0.018992639541625975, 0.018825632095336914, 0.01862041664123535, 0.018534496307373048, 0.01861555290222168, 0.018806976318359377, 0.01882979202270508, 0.018697471618652345, 0.018723583221435545, 0.018692096710205077, 0.018683584213256835, 0.01873139190673828, 0.01902783966064453, 0.019003456115722656, 0.018824800491333008, 0.018745344161987306, 0.018613887786865235, 0.018628543853759765, 0.018563936233520508, 0.018642623901367186, 0.0186507511138916, 0.01881772804260254, 0.020332544326782227, 0.01977244758605957, 0.018916288375854493, 0.018882080078125, 0.01873094367980957, 0.01870319938659668, 0.018889568328857423, 0.019034463882446288, 0.01905686378479004, 0.01917318344116211, 0.019005247116088867, 0.018995904922485353, 0.01884364891052246, 0.01879449653625488, 0.01879449653625488, 0.01877027130126953, 0.019009536743164062, 0.01890643119812012, 0.018799360275268556, 0.018915327072143554, 0.018992448806762697, 0.018835872650146485, 0.01888643264770508, 0.018784608840942383, 0.01869158363342285, 0.01880284881591797, 0.01871843147277832, 0.018698911666870117, 0.018815103530883788, 0.018972671508789063, 0.01903206443786621, 0.018730592727661134, 0.018648576736450196, 0.01886892890930176, 0.018827295303344725, 0.01871286392211914, 0.018822111129760743, 0.019002111434936523, 0.018804927825927735, 0.018756959915161135, 0.018778783798217773, 0.018796480178833008, 0.0187761287689209, 0.01879859161376953, 0.018703392028808594, 0.01859244728088379, 0.018675296783447266, 0.018655935287475587, 0.018741247177124023, 0.018786239624023437, 0.01872287940979004, 0.018687999725341797, 0.018818080902099608, 0.01871766471862793, 0.018948095321655273, 0.019095552444458007, 0.018986976623535157, 0.01870159912109375, 0.019055007934570312, 0.019018079757690428, 0.019216384887695313, 0.018830368041992188, 0.018795488357543945, 0.018765823364257812, 0.019425216674804686, 0.018810943603515626, 0.018714624404907225, 0.018739168167114257, 0.018685983657836913, 0.018694143295288086, 0.018876575469970704, 0.019361631393432617, 0.021215232849121093, 0.018898944854736328, 0.01865920066833496, 0.01876799964904785, 0.01860758399963379, 0.0184550724029541, 0.018716672897338867, 0.019016895294189453, 0.019008768081665038, 0.018960960388183595, 0.01907302474975586, 0.018888191223144533, 0.018784767150878907, 0.018781824111938475, 0.018674047470092773, 0.018661376953125, 0.018515968322753908, 0.018579456329345705, 0.01855695915222168, 0.01863471984863281, 0.01890620803833008, 0.019428255081176758, 0.01919795227050781, 0.0191461124420166, 0.01914944076538086, 0.019058687210083008, 0.019037311553955077, 0.018989120483398438, 0.01903276824951172, 0.01901353645324707, 0.01906710433959961, 0.019023839950561523, 0.019056671142578124, 0.019169279098510742, 0.019177024841308593, 0.019132448196411134, 0.018895263671875, 0.019017375946044923, 0.018923871994018553, 0.01885747146606445, 0.018988704681396483, 0.019173919677734377, 0.018852256774902345, 0.018605888366699217, 0.01863075256347656, 0.01862771224975586, 0.018557823181152344, 0.01858086395263672, 0.018569631576538084, 0.01854070472717285, 0.01849350357055664, 0.018491167068481446, 0.01847318458557129, 0.018579231262207032, 0.01866160011291504, 0.018588031768798828, 0.018646495819091797, 0.018567264556884764, 0.018558176040649414, 0.018616159439086913, 0.018611200332641603, 0.018630239486694337, 0.01854739189147949, 0.01856073570251465, 0.018765823364257812, 0.02037126350402832, 0.018822463989257812, 0.018640928268432617, 0.018625375747680663, 0.018666719436645506, 0.019087360382080077, 0.019326335906982423, 0.018938016891479493, 0.018866655349731445, 0.01876924705505371, 0.018731679916381836, 0.018953887939453126, 0.018680160522460937, 0.01865727996826172, 0.018778112411499022, 0.018624128341674803, 0.018583871841430663, 0.01869398307800293, 0.018726783752441405, 0.018908607482910157, 0.018912160873413086, 0.019200000762939453, 0.019076576232910158, 0.019243551254272462, 0.0188538875579834, 0.018856063842773437, 0.01871859169006348, 0.018694143295288086, 0.01883545684814453, 0.01881088066101074, 0.01873446464538574, 0.01869830322265625, 0.01875200080871582, 0.018630720138549803, 0.018593791961669923, 0.018657119750976562, 0.018595552444458006, 0.018577856063842775, 0.018883583068847656, 0.018670400619506835, 0.01866476821899414, 0.018739328384399415, 0.018649856567382814, 0.01866476821899414, 0.018703039169311524, 0.018747392654418944, 0.018737152099609376, 0.018804224014282226, 0.01871308708190918, 0.018737152099609376, 0.019115711212158205, 0.01903379249572754, 0.018972415924072266, 0.018876415252685547, 0.018713472366333007, 0.01865705680847168, 0.0188438720703125, 0.018944000244140623, 0.018792127609252928, 0.019299968719482422, 0.018931455612182617, 0.018701248168945313, 0.0187410888671875, 0.018652671813964843, 0.018545312881469725, 0.018696191787719727, 0.018589696884155273, 0.018800640106201173, 0.018833503723144532, 0.018890335083007814, 0.01882915115356445, 0.018905632019042967, 0.018959711074829102, 0.019449535369873046, 0.019172319412231444, 0.01892905616760254, 0.01878611183166504, 0.01865590476989746, 0.018787872314453124, 0.01873967933654785, 0.018757312774658204, 0.018781824111938475, 0.018792863845825195, 0.01868569564819336, 0.018613983154296875, 0.018564031600952147, 0.018761152267456054, 0.01868448066711426, 0.01913987159729004, 0.019942272186279298, 0.018724767684936524, 0.018689664840698242, 0.01876121520996094, 0.01877484893798828, 0.01880035209655762, 0.018917823791503908, 0.019056480407714845, 0.019140384674072267, 0.020979936599731446, 0.019277856826782225, 0.01921628761291504, 0.01918124771118164, 0.01915328025817871, 0.01921433639526367, 0.01918976020812988, 0.019200000762939453, 0.01911939239501953, 0.01901225662231445, 0.018759647369384767, 0.018606176376342775, 0.018741247177124023, 0.01873699188232422, 0.018735263824462892, 0.018774015426635742, 0.0187225284576416, 0.0188538875579834, 0.018694303512573243, 0.018855648040771486, 0.018655647277832033, 0.018546144485473634, 0.01865545654296875, 0.018585920333862305, 0.018597536087036133, 0.018538015365600586, 0.019006528854370118, 0.018900192260742188, 0.0189652156829834, 0.018771039962768556, 0.018762144088745117, 0.018689823150634766, 0.018582048416137694, 0.01907040023803711, 0.01894688034057617, 0.018977888107299806, 0.01867350387573242, 0.01863324737548828, 0.01862841606140137, 0.0187412166595459, 0.01871308708190918, 0.018759679794311524, 0.01886617660522461, 0.01884979248046875, 0.01869824028015137, 0.019156959533691405, 0.01936489677429199, 0.01963315200805664, 0.018739200592041014, 0.018603872299194336, 0.018584896087646484, 0.018555616378784178, 0.018468992233276367, 0.01841971206665039, 0.018532352447509767, 0.018490720748901367, 0.01846940803527832, 0.01882486343383789, 0.018629087448120116, 0.01849897575378418, 0.01853615951538086, 0.01859369659423828, 0.018723808288574218, 0.01878835105895996, 0.01872275161743164, 0.01864687919616699, 0.018744543075561525, 0.018734079360961914, 0.018666719436645506, 0.0188055362701416, 0.018702175140380858, 0.018731008529663085, 0.018894752502441405, 0.018841856002807616, 0.018997247695922852, 0.01903411293029785, 0.018892799377441406, 0.018974624633789062, 0.019089216232299804, 0.01889308738708496, 0.019079168319702147, 0.018933759689331055, 0.018677759170532226, 0.018689504623413088, 0.018794431686401367, 0.01871219253540039, 0.01877872085571289, 0.01884921646118164, 0.01871558380126953, 0.018716672897338867, 0.01866268730163574, 0.018606815338134765, 0.018619871139526366, 0.018704095840454103, 0.018774656295776366, 0.018704063415527345, 0.019055776596069336, 0.019133247375488282, 0.018878496170043946, 0.018743295669555664, 0.018653152465820312, 0.018597951889038088, 0.018704479217529296, 0.018696063995361327, 0.018771360397338867, 0.018685728073120116, 0.018794687271118164, 0.0187840633392334, 0.018682016372680663, 0.018870943069458006, 0.01903379249572754, 0.018694463729858397, 0.018792448043823243, 0.018734975814819334, 0.018737279891967773, 0.01871366310119629, 0.018823423385620118, 0.01868828773498535, 0.018786720275878906, 0.018857664108276367, 0.01882758331298828, 0.01878214454650879, 0.018753536224365236, 0.018787616729736327, 0.018633216857910157, 0.018800928115844728, 0.018798175811767577, 0.01908367919921875, 0.018941951751708985, 0.01903615951538086, 0.01901372718811035, 0.01899305534362793, 0.018924575805664062, 0.018674560546875, 0.018826623916625977, 0.018629215240478517, 0.01869340705871582, 0.018627424240112305, 0.018704256057739257, 0.018714879989624022, 0.018695711135864258, 0.018534847259521484, 0.01862339210510254, 0.018662527084350587, 0.018689792633056642, 0.01861235237121582, 0.01858355140686035, 0.018902271270751954, 0.01881078338623047, 0.018772832870483397, 0.018752511978149415, 0.0186296329498291, 0.01863910484313965, 0.018752384185791015, 0.019060640335083007, 0.019645408630371095, 0.019066879272460938, 0.01876617622375488, 0.018781856536865236]",tokens/s,53.031013488751796,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4278.554624,4733.140992,0.0,4347.396096,4328.833024,s,1,9.945201171875,9.945201171875,0.0,9.945201171875,9.945201171875,9.945201171875,9.945201171875,[9.945201171875],,kWh,8.320812820835879e-05,9.171267385607297e-06,2.741696637803548e-05,0.00011979636197200156,,MB,1413.586944,5219.680256,0.0,4804.575232,4748.27776,s,10,3.394944519042969,0.3394944519042969,0.0020998930967299724,0.34023983764648436,0.34102067260742186,0.34122933044433595,0.3413962567138672,"[0.333991943359375, 0.33939376831054685, 0.34097430419921876, 0.34068563842773436, 0.3395765380859375, 0.33760150146484375, 0.3401780700683594, 0.3403016052246094, 0.34080316162109375, 0.34143798828125]",tokens/s,754.0623964958523,kWh,9.861887761249918e-06,1.087584543565022e-06,6.581107116732099e-06,1.7530579421547038e-05,tokens/kWh,14603054.117272783,MB,1433.00608,5328.73216,0.0,4913.627136,4878.043648,s,10,18.278779296875,1.8278779296875,0.005375331724542857,1.8263477783203124,1.8360775634765625,1.8361283203125,1.83616892578125,"[1.822375, 1.8241177978515626, 1.832373779296875, 1.82473388671875, 1.81974609375, 1.8304918212890624, 1.8257061767578124, 1.8361790771484374, 1.8360662841796875, 1.8269893798828125]",tokens/s,34.46619655327349,kWh,5.35171497612484e-05,5.902882081177866e-06,3.5475065417067686e-05,9.489509725949398e-05,tokens/kWh,663890.9893071112,,s,630,18.27564096641538,0.02900895391494509,0.0008214540632536039,0.029015392303466794,0.029618316078186036,0.02979974431991577,0.033174618072509765,"[0.032325984954833985, 0.02968934440612793, 0.028594688415527345, 0.028507999420166016, 0.028810720443725586, 0.028596927642822265, 0.027994016647338867, 0.027788799285888673, 0.028570207595825195, 0.0287825927734375, 0.028129056930541994, 0.0304333438873291, 0.029542560577392577, 0.02769468879699707, 0.02759721565246582, 0.02873139190673828, 0.02896447944641113, 0.02837651252746582, 0.0278089599609375, 0.027154176712036134, 0.02781923294067383, 0.028581663131713866, 0.02913382339477539, 0.029052671432495118, 0.028007904052734376, 0.027389856338500978, 0.027885568618774413, 0.0277574405670166, 0.029519872665405275, 0.029156831741333007, 0.028537376403808594, 0.029148319244384765, 0.030038463592529298, 0.02963599967956543, 0.02898841667175293, 0.029075456619262696, 0.02859212875366211, 0.02983526420593262, 0.029236831665039063, 0.02869494438171387, 0.02895452880859375, 0.028721248626708985, 0.02898944091796875, 0.02904473686218262, 0.02884752082824707, 0.02869923210144043, 0.02895871925354004, 0.029343744277954102, 0.02939904022216797, 0.029675519943237305, 0.029658912658691406, 0.02944393539428711, 0.029249887466430664, 0.029228992462158203, 0.02920662307739258, 0.029718624114990235, 0.02937436866760254, 0.02898486328125, 0.02948748779296875, 0.02927395248413086, 0.029576576232910157, 0.02923776054382324, 0.028742015838623045, 0.03350675201416015, 0.03012380790710449, 0.02850281524658203, 0.028907520294189453, 0.02908758354187012, 0.028670047760009764, 0.02836400032043457, 0.029235712051391603, 0.028799039840698242, 0.028323360443115234, 0.028084896087646485, 0.027391136169433592, 0.02745030403137207, 0.02744438362121582, 0.027499551773071288, 0.02893600082397461, 0.029349760055541994, 0.028753440856933595, 0.02872547149658203, 0.028526336669921874, 0.028940223693847657, 0.028677919387817382, 0.02869481658935547, 0.028529279708862303, 0.028887039184570314, 0.029015264511108398, 0.029309343338012696, 0.029292928695678712, 0.029249536514282228, 0.029347328186035155, 0.029114656448364258, 0.02936444854736328, 0.030054239273071288, 0.029780128479003905, 0.02938857650756836, 0.028802688598632813, 0.02981670379638672, 0.02927065658569336, 0.028662912368774413, 0.02937286376953125, 0.029198879241943358, 0.028880895614624022, 0.02934169578552246, 0.029454463958740233, 0.029134111404418947, 0.028992095947265626, 0.028420064926147463, 0.02925721549987793, 0.029116960525512697, 0.02840166473388672, 0.028073984146118162, 0.028088319778442384, 0.027807552337646483, 0.027950368881225585, 0.02944198417663574, 0.02963555145263672, 0.028874080657958986, 0.0292010555267334, 0.028876800537109375, 0.02860380744934082, 0.029020767211914062, 0.02942959976196289, 0.029366111755371092, 0.033033279418945315, 0.029788480758666993, 0.028653663635253908, 0.027825952529907227, 0.02798784065246582, 0.029303680419921874, 0.029511680603027345, 0.028651519775390624, 0.027899168014526368, 0.027269855499267578, 0.0277872314453125, 0.02932534408569336, 0.029061119079589845, 0.028704608917236328, 0.02887494468688965, 0.0288558406829834, 0.02886697578430176, 0.028757984161376954, 0.028577728271484373, 0.02875823974609375, 0.028904767990112306, 0.0291615047454834, 0.02887718391418457, 0.028913856506347656, 0.029300224304199218, 0.028719615936279298, 0.028909568786621095, 0.02891916847229004, 0.02927187156677246, 0.029403167724609373, 0.029272863388061524, 0.029652992248535157, 0.02930886459350586, 0.029136959075927733, 0.029429759979248047, 0.02933145523071289, 0.029105855941772462, 0.0298089599609375, 0.029534208297729493, 0.02951372718811035, 0.029199392318725585, 0.02921494483947754, 0.029266687393188478, 0.02897020721435547, 0.02841206359863281, 0.02925632095336914, 0.02918400001525879, 0.028927648544311523, 0.02876451110839844, 0.028561376571655275, 0.02882499122619629, 0.029127103805541992, 0.02913667106628418, 0.02948547172546387, 0.029034271240234374, 0.029523775100708007, 0.02943212890625, 0.028829792022705077, 0.029433664321899415, 0.029490720748901366, 0.02925606346130371, 0.029229312896728515, 0.029533952713012696, 0.03351062393188477, 0.02987411117553711, 0.028773088455200196, 0.027918336868286132, 0.02694153594970703, 0.027314079284667968, 0.027661855697631837, 0.02850864028930664, 0.028625919342041017, 0.028697599411010744, 0.02857766342163086, 0.028722496032714845, 0.02868409538269043, 0.02874176025390625, 0.02801958465576172, 0.027444351196289064, 0.027544448852539063, 0.029064544677734373, 0.029203104019165038, 0.02916761589050293, 0.028444671630859376, 0.02819868850708008, 0.029308704376220702, 0.028960351943969728, 0.028943199157714844, 0.028635072708129882, 0.028802495956420898, 0.028794879913330077, 0.028219743728637694, 0.027613471984863282, 0.028464223861694334, 0.029905216217041015, 0.029581247329711916, 0.029502111434936522, 0.029467967987060546, 0.028885696411132814, 0.02915081596374512, 0.029340000152587892, 0.029310720443725586, 0.029186368942260742, 0.029015520095825195, 0.029096351623535157, 0.02933148765563965, 0.029304927825927734, 0.029466623306274413, 0.029343744277954102, 0.029529247283935547, 0.02878895950317383, 0.028945024490356446, 0.029009920120239258, 0.029267967224121092, 0.033269630432128904, 0.028670080184936525, 0.02831564712524414, 0.027576320648193358, 0.028339359283447267, 0.029432672500610352, 0.029109823226928712, 0.029016128540039064, 0.029534496307373048, 0.02971820831298828, 0.02939276885986328, 0.02925212860107422, 0.033417152404785155, 0.030022239685058592, 0.028637504577636717, 0.028151552200317384, 0.028851455688476562, 0.028771520614624024, 0.028180288314819335, 0.02747315216064453, 0.02785750389099121, 0.027561792373657225, 0.02793302345275879, 0.02702454376220703, 0.027526111602783204, 0.02792428779602051, 0.029054399490356445, 0.029028959274291992, 0.0292554874420166, 0.02899990463256836, 0.02817228889465332, 0.02715443229675293, 0.02828009605407715, 0.02851900863647461, 0.02872947120666504, 0.028991487503051756, 0.028420095443725587, 0.029455455780029297, 0.0281278076171875, 0.027644256591796874, 0.02778432083129883, 0.027599519729614257, 0.028924127578735352, 0.02954854393005371, 0.02977555274963379, 0.029130592346191406, 0.02908236885070801, 0.030086816787719725, 0.029720096588134765, 0.029130271911621094, 0.028803808212280273, 0.028682336807250977, 0.02941152000427246, 0.029718528747558592, 0.02891766357421875, 0.028708959579467775, 0.028722719192504884, 0.028852703094482422, 0.02926905632019043, 0.02930496025085449, 0.02937120056152344, 0.029380607604980468, 0.028991487503051756, 0.028874080657958986, 0.02894095993041992, 0.028825599670410155, 0.028553216934204102, 0.02902134323120117, 0.03026211166381836, 0.029880256652832032, 0.029765024185180664, 0.029241504669189452, 0.029236928939819336, 0.029260608673095705, 0.029468608856201173, 0.03374089431762695, 0.030424320220947265, 0.028594816207885742, 0.028600608825683594, 0.028505407333374023, 0.028848127365112306, 0.028739679336547853, 0.028668352127075195, 0.028646720886230468, 0.028938175201416016, 0.028852479934692383, 0.028836223602294923, 0.02849184036254883, 0.02888710403442383, 0.028754207611083986, 0.028648351669311522, 0.02867296028137207, 0.02853670310974121, 0.028056768417358397, 0.0283492488861084, 0.028649120330810546, 0.028993888854980467, 0.028618560791015626, 0.02865580749511719, 0.028489728927612305, 0.029165567398071288, 0.029136415481567382, 0.028784543991088866, 0.028090944290161134, 0.02753740882873535, 0.028041215896606447, 0.027893632888793946, 0.029585535049438477, 0.030035839080810547, 0.029661312103271484, 0.029262880325317382, 0.029291488647460937, 0.029394559860229492, 0.02951795196533203, 0.029174016952514648, 0.028747871398925783, 0.02874928092956543, 0.029479360580444335, 0.029577215194702147, 0.029296703338623047, 0.029132320404052735, 0.029452224731445313, 0.029125087738037108, 0.029535711288452147, 0.029280799865722656, 0.02895462417602539, 0.02942300796508789, 0.029468799591064455, 0.02939952087402344, 0.029359552383422853, 0.029059167861938476, 0.029245920181274414, 0.029437952041625977, 0.029689088821411132, 0.028983552932739257, 0.028755903244018555, 0.02870284843444824, 0.02952406311035156, 0.032779998779296875, 0.029964576721191405, 0.028625471115112305, 0.02811471939086914, 0.029006048202514647, 0.02886649513244629, 0.027959680557250975, 0.027440256118774414, 0.02772332763671875, 0.028805984497070312, 0.028841375350952148, 0.0283819522857666, 0.029009599685668946, 0.029186368942260742, 0.02895359992980957, 0.028305631637573242, 0.027460191726684572, 0.02865718460083008, 0.029248159408569337, 0.02898908805847168, 0.02859657669067383, 0.02884623908996582, 0.02868003273010254, 0.028637439727783202, 0.029054719924926756, 0.02816819190979004, 0.027586559295654296, 0.028368896484375, 0.02964796829223633, 0.029544416427612304, 0.029079904556274416, 0.0293536319732666, 0.03015715217590332, 0.029644735336303712, 0.02924172782897949, 0.02908768081665039, 0.02881295967102051, 0.02986185646057129, 0.02978611183166504, 0.029253984451293947, 0.02910451126098633, 0.028923904418945313, 0.028820575714111327, 0.028941215515136717, 0.02863702392578125, 0.028960927963256836, 0.02954854393005371, 0.02934351921081543, 0.029108352661132812, 0.028842079162597657, 0.02850201606750488, 0.029405183792114258, 0.029083648681640626, 0.02862607955932617, 0.029102176666259767, 0.028700992584228514, 0.02920902442932129, 0.02855526351928711, 0.02948624038696289, 0.02928112030029297, 0.02916966438293457, 0.028646463394165038, 0.02968390464782715, 0.033193920135498045, 0.029851648330688478, 0.02883078384399414, 0.028375999450683594, 0.02897715187072754, 0.02895257568359375, 0.028948480606079102, 0.028895231246948243, 0.028980831146240234, 0.028827999114990236, 0.02909395217895508, 0.028718751907348634, 0.028744064331054687, 0.02809641647338867, 0.027408447265625, 0.028450815200805665, 0.028719104766845704, 0.028645376205444335, 0.02878873634338379, 0.028874336242675783, 0.029208992004394533, 0.028725248336791992, 0.028847488403320312, 0.028992128372192384, 0.02935807991027832, 0.02922700881958008, 0.028935583114624023, 0.02872175979614258, 0.029138816833496093, 0.028430463790893555, 0.029322879791259766, 0.02927039909362793, 0.029147136688232423, 0.029755392074584962, 0.02978611183166504, 0.02956889533996582, 0.0289117431640625, 0.029095136642456054, 0.02976438331604004, 0.029275711059570313, 0.028612224578857422, 0.02921104049682617, 0.028830272674560547, 0.029618015289306642, 0.029770912170410155, 0.02963337516784668, 0.029515775680541992, 0.02912060737609863, 0.029104095458984375, 0.029357631683349608, 0.029247871398925783, 0.029091136932373047, 0.02904473686218262, 0.029016544342041015, 0.029378240585327148, 0.02939926338195801, 0.02940550422668457, 0.029265920639038087, 0.02960588836669922, 0.029257535934448242, 0.02861280059814453, 0.029497024536132812, 0.02940345573425293, 0.034179039001464843, 0.030367904663085938, 0.028810943603515625, 0.028090143203735353, 0.02850169563293457, 0.028850879669189453, 0.029173759460449217, 0.02861430358886719, 0.02813987159729004, 0.029155328750610353, 0.029187839508056642, 0.029154815673828126, 0.028494176864624025, 0.029010335922241212, 0.02885148811340332, 0.028899328231811523, 0.02884272003173828, 0.028891136169433593, 0.028104352951049804, 0.027584863662719727, 0.028182432174682616, 0.028993535995483398, 0.029151071548461915, 0.029309183120727538, 0.0289300479888916, 0.02843574333190918, 0.02930352020263672, 0.029207744598388673, 0.0287076473236084, 0.028626943588256838, 0.02941241645812988, 0.02975974464416504, 0.029426368713378906, 0.029050880432128907, 0.028907520294189453, 0.02933737564086914, 0.029036672592163085, 0.02912060737609863, 0.030023551940917968, 0.029465984344482422, 0.029035263061523438, 0.029470720291137696, 0.02952396774291992, 0.028882368087768555, 0.029180479049682618, 0.028692480087280273, 0.029243167877197267, 0.02934726333618164, 0.029621023178100586, 0.02963046455383301, 0.02930588722229004, 0.028713375091552733, 0.029221439361572267, 0.029419519424438476, 0.029476287841796876, 0.029303359985351562, 0.02893414306640625, 0.029252895355224608, 0.029145503997802736, 0.028870975494384766, 0.029280223846435548, 0.029425695419311525, 0.02950553512573242, 0.03312736129760742, 0.029748159408569334, 0.028006399154663086, 0.026856800079345704, 0.02737833595275879, 0.02812678337097168, 0.02760745620727539, 0.02750057601928711, 0.02791561508178711, 0.029446815490722655, 0.028817407608032225, 0.028341567993164063, 0.02755561637878418, 0.028240800857543946, 0.02920992088317871, 0.02898809623718262, 0.02877395248413086, 0.02861440086364746, 0.028535232543945313, 0.028932031631469728, 0.028945791244506837, 0.02862588882446289, 0.028321056365966796, 0.029502143859863283, 0.02894655990600586, 0.028319616317749024, 0.02885206413269043, 0.02914022445678711, 0.028903839111328124, 0.02932307243347168, 0.029600448608398437, 0.029193504333496094, 0.029725408554077147, 0.0293187198638916, 0.02942201614379883, 0.02915123176574707, 0.028974239349365234, 0.02949001693725586, 0.029325023651123047, 0.029028640747070313, 0.02974687957763672, 0.02918022346496582, 0.029005823135375978, 0.028975040435791015, 0.028608415603637697, 0.029369728088378906, 0.02949942398071289, 0.029127424240112304, 0.029284351348876952, 0.028923904418945313, 0.028895231246948243, 0.029483007431030273, 0.029442047119140623, 0.029427711486816405, 0.029190143585205077, 0.029421119689941405, 0.02927644729614258, 0.029136159896850585, 0.029485952377319335, 0.029302783966064453, 0.02924313545227051, 0.029287904739379884, 0.029555360794067384]",tokens/s,34.47211515906512,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,883.032064,566.099968,0.0,163.577856,152.009216,s,1,7.78508740234375,7.78508740234375,0.0,7.78508740234375,7.78508740234375,7.78508740234375,7.78508740234375,[7.78508740234375],,kWh,1.3536580312499305e-05,1.485669180486029e-06,3.8077808240388755e-06,1.8830030317024208e-05,,MB,1278.980096,616.431616,0.0,199.22944,184.525824,s,30,0.3329033584594726,0.011096778615315753,0.00014275998882137242,0.011071135997772217,0.011110374546051025,0.011171863746643066,0.011656904392242432,"[0.011836992263793946, 0.011026816368103027, 0.01105344009399414, 0.011048992156982422, 0.011106847763061523, 0.011109536170959472, 0.011082271575927734, 0.011087008476257324, 0.011071359634399414, 0.011059871673583984, 0.011029631614685058, 0.011215999603271484, 0.011057087898254395, 0.01104252815246582, 0.011117919921875, 0.011081439971923828, 0.011006943702697754, 0.011062239646911621, 0.011106975555419922, 0.011083488464355468, 0.011054047584533691, 0.011097824096679688, 0.011085536003112793, 0.0110513277053833, 0.011014944076538085, 0.011083231925964356, 0.011031968116760254, 0.01104975986480713, 0.011076416015625, 0.011070912361145019]",tokens/s,23069.758249179566,kWh,3.915756430239645e-07,4.316792203271008e-08,2.591988966648476e-07,6.939424617215222e-07,tokens/kWh,368906666.0727447,MB,1292.161024,618.528768,0.0,201.326592,184.528384,s,30,9.873421997070315,0.32911406656901043,0.0023199245646620785,0.3287293243408203,0.3319338562011719,0.3330650299072266,0.33503264831542967,"[0.33546578979492186, 0.3299373779296875, 0.3277629089355469, 0.33123818969726565, 0.3289024353027344, 0.32900994873046874, 0.3289847106933594, 0.32893585205078124, 0.33095077514648436, 0.3285562133789062, 0.327890380859375, 0.33027896118164063, 0.3291770324707031, 0.32653546142578127, 0.32788546752929687, 0.3278814697265625, 0.32602532958984376, 0.33397219848632814, 0.32630191040039064, 0.3262044677734375, 0.33183157348632814, 0.32768792724609375, 0.32703680419921877, 0.3319313659667969, 0.32812942504882814, 0.32684255981445315, 0.33195626831054686, 0.32792254638671875, 0.32675216674804686, 0.3314344787597656]",tokens/s,191.4229940299128,kWh,9.396901539339179e-06,1.0363279809580089e-06,3.7162209503354175e-06,1.4149450470632604e-05,tokens/kWh,4452469.73589239,,s,1890,9.85954900979995,0.005216692597777755,0.00018476969835680808,0.005185024023056031,0.005273603296279907,0.005371990442276001,0.005990755162239072,"[0.005150720119476319, 0.005411488056182861, 0.005408768177032471, 0.005313536167144775, 0.005272575855255127, 0.005238783836364746, 0.005185535907745361, 0.005254464149475098, 0.005241536140441895, 0.005205632209777832, 0.005243264198303223, 0.005237823963165284, 0.005208191871643067, 0.005226687908172607, 0.005218592166900635, 0.005251423835754394, 0.0052195839881896975, 0.005443776130676269, 0.0052715520858764645, 0.005239264011383056, 0.005365856170654297, 0.005514976024627686, 0.00521449613571167, 0.005206016063690186, 0.005391520023345947, 0.005219007968902588, 0.005245088100433349, 0.00522815990447998, 0.005202303886413575, 0.005214367866516113, 0.005224287986755371, 0.005185535907745361, 0.005187039852142334, 0.005247007846832275, 0.0051940159797668455, 0.005258815765380859, 0.00601097583770752, 0.00637500810623169, 0.005901631832122803, 0.005844639778137207, 0.005298175811767578, 0.005307871818542481, 0.005257952213287354, 0.0052856640815734865, 0.005273632049560547, 0.005253119945526123, 0.0053002238273620605, 0.005298175811767578, 0.0052754878997802734, 0.005261343955993652, 0.005267360210418701, 0.0052483839988708494, 0.005276512145996093, 0.005260735988616943, 0.005195456027984619, 0.005381184101104736, 0.005269311904907227, 0.005238592147827148, 0.005204256057739258, 0.005446720123291016, 0.005266272068023682, 0.005216415882110596, 0.005416800022125244, 0.005074944019317627, 0.005259263992309571, 0.0053014721870422365, 0.005294591903686524, 0.005271200180053711, 0.005281631946563721, 0.005255648136138916, 0.005250847816467285, 0.005242815971374512, 0.005330560207366944, 0.0053093118667602536, 0.00541209602355957, 0.005352287769317627, 0.005212160110473632, 0.0051849279403686525, 0.005196576118469238, 0.0052962880134582516, 0.005182112216949463, 0.005187903881072998, 0.005202400207519531, 0.005159135818481445, 0.005193888187408447, 0.005180511951446533, 0.005169280052185058, 0.0051981120109558104, 0.005182144165039063, 0.005188928127288818, 0.0051695041656494144, 0.0051736001968383785, 0.00518236780166626, 0.0054579200744628905, 0.005321055889129639, 0.005199935913085938, 0.005172959804534912, 0.005180031776428223, 0.005169151782989502, 0.0051996798515319825, 0.005175487995147705, 0.005173247814178467, 0.005197472095489502, 0.00517574405670166, 0.005205920219421387, 0.005197824001312256, 0.005181727886199951, 0.005174464225769043, 0.005179872035980224, 0.005195903778076172, 0.0054048638343811035, 0.005218048095703125, 0.005393472194671631, 0.005314943790435791, 0.005193727970123291, 0.005179615974426269, 0.0051777281761169434, 0.005197792053222656, 0.005279744148254394, 0.005185567855834961, 0.005189631938934326, 0.0052667841911315915, 0.005388768196105957, 0.005207776069641113, 0.005214655876159668, 0.005211967945098877, 0.004976960182189941, 0.005186304092407227, 0.005209055900573731, 0.005223296165466309, 0.005350944042205811, 0.005252863883972168, 0.005197760105133057, 0.005165120124816894, 0.005193535804748535, 0.005180287837982178, 0.0051998720169067385, 0.005173247814178467, 0.005159103870391845, 0.0051806077957153324, 0.005160768032073975, 0.005178175926208496, 0.005191264152526856, 0.0051652159690856935, 0.0051717119216918945, 0.005193471908569336, 0.005158656120300293, 0.005150976181030273, 0.005187583923339844, 0.005228544235229492, 0.0052507839202880855, 0.005176991939544678, 0.005202847957611084, 0.005173247814178467, 0.005187295913696289, 0.005181439876556396, 0.005193439960479737, 0.005168799877166748, 0.005269824028015136, 0.005460063934326172, 0.005154047966003418, 0.005186528205871582, 0.005176608085632324, 0.005173984050750732, 0.00528329610824585, 0.00522208023071289, 0.00517628812789917, 0.005169023990631103, 0.0052195839881896975, 0.005206783771514892, 0.005181439876556396, 0.005183040142059326, 0.005169600009918213, 0.005183487892150879, 0.005189631938934326, 0.005218272209167481, 0.00517139196395874, 0.005197663784027099, 0.005198016166687012, 0.005175104141235351, 0.00519379186630249, 0.005177279949188232, 0.0052154560089111325, 0.005157663822174073, 0.005187583923339844, 0.005230591773986816, 0.005166560173034668, 0.005193568229675293, 0.005225024223327637, 0.004923967838287353, 0.005162975788116455, 0.005187615871429443, 0.005217728137969971, 0.0051877121925354005, 0.0051816639900207516, 0.005175487995147705, 0.005203936100006103, 0.005175327777862549, 0.005173247814178467, 0.005209248065948486, 0.005164927959442138, 0.005208831787109375, 0.005191904067993164, 0.005238783836364746, 0.005261312007904053, 0.005224448204040527, 0.005483520030975342, 0.005194975852966309, 0.005206143856048584, 0.0052130880355834965, 0.005213024139404297, 0.005230495929718018, 0.005210112094879151, 0.0051998720169067385, 0.005179391860961914, 0.0052079038619995115, 0.005256480216979981, 0.0051803522109985355, 0.005188704013824463, 0.005186399936676025, 0.005187583923339844, 0.005193471908569336, 0.0052165122032165525, 0.005183487892150879, 0.005212160110473632, 0.005182816028594971, 0.005178016185760498, 0.005173247814178467, 0.005181312084197998, 0.005212096214294434, 0.005152959823608399, 0.005201216220855713, 0.005192383766174317, 0.005181439876556396, 0.005202112197875977, 0.005195295810699463, 0.005239039897918701, 0.005195807933807373, 0.0052707200050354005, 0.005200767993927002, 0.006166304111480713, 0.005219552040100097, 0.005241983890533447, 0.0055478401184082034, 0.0053935680389404295, 0.005221439838409424, 0.0052284798622131345, 0.005495744228363037, 0.005262239933013916, 0.006288991928100586, 0.005274015903472901, 0.005247104167938233, 0.004939775943756103, 0.005215839862823486, 0.00523305606842041, 0.005181344032287598, 0.005202015876770019, 0.005199967861175537, 0.005185247898101807, 0.0051896958351135255, 0.005271679878234863, 0.00521830415725708, 0.005193727970123291, 0.005655935764312744, 0.005187615871429443, 0.005290592193603515, 0.005244927883148193, 0.00520198392868042, 0.005184607982635498, 0.005239327907562256, 0.005232960224151611, 0.005244863986968994, 0.005187647819519043, 0.005215712070465088, 0.005161856174468994, 0.0052015681266784665, 0.0052056961059570315, 0.005218624114990234, 0.005221439838409424, 0.00532371187210083, 0.0052053117752075195, 0.005190688133239746, 0.005211487770080566, 0.005191999912261963, 0.005193024158477783, 0.005177792072296143, 0.005209856033325196, 0.005190144062042237, 0.005209983825683594, 0.005188896179199219, 0.005177855968475342, 0.005181087970733643, 0.005155519962310791, 0.00520966386795044, 0.005166880130767822, 0.0051756157875061035, 0.005208415985107422, 0.005255424022674561, 0.00520527982711792, 0.0052434239387512205, 0.0052152638435363765, 0.005192192077636719, 0.00517571210861206, 0.005219520092010498, 0.005163839817047119, 0.005231872081756592, 0.0052130560874938964, 0.005219552040100097, 0.005208672046661377, 0.005255424022674561, 0.005201727867126465, 0.005195775985717774, 0.0053002238273620605, 0.005236832141876221, 0.0052176637649536135, 0.004996575832366943, 0.0053820481300354, 0.005212800025939941, 0.005193439960479737, 0.005187903881072998, 0.005211872100830078, 0.005167359828948974, 0.005196864128112793, 0.005173471927642823, 0.005188320159912109, 0.0051998720169067385, 0.005197824001312256, 0.005363327980041504, 0.005183807849884033, 0.005226560115814209, 0.005201695919036865, 0.005195168018341065, 0.005202015876770019, 0.005186495780944825, 0.005187583923339844, 0.005352640151977539, 0.005235583782196045, 0.005181151866912842, 0.005206016063690186, 0.00522649621963501, 0.005189792156219482, 0.005197023868560791, 0.0051636481285095214, 0.005234687805175781, 0.00537395191192627, 0.00520908784866333, 0.00520195198059082, 0.0051885762214660645, 0.005224063873291016, 0.005210847854614258, 0.005205664157867431, 0.005175295829772949, 0.00521830415725708, 0.005195775985717774, 0.00536575984954834, 0.005232639789581299, 0.00535756778717041, 0.005164768218994141, 0.005148384094238281, 0.005186207771301269, 0.005171487808227539, 0.005305503845214844, 0.005209856033325196, 0.005163584232330322, 0.005180928230285644, 0.005208735942840576, 0.005208320140838623, 0.005240575790405273, 0.005213664054870605, 0.005238304138183594, 0.0052638077735900875, 0.0052247681617736816, 0.005201312065124511, 0.005180255889892578, 0.005269472122192383, 0.005162752151489257, 0.005243167877197265, 0.0051562881469726565, 0.004923391819000244, 0.005183487892150879, 0.005195487976074218, 0.0051981120109558104, 0.005171199798583984, 0.005218080043792725, 0.005150496006011963, 0.0051838397979736325, 0.005232672214508057, 0.005165120124816894, 0.005306367874145508, 0.005171199798583984, 0.005304384231567383, 0.005425087928771973, 0.005183487892150879, 0.005238143920898437, 0.00515990400314331, 0.005215007781982422, 0.00521673583984375, 0.005196224212646485, 0.005197184085845947, 0.005206495761871338, 0.005183648109436035, 0.005150784015655518, 0.005194975852966309, 0.005202144145965576, 0.005182079792022705, 0.005248672008514405, 0.005228608131408691, 0.005196191787719726, 0.005173056125640869, 0.005215136051177979, 0.005174272060394287, 0.005191679954528809, 0.00517471981048584, 0.0052147841453552245, 0.0051998720169067385, 0.005185279846191406, 0.005207359790802002, 0.005174399852752686, 0.005216063976287842, 0.0052163200378417965, 0.0051649918556213375, 0.005217440128326416, 0.005192543983459473, 0.005242527961730957, 0.0051807999610900875, 0.0052295360565185545, 0.0051998720169067385, 0.005256256103515625, 0.005239168167114258, 0.005299935817718506, 0.0051998400688171385, 0.005187903881072998, 0.005419583797454834, 0.005185535907745361, 0.005306367874145508, 0.005541440010070801, 0.005190080165863037, 0.0052408318519592285, 0.005195775985717774, 0.005252384185791016, 0.005204095840454102, 0.004915520191192627, 0.00519212818145752, 0.00516319990158081, 0.005191679954528809, 0.005196000099182129, 0.0052520642280578615, 0.005184319972991944, 0.005158336162567139, 0.005190207958221436, 0.005447840213775635, 0.005202047824859619, 0.0054098558425903324, 0.005753471851348877, 0.005236767768859864, 0.0052401599884033204, 0.005210783958435059, 0.005252895832061767, 0.005183712005615234, 0.005185535907745361, 0.00516096019744873, 0.005183167934417724, 0.0051753602027893066, 0.005180672168731689, 0.005202943801879883, 0.00516860818862915, 0.005186079978942871, 0.005193471908569336, 0.005218560218811035, 0.005142623901367187, 0.005191584110260009, 0.005265151977539063, 0.005191199779510498, 0.005190368175506592, 0.005170207977294922, 0.005209055900573731, 0.005152768135070801, 0.005201920032501221, 0.005203968048095703, 0.005162303924560547, 0.005223104000091553, 0.005171199798583984, 0.0052899842262268066, 0.0051485438346862795, 0.005184800148010254, 0.005192543983459473, 0.0051875200271606445, 0.005205440044403076, 0.005227168083190918, 0.005182655811309814, 0.005171999931335449, 0.005191199779510498, 0.005175392150878907, 0.005210495948791504, 0.005226367950439453, 0.005316736221313477, 0.005185696125030517, 0.005168992042541504, 0.00531660795211792, 0.005173247814178467, 0.005197824001312256, 0.0052367358207702636, 0.00524012804031372, 0.005298592090606689, 0.006449088096618653, 0.006764383792877198, 0.005671296119689941, 0.005248447895050049, 0.005290719985961914, 0.00526639986038208, 0.005227231979370117, 0.005207456111907959, 0.005236512184143067, 0.005202847957611084, 0.005263264179229736, 0.0051838397979736325, 0.005168384075164795, 0.005167520046234131, 0.005171199798583984, 0.0051970877647399905, 0.005266143798828125, 0.005167232036590576, 0.0051792640686035155, 0.005181407928466797, 0.0051487040519714354, 0.005185535907745361, 0.0051868481636047365, 0.005163743972778321, 0.005314752101898194, 0.005234655857086182, 0.005176415920257568, 0.005198592185974121, 0.005191167831420898, 0.005165599822998047, 0.005171360015869141, 0.0051868481636047365, 0.005176864147186279, 0.005155712127685547, 0.005190944194793701, 0.005204031944274903, 0.005194303989410401, 0.005179359912872315, 0.005200128078460694, 0.00517852783203125, 0.005143392086029053, 0.005291456222534179, 0.005175327777862549, 0.005155680179595948, 0.005155871868133545, 0.0051710400581359865, 0.0051370558738708496, 0.005167263984680176, 0.005159071922302246, 0.005215871810913086, 0.005150944232940674, 0.005224224090576172, 0.005204192161560059, 0.005167232036590576, 0.005192736148834229, 0.005231135845184326, 0.0051632637977600095, 0.0051753602027893066, 0.005163008213043213, 0.005200128078460694, 0.005164735794067383, 0.005197855949401855, 0.005177440166473389, 0.004907423973083496, 0.005159264087677002, 0.005190815925598145, 0.005184192180633545, 0.005159776210784912, 0.00532374382019043, 0.005303872108459473, 0.00513478422164917, 0.005171199798583984, 0.00556390380859375, 0.005199359893798828, 0.005179903984069824, 0.005173759937286377, 0.005273600101470947, 0.005180448055267334, 0.005190624237060547, 0.0051875200271606445, 0.005164608001708985, 0.005183743953704834, 0.005203711986541748, 0.005154880046844482, 0.005157023906707764, 0.005191296100616455, 0.0051959362030029295, 0.005161407947540283, 0.005170976161956787, 0.005210400104522705, 0.0051487360000610355, 0.005177152156829834, 0.0051645441055297855, 0.005280384063720703, 0.005148064136505127, 0.00516380786895752, 0.005177152156829834, 0.005169151782989502, 0.005171552181243897, 0.00517196798324585, 0.005159840106964111, 0.005176991939544678, 0.005178912162780762, 0.005208384037017822, 0.0051727681159973145, 0.0051896958351135255, 0.0051864638328552246, 0.005189631938934326, 0.005195328235626221, 0.005470047950744629, 0.005312960147857666, 0.005419167995452881, 0.005231872081756592, 0.005193920135498047, 0.00540723180770874, 0.00551913595199585, 0.005177599906921387, 0.0052449598312377926, 0.00517091178894043, 0.005200160026550293, 0.005162879943847656, 0.005181119918823242, 0.005179840087890625, 0.00514799976348877, 0.005190303802490234, 0.005180831909179688, 0.00489462423324585, 0.005177663803100586, 0.005184127807617188, 0.005216671943664551, 0.0051643199920654295, 0.005171328067779541, 0.005183487892150879, 0.00518943977355957, 0.005239168167114258, 0.005180543899536133, 0.005436223983764648, 0.005171264171600342, 0.005162144184112549, 0.00519049596786499, 0.005179647922515869, 0.005158656120300293, 0.00514467191696167, 0.005168479919433594, 0.005149248123168945, 0.005220352172851562, 0.005162240028381348, 0.00514739179611206, 0.0051530561447143556, 0.005165088176727295, 0.005177120208740235, 0.005166111946105957, 0.005163904190063477, 0.005194079875946045, 0.0051708478927612305, 0.00519923210144043, 0.0052000641822814945, 0.0051593599319458006, 0.0051567997932434086, 0.005181503772735596, 0.0052451519966125485, 0.005168352127075195, 0.005167679786682129, 0.005265408039093018, 0.0051463360786437986, 0.005170623779296875, 0.005135200023651123, 0.005185535907745361, 0.005156479835510254, 0.005196159839630127, 0.00520956802368164, 0.005183551788330078, 0.005218783855438233, 0.00519385576248169, 0.005151840209960937, 0.005186336040496826, 0.0051612801551818846, 0.005193408012390137, 0.005209792137145996, 0.0055033278465271, 0.005211616039276123, 0.005177855968475342, 0.005222400188446045, 0.005200992107391358, 0.005147552013397217, 0.0052509760856628415, 0.005556320190429688, 0.005213215827941895, 0.005422048091888428, 0.0049658560752868655, 0.005988255977630615, 0.006318367958068847, 0.005406464099884033, 0.005258975982666015, 0.005221248149871826, 0.005246240139007569, 0.005316895961761475, 0.0051859841346740725, 0.005203264236450195, 0.005153471946716308, 0.0051773438453674315, 0.005453887939453125, 0.005247935771942138, 0.00521727991104126, 0.005222400188446045, 0.005187808036804199, 0.00564796781539917, 0.005262752056121826, 0.005166207790374756, 0.005209184169769287, 0.0054830718040466304, 0.005197824001312256, 0.005187744140625, 0.005162271976470947, 0.005169727802276611, 0.005158592224121094, 0.005156735897064209, 0.005179840087890625, 0.005173247814178467, 0.005191679954528809, 0.005191679954528809, 0.005203231811523438, 0.00518236780166626, 0.00520582389831543, 0.005204031944274903, 0.005174784183502197, 0.00518393611907959, 0.005201920032501221, 0.0051959362030029295, 0.005196991920471191, 0.005192351818084717, 0.005174975872039795, 0.005151040077209472, 0.005195775985717774, 0.005214208126068115, 0.005148672103881836, 0.005180863857269287, 0.005197504043579102, 0.00525167989730835, 0.005165088176727295, 0.005144512176513672, 0.005178719997406006, 0.0051365442276000975, 0.005158847808837891, 0.0051530561447143556, 0.005142176151275634, 0.005163743972778321, 0.005145887851715088, 0.005189727783203125, 0.0051495361328125, 0.00516096019744873, 0.005173247814178467, 0.004871456146240234, 0.005155551910400391, 0.005248703956604004, 0.005173888206481934, 0.0051494078636169435, 0.005423967838287354, 0.005195136070251465, 0.005264383792877197, 0.005181056022644043, 0.005201344013214111, 0.005190336227416993, 0.005182559967041016, 0.005186528205871582, 0.005221568107604981, 0.005196544170379638, 0.00522982406616211, 0.005203743934631347, 0.005192671775817871, 0.005189504146575928, 0.005206143856048584, 0.005195775985717774, 0.005197824001312256, 0.005218207836151123, 0.0051979198455810545, 0.005384096145629883, 0.005222496032714844, 0.0052341117858886715, 0.005222239971160889, 0.005216864109039307, 0.005230720043182373, 0.0052912960052490235, 0.005264095783233643, 0.005340672016143798, 0.0052128958702087405, 0.005183263778686524, 0.005211711883544922, 0.005212255954742432, 0.0051797437667846676, 0.005181727886199951, 0.00523635196685791, 0.0051918082237243654, 0.00522441577911377, 0.005201920032501221, 0.005348544120788574, 0.0052427520751953125, 0.005200255870819092, 0.005196352005004883, 0.005185535907745361, 0.005275551795959473, 0.005234784126281739, 0.005257408142089844, 0.005301407814025879, 0.005274271965026856, 0.005228352069854736, 0.00522873592376709, 0.005316031932830811, 0.005216832160949707, 0.005205760002136231, 0.005191232204437256, 0.00518236780166626, 0.005150527954101563, 0.005185472011566162, 0.005183519840240479, 0.004872191905975342, 0.005147776126861572, 0.005176191806793213, 0.005186944007873535, 0.005149312019348144, 0.005173247814178467, 0.005258848190307617, 0.005149087905883789, 0.005153088092803955, 0.005174240112304687, 0.005188064098358154, 0.005165503978729248, 0.005173120021820068, 0.005198880195617676, 0.005261792182922363, 0.005176799774169922, 0.0051847357749938965, 0.005153600215911865, 0.005165535926818848, 0.005226687908172607, 0.005167712211608886, 0.005152448177337647, 0.005225503921508789, 0.005193984031677246, 0.005167840003967285, 0.005191232204437256, 0.0051933760643005375, 0.005163008213043213, 0.005154784202575684, 0.005159776210784912, 0.005197824001312256, 0.005154719829559326, 0.005173312187194824, 0.005299776077270508, 0.005185887813568115, 0.005216224193572998, 0.005146751880645752, 0.005179391860961914, 0.005139743804931641, 0.00522108793258667, 0.005173408031463623, 0.005175295829772949, 0.005195199966430664, 0.005170752048492431, 0.005139296054840088, 0.005157087802886963, 0.005161056041717529, 0.005205984115600586, 0.005166816234588623, 0.005272672176361084, 0.005218688011169433, 0.00516323184967041, 0.005198463916778564, 0.005149983882904053, 0.005185311794281006, 0.005205728054046631, 0.00514083194732666, 0.005159391880035401, 0.00513647985458374, 0.005189631938934326, 0.005167103767395019, 0.005181439876556396, 0.005135424137115478, 0.004917151927947998, 0.00520851182937622, 0.005166592121124268, 0.005136767864227295, 0.005136159896850586, 0.005267360210418701, 0.005170783996582031, 0.005177855968475342, 0.005175295829772949, 0.00526364803314209, 0.005175104141235351, 0.0051833920478820805, 0.005232639789581299, 0.005187583923339844, 0.005205215930938721, 0.005165152072906494, 0.0051287999153137205, 0.005142623901367187, 0.00573641586303711, 0.005765151977539062, 0.00517033576965332, 0.0052082881927490235, 0.005520287990570068, 0.005185344219207764, 0.005216159820556641, 0.005197343826293946, 0.005175776004791259, 0.005201056003570557, 0.005168191909790039, 0.0051703681945800785, 0.005179520130157471, 0.005230463981628418, 0.005141088008880615, 0.005152351856231689, 0.005167520046234131, 0.005146560192108155, 0.0051785922050476075, 0.005225312232971191, 0.005128543853759765, 0.005139552116394043, 0.005131103992462158, 0.005170976161956787, 0.005132127761840821, 0.005179647922515869, 0.005164671897888184, 0.005210336208343506, 0.005226431846618652, 0.005183263778686524, 0.005202559947967529, 0.0052015681266784665, 0.005257215976715088, 0.005165056228637695, 0.005150720119476319, 0.005179391860961914, 0.005182752132415772, 0.005141024112701416, 0.005136032104492188, 0.005163551807403565, 0.005154816150665284, 0.0051652159690856935, 0.005134175777435303, 0.005195775985717774, 0.005158912181854248, 0.0049016962051391605, 0.005140128135681152, 0.005165535926818848, 0.0051402878761291505, 0.005248000144958496, 0.0052641282081604, 0.005181151866912842, 0.005145088195800781, 0.005167103767395019, 0.005187583923339844, 0.0051504321098327635, 0.005155327796936035, 0.0051827201843261715, 0.005171743869781495, 0.005146624088287354, 0.005171199798583984, 0.005148799896240235, 0.005141600131988526, 0.005157663822174073, 0.005154592037200928, 0.005133952140808105, 0.00537391996383667, 0.005202559947967529, 0.005207871913909912, 0.0052432317733764645, 0.005181280136108399, 0.005195615768432617, 0.005152639865875244, 0.005166399955749512, 0.0051844801902771, 0.005173279762268067, 0.005191648006439209, 0.005322303771972656, 0.005236512184143067, 0.00519820785522461, 0.005511007785797119, 0.0054271998405456545, 0.0051671361923217776, 0.005280064105987549, 0.005337183952331543, 0.005287712097167969, 0.005187039852142334, 0.005200384140014648, 0.0051773757934570315, 0.0051840319633483884, 0.005373631954193115, 0.005168767929077148, 0.005185215950012207, 0.005185696125030517, 0.005169695854187012, 0.005156544208526612, 0.0051511359214782716, 0.005195680141448974, 0.0051775360107421875, 0.005162816047668457, 0.005183712005615234, 0.005157983779907226, 0.005167808055877686, 0.005163167953491211, 0.005293504238128662, 0.005147039890289307, 0.005172224044799805, 0.0051701760292053225, 0.0048855037689208985, 0.005170239925384522, 0.0051497921943664555, 0.005174880027770996, 0.0051420159339904785, 0.005237247943878174, 0.005148799896240235, 0.005163040161132813, 0.00514246416091919, 0.005168543815612793, 0.005159391880035401, 0.005140704154968262, 0.005144544124603271, 0.005129504203796387, 0.005163616180419922, 0.005187647819519043, 0.005138495922088623, 0.005164703845977783, 0.005120351791381836, 0.005169151782989502, 0.005150976181030273, 0.005226240158081054, 0.005142752170562744, 0.005152544021606445, 0.005162879943847656, 0.00514467191696167, 0.005140575885772705, 0.005189568042755127, 0.00555836820602417, 0.005230815887451172, 0.005350719928741455, 0.005148575782775879, 0.005148672103881836, 0.005159200191497803, 0.005165247917175293, 0.005150720119476319, 0.005224063873291016, 0.005165056228637695, 0.005191199779510498, 0.005170271873474121, 0.00515772819519043, 0.005157792091369629, 0.005122176170349121, 0.005154463768005371, 0.005173471927642823, 0.005122144222259522, 0.005207935810089112, 0.005176576137542724, 0.00514035177230835, 0.005168032169342041, 0.005158271789550781, 0.005166816234588623, 0.005184576034545898, 0.00519155216217041, 0.005158400058746338, 0.005140960216522217, 0.005155136108398437, 0.005152448177337647, 0.005158912181854248, 0.005152128219604492, 0.005169600009918213, 0.005181248188018799, 0.005148928165435791, 0.004884384155273438, 0.005151167869567871, 0.005190847873687744, 0.005149151802062988, 0.005757120132446289, 0.005271359920501709, 0.0054579200744628905, 0.006184127807617188, 0.00550380802154541, 0.007950335979461669, 0.005472256183624268, 0.006934463977813721, 0.005181568145751953, 0.005223872184753418, 0.005369984149932862, 0.005216256141662597, 0.005211775779724121, 0.005146463871002197, 0.005163839817047119, 0.005158463954925537, 0.005194047927856446, 0.005149216175079346, 0.005174623966217041, 0.005185344219207764, 0.005151264190673828, 0.005297887802124024, 0.005228831768035889, 0.005175456047058106, 0.005218143939971924, 0.005139999866485596, 0.005175519943237305, 0.005167327880859375, 0.005185567855834961, 0.005191679954528809, 0.005183487892150879, 0.005158080101013184, 0.005169695854187012, 0.005187808036804199, 0.005157023906707764, 0.0051970877647399905, 0.0051855998039245605, 0.005149280071258545, 0.005235743999481201, 0.0051693120002746585, 0.00516918420791626, 0.005210912227630616, 0.005167327880859375, 0.00525439977645874, 0.005150559902191162, 0.005149695873260498, 0.005162720203399658, 0.005136320114135742, 0.005146656036376953, 0.005152703762054444, 0.005158720016479493, 0.00522057580947876, 0.005220352172851562, 0.005341055870056152, 0.005210400104522705, 0.005180511951446533, 0.005159679889678955, 0.0051560640335083004, 0.005152575969696045, 0.004915200233459473, 0.005236320018768311, 0.0051552319526672365, 0.005167103767395019, 0.005154208183288574, 0.0051975998878479, 0.0051495041847229, 0.005175648212432861, 0.005161952018737793, 0.005142303943634033, 0.005173151969909668, 0.005176415920257568, 0.005158815860748291, 0.005146175861358643, 0.005152639865875244, 0.005177919864654541, 0.005123871803283692, 0.00516096019744873, 0.0054065918922424315, 0.005175648212432861, 0.005167103767395019, 0.005167168140411377, 0.005207647800445556, 0.005162559986114502, 0.0051653761863708495, 0.00520854377746582, 0.005156864166259765, 0.005205183982849121, 0.005208255767822266, 0.0051699519157409664, 0.005175168037414551, 0.005179135799407959, 0.005175551891326904, 0.005157087802886963, 0.005166848182678222, 0.0051998720169067385, 0.005267039775848388, 0.005175199985504151, 0.005143040180206298, 0.005171199798583984, 0.005154560089111328, 0.005168384075164795, 0.005155136108398437, 0.005165760040283203, 0.005175424098968506, 0.005171296119689942, 0.005142303943634033, 0.005165056228637695, 0.005252799987792969, 0.005189407825469971, 0.005142303943634033, 0.005165823936462402, 0.005174655914306641, 0.005150559902191162, 0.005143328189849854, 0.005150784015655518, 0.005173215866088867, 0.005152671813964844, 0.005158976078033447, 0.00522649621963501, 0.005147712230682373, 0.005187808036804199, 0.005188320159912109, 0.0049285759925842285, 0.005169216156005859, 0.005181536197662353, 0.005204671859741211, 0.005168288230895996, 0.0051559357643127446, 0.0051785922050476075, 0.005181056022644043, 0.005159840106964111, 0.005153120040893555, 0.00518336009979248, 0.005221695899963379, 0.005144192218780518, 0.005163968086242676, 0.005202079772949219, 0.005150015830993652, 0.005164735794067383, 0.00518230390548706, 0.005199584007263183, 0.005199423789978027, 0.005196544170379638, 0.00517900800704956, 0.005177696228027343, 0.005157983779907226, 0.005167103767395019, 0.0052336640357971195, 0.0051833920478820805, 0.00518668794631958, 0.0051454720497131345, 0.00514902400970459, 0.005215551853179932, 0.005212063789367676, 0.005167295932769775, 0.005169407844543457, 0.005189151763916016, 0.005136864185333252, 0.0051550078392028804, 0.005192704200744629, 0.005163839817047119, 0.005169151782989502, 0.005191584110260009, 0.005171296119689942, 0.005324831962585449, 0.0051588802337646485, 0.0051753602027893066, 0.005218239784240722, 0.005169151782989502, 0.0051643199920654295, 0.0051494078636169435, 0.005174335956573487, 0.005138879776000977, 0.005149184226989746, 0.005134335994720459, 0.00516099214553833, 0.005171167850494385, 0.005138432025909424, 0.0051420159339904785, 0.005155327796936035, 0.005164927959442138, 0.0051625919342041015, 0.005146687984466553, 0.005217887878417969, 0.0051365442276000975, 0.004867424011230468, 0.005165279865264893, 0.005173696041107178, 0.00513801622390747, 0.005124512195587158, 0.005171199798583984, 0.005170720100402832, 0.005155551910400391, 0.005152703762054444, 0.005341343879699707, 0.005157983779907226, 0.005142752170562744, 0.007608672142028808, 0.006129087924957276, 0.0061179518699645994, 0.005214208126068115, 0.005213600158691406, 0.005186016082763672, 0.00518998384475708, 0.005197472095489502, 0.005161087989807129, 0.005196032047271729, 0.005247104167938233, 0.005143712043762207, 0.005387904167175293, 0.00517526388168335, 0.005254335880279541, 0.005177023887634277, 0.005181439876556396, 0.005191743850708008, 0.005162911891937256, 0.005183135986328125, 0.00517139196395874, 0.0051734399795532226, 0.005142528057098389, 0.005168767929077148, 0.005222784042358398, 0.005150720119476319, 0.005150911808013916, 0.005177152156829834, 0.005129504203796387, 0.005350111961364746, 0.005146592140197754, 0.005196159839630127, 0.005408415794372559, 0.0056852478981018065, 0.0051693120002746585, 0.005168992042541504, 0.005173120021820068, 0.0051877121925354005, 0.005130464076995849, 0.005397632122039795, 0.005176000118255615, 0.00516707181930542, 0.005181344032287598, 0.0051569600105285645, 0.005143712043762207, 0.005147488117218018, 0.005148223876953125, 0.005175871849060059, 0.005144447803497315, 0.005172704219818115, 0.0051840319633483884, 0.004954368114471436, 0.005183135986328125, 0.0051938238143920895, 0.005202176094055176, 0.005166719913482666, 0.005167488098144531, 0.005171199798583984, 0.005182496070861816, 0.005163167953491211, 0.005176383972167969, 0.005179168224334717, 0.0051829757690429685, 0.005169631958007813, 0.005185344219207764, 0.005345823764801025, 0.00521507215499878, 0.005168288230895996, 0.0052055039405822755, 0.005163167953491211, 0.005179200172424316, 0.00517958402633667, 0.005171264171600342, 0.005197760105133057, 0.005211520195007324, 0.005196288108825684, 0.005213823795318604, 0.005227007865905762, 0.005232639789581299, 0.005193727970123291, 0.005187583923339844, 0.005195424079895019, 0.005380127906799316, 0.005243264198303223, 0.005246655941009521, 0.005244319915771485, 0.005245791912078857, 0.005244927883148193, 0.005241951942443848, 0.0051864638328552246, 0.005232416152954102, 0.0054347519874572755, 0.005196640014648438, 0.005183648109436035, 0.0051641278266906735, 0.005159423828125, 0.005173503875732422, 0.005170591831207275, 0.005143455982208252, 0.005149727821350098, 0.0051677761077880855, 0.005177055835723877, 0.00514035177230835, 0.005177311897277832, 0.005177792072296143, 0.005168191909790039, 0.005154079914093017, 0.005191328048706055, 0.005230336189270019, 0.005154911994934082, 0.00517468786239624, 0.005167871952056885, 0.005175487995147705, 0.005174592018127442, 0.004898816108703613, 0.00518998384475708, 0.005154463768005371, 0.005165056228637695, 0.005197728157043457, 0.005228640079498291, 0.005154880046844482, 0.005248960018157959, 0.0051684479713439944, 0.005157023906707764, 0.00516764783859253, 0.0053002238273620605, 0.005154816150665284, 0.005173247814178467, 0.005183487892150879, 0.005169151782989502, 0.005144927978515625, 0.005178336143493652, 0.005182112216949463, 0.00516703987121582, 0.00516099214553833, 0.005184639930725097, 0.005153312206268311, 0.005160575866699219, 0.005163360118865967, 0.005180895805358887, 0.005153759956359864, 0.005202144145965576, 0.005262944221496582, 0.005202239990234375, 0.0053012480735778805, 0.0052130560874938964, 0.005158912181854248, 0.005185376167297364, 0.005157023906707764, 0.005189888000488281, 0.005168896198272705, 0.005172544002532959, 0.00532755184173584, 0.005173215866088867, 0.0051627840995788574, 0.005220511913299561, 0.0051528639793396, 0.005169151782989502, 0.005175295829772949, 0.005212160110473632, 0.005201920032501221, 0.005185535907745361, 0.005183743953704834, 0.005207808017730713, 0.005181439876556396, 0.005275680065155029, 0.005171167850494385, 0.005184991836547852, 0.005177440166473389, 0.0051859841346740725, 0.005162047863006592, 0.005184832096099853, 0.005176959991455078, 0.0051868481636047365, 0.005184224128723144, 0.005206016063690186, 0.005176767826080322, 0.004915200233459473, 0.005197184085845947, 0.005175487995147705, 0.005189536094665527, 0.005225279808044434, 0.005172192096710205, 0.005208831787109375, 0.005159071922302246, 0.005179232120513916, 0.005392384052276611, 0.005160704135894776, 0.005337344169616699, 0.00520966386795044, 0.0051775360107421875, 0.007284192085266114, 0.005440447807312012, 0.007069695949554444, 0.005265247821807861, 0.005255072116851806, 0.005205344200134277, 0.005159872055053711, 0.00517087984085083, 0.005206143856048584, 0.005158207893371582, 0.005165760040283203, 0.005183487892150879, 0.00516096019744873, 0.005150720119476319, 0.005166175842285156, 0.00518236780166626, 0.0051790719032287595, 0.005164864063262939, 0.005183008193969727, 0.005274591922760009, 0.005146624088287354, 0.00527561616897583, 0.005171584129333496, 0.00518723201751709, 0.005187263965606689, 0.005226784229278565, 0.0051487040519714354, 0.005170720100402832, 0.0052436480522155765, 0.005152480125427246, 0.005165056228637695, 0.005181536197662353, 0.005189536094665527, 0.005158656120300293, 0.005189504146575928, 0.005167808055877686, 0.005146111965179443, 0.005159103870391845, 0.005160287857055664, 0.005194528102874756, 0.0051792640686035155, 0.00517139196395874, 0.0051866559982299805, 0.005176000118255615, 0.005194975852966309, 0.005210495948791504, 0.005226304054260254, 0.005495423793792724, 0.005228544235229492, 0.005792287826538086, 0.005217311859130859, 0.005189888000488281, 0.005234975814819336, 0.005176000118255615, 0.005201791763305664, 0.005218175888061524, 0.005180511951446533, 0.005180319786071777, 0.005215839862823486, 0.005171487808227539, 0.005547135829925537, 0.005226624011993408, 0.005186431884765625, 0.005189248085021973, 0.005196159839630127, 0.00518342399597168, 0.005162176132202149, 0.005179679870605468, 0.005200607776641846, 0.005158527851104737, 0.005184991836547852, 0.005229119777679443, 0.005163040161132813, 0.005161151885986328, 0.005189184188842773, 0.005202367782592774, 0.005183487892150879, 0.00519379186630249, 0.005228415966033935, 0.005191743850708008, 0.005201920032501221, 0.005213503837585449, 0.00522105598449707, 0.005185440063476563, 0.0051979198455810545, 0.005208255767822266, 0.005162112236022949, 0.005160736083984375, 0.005200287818908691, 0.005165023803710938, 0.005199967861175537, 0.005144224166870117, 0.005311456203460694, 0.005190847873687744, 0.005167744159698486, 0.005175295829772949, 0.005153984069824219, 0.005149759769439697, 0.005195871829986572, 0.005144224166870117, 0.0051567997932434086, 0.005161024093627929, 0.0051814718246459964, 0.005166783809661866, 0.0051528000831604, 0.0051632637977600095, 0.005171552181243897, 0.005147903919219971, 0.005173567771911621, 0.0051794881820678715, 0.005174431800842285, 0.005161407947540283, 0.004921279907226563, 0.0051791038513183596, 0.0051736001968383785, 0.005234687805175781, 0.005179391860961914, 0.0051402878761291505, 0.005167295932769775, 0.005171199798583984, 0.005142623901367187, 0.005154623985290528, 0.005165247917175293, 0.0051979198455810545, 0.005160575866699219, 0.005168735980987549, 0.00519868803024292, 0.00516703987121582, 0.005153600215911865, 0.0051801600456237796, 0.00519375991821289, 0.005147071838378906, 0.005162528038024902, 0.005195615768432617, 0.00518390417098999, 0.005167103767395019, 0.0051775679588317875, 0.005193503856658935, 0.005173247814178467, 0.00516483211517334, 0.005160448074340821, 0.005157599925994873, 0.005171199798583984, 0.005182688236236572, 0.005298367977142334, 0.005212768077850342, 0.005359519958496093, 0.005167263984680176, 0.005150144100189209, 0.005180928230285644, 0.005187903881072998, 0.005172031879425049, 0.005174431800842285, 0.005182144165039063, 0.0052306241989135745, 0.005207456111907959, 0.005161503791809082, 0.0051998720169067385, 0.005158976078033447, 0.0051693120002746585, 0.005191648006439209, 0.005164095878601074, 0.005186016082763672, 0.0051838397979736325, 0.005163008213043213, 0.005175039768218994, 0.005208479881286621, 0.00519916820526123, 0.005173791885375976, 0.005179327964782715, 0.0051764798164367675, 0.005349599838256836, 0.005167520046234131, 0.005211743831634521, 0.005157343864440918, 0.004942336082458496, 0.005173344135284424, 0.005253215789794922, 0.005152416229248047, 0.0051485438346862795, 0.005187551975250244, 0.0051756157875061035, 0.005152575969696045, 0.00514847993850708, 0.005212543964385986, 0.0051660799980163576, 0.005157375812530517, 0.005140448093414306, 0.005338687896728516, 0.005170400142669678, 0.005189151763916016, 0.005439775943756104, 0.00729852819442749, 0.007143167972564697, 0.005278367996215821, 0.005199584007263183, 0.005281504154205322, 0.005315199851989746, 0.005181439876556396, 0.005173247814178467, 0.0051866559982299805, 0.005159840106964111, 0.005183008193969727, 0.0051777281761169434, 0.005175392150878907, 0.005167103767395019, 0.005201248168945313, 0.00518336009979248, 0.0051877121925354005, 0.005270431995391846, 0.005183231830596924, 0.005201920032501221, 0.0051693120002746585, 0.005229983806610107, 0.005199903964996338, 0.005167520046234131, 0.005189631938934326, 0.005193727970123291, 0.005183040142059326, 0.005181888103485108, 0.005171072006225586, 0.005309663772583008, 0.005188352108001709, 0.005187744140625, 0.005214208126068115, 0.005160672187805176, 0.0052165441513061525, 0.0052367358207702636, 0.005277440071105957, 0.005222047805786133, 0.00520252799987793, 0.005175295829772949, 0.005163167953491211, 0.005154655933380127, 0.005195775985717774, 0.005178463935852051, 0.005245855808258057, 0.005189280033111573, 0.004917376041412354, 0.0051528959274292, 0.005154560089111328, 0.005193727970123291, 0.005150047779083252, 0.005135007858276367, 0.005175295829772949, 0.005436863899230957, 0.005181024074554444, 0.005208255767822266, 0.005221183776855469, 0.005176896095275879, 0.005163424015045166, 0.005197824001312256, 0.005197824001312256, 0.005179232120513916, 0.005175456047058106, 0.005196832180023194, 0.005172128200531006, 0.005174399852752686, 0.005184127807617188, 0.005146848201751709, 0.005176832199096679, 0.005317215919494629, 0.005302271842956543, 0.005174431800842285, 0.005192543983459473, 0.005192768096923828, 0.00515123176574707, 0.00517574405670166, 0.005287936210632324, 0.005391424179077149, 0.005502111911773681, 0.005217311859130859, 0.0051943678855896, 0.005187551975250244, 0.005189792156219482, 0.005154816150665284, 0.0051785922050476075, 0.0051660480499267575, 0.0051847038269042965, 0.005194111824035645, 0.0051857600212097165, 0.005205471992492675, 0.005187327861785889, 0.005197824001312256, 0.005194560050964355, 0.005257215976715088, 0.0052053117752075195, 0.0051493759155273435, 0.005175327777862549, 0.005226240158081054, 0.005174784183502197, 0.0051803522109985355, 0.005146399974822998, 0.005165056228637695, 0.005165056228637695, 0.00515664005279541, 0.005152991771697998, 0.005189631938934326, 0.005154143810272216, 0.005355936050415039, 0.0052221441268920895, 0.004896768093109131, 0.005176640033721923, 0.005147327899932861, 0.0051727681159973145, 0.005183104038238525, 0.005206399917602539, 0.005183263778686524, 0.005169856071472168, 0.005185535907745361, 0.00516096019744873, 0.005179391860961914, 0.005184544086456299, 0.005216864109039307, 0.005177216053009033, 0.005186048030853272, 0.0051695041656494144, 0.0051847357749938965, 0.005194176197052002, 0.005181695938110352, 0.005183231830596924, 0.005165311813354492, 0.005189375877380371, 0.005150720119476319, 0.005160352230072021, 0.005173056125640869, 0.005185503959655762, 0.005149695873260498, 0.0051976318359375, 0.005189631938934326, 0.005162816047668457, 0.005199999809265137, 0.005404736042022705, 0.005160223960876465, 0.005194464206695557, 0.005220352172851562, 0.005185056209564209, 0.005192160129547119, 0.005256383895874023, 0.0052351679801940915, 0.005189824104309082, 0.005166656017303467, 0.00521072006225586, 0.005154816150665284, 0.005192800045013428, 0.005214560031890869, 0.005143104076385498, 0.005170591831207275, 0.005181407928466797, 0.005173535823822022, 0.005161407947540283, 0.005161983966827393, 0.005206943988800049, 0.005150112152099609, 0.005178175926208496, 0.005170976161956787, 0.005188608169555664, 0.005161439895629883, 0.005171679973602295, 0.0051567997932434086, 0.005166751861572266, 0.005155295848846436, 0.005174752235412598, 0.005186079978942871, 0.004934463977813721, 0.005166783809661866, 0.005283520221710205, 0.005169631958007813, 0.005148831844329834, 0.005154496192932129, 0.005181600093841552, 0.0051521601676940915, 0.005171648025512695, 0.005165279865264893, 0.00515664005279541, 0.0051446080207824705, 0.005154784202575684, 0.005193568229675293, 0.005152927875518799, 0.0051784000396728515, 0.00532089614868164, 0.0051924800872802735, 0.0051979517936706545, 0.007595295906066894, 0.006425183773040771, 0.005787231922149658, 0.005285696029663086, 0.005212768077850342, 0.005184576034545898, 0.0051987838745117185, 0.0053309440612792965, 0.005216256141662597, 0.005181439876556396, 0.00520908784866333, 0.005300992012023926, 0.005184959888458252, 0.005198431968688965, 0.005176991939544678, 0.005196352005004883, 0.005163008213043213, 0.005177663803100586, 0.005152128219604492, 0.005190207958221436, 0.005190656185150146, 0.00515558385848999, 0.005178976058959961, 0.005203872203826904, 0.005155327796936035, 0.005160064220428467, 0.005177984237670899, 0.005193632125854492, 0.00523686408996582, 0.0051857600212097165, 0.005181439876556396, 0.005150303840637207, 0.0051452798843383786, 0.005145919799804687, 0.005183231830596924, 0.005273471832275391, 0.005179872035980224, 0.005177599906921387, 0.005164192199707031, 0.005190688133239746, 0.005201791763305664, 0.0051404800415039064, 0.005193408012390137, 0.005192160129547119]",tokens/s,191.69233786671413,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6677.700608,9395.109888,0.0,8992.587776,8404.320768,s,1,14.3119267578125,14.3119267578125,0.0,14.3119267578125,14.3119267578125,14.3119267578125,14.3119267578125,[14.3119267578125],,kWh,0.0002046075615208186,2.2562483415406577e-05,9.204312918997815e-05,0.0003192131741262033,,MB,1619.161088,9409.789952,0.0,8992.587776,7879.473152,s,10,3.303888031005859,0.33038880310058594,0.0006076718878471902,0.3303198852539062,0.33122945556640626,0.3312737854003906,0.3313092492675781,"[0.3298011779785156, 0.329601806640625, 0.3309787292480469, 0.33055328369140624, 0.329951171875, 0.32972183227539065, 0.33065582275390626, 0.331318115234375, 0.33008648681640623, 0.3312196044921875]",tokens/s,774.8446605863381,kWh,9.675382279835982e-06,1.0666945124804357e-06,6.436107299419694e-06,1.7178184091736108e-05,tokens/kWh,14902622.921776328,MB,1629.446144,9409.789952,0.0,8992.587776,8125.43744,s,10,22.351047851562498,2.23510478515625,0.0015041533697638238,2.2351145019531247,2.2368691650390624,2.2372614868164065,2.2375753442382815,"[2.23553662109375, 2.2334091796875, 2.2335615234375, 2.232909912109375, 2.2346923828125, 2.236781982421875, 2.23584423828125, 2.234271728515625, 2.236386474609375, 2.23765380859375]",tokens/s,28.186597969989958,kWh,6.525848447266902e-05,7.198317251737343e-06,4.326657135157829e-05,0.00011572337307598465,tokens/kWh,544401.6910795871,,s,630,22.346785312652592,0.035471087797861255,0.0003944902893675853,0.03546238327026367,0.03590748634338379,0.035958040809631345,0.036100016441345216,"[0.03611561584472656, 0.035322719573974606, 0.03499155044555664, 0.03482681655883789, 0.03479142379760742, 0.034813568115234374, 0.034775550842285154, 0.034897407531738284, 0.03499046325683594, 0.034992031097412106, 0.03495529556274414, 0.034985504150390624, 0.03501417541503906, 0.0350379524230957, 0.03497190475463867, 0.035225601196289064, 0.03523788833618164, 0.03515177536010742, 0.0351253433227539, 0.035147712707519534, 0.0352413444519043, 0.0352611198425293, 0.03953049468994141, 0.03504086303710938, 0.03512361526489258, 0.03519686508178711, 0.03546265411376953, 0.03545695877075195, 0.035312030792236326, 0.03532755279541016, 0.03533891296386719, 0.03538486480712891, 0.03536297607421875, 0.03535494232177734, 0.03538739013671875, 0.03547068786621094, 0.03542697525024414, 0.035393024444580076, 0.03541366577148437, 0.03542512130737305, 0.03543040084838867, 0.03553481674194336, 0.03560860824584961, 0.035633056640625, 0.0356844482421875, 0.03592601776123047, 0.03561798477172851, 0.035752544403076174, 0.03599929428100586, 0.03580176162719727, 0.035800769805908204, 0.03576249694824219, 0.035757568359375, 0.03576371383666992, 0.035783679962158206, 0.035866561889648436, 0.03588412857055664, 0.03592291259765625, 0.03587686538696289, 0.03588614273071289, 0.03587699127197266, 0.035883617401123044, 0.035909854888916015, 0.03591017532348633, 0.035198974609375, 0.03491430282592774, 0.034856449127197264, 0.0348042221069336, 0.03484467315673828, 0.03483647918701172, 0.03499129486083984, 0.03498031997680664, 0.035012481689453125, 0.03502656173706055, 0.03507491302490234, 0.035074047088623043, 0.035125247955322264, 0.035067680358886716, 0.03525564956665039, 0.0352490234375, 0.035149822235107424, 0.0350904312133789, 0.035176448822021485, 0.0352542724609375, 0.03522918319702149, 0.03527427291870117, 0.03537200164794922, 0.03537100982666016, 0.03538534545898438, 0.03536076736450195, 0.035438495635986327, 0.035444671630859376, 0.0354733772277832, 0.03547564697265625, 0.035481342315673826, 0.035428287506103516, 0.03535036849975586, 0.03540127944946289, 0.035523040771484375, 0.035504512786865235, 0.03551846313476562, 0.03546323013305664, 0.035530048370361327, 0.0355805778503418, 0.03556764984130859, 0.03560147094726562, 0.035681217193603516, 0.03563724899291992, 0.03568230438232422, 0.03577241516113281, 0.03572057723999023, 0.03585696029663086, 0.035780670166015625, 0.03572326278686523, 0.03574480056762695, 0.03575228881835937, 0.035799678802490235, 0.035850238800048825, 0.0358809585571289, 0.035859745025634764, 0.03602025604248047, 0.035902145385742185, 0.03590889739990234, 0.03595337677001953, 0.03593011093139648, 0.03598521423339844, 0.03565203094482422, 0.03506790542602539, 0.0349306869506836, 0.034887680053710936, 0.034871295928955076, 0.03502643203735351, 0.03503155136108398, 0.03499971389770508, 0.03493948745727539, 0.0350022087097168, 0.03507011032104492, 0.03508019256591797, 0.035055198669433595, 0.035133216857910154, 0.03515251159667969, 0.03521945571899414, 0.035166206359863283, 0.035110912322998046, 0.035116512298583986, 0.035211071014404294, 0.0352283821105957, 0.03530547332763672, 0.035253662109375, 0.035224159240722655, 0.03521046447753906, 0.0353144645690918, 0.03549699020385742, 0.035462112426757814, 0.03533004760742187, 0.03551027297973633, 0.035399681091308595, 0.035429473876953124, 0.03549481582641602, 0.03547116851806641, 0.03548384094238281, 0.035552928924560544, 0.03554256057739258, 0.035464000701904294, 0.03548713684082031, 0.0355272331237793, 0.03567407989501953, 0.035700096130371096, 0.03561747360229492, 0.03560195159912109, 0.03565411376953125, 0.035678207397460936, 0.03576755142211914, 0.035732223510742185, 0.0357498893737793, 0.03572531127929687, 0.03572326278686523, 0.035702686309814456, 0.03576959991455078, 0.03593081665039063, 0.03593603134155274, 0.03589897537231445, 0.03585923385620117, 0.03594387054443359, 0.03590134429931641, 0.03592416000366211, 0.03593638229370117, 0.035969375610351566, 0.03594854354858398, 0.03576345443725586, 0.03512137603759766, 0.03491088104248047, 0.034815521240234376, 0.03485129547119141, 0.034852638244628906, 0.03483875274658203, 0.03491635131835937, 0.03504127883911133, 0.03506995010375977, 0.03506083297729492, 0.03506003189086914, 0.03503887939453125, 0.03511737442016601, 0.035204959869384767, 0.03516700744628906, 0.03509612655639648, 0.03509804916381836, 0.03518544006347656, 0.03522982406616211, 0.03524822235107422, 0.035246047973632816, 0.03522768020629883, 0.03529523086547852, 0.03532185745239258, 0.03529872131347656, 0.03539971160888672, 0.035495967864990235, 0.035503807067871096, 0.03549590301513672, 0.035490463256835934, 0.03538761520385742, 0.03536227035522461, 0.0353259506225586, 0.03540636825561523, 0.03555052947998047, 0.03552473449707031, 0.03555583953857422, 0.03555129623413086, 0.03555286407470703, 0.03563766479492188, 0.035628448486328124, 0.03559894561767578, 0.03564479827880859, 0.03562150573730469, 0.03561062240600586, 0.035768318176269534, 0.03579084777832031, 0.03582313537597656, 0.03572579193115234, 0.035737598419189456, 0.035729408264160156, 0.03570687866210937, 0.03589734268188476, 0.03592806243896484, 0.03588710403442383, 0.03589286422729492, 0.03592025756835938, 0.03588406372070312, 0.03583894348144531, 0.03586975860595703, 0.03589011383056641, 0.03590335845947266, 0.03582777786254883, 0.03510486221313477, 0.03497983932495117, 0.03491839981079101, 0.034904064178466795, 0.03495116806030273, 0.034971649169921876, 0.034988033294677735, 0.03504742431640625, 0.035110080718994144, 0.03507283020019531, 0.03504947280883789, 0.0350164794921875, 0.03506198501586914, 0.03516767883300781, 0.035138111114501956, 0.035141632080078124, 0.03513923263549805, 0.03505145645141602, 0.03512771224975586, 0.035227649688720705, 0.035239486694335936, 0.03535712051391601, 0.035250175476074216, 0.03546931076049804, 0.035266559600830076, 0.03531078338623047, 0.03548617553710937, 0.035512672424316404, 0.03561881637573242, 0.03550531387329101, 0.035461982727050784, 0.03547465515136719, 0.035427104949951174, 0.03551596832275391, 0.035615169525146484, 0.03555516815185547, 0.03555942535400391, 0.03551043319702148, 0.03555737686157227, 0.0355676155090332, 0.035538944244384765, 0.035636287689208984, 0.03564230346679687, 0.035667167663574216, 0.035633953094482425, 0.03567001724243164, 0.03582534408569336, 0.03578009414672852, 0.035702945709228516, 0.035794815063476565, 0.035667743682861325, 0.03577718353271484, 0.03589971160888672, 0.03595267105102539, 0.035931232452392575, 0.03583273696899414, 0.03593830490112305, 0.03588710403442383, 0.035896766662597654, 0.03588768005371094, 0.03596249771118164, 0.03601446533203125, 0.035624801635742186, 0.03511299133300781, 0.034950496673583985, 0.03485308837890625, 0.03489865493774414, 0.03500217437744141, 0.03500435256958008, 0.03491766357421875, 0.034982654571533205, 0.035118495941162106, 0.035116928100585934, 0.0351231689453125, 0.03520406341552734, 0.03523788833618164, 0.03536806488037109, 0.035285888671875, 0.03519075012207031, 0.03510204696655273, 0.03508035278320312, 0.03517494583129883, 0.03518873596191406, 0.03534643173217773, 0.03536896133422852, 0.035363937377929686, 0.03538217544555664, 0.03538691329956055, 0.03537148666381836, 0.03540172958374024, 0.03657523345947265, 0.03533824157714844, 0.03534783935546875, 0.03532863998413086, 0.03546271896362305, 0.0354015998840332, 0.03540825653076172, 0.03542153549194336, 0.035412830352783205, 0.03560835266113281, 0.03558832168579101, 0.03560038375854492, 0.03616767883300781, 0.03578870391845703, 0.03569782257080078, 0.035680702209472656, 0.035641185760498045, 0.035625633239746095, 0.03577446365356445, 0.035844097137451174, 0.03582534408569336, 0.03572329711914062, 0.035771903991699217, 0.03585273742675781, 0.035813472747802735, 0.035784961700439454, 0.03582361602783203, 0.03580518341064453, 0.03583795166015625, 0.03598518371582031, 0.035944671630859376, 0.035880321502685546, 0.035936702728271486, 0.035950206756591795, 0.03601670455932617, 0.035854656219482424, 0.03525820922851562, 0.035017345428466795, 0.034928638458251955, 0.03493641662597656, 0.03496182250976562, 0.03490195083618164, 0.03500624084472656, 0.035041534423828125, 0.035043361663818356, 0.03500851058959961, 0.03507388687133789, 0.0352011833190918, 0.0352542724609375, 0.03518454360961914, 0.03517449569702148, 0.0351723518371582, 0.035141632080078124, 0.035151870727539065, 0.03516416168212891, 0.03528073501586914, 0.03533363342285156, 0.03712243270874024, 0.03521155166625976, 0.03518624114990234, 0.035366878509521485, 0.03535468673706055, 0.035368801116943356, 0.035457599639892576, 0.03545087814331055, 0.035350494384765625, 0.03530140686035156, 0.03534643173217773, 0.035340286254882815, 0.035366912841796876, 0.035495616912841796, 0.03554329681396484, 0.035519550323486325, 0.035525409698486325, 0.03556963348388672, 0.035671680450439454, 0.035619071960449215, 0.03556156921386719, 0.03553923034667969, 0.03557331085205078, 0.035640926361083985, 0.03560726547241211, 0.03575001525878906, 0.03575398254394531, 0.035784702301025394, 0.03579289627075195, 0.0358458251953125, 0.035873088836669925, 0.03595199966430664, 0.03606182479858398, 0.036036609649658206, 0.03598697662353516, 0.035967456817626954, 0.03589734268188476, 0.03590963363647461, 0.03585004806518555, 0.035874942779541015, 0.035993663787841794, 0.035649600982666015, 0.035090110778808595, 0.03489107131958008, 0.034861759185791014, 0.03486751937866211, 0.03487535858154297, 0.034977535247802734, 0.03508047866821289, 0.03501670455932617, 0.03508768081665039, 0.03515871810913086, 0.03511705780029297, 0.03511203384399414, 0.03506419372558594, 0.035121696472167965, 0.03516211318969727, 0.03523891067504883, 0.03515692901611328, 0.0351396484375, 0.035155200958251955, 0.03518467330932617, 0.03532668685913086, 0.03544268798828125, 0.03540505599975586, 0.035427070617675784, 0.03546441650390625, 0.035819393157958984, 0.0353719367980957, 0.035350528717041016, 0.03532745742797851, 0.03535516738891602, 0.03531161499023437, 0.035363903045654295, 0.03547846221923828, 0.035499519348144534, 0.035506431579589846, 0.035553535461425784, 0.03559423828125, 0.03561414337158203, 0.03556204986572266, 0.03566694259643555, 0.03564031982421875, 0.03564748764038086, 0.0356453742980957, 0.035700801849365235, 0.03560038375854492, 0.03556556701660156, 0.03565740966796875, 0.035720958709716796, 0.035797054290771485, 0.03572582244873047, 0.03580825424194336, 0.03587583923339844, 0.035972415924072264, 0.03591020965576172, 0.03585036849975586, 0.03584819030761719, 0.03583737564086914, 0.03583443069458008, 0.035865695953369144, 0.03601619338989258, 0.035982177734375, 0.03601753616333008, 0.03587929534912109, 0.03518716812133789, 0.035022113800048826, 0.03488022232055664, 0.03488972854614258, 0.034903102874755856, 0.03498899078369141, 0.03500851058959961, 0.035103904724121095, 0.03513020706176758, 0.035092479705810545, 0.035071678161621093, 0.035176193237304684, 0.03530400085449219, 0.035266559600830076, 0.035225601196289064, 0.03518054580688477, 0.03521104049682617, 0.035200801849365235, 0.03516870498657226, 0.035209217071533204, 0.03527475357055664, 0.03539763259887695, 0.03545481491088867, 0.03540188980102539, 0.035390846252441405, 0.035404415130615235, 0.03546726226806641, 0.03550207901000976, 0.0354442253112793, 0.03533875274658203, 0.035281982421875, 0.035344894409179685, 0.035505790710449216, 0.035638080596923825, 0.03551747131347656, 0.03565667343139649, 0.0356126708984375, 0.035563518524169925, 0.0355365104675293, 0.03563967895507812, 0.0361267204284668, 0.03568611145019531, 0.03566211318969727, 0.035661823272705076, 0.035688449859619144, 0.03576623916625977, 0.035743358612060544, 0.03576387023925781, 0.03575177764892578, 0.0356965446472168, 0.03581235122680664, 0.0360079345703125, 0.03590732955932617, 0.035901695251464846, 0.035983360290527344, 0.03599257659912109, 0.035961856842041014, 0.035929088592529294, 0.03586288070678711, 0.03581171035766602, 0.03592425537109375, 0.0359813117980957, 0.03571699142456055, 0.03508278274536133, 0.034995521545410156, 0.03491910552978516, 0.03491635131835937, 0.03492169570922852, 0.035005214691162106, 0.03504684829711914, 0.035153888702392576, 0.03518320083618164, 0.03514777755737305, 0.035167808532714846, 0.03519942474365234, 0.03516416168212891, 0.03512934494018555, 0.035125247955322264, 0.035200416564941404, 0.035268318176269534, 0.03516096115112305, 0.03512646484375, 0.03531039810180664, 0.03540105438232422, 0.035430622100830075, 0.035307167053222656, 0.0352509765625, 0.0353155517578125, 0.03530972671508789, 0.03540787124633789, 0.03544063949584961, 0.0353768310546875, 0.035385665893554685, 0.0353702392578125, 0.035368896484375, 0.035324737548828124, 0.03854441452026367, 0.035313919067382814, 0.03555196762084961, 0.03556310272216797, 0.035602241516113284, 0.03553750228881836, 0.03568780899047851, 0.03569113540649414, 0.03567196655273437, 0.03561276626586914, 0.035631103515625, 0.035606529235839846, 0.03565334320068359, 0.03572969436645508, 0.03575193786621094, 0.03587071990966797, 0.0359090576171875, 0.03579251098632812, 0.03580140686035156, 0.035815326690673825, 0.03581983947753906, 0.035993953704833985, 0.0359375991821289, 0.035918495178222654, 0.035989406585693356, 0.03597740936279297, 0.03591513442993164, 0.03592665481567383, 0.03589734268188476]",tokens/s,28.191974424316793,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4192.882688,5932.711936,0.0,5530.189824,5138.859008,s,1,11.4970068359375,11.4970068359375,0.0,11.4970068359375,11.4970068359375,11.4970068359375,11.4970068359375,[11.4970068359375],,kWh,0.00013080160697921504,1.441463058961622e-05,5.486587722602909e-05,0.00020008211479486036,,MB,1499.344896,5951.586304,0.0,5534.384128,4845.009408,s,10,2.131956008911133,0.21319560089111328,0.00040749075404557234,0.21321597290039063,0.21354927215576172,0.21370473556518554,0.2138291062927246,"[0.21237542724609376, 0.2135147247314453, 0.21265402221679688, 0.2134705352783203, 0.21304867553710938, 0.21324137878417967, 0.2134222412109375, 0.21319056701660155, 0.21386019897460937, 0.21317823791503906]",tokens/s,1200.7752455021268,kWh,6.292090230230224e-06,6.939105799812934e-07,4.1615104214050676e-06,1.1147511231616583e-05,tokens/kWh,22964767.17367483,MB,1527.394304,5951.586304,0.0,5534.384128,5014.504448,s,10,19.475051879882812,1.9475051879882812,0.0027264220118194066,1.9476490478515625,1.951285803222656,1.951456805419922,1.9515936071777344,"[1.9490078125, 1.946654541015625, 1.947730712890625, 1.9491907958984376, 1.9475673828125, 1.951247802734375, 1.9516278076171876, 1.9446865234375, 1.9438685302734375, 1.943469970703125]",tokens/s,32.34907942149168,kWh,5.678331705684841e-05,6.263050281785705e-06,3.6924411336194775e-05,9.997077867482889e-05,tokens/kWh,630184.1481591103,,s,630,19.472105400085425,0.03090810380965945,0.0003773029946473923,0.03083318328857422,0.03122623672485352,0.03135997190475464,0.03303018272399903,"[0.03138403129577637, 0.03111564826965332, 0.030939136505126953, 0.0309552001953125, 0.030908735275268554, 0.030740480422973632, 0.031003711700439453, 0.03102729606628418, 0.031169376373291015, 0.03091036796569824, 0.030877952575683595, 0.031092575073242187, 0.03089731216430664, 0.03069753646850586, 0.031061952590942382, 0.031156192779541014, 0.03148252868652344, 0.03133161544799805, 0.03131692886352539, 0.031448863983154295, 0.031308000564575195, 0.031045631408691408, 0.03092076873779297, 0.03099625587463379, 0.030933151245117186, 0.030723360061645506, 0.030726879119873048, 0.031092735290527345, 0.03082415962219238, 0.03087798309326172, 0.030848255157470705, 0.030613664627075196, 0.030671775817871092, 0.03071334457397461, 0.030737632751464843, 0.0308623046875, 0.030865407943725585, 0.030943231582641603, 0.030950592041015624, 0.03105446434020996, 0.03109222412109375, 0.031152416229248046, 0.030975679397583007, 0.031020959854125976, 0.030833471298217775, 0.030650367736816408, 0.03064998435974121, 0.0307872314453125, 0.030745311737060545, 0.030740480422973632, 0.030794815063476564, 0.030739328384399415, 0.031125856399536134, 0.03100764846801758, 0.030822784423828124, 0.030929344177246094, 0.030697439193725588, 0.03123740768432617, 0.030673376083374025, 0.030542112350463866, 0.030976192474365234, 0.03066169548034668, 0.030591039657592772, 0.03115395164489746, 0.031012128829956055, 0.03154774475097656, 0.031365472793579104, 0.03088342475891113, 0.03098896026611328, 0.031220832824707032, 0.031656063079833985, 0.0309215030670166, 0.031169727325439454, 0.03077350425720215, 0.030859840393066405, 0.030698495864868162, 0.030714879989624022, 0.03068864059448242, 0.030999168395996094, 0.03094118309020996, 0.030863616943359377, 0.030868959426879884, 0.03086774444580078, 0.030811328887939454, 0.030796768188476563, 0.03086729621887207, 0.03097916793823242, 0.03069580841064453, 0.031201824188232422, 0.03062713623046875, 0.030692031860351562, 0.03061507225036621, 0.030716384887695312, 0.030604480743408203, 0.030603904724121094, 0.03057792091369629, 0.030825504302978514, 0.031970943450927734, 0.031086879730224608, 0.030713024139404296, 0.03075155258178711, 0.030662176132202148, 0.030982656478881834, 0.03061756706237793, 0.030955263137817383, 0.03053011131286621, 0.03085276794433594, 0.030816287994384767, 0.030840831756591795, 0.030721792221069334, 0.030869728088378907, 0.030838815689086915, 0.031139839172363282, 0.030895328521728514, 0.030962272644042967, 0.030810304641723633, 0.03089379119873047, 0.03072960090637207, 0.03096259117126465, 0.030698976516723632, 0.030978591918945312, 0.03094528007507324, 0.03095961570739746, 0.03067692756652832, 0.030928991317749024, 0.030770816802978516, 0.031194719314575195, 0.03131839942932129, 0.030769151687622072, 0.030723968505859376, 0.030691423416137696, 0.03057872009277344, 0.03057254409790039, 0.030663711547851562, 0.030667743682861327, 0.030908416748046875, 0.03126067161560059, 0.03254188919067383, 0.03114793586730957, 0.031111648559570312, 0.03105836868286133, 0.030833984375, 0.030697792053222657, 0.030799552917480467, 0.030866048812866212, 0.03092678451538086, 0.031197311401367188, 0.03098624038696289, 0.030873504638671875, 0.030750335693359374, 0.03073276710510254, 0.030892032623291016, 0.030648448944091796, 0.030475776672363283, 0.031000959396362306, 0.030690879821777345, 0.03082080078125, 0.030864831924438476, 0.03066707229614258, 0.031107200622558593, 0.030888063430786133, 0.030846752166748048, 0.030908639907836915, 0.03086079978942871, 0.03067955207824707, 0.031297216415405275, 0.03084115219116211, 0.03086489677429199, 0.03079846382141113, 0.03067027282714844, 0.030996000289916992, 0.030916864395141602, 0.031193471908569335, 0.031168800354003906, 0.030942975997924806, 0.030925056457519532, 0.031063583374023436, 0.03097555160522461, 0.03111939239501953, 0.031128448486328127, 0.030780672073364258, 0.030732608795166014, 0.030716384887695312, 0.030664703369140626, 0.0306997127532959, 0.030689056396484376, 0.03081216049194336, 0.03099443244934082, 0.031192928314208983, 0.031141439437866212, 0.03095187187194824, 0.030805728912353517, 0.030751007080078125, 0.030810111999511718, 0.030724096298217773, 0.030713855743408205, 0.03073404884338379, 0.03331452941894531, 0.0312956485748291, 0.0313143367767334, 0.03097382354736328, 0.030821504592895507, 0.031145984649658204, 0.03074355125427246, 0.030713855743408205, 0.030707008361816408, 0.030671552658081056, 0.030650272369384765, 0.03087984085083008, 0.03077120018005371, 0.030688480377197267, 0.03067568016052246, 0.031012928009033203, 0.031033344268798828, 0.031227647781372072, 0.03096406364440918, 0.030875680923461914, 0.030689151763916015, 0.030885887145996094, 0.03225804901123047, 0.032092159271240234, 0.030881759643554687, 0.030906272888183595, 0.030924287796020508, 0.030847423553466796, 0.03101286315917969, 0.030727775573730468, 0.030721920013427734, 0.030792640686035155, 0.030730016708374025, 0.030793727874755858, 0.030832767486572266, 0.030977920532226564, 0.030852895736694336, 0.030776639938354493, 0.030907007217407228, 0.030773536682128906, 0.030930944442749023, 0.03100876808166504, 0.030835039138793947, 0.030635808944702147, 0.03146675109863281, 0.030724735260009767, 0.030685184478759765, 0.030731552124023436, 0.0311549129486084, 0.03092403221130371, 0.030651136398315428, 0.030527488708496094, 0.03060326385498047, 0.03076736068725586, 0.030744319915771486, 0.031062496185302733, 0.030982240676879883, 0.030838655471801757, 0.030773279190063476, 0.03333059310913086, 0.03091219139099121, 0.031012096405029298, 0.03088889694213867, 0.03096828842163086, 0.030636287689208983, 0.030602783203125, 0.030749311447143556, 0.030619487762451172, 0.031032703399658204, 0.03376924896240235, 0.03111612892150879, 0.031205375671386718, 0.031022592544555663, 0.030740991592407226, 0.03075472068786621, 0.030719200134277345, 0.03061849594116211, 0.03054198455810547, 0.03073212814331055, 0.030583135604858397, 0.03058038330078125, 0.03078963279724121, 0.03047785568237305, 0.030513792037963866, 0.030547807693481446, 0.030727968215942383, 0.030680736541748046, 0.030722623825073243, 0.030656511306762696, 0.030558528900146483, 0.030648000717163087, 0.030663808822631835, 0.030637088775634765, 0.030641984939575196, 0.03066268730163574, 0.030785375595092774, 0.031002656936645508, 0.030873727798461915, 0.030737920761108397, 0.03071232032775879, 0.030666175842285155, 0.030619359970092772, 0.030538591384887695, 0.030662208557128905, 0.030765087127685546, 0.031100896835327147, 0.03136556816101074, 0.03139993667602539, 0.031086591720581053, 0.031030784606933592, 0.03098415946960449, 0.03101136016845703, 0.030892032623291016, 0.031137472152709962, 0.0312260799407959, 0.03175635147094726, 0.030660512924194337, 0.030566144943237304, 0.031170560836791993, 0.030892032623291016, 0.030730112075805664, 0.030985952377319336, 0.031047807693481446, 0.030855743408203126, 0.030847808837890626, 0.030616447448730467, 0.03071183967590332, 0.030742528915405274, 0.030857215881347655, 0.030739519119262697, 0.030894111633300782, 0.03105580711364746, 0.03114512062072754, 0.03116422462463379, 0.03123404884338379, 0.031516895294189454, 0.031343711853027346, 0.03100652885437012, 0.031087488174438477, 0.03106790351867676, 0.03115839958190918, 0.031205087661743163, 0.031248319625854493, 0.0313656005859375, 0.03125014305114746, 0.031148319244384767, 0.031172607421875, 0.031236095428466795, 0.03153081512451172, 0.03127884864807129, 0.0312611198425293, 0.03118601608276367, 0.030994400024414063, 0.031202463150024413, 0.03128534317016601, 0.03132156753540039, 0.030947296142578126, 0.030832895278930662, 0.03063747215270996, 0.03063155174255371, 0.030641120910644533, 0.03055996894836426, 0.030619935989379884, 0.030862432479858398, 0.031069087982177734, 0.03175836753845215, 0.031164384841918944, 0.030658079147338868, 0.030644704818725586, 0.030723360061645506, 0.030822975158691406, 0.03069148826599121, 0.030893632888793945, 0.030759231567382812, 0.030678144454956056, 0.03056947135925293, 0.03067241668701172, 0.030593536376953126, 0.030548032760620118, 0.030702592849731446, 0.030714784622192383, 0.03131158447265625, 0.031035167694091797, 0.03103385543823242, 0.03079897689819336, 0.03077212715148926, 0.030610496520996094, 0.030593088150024414, 0.030689311981201173, 0.03309996795654297, 0.03138419151306152, 0.031121568679809572, 0.030979936599731445, 0.031038944244384765, 0.03080246353149414, 0.03081046485900879, 0.03120502471923828, 0.030864416122436525, 0.03123094367980957, 0.030881792068481444, 0.030680639266967773, 0.030927616119384764, 0.030713375091552735, 0.03078160095214844, 0.030656063079833984, 0.03084332847595215, 0.030570816040039063, 0.03059052848815918, 0.030580671310424804, 0.030691328048706053, 0.030677183151245117, 0.030689184188842773, 0.030754911422729493, 0.030914560317993164, 0.03089948844909668, 0.031166175842285156, 0.031116287231445314, 0.030875104904174805, 0.03071414375305176, 0.03077756881713867, 0.030709920883178712, 0.030760831832885742, 0.030764543533325195, 0.0312076473236084, 0.030672576904296874, 0.03086534309387207, 0.030761632919311523, 0.030786592483520506, 0.03092937660217285, 0.030894559860229494, 0.03065247917175293, 0.03083798408508301, 0.030722368240356446, 0.03079190444946289, 0.033167583465576175, 0.03257855987548828, 0.03202764892578125, 0.03097932815551758, 0.03148915290832519, 0.030866527557373048, 0.030721759796142577, 0.03085113525390625, 0.030675712585449218, 0.030762239456176756, 0.03130486488342285, 0.031278144836425784, 0.031071168899536133, 0.03080726432800293, 0.030693151473999022, 0.030670848846435547, 0.0306112003326416, 0.030815999984741212, 0.030689792633056642, 0.03071107292175293, 0.030828960418701173, 0.03069811248779297, 0.030613183975219727, 0.03057459259033203, 0.030682655334472658, 0.030519519805908203, 0.030658815383911135, 0.030514720916748048, 0.030591455459594727, 0.030693376541137695, 0.030483648300170897, 0.0305467529296875, 0.030660192489624025, 0.03077996826171875, 0.030799903869628907, 0.03077939224243164, 0.030742271423339844, 0.030721248626708983, 0.030918880462646483, 0.031023744583129884, 0.030916032791137697, 0.031184864044189454, 0.031047487258911134, 0.030726943969726563, 0.03078758430480957, 0.03113529586791992, 0.031267263412475585, 0.03133612823486328, 0.03117228889465332, 0.031244800567626952, 0.03101625633239746, 0.030864191055297852, 0.03072204780578613, 0.030756864547729492, 0.03067225646972656, 0.030762943267822265, 0.03068998336791992, 0.030896127700805662, 0.03075926399230957, 0.030733312606811523, 0.030663616180419923, 0.031127487182617188, 0.03326508712768555, 0.031123680114746095, 0.0308569278717041, 0.030884159088134765, 0.030733760833740235, 0.030692319869995117, 0.03062348747253418, 0.03069740867614746, 0.030741600036621092, 0.03094828796386719, 0.03085087966918945, 0.03140912055969238, 0.03101081657409668, 0.031214815139770508, 0.031017759323120116, 0.031066112518310547, 0.0332196159362793, 0.03135324859619141, 0.03120979118347168, 0.031236352920532225, 0.03103059196472168, 0.03139983940124512, 0.030933792114257813, 0.030672895431518556, 0.030639616012573243, 0.030736000061035156, 0.030516096115112305, 0.03055615997314453, 0.030656511306762696, 0.030539775848388673, 0.030484479904174806, 0.03067625617980957, 0.030622432708740235, 0.031156320571899414, 0.030759904861450197, 0.031025184631347656, 0.030624736785888673, 0.03068022346496582, 0.030653215408325194, 0.031073280334472656, 0.030976512908935546, 0.03109119987487793, 0.03084492874145508, 0.030699520111083983, 0.030701568603515625, 0.030519519805908203, 0.030592832565307617, 0.030678367614746092, 0.030581375122070313, 0.03050912094116211, 0.03045984077453613, 0.03049238395690918, 0.03052195167541504, 0.030570207595825197, 0.030504608154296876, 0.030593408584594726, 0.03065235137939453, 0.030373888015747072, 0.03076300811767578, 0.030554176330566406, 0.030619071960449218, 0.030540288925170897, 0.030803136825561524, 0.030659391403198243, 0.03061529541015625, 0.030503231048583983, 0.030499040603637697, 0.030802751541137697, 0.031030176162719726, 0.03285932922363281, 0.030814048767089843, 0.030980096817016602, 0.030954816818237304, 0.03103299140930176, 0.031160959243774412, 0.030939136505126953, 0.030940223693847656, 0.03084998321533203, 0.030835872650146486, 0.03078633689880371, 0.03091974449157715, 0.030743167877197265, 0.03068899154663086, 0.030890655517578126, 0.031322111129760744, 0.030709760665893555, 0.030592191696166993, 0.030644800186157225, 0.030539104461669922, 0.03068172836303711, 0.030566688537597655, 0.030735904693603516, 0.030465663909912108, 0.03106287956237793, 0.0312108154296875, 0.031085248947143554, 0.0309616641998291, 0.031055200576782228, 0.030933664321899413, 0.03119923210144043, 0.031269887924194335, 0.0308887996673584, 0.030717472076416015, 0.03085171127319336, 0.030610719680786134, 0.030677728652954102, 0.030769472122192384, 0.030813024520874022, 0.030763872146606447, 0.031039487838745116, 0.030830528259277342, 0.030891199111938477, 0.030751232147216798, 0.030699935913085938, 0.03072934341430664, 0.030757728576660155, 0.031055871963500976, 0.030719839096069335, 0.03056656074523926, 0.030636032104492186, 0.030465183258056642, 0.030810976028442384, 0.030807104110717774, 0.030715007781982422, 0.03054684829711914, 0.030610143661499025, 0.03068742370605469, 0.030676671981811524, 0.030744352340698243, 0.03084137535095215, 0.031059648513793944, 0.031239776611328124, 0.031083200454711916, 0.03116422462463379, 0.031084768295288084, 0.031149824142456053, 0.030932544708251953]",tokens/s,32.353974419080295,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,820.568064,545.128448,0.0,159.383552,141.760512,s,1,7.52593798828125,7.52593798828125,0.0,7.52593798828125,7.52593798828125,7.52593798828125,7.52593798828125,[7.52593798828125],,kWh,1.1929482570864517e-05,1.3086369682674254e-06,3.796391925936504e-06,1.7034511465068445e-05,,MB,1316.429824,639.500288,0.0,222.298112,184.771584,s,24,0.19275971317291263,0.008031654715538024,0.00013685574718607727,0.007994096040725707,0.008121190452575684,0.00816301121711731,0.008521267213821411,"[0.008138912200927734, 0.008043968200683593, 0.00794432020187378, 0.008167263984680175, 0.007943039894104003, 0.008028287887573243, 0.007926688194274903, 0.007997663974761962, 0.007930496215820312, 0.00798860788345337, 0.007970687866210937, 0.008079839706420898, 0.008044832229614258, 0.00800051212310791, 0.007990047931671143, 0.00800233554840088, 0.007972832202911378, 0.008038911819458008, 0.007977888107299805, 0.007990528106689453, 0.007970464229583741, 0.008627008438110352, 0.008008928298950195, 0.007975647926330567]",tokens/s,31873.88017375086,kWh,2.3563470313084127e-07,2.5986435966379434e-08,1.3603902910732685e-07,3.976601682045476e-07,tokens/kWh,643765758.9792078,MB,1355.763712,639.500288,0.0,222.298112,184.774144,s,24,10.049942413330077,0.4187476005554199,0.0027859503056213895,0.41757308959960937,0.42247306213378905,0.4248175552368164,0.4264380285644531,"[0.4252063293457031, 0.422614501953125, 0.4163011474609375, 0.42214303588867186, 0.41771307373046873, 0.4173616027832031, 0.4268059387207031, 0.4166664428710937, 0.42110736083984374, 0.4180314025878906, 0.4173348693847656, 0.4180187377929688, 0.41661920166015626, 0.420090087890625, 0.41743310546875, 0.41777484130859377, 0.41942611694335935, 0.4169107666015625, 0.417165283203125, 0.4180433349609375, 0.4169783935546875, 0.416890869140625, 0.4167825317382812, 0.4165234375]",tokens/s,150.44862326718493,kWh,1.2079631277075414e-05,1.3321726510641887e-06,4.540700823393451e-06,1.7952504751533054e-05,tokens/kWh,3509259.619865585,,s,1512,10.038981662273391,0.006639538136424211,0.00017462487923766267,0.006607951879501342,0.006700783967971802,0.006798179388046265,0.007426039247512819,"[0.006641183853149414, 0.006812128067016601, 0.006711391925811767, 0.006688672065734863, 0.00665334415435791, 0.006682591915130615, 0.006682432174682617, 0.006789951801300049, 0.006936639785766602, 0.0068197760581970215, 0.006799359798431396, 0.006815743923187256, 0.0068681597709655765, 0.006699552059173584, 0.006713856220245362, 0.006800191879272461, 0.006654176235198974, 0.006678847789764404, 0.006679103851318359, 0.006706624031066894, 0.006943168163299561, 0.00667628812789917, 0.006628767967224121, 0.006635392189025879, 0.006663360118865967, 0.006593855857849121, 0.006654367923736572, 0.006604800224304199, 0.0066427202224731445, 0.0065996479988098145, 0.006651904106140137, 0.006574368000030518, 0.006586080074310303, 0.006728864192962646, 0.006587232112884521, 0.006625279903411865, 0.00662716817855835, 0.006565919876098633, 0.006594687938690186, 0.007081952095031738, 0.008980575561523438, 0.008057951927185059, 0.007193280220031738, 0.006678463935852051, 0.006615007877349853, 0.00663372802734375, 0.006608640193939209, 0.006598911762237549, 0.006602752208709717, 0.006612991809844971, 0.006622687816619873, 0.006638239860534668, 0.006635680198669434, 0.006608128070831299, 0.006611423969268799, 0.006578464031219483, 0.006588128089904785, 0.006612800121307373, 0.006669888019561768, 0.006633952140808106, 0.006625184059143066, 0.006615295886993409, 0.006639616012573242, 0.006566175937652588, 0.006580575942993164, 0.006629280090332031, 0.006659840106964111, 0.006896768093109131, 0.006875008106231689, 0.006742527961730957, 0.006738431930541992, 0.006645760059356689, 0.006659904003143311, 0.00661900806427002, 0.006760767936706543, 0.006707263946533203, 0.006625216007232666, 0.006649856090545654, 0.0066416640281677245, 0.006606847763061524, 0.006735455989837646, 0.006733280181884765, 0.0067123198509216305, 0.008386495590209962, 0.007657536029815674, 0.007921279907226563, 0.006658559799194336, 0.006645567893981933, 0.006628511905670166, 0.006582208156585693, 0.0066139202117919925, 0.006631423950195312, 0.006608416080474853, 0.006684127807617187, 0.006584352016448975, 0.006601183891296387, 0.00662169599533081, 0.006621183872222901, 0.006590240001678467, 0.006864543914794922, 0.006619647979736328, 0.006606912136077881, 0.006627327919006347, 0.006572095870971679, 0.006583744049072265, 0.00658892822265625, 0.006627679824829102, 0.006626976013183594, 0.0066208958625793455, 0.006614751815795898, 0.006629280090332031, 0.00655836820602417, 0.0065679998397827144, 0.0065924482345581055, 0.00656387186050415, 0.006553023815155029, 0.006672927856445313, 0.006543360233306885, 0.0065550079345703125, 0.006599391937255859, 0.00655951976776123, 0.006576096057891846, 0.006584256172180176, 0.006526527881622314, 0.006688767910003662, 0.006584256172180176, 0.006536511898040771, 0.006568511962890625, 0.006607999801635742, 0.006587103843688965, 0.006613279819488525, 0.0066641921997070315, 0.006752255916595459, 0.006623231887817383, 0.00659222412109375, 0.006595104217529297, 0.00661081600189209, 0.006573952198028564, 0.00659660816192627, 0.00662278413772583, 0.006635968208312988, 0.006615039825439453, 0.006621183872222901, 0.0066455998420715335, 0.006594143867492676, 0.006773312091827393, 0.006558047771453857, 0.006591936111450195, 0.006648032188415527, 0.006559584140777588, 0.006582240104675293, 0.006635039806365967, 0.006580895900726318, 0.006610432147979736, 0.006603136062622071, 0.006547359943389892, 0.006740320205688477, 0.006604671955108643, 0.006565599918365478, 0.006604320049285889, 0.006573919773101807, 0.0065977277755737305, 0.006621183872222901, 0.006547008037567139, 0.006579552173614502, 0.006668543815612793, 0.006572703838348389, 0.006736000061035156, 0.006639488220214844, 0.0065755839347839354, 0.006564383983612061, 0.006584479808807373, 0.006557119846343994, 0.006617856025695801, 0.0066184959411621095, 0.006545216083526611, 0.0065495362281799314, 0.0066152639389038086, 0.006548863887786865, 0.0065895037651062015, 0.0064997758865356445, 0.006603104114532471, 0.006596864223480225, 0.00652623987197876, 0.006568511962890625, 0.006567840099334717, 0.006581855773925781, 0.00655731201171875, 0.006591008186340332, 0.006469632148742676, 0.00657203197479248, 0.006584415912628174, 0.006566080093383789, 0.006588128089904785, 0.006592864036560058, 0.006574975967407227, 0.006581024169921875, 0.006613120079040528, 0.006612544059753418, 0.0066063361167907715, 0.006576000213623047, 0.006611904144287109, 0.006598176002502442, 0.006615744113922119, 0.00657097578048706, 0.006673215866088867, 0.00664134407043457, 0.006584640026092529, 0.006602752208709717, 0.006566976070404053, 0.006593472003936768, 0.0066119999885559085, 0.006553631782531738, 0.006640575885772705, 0.0066068158149719235, 0.006598559856414795, 0.006568384170532226, 0.006667551994323731, 0.0068468799591064455, 0.006731455802917481, 0.006660511970520019, 0.006598559856414795, 0.0066540160179138184, 0.006725312232971192, 0.006670591831207276, 0.0068195838928222655, 0.0066128320693969725, 0.006578720092773437, 0.006554944038391113, 0.006603328227996827, 0.006628416061401367, 0.006616000175476074, 0.006610208034515381, 0.006607264041900635, 0.006756991863250733, 0.006596415996551514, 0.008675200462341309, 0.00791974401473999, 0.007611839771270752, 0.006623839855194092, 0.006666079998016357, 0.006680607795715332, 0.006557663917541504, 0.006641536235809326, 0.006574207782745361, 0.00663759994506836, 0.006690783977508545, 0.00658841609954834, 0.006798848152160645, 0.00662332820892334, 0.0065929279327392575, 0.006656000137329102, 0.00653872013092041, 0.006692671775817871, 0.006582399845123291, 0.006609824180603028, 0.006595456123352051, 0.006600959777832031, 0.006758528232574463, 0.006936672210693359, 0.0066112642288208, 0.006666304111480713, 0.0066349759101867675, 0.006825471878051758, 0.0066447358131408694, 0.006743872165679931, 0.006625408172607422, 0.006588479995727539, 0.0066126399040222165, 0.006590015888214111, 0.006648608207702637, 0.0065886077880859375, 0.006571839809417725, 0.006604383945465088, 0.0066332478523254395, 0.006586048126220703, 0.006585343837738037, 0.006639328002929688, 0.006613215923309326, 0.006665887832641601, 0.006597280025482178, 0.0065981121063232425, 0.00660038423538208, 0.0065699200630187984, 0.006606624126434326, 0.006599775791168213, 0.006618912220001221, 0.006585343837738037, 0.006575136184692383, 0.006569664001464844, 0.0065679998397827144, 0.006545375823974609, 0.006555456161499024, 0.006613183975219727, 0.006616864204406738, 0.006613408088684082, 0.006598368167877198, 0.006576096057891846, 0.00661843204498291, 0.006625472068786621, 0.0065829439163208004, 0.006584479808807373, 0.00665177583694458, 0.0066735677719116215, 0.0066100478172302244, 0.006614783763885498, 0.0066022400856018066, 0.006598368167877198, 0.006612063884735108, 0.006639391899108887, 0.006643136024475097, 0.006701632022857666, 0.006604800224304199, 0.006625247955322266, 0.006672544002532959, 0.006476319789886474, 0.006558015823364258, 0.00668233585357666, 0.006669407844543457, 0.0066377601623535155, 0.006614719867706299, 0.00657206392288208, 0.006604800224304199, 0.006609375953674316, 0.00652953577041626, 0.006582272052764892, 0.0066232957839965825, 0.006635807991027832, 0.0065957121849060055, 0.006568831920623779, 0.006588096141815185, 0.006602911949157715, 0.006561823844909668, 0.006554463863372803, 0.00663647985458374, 0.006569983959197998, 0.0066126718521118165, 0.006603072166442871, 0.006586304187774658, 0.0065801281929016115, 0.0065927357673645015, 0.006554656028747559, 0.006601696014404297, 0.006637728214263916, 0.006612736225128174, 0.006642816066741944, 0.006628064155578614, 0.006651999950408935, 0.006638912200927735, 0.006660960197448731, 0.0065883522033691405, 0.006662144184112549, 0.00658351993560791, 0.0065883522033691405, 0.006595583915710449, 0.006615104198455811, 0.0066332478523254395, 0.006647808074951172, 0.00660211181640625, 0.006654463768005371, 0.006655424118041992, 0.0065809922218322756, 0.0065764799118041995, 0.006783616065979004, 0.006590623855590821, 0.006634592056274414, 0.006574975967407227, 0.006685088157653809, 0.006771455764770508, 0.0067704000473022465, 0.006561759948730469, 0.0066293439865112306, 0.006635551929473877, 0.006737919807434082, 0.006670335769653321, 0.006612256050109864, 0.006617824077606201, 0.0065756158828735355, 0.006488192081451416, 0.006605855941772461, 0.006587456226348877, 0.006616864204406738, 0.007112959861755371, 0.006700799942016601, 0.008462016105651855, 0.007416128158569336, 0.0074898238182067875, 0.0090928316116333, 0.007427264213562012, 0.0066828479766845705, 0.006692704200744629, 0.006664608001708984, 0.00666864013671875, 0.006612991809844971, 0.006915743827819824, 0.006625631809234619, 0.006667391777038574, 0.006604703903198242, 0.006700160026550293, 0.006585951805114746, 0.006713312149047851, 0.006631423950195312, 0.006598720073699951, 0.006588704109191894, 0.00666806411743164, 0.00658841609954834, 0.006618591785430908, 0.007195328235626221, 0.006635583877563476, 0.006625152111053466, 0.006660160064697266, 0.006701183795928955, 0.0066529598236083985, 0.00667855978012085, 0.0065933442115783695, 0.006662144184112549, 0.006624320030212402, 0.006623487949371338, 0.006597311973571778, 0.006608416080474853, 0.006673888206481933, 0.006626304149627686, 0.0065963840484619144, 0.006625504016876221, 0.006619135856628418, 0.006817791938781738, 0.006619135856628418, 0.006635519981384277, 0.006673759937286377, 0.006617087841033936, 0.006633567810058594, 0.006683199882507324, 0.0066754879951477055, 0.006659039974212647, 0.0066928319931030275, 0.0066555519104003905, 0.006635263919830322, 0.006660416126251221, 0.0066154561042785645, 0.006617087841033936, 0.006635519981384277, 0.006509312152862549, 0.006601920127868652, 0.006623104095458985, 0.006593311786651611, 0.006592512130737305, 0.006606304168701172, 0.006607264041900635, 0.006613376140594482, 0.006671807765960693, 0.006635295867919922, 0.006600671768188476, 0.006621856212615967, 0.006612512111663818, 0.006619040012359619, 0.006578656196594238, 0.006581600189208984, 0.0065829439163208004, 0.006576128005981445, 0.006590464115142822, 0.006604288101196289, 0.006616960048675537, 0.006620128154754639, 0.006647168159484863, 0.006610623836517334, 0.0066269440650939945, 0.0066499199867248535, 0.006574560165405274, 0.006590911865234375, 0.006635007858276368, 0.00666806411743164, 0.006605375766754151, 0.006649504184722901, 0.006584832191467285, 0.006612991809844971, 0.006634880065917969, 0.0065972480773925785, 0.006623392105102539, 0.006553215980529785, 0.006568160057067871, 0.0066334400177001955, 0.006588096141815185, 0.006561471939086914, 0.006611072063446045, 0.006604928016662598, 0.006606656074523926, 0.006635519981384277, 0.006599264144897461, 0.006610720157623291, 0.0065784001350402835, 0.0065920958518981936, 0.006619616031646729, 0.006614943981170654, 0.00658025598526001, 0.00670249605178833, 0.006637919902801514, 0.006610496044158936, 0.006615647792816162, 0.006604896068572998, 0.006541279792785645, 0.006567679882049561, 0.006580512046813965, 0.0066007041931152345, 0.006618815898895264, 0.0065411520004272465, 0.0065766720771789555, 0.006611711978912353, 0.006601151943206787, 0.0066277761459350585, 0.006660096168518067, 0.00657203197479248, 0.0066112961769104, 0.006664896011352539, 0.006620128154754639, 0.006600671768188476, 0.006912032127380371, 0.006774784088134766, 0.006666431903839112, 0.0066477122306823734, 0.0066210880279541015, 0.007407936096191406, 0.00771449613571167, 0.006865119934082032, 0.006692224025726319, 0.006666719913482666, 0.006668352127075196, 0.006631584167480469, 0.006618847846984863, 0.006660096168518067, 0.006652224063873291, 0.0066005120277404785, 0.006710720062255859, 0.006804096221923828, 0.006688576221466064, 0.006633471965789795, 0.00664086389541626, 0.006652160167694092, 0.00657040023803711, 0.006631552219390869, 0.006624703884124756, 0.0066418561935424805, 0.006738592147827149, 0.006758111953735352, 0.006627327919006347, 0.006635519981384277, 0.00661734390258789, 0.006784768104553223, 0.006635871887207032, 0.00658355188369751, 0.0065929279327392575, 0.0066109437942504885, 0.0065680317878723145, 0.006567840099334717, 0.006631423950195312, 0.00676803207397461, 0.006599264144897461, 0.006621183872222901, 0.006643904209136963, 0.006863776206970215, 0.00662169599533081, 0.006582335948944092, 0.0066457920074462894, 0.006574399948120117, 0.006578271865844727, 0.006599743843078613, 0.0065730881690979, 0.006551136016845703, 0.006659711837768555, 0.006800096035003662, 0.0068219838142395016, 0.006694816112518311, 0.006643712043762207, 0.006555647850036621, 0.006553599834442139, 0.0066167678833007814, 0.006852767944335938, 0.0066162881851196285, 0.0065732159614562986, 0.006598656177520752, 0.0065963840484619144, 0.006553023815155029, 0.0065636482238769535, 0.006601471900939942, 0.006603007793426514, 0.006588160037994385, 0.006634943962097168, 0.006616864204406738, 0.0066278080940246585, 0.006597983837127686, 0.006591455936431885, 0.006585408210754395, 0.0066097922325134275, 0.0065679998397827144, 0.006914048194885254, 0.006866943836212158, 0.0065697598457336425, 0.006625504016876221, 0.006590176105499268, 0.006608479976654052, 0.006565760135650635, 0.006615551948547363, 0.00679699182510376, 0.006604544162750244, 0.006642208099365234, 0.006590688228607177, 0.006590303897857666, 0.0065989441871643064, 0.0065784001350402835, 0.0066332478523254395, 0.006632639884948731, 0.006595456123352051, 0.006644768238067627, 0.006572095870971679, 0.006671072006225586, 0.006650015830993652, 0.006588160037994385, 0.006623263835906982, 0.006563744068145752, 0.006583680152893066, 0.006621535778045654, 0.006650432109832764, 0.0065552000999450685, 0.00663750410079956, 0.006633984088897705, 0.0065755839347839354, 0.006596479892730713, 0.006643871784210205, 0.006603328227996827, 0.006608479976654052, 0.0066146240234375, 0.006537248134613037, 0.00659168004989624, 0.006630112171173095, 0.00661900806427002, 0.006603040218353271, 0.006600927829742431, 0.006593376159667969, 0.006650303840637207, 0.006648255825042725, 0.006582528114318848, 0.006631167888641357, 0.006648064136505127, 0.006582015991210937, 0.0066416640281677245, 0.0065998082160949705, 0.006648736000061035, 0.006636576175689697, 0.00656441593170166, 0.006592991828918457, 0.006629280090332031, 0.006602752208709717, 0.00662278413772583, 0.006640063762664795, 0.006563839912414551, 0.006622367858886718, 0.006622047901153565, 0.006562975883483887, 0.006566751956939698, 0.006635519981384277, 0.00658841609954834, 0.006625631809234619, 0.006575776100158691, 0.006567200183868408, 0.0066072320938110355, 0.006541664123535156, 0.006842400074005127, 0.00677788782119751, 0.006572991847991943, 0.006591904163360596, 0.006601439952850342, 0.006590144157409668, 0.006596767902374267, 0.0066128640174865725, 0.006612991809844971, 0.006600863933563232, 0.006637087821960449, 0.006613823890686035, 0.006616735935211181, 0.006598015785217285, 0.006573791980743408, 0.0065937919616699215, 0.006636384010314941, 0.006548575878143311, 0.006612576007843017, 0.006631552219390869, 0.006602015972137451, 0.006577023983001709, 0.006620543956756592, 0.006574687957763672, 0.006633120059967041, 0.0066902079582214355, 0.006865856170654297, 0.006679711818695068, 0.006599071979522705, 0.006668352127075196, 0.0067090878486633304, 0.006670176029205322, 0.006645055770874023, 0.006687295913696289, 0.006652192115783691, 0.006616576194763184, 0.0066360321044921875, 0.006686528205871582, 0.006645088195800781, 0.006648736000061035, 0.006639552116394043, 0.006781280040740967, 0.006659743785858155, 0.006649759769439698, 0.0066306557655334475, 0.0066260800361633305, 0.006660160064697266, 0.006604800224304199, 0.006647808074951172, 0.006671648025512695, 0.006654208183288574, 0.006662367820739746, 0.0066538238525390625, 0.006627488136291504, 0.006654176235198974, 0.006650047779083252, 0.006624639987945557, 0.006816192150115967, 0.006659999847412109, 0.006630847930908203, 0.006635392189025879, 0.006635935783386231, 0.00670963191986084, 0.00663270378112793, 0.006609663963317871, 0.00659987211227417, 0.006636352062225342, 0.006658080101013183, 0.006600671768188476, 0.006609920024871826, 0.0066976318359375, 0.006534656047821045, 0.006577216148376465, 0.0065491838455200195, 0.006563936233520508, 0.0065821118354797365, 0.006553760051727295, 0.006583680152893066, 0.006617728233337402, 0.006543360233306885, 0.006565887928009034, 0.006581823825836181, 0.00654585599899292, 0.006629183769226075, 0.006604991912841797, 0.006584320068359375, 0.0065780158042907716, 0.006557951927185059, 0.0065616960525512694, 0.006588511943817139, 0.0065474557876586915, 0.006502336025238037, 0.006599008083343506, 0.006581952095031738, 0.006577055931091308, 0.006594336032867432, 0.00657203197479248, 0.006569983959197998, 0.006651904106140137, 0.0065732159614562986, 0.006587135791778565, 0.00660211181640625, 0.006565951824188233, 0.006592160224914551, 0.006604991912841797, 0.006575232028961182, 0.006567423820495606, 0.006598495960235596, 0.006536928176879883, 0.0065830078125, 0.006582176208496094, 0.006715392112731934, 0.0066128640174865725, 0.006581855773925781, 0.006595104217529297, 0.006592671871185302, 0.00655344009399414, 0.006616064071655273, 0.006568960189819336, 0.006563839912414551, 0.006583392143249512, 0.006593440055847168, 0.006567679882049561, 0.006604703903198242, 0.006599008083343506, 0.006607071876525879, 0.006606624126434326, 0.006598207950592041, 0.006599071979522705, 0.006663648128509522, 0.006582848072052002, 0.006586368083953857, 0.006645503997802734, 0.006599936008453369, 0.006664512157440186, 0.006625247955322266, 0.006590240001678467, 0.006601664066314697, 0.006612576007843017, 0.006590015888214111, 0.006607711791992188, 0.006617119789123535, 0.006606527805328369, 0.006579584121704102, 0.006646592140197754, 0.0065799360275268556, 0.006588191986083985, 0.006632031917572022, 0.006620351791381836, 0.00663212776184082, 0.006895296096801758, 0.006734272003173828, 0.006638688087463379, 0.0066507840156555175, 0.006503903865814209, 0.006670015811920166, 0.006634463787078857, 0.00658841609954834, 0.0066375679969787596, 0.006636864185333252, 0.006564608097076416, 0.00658355188369751, 0.0066423678398132326, 0.006685855865478515, 0.00666710376739502, 0.006606624126434326, 0.0065784001350402835, 0.0067933440208435055, 0.00665177583694458, 0.006612991809844971, 0.006605823993682861, 0.006605216026306152, 0.00659116792678833, 0.0068009281158447265, 0.006621471881866455, 0.006605216026306152, 0.006637248039245605, 0.006590144157409668, 0.0066112642288208, 0.006630847930908203, 0.006649439811706543, 0.006707712173461914, 0.006802207946777343, 0.006661727905273438, 0.007043136119842529, 0.0067164478302001954, 0.006676928043365478, 0.006636352062225342, 0.006711391925811767, 0.007634592056274414, 0.006659776210784912, 0.006606592178344727, 0.006637248039245605, 0.006587264060974121, 0.006696959972381592, 0.006713535785675049, 0.006595456123352051, 0.006602784156799316, 0.0066200637817382816, 0.006569151878356934, 0.006594687938690186, 0.006607615947723388, 0.006585887908935547, 0.006637087821960449, 0.006833024024963379, 0.0066109437942504885, 0.006586368083953857, 0.006657599925994873, 0.006617311954498291, 0.006685184001922607, 0.006647168159484863, 0.006620575904846191, 0.006619455814361572, 0.006646399974822998, 0.006602208137512207, 0.0065848641395568844, 0.006617087841033936, 0.006495200157165528, 0.0065474557876586915, 0.006608799934387207, 0.006608607769012451, 0.006596992015838623, 0.00659065580368042, 0.006680384159088135, 0.006594560146331787, 0.0065998082160949705, 0.0066097922325134275, 0.006610655784606933, 0.006648096084594726, 0.006667679786682129, 0.006607071876525879, 0.006556032180786132, 0.006590400218963623, 0.006588031768798828, 0.006556096076965332, 0.006567935943603515, 0.006729887962341309, 0.006598495960235596, 0.00660591983795166, 0.006603072166442871, 0.006582880020141601, 0.006596000194549561, 0.006613408088684082, 0.0065617280006408695, 0.006627488136291504, 0.006653215885162354, 0.00656879997253418, 0.006616384029388428, 0.00659222412109375, 0.006572991847991943, 0.006599967956542969, 0.006720416069030762, 0.006608704090118408, 0.006657792091369629, 0.006570240020751953, 0.006602464199066162, 0.006598911762237549, 0.006587456226348877, 0.006587071895599365, 0.00660316801071167, 0.00660643196105957, 0.006715519905090332, 0.007143583774566651, 0.006592127799987793, 0.0066360321044921875, 0.006657919883728027, 0.006632512092590332, 0.006601823806762695, 0.006748191833496094, 0.006583648204803467, 0.006613471984863281, 0.00658841609954834, 0.006556960105895996, 0.006576863765716553, 0.006635519981384277, 0.0065924482345581055, 0.0065495681762695315, 0.006639455795288086, 0.0066044158935546875, 0.006722015857696533, 0.00651043176651001, 0.006611328125, 0.006616032123565674, 0.006607776165008545, 0.006571328163146973, 0.006611328125, 0.006574304103851319, 0.0065616960525512694, 0.00658784008026123, 0.006592415809631348, 0.006550335884094238, 0.006611104011535644, 0.006571968078613281, 0.006573567867279053, 0.006574431896209717, 0.0065474238395690915, 0.00656009578704834, 0.006558400154113769, 0.0066500802040100095, 0.006733727931976318, 0.006734623908996582, 0.006643807888031006, 0.0066416640281677245, 0.006649824142456054, 0.006608160018920899, 0.0066260480880737304, 0.006668288230895996, 0.006561791896820069, 0.0066826238632202144, 0.00658841609954834, 0.006589536190032959, 0.0065560641288757325, 0.00662169599533081, 0.006604991912841797, 0.0065630397796630855, 0.006574687957763672, 0.006553567886352539, 0.0065946559906005855, 0.0065634560585021975, 0.006577568054199219, 0.006599679946899414, 0.006609087944030762, 0.006768415927886963, 0.00661840009689331, 0.0065972480773925785, 0.006585951805114746, 0.006696640014648437, 0.006677248001098633, 0.006647808074951172, 0.006639616012573242, 0.006612991809844971, 0.0066641921997070315, 0.006700640201568604, 0.0066113600730896, 0.0066583681106567386, 0.006729407787322998, 0.00674732780456543, 0.006783360004425048, 0.006846911907196045, 0.0066145920753479005, 0.006641536235809326, 0.006670335769653321, 0.006598527908325195, 0.006521567821502685, 0.0065961918830871585, 0.006559296131134033, 0.0071197118759155275, 0.0068906559944152835, 0.0066135039329528805, 0.006589888095855713, 0.007166304111480713, 0.006605375766754151, 0.0065608639717102055, 0.006565023899078369, 0.006574016094207763, 0.006584127902984619, 0.00658022403717041, 0.006619455814361572, 0.006562751770019531, 0.006576896190643311, 0.0065598077774047855, 0.0065435199737548825, 0.006587808132171631, 0.006575839996337891, 0.006562687873840332, 0.006628960132598877, 0.006553952217102051, 0.006575967788696289, 0.006602752208709717, 0.0065413122177124024, 0.0065978879928588864, 0.006602975845336914, 0.006657887935638427, 0.006585023880004883, 0.006639423847198486, 0.006582560062408448, 0.006582176208496094, 0.006565887928009034, 0.006578303813934326, 0.006578048229217529, 0.006600959777832031, 0.006553120136260986, 0.006549727916717529, 0.00657203197479248, 0.0065474557876586915, 0.006585792064666748, 0.0065268797874450685, 0.006572095870971679, 0.006586719989776611, 0.006575520038604736, 0.006583136081695556, 0.006583871841430664, 0.006561600208282471, 0.006617280006408691, 0.006554175853729248, 0.006543456077575683, 0.006616640090942383, 0.006574304103851319, 0.0065474557876586915, 0.006578432083129883, 0.006945663928985595, 0.008235967636108399, 0.007557055950164795, 0.006617087841033936, 0.0066228160858154295, 0.006580287933349609, 0.006501823902130127, 0.006617663860321045, 0.00658022403717041, 0.006595647811889648, 0.006582496166229248, 0.006554111957550048, 0.00679750394821167, 0.006587615966796875, 0.006597439765930176, 0.006591968059539795, 0.006637983798980713, 0.006563615798950195, 0.006592864036560058, 0.006607999801635742, 0.006581471920013428, 0.006647615909576416, 0.006573279857635498, 0.006619775772094726, 0.006646912097930908, 0.006570496082305908, 0.006580607891082764, 0.006628767967224121, 0.006554175853729248, 0.006631648063659668, 0.006637375831604004, 0.006641183853149414, 0.006595039844512939, 0.0066375679969787596, 0.0065690879821777345, 0.006581120014190674, 0.006613088130950928, 0.006604351997375488, 0.006586719989776611, 0.00659660816192627, 0.006615039825439453, 0.006608320236206055, 0.006588992118835449, 0.006619135856628418, 0.006619135856628418, 0.0066007041931152345, 0.00661513614654541, 0.006616991996765137, 0.006811615943908692, 0.006645535945892334, 0.006781119823455811, 0.006608448028564453, 0.006628928184509278, 0.0066416640281677245, 0.006565055847167969, 0.006639359951019287, 0.006756351947784424, 0.006588191986083985, 0.006596960067749023, 0.006631296157836914, 0.006567647933959961, 0.006613311767578125, 0.006598656177520752, 0.0065779838562011715, 0.0065474557876586915, 0.0065797438621521, 0.006525536060333252, 0.0065823040008544925, 0.00658022403717041, 0.006656000137329102, 0.006630527973175049, 0.006576767921447754, 0.006580480098724365, 0.006612991809844971, 0.006598656177520752, 0.00657203197479248, 0.006588064193725586, 0.0066112961769104, 0.006630559921264648, 0.006768896102905274, 0.006608672142028808, 0.006591296195983887, 0.006581920146942139, 0.006515039920806885, 0.006796512126922607, 0.006628416061401367, 0.006600416183471679, 0.006602943897247315, 0.006663616180419922, 0.006613408088684082, 0.0066293439865112306, 0.006643648147583008, 0.006606912136077881, 0.006609151840209961, 0.0065896639823913574, 0.0065779838562011715, 0.00659276819229126, 0.006596864223480225, 0.006555776119232178, 0.00660425615310669, 0.006584767818450928, 0.006811840057373047, 0.006772287845611572, 0.006604320049285889, 0.006619743824005127, 0.006637728214263916, 0.006630559921264648, 0.006595136165618897, 0.006572480201721191, 0.006557184219360352, 0.00659443187713623, 0.00660748815536499, 0.006561759948730469, 0.006586368083953857, 0.00662940788269043, 0.006662144184112549, 0.0066459841728210445, 0.006606016159057617, 0.006586976051330566, 0.006612991809844971, 0.006621183872222901, 0.006625088214874267, 0.006584512233734131, 0.00659660816192627, 0.006619135856628418, 0.006583392143249512, 0.006581344127655029, 0.006561215877532959, 0.006589024066925049, 0.006592639923095703, 0.006602176189422607, 0.006643231868743897, 0.006535168170928955, 0.006608511924743652, 0.006622655868530274, 0.006646719932556153, 0.006543360233306885, 0.006658048152923584, 0.006565887928009034, 0.006590464115142822, 0.006987103939056397, 0.006632224082946777, 0.006834047794342041, 0.006858751773834228, 0.006660096168518067, 0.006903808116912841, 0.006930784225463867, 0.006649504184722901, 0.006750207901000976, 0.006606847763061524, 0.006749216079711914, 0.006601696014404297, 0.00667852783203125, 0.006674304008483887, 0.006574079990386963, 0.006600863933563232, 0.006619103908538818, 0.006615039825439453, 0.006647808074951172, 0.0066162881851196285, 0.006687520027160644, 0.006627327919006347, 0.006583903789520264, 0.006623648166656494, 0.006568064212799072, 0.006682208061218262, 0.0065684161186218264, 0.006557151794433593, 0.006601056098937988, 0.0066267199516296385, 0.006599264144897461, 0.00658841609954834, 0.006571167945861816, 0.006593183994293213, 0.0065437121391296385, 0.0065615358352661135, 0.006594079971313477, 0.006561791896820069, 0.006699391841888428, 0.006586559772491455, 0.0066488637924194335, 0.00656057596206665, 0.006578112125396728, 0.006545631885528564, 0.006575520038604736, 0.006553760051727295, 0.006525184154510498, 0.006584383964538574, 0.006600831985473633, 0.006562816143035889, 0.00657919979095459, 0.006567935943603515, 0.006545536041259766, 0.006584479808807373, 0.006594272136688233, 0.006491871833801269, 0.006559360027313233, 0.0068485760688781734, 0.006633408069610595, 0.006724256038665772, 0.0065484800338745115, 0.0065809922218322756, 0.006666495800018311, 0.0065781760215759275, 0.006604800224304199, 0.006631423950195312, 0.006563839912414551, 0.0066080641746520995, 0.006605631828308105, 0.006590208053588867, 0.006637792110443116, 0.006598688125610351, 0.006565120220184326, 0.006553631782531738, 0.006556384086608887, 0.006551199913024902, 0.006616608142852783, 0.006601535797119141, 0.006582623958587647, 0.006629024028778076, 0.00659660816192627, 0.006610752105712891, 0.006705247879028321, 0.0066109437942504885, 0.006600575923919678, 0.006617248058319092, 0.006592703819274902, 0.006637440204620361, 0.006604640007019043, 0.006559264183044433, 0.006591104030609131, 0.006585536003112793, 0.0065905599594116215, 0.006583263874053955, 0.006589536190032959, 0.006602528095245361, 0.006619935989379883, 0.006588704109191894, 0.006612063884735108, 0.006635903835296631, 0.0066472959518432614, 0.0065790400505065915, 0.006631423950195312, 0.006647264003753662, 0.00658457612991333, 0.006654047966003418, 0.006627520084381103, 0.006592512130737305, 0.006610400199890136, 0.0066730880737304685, 0.006788959980010986, 0.0065781760215759275, 0.006569983959197998, 0.006727712154388428, 0.006565887928009034, 0.006621151924133301, 0.006575232028961182, 0.006593728065490722, 0.006480127811431885, 0.006610208034515381, 0.006625984191894531, 0.006625311851501465, 0.006591807842254638, 0.006660799980163574, 0.006595647811889648, 0.006587327957153321, 0.006616864204406738, 0.006670911788940429, 0.0066126399040222165, 0.006753376007080078, 0.006607647895812988, 0.006633600234985351, 0.0065865921974182125, 0.006575776100158691, 0.006631616115570068, 0.006631487846374511, 0.006596799850463867, 0.006598656177520752, 0.006645535945892334, 0.006586112022399902, 0.0066564159393310545, 0.006629471778869629, 0.006653696060180664, 0.006606751918792724, 0.006645760059356689, 0.0065779838562011715, 0.006596704006195068, 0.006581535816192627, 0.006580895900726318, 0.006674592018127441, 0.00656713581085205, 0.006583295822143555, 0.0066557440757751465, 0.006560160160064697, 0.0067265920639038084, 0.006652160167694092, 0.006672832012176513, 0.0066068158149719235, 0.0066109437942504885, 0.0065641279220581055, 0.0065979199409484865, 0.006610591888427734, 0.0065625600814819334, 0.006588479995727539, 0.006621183872222901, 0.006590752124786377, 0.0065833601951599124, 0.006631455898284912, 0.006581888198852539, 0.006626304149627686, 0.006581888198852539, 0.0065842242240905765, 0.006616864204406738, 0.006566592216491699, 0.006584320068359375, 0.006593920230865478, 0.006565695762634278, 0.006566720008850098, 0.006582272052764892, 0.006716991901397705, 0.006557375907897949, 0.0064967041015625, 0.0065780158042907716, 0.006547679901123047, 0.0066295042037963865, 0.006549503803253174, 0.00657155179977417, 0.006580031871795654, 0.006546080112457275, 0.006552512168884277, 0.006547967910766602, 0.006525023937225342, 0.006594336032867432, 0.006537568092346191, 0.006545792102813721, 0.006594207763671875, 0.006598879814147949, 0.0065512962341308595, 0.006661248207092285, 0.006550399780273437, 0.006589951992034912, 0.006619647979736328, 0.006564191818237305, 0.006548799991607666, 0.006652607917785645, 0.006544672012329102, 0.006658432006835938, 0.0065474557876586915, 0.006850080013275146, 0.006797632217407226, 0.006615488052368164, 0.006610400199890136, 0.006605055809020996, 0.0066061758995056155, 0.00662553596496582, 0.006631360054016113, 0.006555232048034668, 0.006597504138946533, 0.0065855679512023925, 0.006603519916534423, 0.006592544078826904, 0.006612607955932617, 0.0065756158828735355, 0.006558559894561768, 0.006594272136688233, 0.006566207885742187, 0.006621183872222901, 0.006624671936035156, 0.006585023880004883, 0.006600031852722168, 0.006582143783569336, 0.0066771841049194335, 0.006598688125610351, 0.006582240104675293, 0.0066059517860412595, 0.006600927829742431, 0.006869919776916504, 0.006659840106964111, 0.006670559883117676, 0.0066022400856018066, 0.006633279800415039, 0.006879712104797363, 0.006625311851501465, 0.006605120182037354, 0.006553919792175293, 0.006616159915924073, 0.006584959983825684, 0.0066003198623657226, 0.006808256149291992, 0.006601952075958252, 0.006570335865020752, 0.00655683183670044, 0.006578720092773437, 0.006594175815582275, 0.006593311786651611, 0.006695072174072266, 0.006844255924224853, 0.006612991809844971, 0.006604959964752197, 0.006613855838775635, 0.006572095870971679, 0.006593311786651611, 0.006592415809631348, 0.006600959777832031, 0.006635200023651123, 0.0066112642288208, 0.006688767910003662, 0.006582240104675293, 0.006586400032043457, 0.006545728206634522, 0.006588096141815185, 0.0065801281929016115, 0.006610752105712891, 0.006555935859680176, 0.006607903957366944, 0.006538559913635254, 0.006577824115753174, 0.006643712043762207, 0.0065567679405212406, 0.006720416069030762, 0.006594560146331787, 0.006584640026092529, 0.0066367039680480956, 0.006573919773101807, 0.006584959983825684, 0.006602880001068115, 0.006569119930267334, 0.006543968200683594, 0.006682047843933106, 0.0065668802261352535, 0.006569600105285645, 0.006637728214263916, 0.006651711940765381, 0.006582208156585693, 0.006664512157440186, 0.0065469760894775395, 0.006617504119873047, 0.006520832061767578, 0.00657203197479248, 0.006615039825439453, 0.006557695865631104, 0.006573728084564209, 0.0066111359596252445, 0.006551936149597168, 0.006590623855590821, 0.006652768135070801, 0.006598495960235596]",tokens/s,150.61288593464724,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2221.080576,2971.533312,0.0,2569.0112,2295.745536,s,1,9.5031572265625,9.5031572265625,0.0,9.5031572265625,9.5031572265625,9.5031572265625,9.5031572265625,[9.5031572265625],,kWh,7.219568145411965e-05,7.95628858616086e-06,2.9456690232021465e-05,0.00010960866027230197,,MB,2304.786432,2988.310528,0.0,2571.108352,2282.97216,s,10,1.0099354858398437,0.10099354858398438,0.0001945934438602146,0.10095251083374024,0.10126512298583984,0.10132692794799805,0.10137637191772461,"[0.10138873291015625, 0.10083219146728516, 0.10109117126464844, 0.10074697875976563, 0.10088448333740234, 0.10104582214355469, 0.10125138854980469, 0.10095954895019531, 0.1007896957397461, 0.10094547271728516]",tokens/s,2534.8153777081625,kWh,3.0032570032734016e-06,3.312028529614683e-07,1.994188103285276e-06,5.328647959520145e-06,tokens/kWh,48042205.44211993,MB,2315.579392,2988.310528,0.0,2571.108352,2390.926848,s,10,19.095451049804687,1.9095451049804688,0.009148052091526568,1.9095098266601562,1.9176468872070314,1.9235252868652344,1.9282280065917967,"[1.900849853515625, 1.9160811767578125, 1.9119996337890626, 1.902387939453125, 1.90702001953125, 1.898927734375, 1.8999140625, 1.9125263671875, 1.916340576171875, 1.9294036865234374]",tokens/s,32.99215076705107,kWh,5.573187321797787e-05,6.146954398723299e-06,2.7325779797118865e-05,8.920460741382004e-05,tokens/kWh,706241.5476786203,,s,630,19.09265004920959,0.03030579372890412,0.0006671665928267241,0.03016500759124756,0.030761616897583006,0.031015484905242918,0.03309274314880372,"[0.03054921531677246, 0.03156048011779785, 0.029964223861694336, 0.029925439834594728, 0.029822975158691405, 0.029687103271484376, 0.030049055099487305, 0.030129600524902343, 0.030171615600585937, 0.030486080169677736, 0.03037228775024414, 0.03054182434082031, 0.029945856094360353, 0.029996671676635743, 0.030187711715698243, 0.029870271682739258, 0.02984060859680176, 0.029811487197875977, 0.029720064163208007, 0.02971494483947754, 0.030095359802246095, 0.030085119247436523, 0.02993971252441406, 0.02979145622253418, 0.033129566192626955, 0.03002457618713379, 0.029817663192749023, 0.02998271942138672, 0.029761503219604492, 0.02995612716674805, 0.02978201675415039, 0.029808576583862306, 0.029663679122924804, 0.02983456039428711, 0.02961199951171875, 0.03002764892578125, 0.029927488327026366, 0.029725088119506835, 0.029642208099365235, 0.029681568145751954, 0.02983590316772461, 0.029665279388427734, 0.030373888015747072, 0.030638080596923828, 0.030865407943725585, 0.03057049560546875, 0.02987331199645996, 0.030073663711547852, 0.029848672866821288, 0.029933759689331055, 0.0300183048248291, 0.030051551818847656, 0.029941951751708985, 0.03049305534362793, 0.030148832321166993, 0.030372991561889648, 0.030472192764282226, 0.033530750274658204, 0.030953439712524414, 0.030113759994506835, 0.030029375076293944, 0.030099199295043944, 0.03003878402709961, 0.030181600570678712, 0.030004032135009767, 0.0298874568939209, 0.030159872055053712, 0.029893632888793945, 0.029998271942138673, 0.029786272048950194, 0.029983392715454103, 0.03022643280029297, 0.03139583969116211, 0.03068681526184082, 0.03128505516052246, 0.0306713924407959, 0.030690656661987305, 0.030343488693237306, 0.030392736434936524, 0.030328832626342773, 0.030361600875854492, 0.030199071884155274, 0.030436063766479494, 0.030308351516723633, 0.030455808639526367, 0.03036774444580078, 0.030203903198242187, 0.03006233596801758, 0.030193920135498046, 0.030020959854125978, 0.030167360305786133, 0.03014896011352539, 0.03023052787780762, 0.030061567306518554, 0.03031532859802246, 0.029996736526489258, 0.030150880813598634, 0.030144800186157228, 0.030158367156982422, 0.03011222457885742, 0.030127424240112305, 0.030020288467407227, 0.030050304412841795, 0.03012575912475586, 0.03022879981994629, 0.031086591720581053, 0.031111167907714843, 0.035161598205566406, 0.030585344314575196, 0.030191680908203126, 0.030158784866333006, 0.029965984344482423, 0.030128480911254883, 0.030109695434570313, 0.030291967391967774, 0.030188896179199218, 0.03106790351867676, 0.03024947166442871, 0.03031491279602051, 0.03030601692199707, 0.030161184310913088, 0.030096416473388673, 0.03271974563598633, 0.0306048641204834, 0.030443775177001954, 0.03029203224182129, 0.030540895462036134, 0.030256992340087892, 0.03018342399597168, 0.030055999755859375, 0.030135839462280274, 0.03020060729980469, 0.030302335739135742, 0.030250463485717773, 0.030145055770874025, 0.03016089630126953, 0.030048255920410157, 0.03279052734375, 0.032575775146484375, 0.030398176193237304, 0.03017728042602539, 0.030228256225585937, 0.030105823516845702, 0.030011392593383788, 0.03017318344116211, 0.030355424880981446, 0.03045583915710449, 0.030516992568969725, 0.030088800430297852, 0.030472864151000978, 0.029937536239624023, 0.03017740821838379, 0.03013222312927246, 0.030648319244384766, 0.0301711368560791, 0.030629280090332032, 0.030375808715820313, 0.030376672744750977, 0.030131711959838867, 0.03030271911621094, 0.030115711212158204, 0.030749919891357422, 0.030767648696899415, 0.03076134490966797, 0.030521343231201172, 0.03061555290222168, 0.030485504150390624, 0.03086137580871582, 0.030942144393920897, 0.03115318489074707, 0.030364639282226564, 0.03024870491027832, 0.030377216339111328, 0.03002470397949219, 0.029833215713500977, 0.03019366455078125, 0.029855743408203125, 0.029954048156738283, 0.029918848037719728, 0.03044940757751465, 0.030228160858154298, 0.029983423233032228, 0.029818687438964844, 0.02999955177307129, 0.02984976005554199, 0.030144351959228516, 0.030101503372192383, 0.03003183937072754, 0.029865087509155272, 0.029916704177856444, 0.029835039138793946, 0.030028480529785156, 0.030778783798217774, 0.03315568161010742, 0.030360607147216796, 0.030677152633666993, 0.030143295288085938, 0.030232576370239257, 0.03017932891845703, 0.030011327743530273, 0.03004217529296875, 0.030005247116088866, 0.03011315155029297, 0.030034175872802736, 0.029960575103759764, 0.031031295776367186, 0.029908479690551756, 0.0298603515625, 0.030142463684082032, 0.0301711368560791, 0.030468095779418947, 0.03007619285583496, 0.0301711368560791, 0.030050432205200196, 0.03017353630065918, 0.03008950424194336, 0.030255071640014647, 0.030178560256958007, 0.030155519485473632, 0.02995814323425293, 0.03005619239807129, 0.030120447158813478, 0.030197504043579102, 0.03015679931640625, 0.03018137550354004, 0.03016499137878418, 0.030056447982788087, 0.030007295608520508, 0.030021631240844726, 0.030449056625366212, 0.03017788887023926, 0.03009561538696289, 0.030420448303222655, 0.02986627197265625, 0.02998681640625, 0.030066656112670898, 0.030152223587036134, 0.030509695053100586, 0.030508928298950196, 0.03007689666748047, 0.030042144775390626, 0.029970432281494142, 0.030109695434570313, 0.03003116798400879, 0.03012022399902344, 0.03022480010986328, 0.03041823959350586, 0.02997644805908203, 0.029997888565063476, 0.030027776718139648, 0.02998681640625, 0.029783552169799804, 0.03023711967468262, 0.029800447463989257, 0.029882368087768556, 0.02992742347717285, 0.029775871276855468, 0.02994380760192871, 0.0300230712890625, 0.03052726364135742, 0.03529014587402344, 0.03076406478881836, 0.030478111267089845, 0.030553056716918946, 0.030278879165649412, 0.03029884719848633, 0.030084480285644533, 0.030296192169189454, 0.030871456146240234, 0.03019843292236328, 0.029902336120605468, 0.029892288208007812, 0.030089792251586915, 0.030341087341308595, 0.030093599319458007, 0.030234624862670898, 0.03042323112487793, 0.030168447494506836, 0.029996543884277343, 0.029941696166992188, 0.030202272415161133, 0.0305545597076416, 0.030226591110229493, 0.03015065574645996, 0.029945535659790037, 0.030046112060546876, 0.029823392868041993, 0.030246912002563478, 0.03042246437072754, 0.029921855926513672, 0.029797887802124022, 0.029948415756225585, 0.029978464126586914, 0.030351327896118163, 0.030320831298828125, 0.030130176544189452, 0.030015487670898438, 0.029945856094360353, 0.030424800872802735, 0.02993724822998047, 0.029968671798706055, 0.03007529640197754, 0.030324735641479493, 0.030508447647094726, 0.030224992752075196, 0.030229856491088867, 0.030159231185913085, 0.030214208602905274, 0.030066911697387694, 0.030475776672363283, 0.0300546875, 0.03030243110656738, 0.030406656265258788, 0.030509056091308592, 0.030461952209472655, 0.031100767135620117, 0.030404767990112304, 0.030274879455566405, 0.03035411262512207, 0.03014575958251953, 0.030093311309814453, 0.029911840438842773, 0.030838720321655272, 0.030056512832641602, 0.030389631271362304, 0.029976320266723634, 0.03000614356994629, 0.029806400299072267, 0.029993152618408202, 0.029861888885498046, 0.03001753616333008, 0.029732864379882814, 0.0299683837890625, 0.029990976333618163, 0.030044095993041992, 0.029958368301391602, 0.030263071060180665, 0.02998601531982422, 0.030079776763916016, 0.0300031681060791, 0.029958175659179687, 0.029831167221069335, 0.030002559661865234, 0.029896448135375977, 0.029930368423461914, 0.029863136291503906, 0.029876735687255858, 0.029881664276123047, 0.030014432907104493, 0.029926687240600585, 0.029971168518066405, 0.029833215713500977, 0.0300579833984375, 0.029760000228881835, 0.029927391052246094, 0.030165023803710937, 0.03079487991333008, 0.03054476737976074, 0.03094937515258789, 0.030383520126342774, 0.030536287307739256, 0.029999103546142578, 0.029820928573608397, 0.029799583435058594, 0.029799264907836916, 0.029810367584228517, 0.029897024154663086, 0.030021631240844726, 0.030345216751098632, 0.03019161605834961, 0.0307936954498291, 0.03300259017944336, 0.03024508857727051, 0.030078752517700195, 0.03002774429321289, 0.029844160079956054, 0.029868064880371095, 0.029753599166870117, 0.030116191864013674, 0.03012944030761719, 0.03030054473876953, 0.03016294479370117, 0.030074880599975585, 0.029949056625366212, 0.03061030387878418, 0.030102783203125, 0.030255935668945313, 0.030051456451416016, 0.02987091255187988, 0.029930879592895508, 0.0298540153503418, 0.029976896286010742, 0.02988153648376465, 0.030042591094970702, 0.029804704666137695, 0.029923263549804686, 0.029866239547729493, 0.030113183975219726, 0.029882272720336913, 0.030499488830566406, 0.03010665512084961, 0.030299135208129883, 0.029921279907226563, 0.02994175910949707, 0.02972876739501953, 0.029900800704956054, 0.030011392593383788, 0.03003183937072754, 0.030074304580688476, 0.030087711334228516, 0.029920320510864257, 0.02994598388671875, 0.02985206413269043, 0.030085599899291993, 0.02995609664916992, 0.029951839447021483, 0.02984307289123535, 0.03008064079284668, 0.03030953598022461, 0.030882879257202147, 0.030755199432373048, 0.03106972885131836, 0.030560768127441407, 0.030312736511230468, 0.030074880599975585, 0.030141824722290038, 0.03005062484741211, 0.030083391189575197, 0.029968000411987303, 0.030078624725341795, 0.029737695693969727, 0.030357152938842773, 0.0300948486328125, 0.030398591995239258, 0.030302944183349608, 0.03014656066894531, 0.03170822334289551, 0.030981056213378905, 0.030096736907958985, 0.03022710418701172, 0.03016022491455078, 0.030328832626342773, 0.030453760147094725, 0.030078975677490235, 0.03003392028808594, 0.029890560150146486, 0.029976287841796876, 0.02988060760498047, 0.030480384826660156, 0.030632991790771485, 0.03049776077270508, 0.030533023834228516, 0.030364032745361327, 0.030279199600219728, 0.030115520477294922, 0.029983135223388673, 0.030253664016723632, 0.03149001693725586, 0.029876256942749025, 0.029861888885498046, 0.02995609664916992, 0.030037792205810546, 0.03007097625732422, 0.02997020721435547, 0.030375551223754883, 0.030144895553588867, 0.030171392440795898, 0.029894943237304687, 0.029969823837280272, 0.02995020866394043, 0.030178815841674804, 0.030050880432128908, 0.030220256805419923, 0.03022060775756836, 0.030766815185546876, 0.030053535461425782, 0.029951168060302735, 0.02995686340332031, 0.030043039321899414, 0.029931552886962892, 0.02992911911010742, 0.030128671646118165, 0.03096143913269043, 0.03058687973022461, 0.030474079132080077, 0.030230688095092773, 0.030029375076293944, 0.029937408447265626, 0.030601919174194334, 0.030246400833129884, 0.030171424865722656, 0.03003209686279297, 0.03033817672729492, 0.030047040939331054, 0.03043084716796875, 0.03014236831665039, 0.03001955223083496, 0.03911673736572266, 0.03109939193725586, 0.03051971244812012, 0.030363359451293946, 0.03004185676574707, 0.030048511505126954, 0.029826271057128907, 0.030460895538330077, 0.030234495162963868, 0.03098431968688965, 0.03185577583312988, 0.030516063690185547, 0.030228479385375977, 0.030062591552734375, 0.03012403106689453, 0.03016294479370117, 0.03013222312927246, 0.030109920501708985, 0.03016476821899414, 0.030093311309814453, 0.030123455047607422, 0.030063167572021484, 0.0301977596282959, 0.0301562557220459, 0.030087711334228516, 0.030123584747314452, 0.030226335525512696, 0.03090224075317383, 0.030971712112426757, 0.030229248046875, 0.03099616050720215, 0.030443328857421875, 0.03061305618286133, 0.030415264129638672, 0.03034320068359375, 0.03021574401855469, 0.0302128963470459, 0.03097372817993164, 0.030591360092163088, 0.03018956756591797, 0.030303583145141602, 0.030355968475341798, 0.03029007911682129, 0.03034441566467285, 0.03030672073364258, 0.03014691162109375, 0.03042905616760254, 0.030750368118286135, 0.03122768020629883, 0.030703968048095703, 0.03048281669616699, 0.03038787269592285, 0.030285472869873046, 0.0301079044342041, 0.03034307289123535, 0.03019865608215332, 0.030170783996582032, 0.03138076782226563, 0.03031318473815918, 0.030070783615112305, 0.030664703369140626, 0.03019164848327637, 0.030754783630371093, 0.030709760665893555, 0.030436351776123048, 0.030349855422973634, 0.030467584609985353, 0.030348255157470704, 0.03019340705871582, 0.030138015747070312, 0.030930463790893554, 0.030370271682739258, 0.030306304931640625, 0.030390272140502928, 0.03056572723388672, 0.030917280197143553, 0.030848127365112304, 0.03551219177246094, 0.031756416320800784, 0.03086630439758301, 0.030818304061889647, 0.030693216323852537, 0.030423200607299805, 0.030256511688232422, 0.030216127395629882, 0.030240575790405275, 0.030169984817504884, 0.030270944595336913, 0.030161439895629884, 0.030391551971435546, 0.030083839416503905, 0.030261247634887696, 0.03040598487854004, 0.03056902313232422, 0.030469951629638673, 0.030394655227661133, 0.030410751342773438, 0.031208736419677734, 0.030642240524291993, 0.03040073585510254, 0.030658143997192383, 0.0316011848449707, 0.030331199645996093, 0.030466047286987305, 0.03023583984375, 0.030343231201171876, 0.030421728134155272, 0.030435359954833985, 0.030367839813232423, 0.030322591781616212, 0.031062015533447264, 0.03077529525756836, 0.03157920074462891, 0.030419904708862303, 0.0301231689453125, 0.030159711837768555, 0.030216192245483397, 0.03032636833190918, 0.030163360595703126, 0.03060259246826172, 0.030110368728637694, 0.030420192718505858, 0.030445472717285156, 0.03047104072570801, 0.030580896377563477, 0.030455263137817382, 0.030857599258422852, 0.030785535812377928, 0.030432992935180665, 0.03066067123413086, 0.030871776580810546, 0.03095267105102539, 0.03050716781616211]",tokens/s,32.9969909036321,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4190.732288,4733.140992,0.0,4347.396096,4328.833024,s,1,9.8930439453125,9.8930439453125,0.0,9.8930439453125,9.8930439453125,9.8930439453125,9.8930439453125,[9.8930439453125],,kWh,8.098068752087784e-05,8.922825388411807e-06,2.748141087399736e-05,0.00011738492378328701,,MB,1414.971392,5276.30336,0.0,4859.101184,4756.928512,s,10,3.530581481933594,0.3530581481933594,0.0018155841052337968,0.3533706817626953,0.3545710174560547,0.3546693588256836,0.3547480319213867,"[0.34808499145507815, 0.3536143493652344, 0.35278164672851564, 0.3526377868652344, 0.35275408935546876, 0.35312701416015624, 0.3543751220703125, 0.3545491638183594, 0.35388961791992185, 0.3547677001953125]",tokens/s,725.0930230897728,kWh,1.0284359589079408e-05,1.1341899689598079e-06,6.870513159242885e-06,1.82890627172821e-05,tokens/kWh,13997436.826442445,MB,1435.66848,5318.2464,0.0,4901.044224,4876.091904,s,10,20.641902832031253,2.0641902832031254,0.004866000710389721,2.0631949462890624,2.071058349609375,2.0724656982421874,2.0735915771484374,"[2.061962158203125, 2.06210205078125, 2.05590087890625, 2.06020751953125, 2.0651337890625, 2.065587890625, 2.062381103515625, 2.0640087890625, 2.073873046875, 2.07074560546875]",tokens/s,30.52044208939846,kWh,6.040362391050403e-05,6.662495426295081e-06,4.0015357682752146e-05,0.00010708147701955125,tokens/kWh,588337.0472046933,,s,630,20.638977506637556,0.032760281756567575,0.0007433466721609787,0.03258014297485351,0.03337887878417969,0.03379831409454346,0.03657866466522217,"[0.03709747314453125, 0.03393535995483398, 0.03278553771972656, 0.03249558258056641, 0.03247990417480469, 0.03242160034179688, 0.032415744781494144, 0.03283520126342773, 0.03256991958618164, 0.032957855224609374, 0.03213164901733399, 0.03232172775268555, 0.032433662414550785, 0.03258819198608399, 0.03224550247192383, 0.03203740692138672, 0.03219660949707031, 0.03209625625610352, 0.031987712860107424, 0.03207372665405273, 0.03226556777954102, 0.032217758178710934, 0.03198361587524414, 0.031991264343261716, 0.032124702453613284, 0.03251891326904297, 0.03276595306396484, 0.03251161575317383, 0.032489856719970706, 0.03341516876220703, 0.03328204727172852, 0.033103809356689454, 0.03284096145629883, 0.032815166473388675, 0.032701183319091796, 0.03264921569824219, 0.032589824676513675, 0.03294940948486328, 0.034012001037597654, 0.03291340637207031, 0.03273318481445313, 0.03251200103759765, 0.032405502319335935, 0.03244646453857422, 0.03236246490478516, 0.0325263671875, 0.03273318481445313, 0.03280652618408203, 0.03240793609619141, 0.03260006332397461, 0.03274956893920898, 0.03288639831542969, 0.03270899200439453, 0.03256729507446289, 0.03262054443359375, 0.032677886962890625, 0.032694175720214845, 0.03284182357788086, 0.03296646499633789, 0.03320435333251953, 0.03344595336914063, 0.03323699188232422, 0.033312545776367185, 0.036805633544921876, 0.03364396667480469, 0.032551231384277346, 0.032532737731933596, 0.032222431182861326, 0.032104736328125, 0.03505350494384766, 0.032072254180908207, 0.032073055267333984, 0.03208668899536133, 0.03194636726379395, 0.03219289779663086, 0.032249855041503905, 0.03263865661621094, 0.03274579238891601, 0.03233996963500976, 0.03244358444213867, 0.03246982574462891, 0.03243318557739258, 0.03233004760742188, 0.032313022613525394, 0.03223030471801758, 0.0319458236694336, 0.03219123077392578, 0.032514270782470704, 0.032238719940185546, 0.032322433471679686, 0.03264921569824219, 0.033312767028808594, 0.03471974563598633, 0.032909313201904294, 0.0328331184387207, 0.032782752990722655, 0.0325549430847168, 0.03321203231811523, 0.033048255920410156, 0.032610240936279296, 0.032307937622070314, 0.03219375991821289, 0.03252076721191406, 0.03283590316772461, 0.03258070373535156, 0.03241209411621094, 0.03245308685302734, 0.03241164779663086, 0.03238854217529297, 0.03261088180541992, 0.03270655822753906, 0.032406753540039065, 0.03257948684692383, 0.03257228851318359, 0.03253680038452148, 0.03320195388793945, 0.03232153701782227, 0.03241308975219727, 0.032490081787109375, 0.03286220932006836, 0.032734657287597654, 0.033080894470214846, 0.033446910858154294, 0.03364435195922852, 0.03334531021118164, 0.03343939208984375, 0.03655196762084961, 0.03382755279541016, 0.03250380706787109, 0.032104415893554686, 0.0320382080078125, 0.03219708633422851, 0.03209369659423828, 0.03189632034301758, 0.03209011077880859, 0.032017505645751954, 0.03183625602722168, 0.03192915153503418, 0.03196313667297363, 0.03199385643005371, 0.03195199966430664, 0.03220159912109375, 0.03204095840454101, 0.0324136962890625, 0.03254886245727539, 0.032315391540527344, 0.03237887954711914, 0.032129024505615236, 0.0324956169128418, 0.03234307098388672, 0.032489952087402345, 0.03230060958862305, 0.03249382400512695, 0.03372265625, 0.03281046295166016, 0.03328505706787109, 0.03340399932861328, 0.03317180633544922, 0.032715328216552736, 0.03256099319458008, 0.032708766937255856, 0.03277414321899414, 0.03250307083129883, 0.03227926254272461, 0.03245260620117187, 0.03264460754394531, 0.03247872161865235, 0.032258560180664066, 0.0324754867553711, 0.03257075119018555, 0.03249027252197265, 0.03256934356689453, 0.032704513549804685, 0.03267532730102539, 0.032635009765625, 0.032539009094238285, 0.032345344543457034, 0.03232201766967773, 0.03285635375976562, 0.03297484970092773, 0.03282124710083008, 0.03266883087158203, 0.03293065643310547, 0.03321187210083008, 0.03331484985351563, 0.03324553680419922, 0.03312656021118164, 0.03320012664794922, 0.03302521514892578, 0.0369906234741211, 0.033732608795166014, 0.032481632232666015, 0.03229695892333984, 0.03199523162841797, 0.032141822814941406, 0.03224387359619141, 0.032315391540527344, 0.03220275115966797, 0.03202191925048828, 0.032397918701171875, 0.03228220748901367, 0.03231375885009766, 0.03208969497680664, 0.032362911224365236, 0.032010238647460935, 0.031909536361694336, 0.03227068710327148, 0.03234931182861328, 0.032502655029296876, 0.03243417739868164, 0.03244236755371094, 0.03255862426757813, 0.032532958984375, 0.03266099166870117, 0.032584190368652344, 0.03217407989501953, 0.03261030578613281, 0.03342115020751953, 0.03332012939453125, 0.033101791381835936, 0.032838623046875, 0.03277212905883789, 0.0328007698059082, 0.03251200103759765, 0.032217086791992186, 0.03252547073364258, 0.03243299102783203, 0.03262464141845703, 0.03261644744873047, 0.032686080932617184, 0.032290817260742184, 0.032310878753662106, 0.032790943145751955, 0.03280281448364258, 0.032726207733154294, 0.032504096984863284, 0.03242243194580078, 0.03246284866333008, 0.032575328826904296, 0.032981151580810546, 0.032942081451416014, 0.03328192138671875, 0.03302617645263672, 0.03263897705078125, 0.0328724479675293, 0.033331199645996096, 0.033212223052978516, 0.033095359802246094, 0.03306857681274414, 0.03314150238037109, 0.03316144180297852, 0.03344998550415039, 0.03671859359741211, 0.0339046401977539, 0.03253657531738281, 0.03209625625610352, 0.03281843185424805, 0.032131263732910156, 0.032369216918945315, 0.03267116928100586, 0.03231727981567383, 0.032074016571044923, 0.03213302230834961, 0.03206371307373047, 0.031983871459960934, 0.032344127655029295, 0.032069633483886716, 0.032091136932373046, 0.031970304489135744, 0.03186428833007812, 0.032213024139404293, 0.032333728790283206, 0.03230985641479492, 0.03608332824707031, 0.0322850570678711, 0.032307201385498044, 0.032198654174804685, 0.03215276718139649, 0.0324892807006836, 0.032876991271972654, 0.03332729721069336, 0.03352387237548828, 0.03366320037841797, 0.033371295928955075, 0.032893665313720705, 0.0325959358215332, 0.032567455291748044, 0.0324851188659668, 0.03266790390014648, 0.03300262451171875, 0.03253120040893555, 0.03257356643676758, 0.03249356842041016, 0.03242598342895508, 0.0324505615234375, 0.03294607925415039, 0.0327064323425293, 0.032704032897949216, 0.032680511474609375, 0.03231868743896484, 0.032643264770507815, 0.03358560180664062, 0.03337766265869141, 0.032815200805664066, 0.03265824127197266, 0.032665184020996094, 0.03240761566162109, 0.0324747200012207, 0.032940799713134766, 0.03328169631958008, 0.033460575103759764, 0.03339263916015625, 0.03323881530761719, 0.033389823913574215, 0.033190238952636716, 0.037392383575439454, 0.0341416015625, 0.032852191925048825, 0.03213078308105469, 0.03251228713989258, 0.03214796829223633, 0.03206921768188477, 0.032043296813964846, 0.03223551940917969, 0.03242393493652344, 0.03222630310058594, 0.03215359878540039, 0.032246784210205076, 0.03202227020263672, 0.03210230255126953, 0.03182995223999024, 0.03213721466064453, 0.03213558578491211, 0.035198974609375, 0.03220857620239258, 0.032145729064941404, 0.032529857635498045, 0.03262300872802734, 0.032536384582519534, 0.03315260696411133, 0.03275990295410156, 0.03257206344604492, 0.032400672912597656, 0.03307593536376953, 0.03324102401733398, 0.03336608123779297, 0.033142784118652346, 0.033113407135009765, 0.0327599983215332, 0.03277667236328125, 0.03268956756591797, 0.03245471954345703, 0.0325043830871582, 0.032562206268310544, 0.03443545532226563, 0.03288124847412109, 0.03251609420776367, 0.03251577758789063, 0.032721153259277345, 0.03273529434204102, 0.03288883209228516, 0.032672767639160154, 0.03306089782714844, 0.03302441787719727, 0.03280543899536133, 0.032685951232910154, 0.032523551940917966, 0.03234239959716797, 0.03264969635009766, 0.03259187316894531, 0.032464897155761716, 0.03242367935180664, 0.0326247673034668, 0.032942207336425784, 0.033148929595947264, 0.03320364761352539, 0.033462848663330075, 0.033355777740478515, 0.03635200119018555, 0.033727519989013674, 0.03310195159912109, 0.03232976150512695, 0.03192300796508789, 0.03212083053588867, 0.032059391021728514, 0.03226009750366211, 0.032215038299560544, 0.03208367919921875, 0.032237377166748044, 0.03256537628173828, 0.03272739028930664, 0.03260416030883789, 0.032557056427001956, 0.03233977508544922, 0.03236201477050781, 0.032202590942382814, 0.03238777542114258, 0.03241791915893555, 0.03223961639404297, 0.0323785285949707, 0.03225225448608399, 0.03240950393676758, 0.03226134490966797, 0.03245872116088867, 0.03296716690063477, 0.03303260803222656, 0.03326566314697266, 0.03358902359008789, 0.03334304046630859, 0.03299395370483398, 0.033070655822753904, 0.03264761734008789, 0.032606208801269534, 0.03260784149169922, 0.03305900955200195, 0.034622783660888674, 0.03278672027587891, 0.03254083251953125, 0.032448993682861325, 0.032838687896728516, 0.03274646377563477, 0.03257958221435547, 0.03234406280517578, 0.032368576049804684, 0.03257452774047852, 0.032655841827392576, 0.03252073669433594, 0.032405502319335935, 0.03231526565551758, 0.032505279541015626, 0.03261062240600586, 0.03251033782958984, 0.032626686096191404, 0.03252633666992188, 0.0324587516784668, 0.03323283386230469, 0.03334969711303711, 0.03331817626953125, 0.03311027145385742, 0.03315536117553711, 0.033167552947998044, 0.03646054458618164, 0.03377356719970703, 0.03263897705078125, 0.032317150115966795, 0.03225219345092773, 0.03221836853027344, 0.032303871154785155, 0.03235583877563476, 0.032141822814941406, 0.032132545471191404, 0.032335617065429687, 0.032193344116210935, 0.032380222320556644, 0.03220479965209961, 0.032228031158447266, 0.03228044891357422, 0.03237081527709961, 0.0323105583190918, 0.03227872085571289, 0.032143230438232424, 0.03261670303344726, 0.03231785583496094, 0.032349857330322265, 0.032448863983154295, 0.032643070220947264, 0.032352256774902347, 0.032282623291015625, 0.033345535278320314, 0.03333865737915039, 0.03321929550170898, 0.03385715103149414, 0.033089473724365236, 0.03321878433227539, 0.03288633728027344, 0.03274563217163086, 0.0327562255859375, 0.032732383728027344, 0.03251484680175781, 0.03265315246582031, 0.032422046661376956, 0.032423137664794925, 0.03267382431030273, 0.032677921295166015, 0.03250249481201172, 0.03272009658813477, 0.03262543869018555, 0.03262607955932617, 0.03259043121337891, 0.03258367919921875, 0.032849918365478514, 0.03263692855834961, 0.03247484970092773, 0.03248963165283203, 0.032978271484375, 0.03302819061279297, 0.03284243011474609, 0.03309072113037109, 0.03381856155395508, 0.03366799926757812, 0.03351254272460937, 0.03322915267944336, 0.0332949104309082, 0.03328335952758789, 0.03658956909179688, 0.0339046401977539, 0.03293328094482422, 0.032449119567871096, 0.03205027389526367, 0.03222975921630859, 0.03230364990234375, 0.03249110412597656, 0.03195536041259766, 0.0324851188659668, 0.0321550407409668, 0.0321135025024414, 0.03204691314697266, 0.032073921203613284, 0.035216480255126956, 0.03612294387817383, 0.033390430450439455, 0.03267046356201172, 0.032349342346191405, 0.0323326416015625, 0.032304737091064455, 0.032444831848144534, 0.032593822479248045, 0.03263497543334961, 0.032307201385498044, 0.03248108673095703, 0.032729278564453124, 0.032904224395751955, 0.03279510498046875, 0.03323100662231445, 0.033204383850097656, 0.03339219284057617, 0.03311270523071289, 0.03266057586669922, 0.03252067184448242, 0.03264716720581055, 0.03262508773803711, 0.03253452682495117, 0.032717823028564456, 0.0333007698059082, 0.03273596954345703, 0.0324587516784668, 0.03226419067382812, 0.03251721572875976, 0.03267881774902344, 0.03329203033447266, 0.03300787353515625, 0.033748382568359374, 0.032850528717041014, 0.0328540153503418, 0.032619873046875, 0.032543392181396485, 0.032292865753173826, 0.03240140914916992, 0.032860160827636715, 0.03325337600708008, 0.03294566345214844, 0.032653182983398435, 0.033642879486083986, 0.03470284652709961, 0.033954559326171876, 0.03315456008911133, 0.03314227294921875, 0.0365939826965332, 0.03401347351074219, 0.03277532958984375, 0.03221347045898437, 0.03236284637451172, 0.032282657623291015, 0.03247923278808594, 0.03245004653930664, 0.0323322868347168, 0.03216316986083984, 0.032395679473876955, 0.03241948699951172, 0.0323109130859375, 0.03236963272094726, 0.03221299362182617, 0.032316959381103516, 0.03249609756469726, 0.032315391540527344, 0.0325591049194336, 0.03313808059692383, 0.03216374588012695, 0.0322050552368164, 0.032185985565185544, 0.03225225448608399, 0.03256367874145508, 0.03296585464477539, 0.032832160949707034, 0.03260224151611328, 0.03488956832885742, 0.0343205451965332, 0.032643070220947264, 0.033091583251953126, 0.03314611053466797, 0.03303504180908203, 0.03336102294921875, 0.03271462249755859, 0.03256524658203125, 0.032390113830566405, 0.03276569747924805, 0.032333248138427736, 0.032193344116210935, 0.03291718292236328, 0.03281951904296875, 0.03265459060668945, 0.032563072204589844, 0.032675743103027344, 0.03274646377563477, 0.0330399055480957, 0.03291555023193359, 0.03441907119750977, 0.032794624328613284, 0.033570816040039066, 0.03240454483032226, 0.032508129119873046, 0.03248611068725586, 0.03311312103271485, 0.03307734298706055, 0.03322560119628906, 0.03362815856933594, 0.03358310317993164, 0.033452030181884765, 0.03330867385864258, 0.0331038703918457]",tokens/s,30.52476799286155,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,921.37472,704.512,0.0,301.989888,282.769408,s,1,8.04277490234375,8.04277490234375,0.0,8.04277490234375,8.04277490234375,8.04277490234375,8.04277490234375,[8.04277490234375],,kWh,2.0720848983319228e-05,2.2783673831543764e-06,6.733060941954694e-06,2.9732277308428298e-05,,MB,1366.675456,759.037952,0.0,341.835776,318.94528,s,14,0.2002993593215942,0.014307097094399587,8.06749770444019e-05,0.014297120094299317,0.01440555181503296,0.01443976788520813,0.014484113702774047,"[0.01417305564880371, 0.014395359992980958, 0.014326815605163573, 0.01431820774078369, 0.014268287658691406, 0.014409919738769532, 0.014273728370666504, 0.01430668830871582, 0.014238431930541992, 0.014243264198303223, 0.014495200157165527, 0.014287551879882813, 0.01422764778137207, 0.014335200309753418]",tokens/s,17893.217492751162,kWh,4.170974873333459e-07,4.599549312020797e-08,2.3557915671720435e-07,6.986721371707583e-07,tokens/kWh,366409344.784609,MB,1408.032768,784.203776,0.0,367.0016,318.94784,s,14,10.545158020019532,0.7532255728585378,0.003910232846172717,0.75265087890625,0.7593061340332031,0.7609104156494141,0.7612968292236328,"[0.7536522216796875, 0.7538075561523437, 0.7518518676757813, 0.7613934326171875, 0.7606503295898438, 0.7483592529296875, 0.7503121948242187, 0.7507894897460937, 0.7510004272460937, 0.756169677734375, 0.7534498901367187, 0.7554207153320313, 0.7490953369140625, 0.7492056274414063]",tokens/s,83.6402828981378,kWh,2.1787164770999728e-05,2.402769371045509e-06,8.188256947424533e-06,3.2378191089469776e-05,tokens/kWh,1945754.159826712,,s,882,10.538304902076717,0.011948191498953199,0.0002437617661121316,0.011897696018218995,0.012077350521087648,0.012219184350967407,0.01281525612831115,"[0.011692031860351563, 0.01204252815246582, 0.011908608436584473, 0.011903200149536133, 0.012060895919799804, 0.011860960006713868, 0.011899135589599609, 0.011876704216003418, 0.011962592124938965, 0.01215897560119629, 0.011917183876037598, 0.011890815734863282, 0.011902432441711425, 0.012107775688171387, 0.011956064224243165, 0.01191801643371582, 0.011884544372558594, 0.011898207664489745, 0.011903264045715333, 0.012038528442382812, 0.011907072067260742, 0.011917216300964355, 0.011914624214172363, 0.011879136085510254, 0.011894399642944337, 0.011923839569091797, 0.011989055633544922, 0.011979007720947265, 0.011927359580993653, 0.011917183876037598, 0.011907039642333984, 0.011904704093933105, 0.011885024070739747, 0.011911040306091308, 0.011923456192016601, 0.011933376312255859, 0.011868479728698731, 0.0118538236618042, 0.011939616203308105, 0.011913439750671388, 0.011933695793151856, 0.011921407699584961, 0.012056384086608887, 0.011862208366394043, 0.011866111755371094, 0.011859935760498047, 0.011921440124511719, 0.011886591911315919, 0.01187987232208252, 0.011901056289672852, 0.011892864227294922, 0.011933759689331054, 0.012126463890075684, 0.012124159812927245, 0.012277759552001954, 0.012025856018066406, 0.012150655746459961, 0.012007552146911621, 0.012220224380493165, 0.012260767936706543, 0.012011872291564942, 0.012007103919982911, 0.011969408035278321, 0.011735615730285644, 0.011891039848327636, 0.011890560150146485, 0.011892352104187012, 0.011861727714538575, 0.011846431732177734, 0.011813216209411621, 0.011990880012512207, 0.011879743576049805, 0.011980928421020508, 0.011894656181335448, 0.011891200065612792, 0.011864095687866212, 0.011857888221740723, 0.011904447555541993, 0.011839776039123536, 0.011841535568237305, 0.011874591827392578, 0.011839679718017578, 0.011884160041809082, 0.01188425636291504, 0.012135168075561524, 0.011947168350219726, 0.012030528068542481, 0.011986975669860839, 0.011866080284118652, 0.0118538236618042, 0.011885727882385254, 0.011901023864746094, 0.011901344299316406, 0.011848031997680664, 0.01216102409362793, 0.012793824195861817, 0.013008352279663085, 0.012124320030212403, 0.01202732753753662, 0.011979743957519532, 0.011980799674987793, 0.011954175949096679, 0.012251487731933594, 0.011917280197143556, 0.01192899227142334, 0.011921376228332519, 0.011970591545104981, 0.011851136207580566, 0.01188150405883789, 0.012036191940307616, 0.011915040016174317, 0.011974656105041503, 0.012009471893310546, 0.011875391960144043, 0.011847840309143067, 0.011863871574401856, 0.011817952156066895, 0.01225113582611084, 0.011861599922180176, 0.01191977596282959, 0.011921407699584961, 0.011900927543640137, 0.01195638370513916, 0.01192124843597412, 0.011910816192626954, 0.011861632347106934, 0.011694080352783203, 0.01185974407196045, 0.011907648086547852, 0.011902015686035156, 0.011937472343444824, 0.011889472007751464, 0.011884320259094238, 0.01192524814605713, 0.011905311584472656, 0.01190278434753418, 0.011915648460388183, 0.011872032165527343, 0.01187161636352539, 0.011930368423461913, 0.011868255615234375, 0.012302207946777343, 0.011933504104614258, 0.011925472259521484, 0.011886112213134766, 0.011907999992370605, 0.011947104454040527, 0.011946656227111817, 0.011960512161254882, 0.012062784194946288, 0.011978400230407716, 0.011857664108276367, 0.011872832298278808, 0.011958304405212403, 0.011901023864746094, 0.011961983680725099, 0.011857888221740723, 0.01193513584136963, 0.011842464447021485, 0.011888480186462403, 0.011896896362304687, 0.011919455528259277, 0.011855456352233886, 0.011909536361694336, 0.011894399642944337, 0.011872159957885741, 0.01203865623474121, 0.011932928085327149, 0.011924192428588868, 0.012046303749084473, 0.011906944274902343, 0.011903136253356934, 0.011832863807678222, 0.011840127944946289, 0.01190608024597168, 0.011883328437805176, 0.01189094352722168, 0.011958016395568847, 0.012033280372619628, 0.01197708797454834, 0.011979167938232421, 0.01198915195465088, 0.01199459171295166, 0.012038111686706543, 0.012027263641357422, 0.011950528144836426, 0.011936223983764649, 0.01188684844970703, 0.011951647758483886, 0.011711135864257812, 0.011921407699584961, 0.011892736434936524, 0.011962207794189453, 0.012023488044738769, 0.012240639686584473, 0.012095295906066894, 0.012069631576538085, 0.012449024200439453, 0.011981247901916504, 0.012123616218566894, 0.011967488288879394, 0.011998687744140625, 0.011974656105041503, 0.011952672004699707, 0.011974016189575195, 0.01195852756500244, 0.011941887855529786, 0.011913599967956543, 0.011912639617919923, 0.011936575889587402, 0.011955967903137207, 0.011937888145446777, 0.011921088218688964, 0.012030176162719727, 0.012138719558715821, 0.012146464347839355, 0.012175071716308593, 0.012674880027770997, 0.012247072219848634, 0.012374527931213379, 0.012199104309082031, 0.01208841609954834, 0.012052127838134766, 0.012053824424743653, 0.012464287757873536, 0.012040736198425293, 0.011993087768554688, 0.011982175827026367, 0.011950143814086914, 0.01194480037689209, 0.011962112426757812, 0.011986240386962891, 0.011982848167419433, 0.01195036792755127, 0.011988608360290528, 0.011938591957092286, 0.011927103996276856, 0.011905632019042968, 0.01186185646057129, 0.011935487747192382, 0.01196671962738037, 0.011950431823730468, 0.012367520332336426, 0.014336000442504883, 0.01290662384033203, 0.011906815528869628, 0.011874272346496581, 0.011855104446411132, 0.011908127784729003, 0.011932767868041993, 0.012131104469299317, 0.011961759567260742, 0.011622495651245117, 0.011834336280822753, 0.011873472213745117, 0.011921631813049316, 0.011876895904541015, 0.011904640197753906, 0.011858176231384278, 0.012155903816223144, 0.011958271980285644, 0.011940159797668458, 0.011934399604797363, 0.011862048149108887, 0.011895872116088868, 0.012032928466796875, 0.011889792442321778, 0.011883392333984374, 0.011861023902893066, 0.01224726390838623, 0.011946304321289063, 0.011934144020080566, 0.011948287963867188, 0.011921216011047363, 0.012070367813110352, 0.01192908763885498, 0.011881440162658692, 0.01185756778717041, 0.011878751754760742, 0.012084320068359375, 0.012125087738037109, 0.011866111755371094, 0.011843263626098633, 0.011833663940429687, 0.011827199935913087, 0.012457183837890625, 0.014330656051635741, 0.015462400436401368, 0.01469865608215332, 0.01203388786315918, 0.011980575561523437, 0.011854047775268555, 0.011837311744689941, 0.011900735855102538, 0.011835712432861328, 0.011851008415222169, 0.011860735893249512, 0.01185923194885254, 0.011866880416870116, 0.011891743659973145, 0.01188755226135254, 0.011894783973693847, 0.011820992469787598, 0.01183955192565918, 0.011831487655639648, 0.011773280143737793, 0.01214310359954834, 0.012109984397888183, 0.011928928375244141, 0.012014047622680664, 0.01193939208984375, 0.011926207542419433, 0.011910016059875489, 0.012016544342041016, 0.011915519714355468, 0.011742688179016114, 0.011864512443542481, 0.011883968353271485, 0.0118438720703125, 0.011858176231384278, 0.011776032447814942, 0.011882495880126954, 0.011902208328247071, 0.011840255737304687, 0.01185587215423584, 0.011863679885864257, 0.011919808387756348, 0.01185763168334961, 0.01197001552581787, 0.01186012840270996, 0.011819552421569825, 0.012113471984863282, 0.012366335868835449, 0.01184556770324707, 0.011817184448242188, 0.01181884765625, 0.011816608428955077, 0.011864416122436524, 0.011826272010803223, 0.011809120178222656, 0.011817343711853027, 0.01207750415802002, 0.011910911560058593, 0.01186406421661377, 0.011906975746154786, 0.011826496124267578, 0.01180678367614746, 0.012012031555175781, 0.011845888137817383, 0.011836959838867187, 0.01186246395111084, 0.011835455894470215, 0.011857151985168457, 0.011802463531494141, 0.011817824363708497, 0.011761440277099609, 0.011798751831054688, 0.011921407699584961, 0.011841535568237305, 0.011825023651123047, 0.011859359741210938, 0.011854080200195313, 0.011848352432250976, 0.011853983879089355, 0.011861599922180176, 0.011956416130065917, 0.011899999618530274, 0.011975456237792968, 0.01186201572418213, 0.012060895919799804, 0.011820704460144042, 0.01183302402496338, 0.01184607982635498, 0.011808575630187989, 0.011825087547302247, 0.011804927825927734, 0.011810815811157227, 0.011841535568237305, 0.011667551994323731, 0.011827103614807129, 0.012191743850708007, 0.011884384155273438, 0.011950240135192872, 0.01204355239868164, 0.011836288452148437, 0.011818079948425294, 0.011887359619140624, 0.011905023574829102, 0.011873696327209473, 0.012290271759033203, 0.011935168266296387, 0.011957183837890625, 0.011938912391662598, 0.011973919868469239, 0.012003071784973144, 0.011935711860656738, 0.01192294406890869, 0.01183580780029297, 0.01196348762512207, 0.01190390396118164, 0.011818143844604492, 0.011839808464050293, 0.011835935592651368, 0.011789376258850098, 0.0118220157623291, 0.011843071937561036, 0.011882847785949707, 0.01184943962097168, 0.011816800117492676, 0.01183743953704834, 0.011833951950073243, 0.011902976036071777, 0.011908448219299317, 0.011860639572143554, 0.011925503730773926, 0.01189408016204834, 0.011888928413391113, 0.011837504386901855, 0.011833663940429687, 0.011911199569702149, 0.011874303817749024, 0.011865887641906738, 0.011855263710021973, 0.011831968307495118, 0.012014880180358886, 0.01186086368560791, 0.011871328353881836, 0.011845600128173829, 0.011850687980651855, 0.011859968185424804, 0.012001215934753418, 0.01192966365814209, 0.011916959762573242, 0.011915712356567383, 0.011890048027038575, 0.011985695838928223, 0.011981823921203612, 0.011926272392272948, 0.011932703971862793, 0.011934687614440918, 0.012001279830932618, 0.011705535888671875, 0.01189692783355713, 0.01188259220123291, 0.011833087921142578, 0.011819904327392577, 0.011849727630615235, 0.011911199569702149, 0.01190294361114502, 0.011933216094970703, 0.011977184295654296, 0.011945055961608888, 0.011845696449279785, 0.011851967811584473, 0.011870880126953125, 0.011937952041625977, 0.011855711936950683, 0.01186201572418213, 0.01179980754852295, 0.01191004753112793, 0.011969408035278321, 0.01232585620880127, 0.011900927543640137, 0.011912480354309082, 0.012018400192260742, 0.012058688163757323, 0.01197817611694336, 0.011962528228759766, 0.0119169921875, 0.012053312301635742, 0.011939680099487305, 0.011943936347961426, 0.012390560150146484, 0.012046175956726074, 0.01193331241607666, 0.011901503562927246, 0.011874112129211425, 0.011881471633911133, 0.011940608024597168, 0.011858240127563476, 0.011861503601074219, 0.011895584106445313, 0.011871040344238281, 0.011846431732177734, 0.011910688400268555, 0.011847711563110352, 0.011851424217224122, 0.011815744400024414, 0.011871487617492675, 0.011958175659179688, 0.011833632469177246, 0.011851519584655761, 0.011868960380554199, 0.011853887557983398, 0.011894687652587891, 0.011884991645812987, 0.011923104286193848, 0.011960320472717285, 0.011825152397155762, 0.011827199935913087, 0.0118538236618042, 0.011898176193237305, 0.011844287872314454, 0.011849280357360839, 0.01166425609588623, 0.011952159881591796, 0.011987008094787598, 0.011856672286987304, 0.01187497615814209, 0.01183574390411377, 0.011833344459533691, 0.011923456192016601, 0.011841535568237305, 0.01187775993347168, 0.011874303817749024, 0.011856512069702148, 0.011863391876220703, 0.0118503999710083, 0.011890687942504884, 0.011872256278991699, 0.01189087963104248, 0.011883904457092285, 0.011812640190124511, 0.011936415672302246, 0.011890687942504884, 0.011866111755371094, 0.011841535568237305, 0.011819007873535157, 0.011872256278991699, 0.011830656051635743, 0.011862272262573243, 0.011825440406799316, 0.011895999908447266, 0.011832192420959472, 0.011816287994384765, 0.011977408409118652, 0.012024191856384278, 0.011995807647705078, 0.011889439582824707, 0.011905376434326171, 0.011934975624084472, 0.011867008209228515, 0.01183846378326416, 0.011825887680053711, 0.011796480178833007, 0.011876352310180664, 0.012084863662719726, 0.011925888061523438, 0.012197888374328614, 0.011964415550231934, 0.011872256278991699, 0.011838496208190918, 0.011821056365966797, 0.011881440162658692, 0.011847519874572754, 0.011814847946166992, 0.012113759994506836, 0.012700032234191895, 0.011900863647460937, 0.01194921588897705, 0.011882752418518066, 0.011926495552062988, 0.01197862434387207, 0.011907103538513184, 0.012443424224853516, 0.011929535865783691, 0.011855680465698243, 0.011620351791381836, 0.01185103988647461, 0.012315360069274903, 0.011886303901672363, 0.011949536323547363, 0.011862848281860352, 0.011860320091247559, 0.012065919876098632, 0.011846176147460937, 0.011909119606018067, 0.011974656105041503, 0.012021920204162598, 0.011819040298461914, 0.011916640281677247, 0.011883296012878419, 0.011871775627136231, 0.011870368003845215, 0.011894335746765137, 0.01182691192626953, 0.011827584266662597, 0.011849408149719238, 0.01204089641571045, 0.012224448204040528, 0.011959424018859863, 0.011932448387145996, 0.011905152320861817, 0.011878687858581543, 0.0120481595993042, 0.01191267204284668, 0.011927968025207519, 0.01184774398803711, 0.013144160270690918, 0.01191107177734375, 0.011904159545898437, 0.011866239547729493, 0.01194262409210205, 0.011883520126342773, 0.011872480392456055, 0.011886688232421875, 0.01189139175415039, 0.011888640403747559, 0.011820416450500488, 0.01184217643737793, 0.011859295845031738, 0.011799200057983399, 0.011833663940429687, 0.011844639778137206, 0.011838303565979005, 0.011877344131469727, 0.011875167846679687, 0.012480511665344238, 0.01296780776977539, 0.012475935935974122, 0.012116576194763183, 0.01213644790649414, 0.012052736282348632, 0.01199078369140625, 0.013180512428283692, 0.011975071907043456, 0.012094783782958984, 0.01192416000366211, 0.012025216102600097, 0.011881471633911133, 0.01214025592803955, 0.012024127960205078, 0.012037568092346192, 0.011983424186706544, 0.012060959815979004, 0.012039903640747071, 0.012123519897460938, 0.01210870361328125, 0.012320480346679687, 0.012267519950866699, 0.012171263694763184, 0.012041343688964843, 0.01205238437652588, 0.01203667163848877, 0.011997599601745606, 0.012055744171142578, 0.012006208419799805, 0.012015263557434082, 0.011980832099914551, 0.011940159797668458, 0.011919360160827636, 0.011891008377075196, 0.011849023818969727, 0.011899264335632324, 0.011919232368469238, 0.011810943603515625, 0.011824928283691406, 0.011852255821228027, 0.011965184211730958, 0.011920384407043457, 0.011888192176818848, 0.011835840225219727, 0.011884415626525879, 0.012038271903991699, 0.012156928062438965, 0.011884896278381348, 0.011842528343200683, 0.011868000030517578, 0.011855903625488282, 0.011799360275268555, 0.011876352310180664, 0.012034048080444336, 0.01188640022277832, 0.01185971164703369, 0.011831744194030762, 0.01186201572418213, 0.011992287635803223, 0.01187945556640625, 0.01188428783416748, 0.011941184043884277, 0.011905728340148925, 0.011905152320861817, 0.011851455688476563, 0.011874496459960937, 0.011902591705322266, 0.011916704177856445, 0.01183017635345459, 0.011984992027282714, 0.011886560440063476, 0.011888544082641601, 0.01188156795501709, 0.01192972755432129, 0.011850591659545898, 0.011694368362426757, 0.011874303817749024, 0.01206991958618164, 0.011904159545898437, 0.01200928020477295, 0.011878591537475586, 0.011856736183166504, 0.011854880332946778, 0.011856927871704101, 0.011877535820007324, 0.01190067195892334, 0.01190886402130127, 0.012005632400512695, 0.012018879890441895, 0.012059359550476075, 0.012025856018066406, 0.012578847885131837, 0.012095135688781738, 0.012386688232421875, 0.012183584213256837, 0.012106080055236817, 0.01204963207244873, 0.01192784023284912, 0.012005536079406739, 0.011868255615234375, 0.011963583946228028, 0.011854559898376464, 0.011992992401123047, 0.012199423789978027, 0.012047264099121094, 0.011910783767700195, 0.011898464202880859, 0.011895263671875, 0.011954400062561035, 0.011859328269958495, 0.01194758415222168, 0.011834048271179199, 0.011859519958496094, 0.011860032081604004, 0.011858783721923827, 0.01189363193511963, 0.011844415664672852, 0.01185587215423584, 0.01181430435180664, 0.0118156156539917, 0.011866016387939453, 0.011816351890563966, 0.01194380760192871, 0.011856831550598145, 0.011897952079772948, 0.012546815872192384, 0.012182656288146973, 0.012481344223022462, 0.012572928428649903, 0.012324128150939941, 0.011970560073852539, 0.011870688438415527, 0.011980287551879883, 0.011919360160827636, 0.01185638427734375, 0.012017024040222169, 0.011813632011413574, 0.011843456268310546, 0.011585536003112793, 0.011890720367431641, 0.011900896072387696, 0.01186201572418213, 0.011872256278991699, 0.01184768009185791, 0.011816191673278808, 0.011836159706115722, 0.01187987232208252, 0.01189743995666504, 0.011888192176818848, 0.011835871696472168, 0.011854816436767578, 0.011842528343200683, 0.01186131191253662, 0.011813568115234376, 0.011894720077514648, 0.011838944435119628, 0.011786848068237306, 0.011810527801513673, 0.011834848403930663, 0.011969408035278321, 0.011865599632263184, 0.011805343627929687, 0.01184540843963623, 0.011827424049377441, 0.01185155200958252, 0.011878335952758789, 0.011882559776306152, 0.011845312118530273, 0.011888287544250488, 0.011963040351867675, 0.011949888229370117, 0.011876543998718262, 0.011972384452819824, 0.011996416091918946, 0.011910112380981445, 0.01188003158569336, 0.011894335746765137, 0.01182969570159912, 0.011852191925048829, 0.011882495880126954, 0.01191107177734375, 0.011900287628173828, 0.01199177646636963, 0.011882623672485352, 0.011867456436157227, 0.011894432067871094, 0.011862431526184082, 0.012167936325073242, 0.012075967788696289, 0.011940671920776367, 0.011982687950134277, 0.011886752128601075, 0.011907072067260742, 0.011901056289672852, 0.011835264205932618, 0.011925503730773926, 0.011893024444580078, 0.011860896110534667, 0.01187497615814209, 0.011849184036254883, 0.011887200355529785, 0.01166534423828125, 0.01191868782043457, 0.011839648246765137, 0.012132672309875489, 0.011874496459960937, 0.011960479736328125, 0.011868032455444336, 0.011888607978820801, 0.011871423721313477, 0.011862848281860352, 0.01186201572418213, 0.011843584060668945, 0.011827391624450683, 0.01190675163269043, 0.011897024154663085, 0.01190659236907959, 0.011856287956237792, 0.011895936012268066, 0.011861087799072266, 0.011806495666503906, 0.011814911842346192, 0.011966560363769531, 0.011827456474304198, 0.011851327896118164, 0.01184607982635498, 0.01186575984954834, 0.011851840019226074, 0.011836447715759278, 0.011834336280822753, 0.011827327728271485, 0.011806207656860352, 0.01177836799621582, 0.01184563159942627, 0.011833696365356446, 0.011861663818359375, 0.011857919692993164, 0.011861632347106934, 0.011831007957458495, 0.011815903663635254, 0.011806400299072265, 0.011810624122619628, 0.011807040214538574, 0.011847200393676758, 0.011809056282043456, 0.011784607887268067, 0.01182479953765869, 0.011822912216186523, 0.011847871780395508, 0.011840895652770996, 0.011834336280822753, 0.011920639991760254, 0.011893152236938476, 0.011894783973693847, 0.011878399848937989, 0.011941535949707031, 0.01223305606842041, 0.011988991737365723, 0.012142592430114747, 0.012299936294555664, 0.012131872177124024, 0.011959103584289551, 0.011923359870910645, 0.011902751922607422]",tokens/s,83.69467463654325,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1527.447552,1868.43136,0.0,1465.909248,1358.169088,s,1,8.7332041015625,8.7332041015625,0.0,8.7332041015625,8.7332041015625,8.7332041015625,8.7332041015625,[8.7332041015625],,kWh,4.866332461665479e-05,5.360478907909336e-06,1.9157515325995966e-05,7.31813188505601e-05,,MB,1637.552128,1889.40288,0.0,1472.200704,1356.544512,s,10,0.4490383415222168,0.044903834152221676,0.0001865241058669827,0.04487147331237793,0.045151627731323245,0.04521389446258545,0.045263707847595216,"[0.045276161193847655, 0.04501808166503906, 0.044872798919677735, 0.04464860916137695, 0.04464467239379883, 0.04513779067993164, 0.04484422302246094, 0.04487267303466797, 0.04485305786132812, 0.04487027359008789]",tokens/s,5701.072187559156,kWh,1.364298439719524e-06,1.50458461873788e-07,9.033691027569729e-07,2.4181260043502846e-06,tokens/kWh,105867105.16302623,MB,1645.367296,1889.40288,0.0,1472.200704,1409.94816,s,10,11.410097534179688,1.1410097534179688,0.005375067657347091,1.1386219482421875,1.1471005126953124,1.1494556518554688,1.1513397631835938,"[1.151810791015625, 1.1464498291015626, 1.142974609375, 1.1378150634765625, 1.1345040283203125, 1.1394288330078124, 1.1362237548828125, 1.136862548828125, 1.1465771484375, 1.137450927734375]",tokens/s,55.21425194769756,kWh,3.3191407275693e-05,3.6605711932750538e-06,1.6121588961644656e-05,5.297356743061271e-05,tokens/kWh,1189272.3683848626,,s,630,11.404658069610592,0.018102631856524755,0.00029441251087913194,0.018042112350463865,0.01831179256439209,0.018441910362243654,0.019191217498779306,"[0.01823244857788086, 0.018006912231445314, 0.01815123176574707, 0.018033887863159178, 0.01813763236999512, 0.01805766487121582, 0.018275520324707032, 0.018039615631103515, 0.01821414375305176, 0.01807846450805664, 0.018070592880249023, 0.018204992294311523, 0.02099580764770508, 0.01925622367858887, 0.018413568496704103, 0.018546688079833985, 0.01837628746032715, 0.018255712509155274, 0.01826464080810547, 0.018124799728393554, 0.018122751235961913, 0.018106367111206053, 0.018173248291015624, 0.018173791885375976, 0.018081632614135743, 0.01803980827331543, 0.018098047256469726, 0.018102144241333006, 0.018733247756958008, 0.018152576446533203, 0.018289567947387696, 0.018280479431152345, 0.018327552795410155, 0.018370559692382812, 0.01864089584350586, 0.018685951232910156, 0.018051071166992186, 0.01799782371520996, 0.018028383255004884, 0.018101503372192383, 0.018096607208251955, 0.018573087692260744, 0.01811840057373047, 0.018137535095214843, 0.018120960235595705, 0.018019968032836915, 0.018134815216064453, 0.018064159393310547, 0.018401311874389648, 0.01827756881713867, 0.018234079360961913, 0.018194143295288085, 0.018098175048828127, 0.018051456451416016, 0.018118656158447266, 0.018144960403442382, 0.01823356819152832, 0.018401376724243163, 0.018409439086914062, 0.01839660835266113, 0.018323968887329102, 0.018317407608032226, 0.01839308738708496, 0.01897216033935547, 0.018878047943115234, 0.018514848709106444, 0.018334911346435546, 0.018508607864379883, 0.018224191665649415, 0.01813724708557129, 0.018633216857910157, 0.018255168914794923, 0.018282976150512695, 0.017977855682373048, 0.017931264877319338, 0.018121728897094725, 0.018003936767578124, 0.018836896896362306, 0.018046688079833985, 0.017971296310424805, 0.01799660873413086, 0.017962112426757812, 0.017951135635375978, 0.017910240173339843, 0.017941600799560548, 0.018711456298828123, 0.018505632400512697, 0.018071647644042968, 0.017920000076293945, 0.01802444839477539, 0.017994752883911135, 0.017861631393432616, 0.01804083251953125, 0.017979391098022462, 0.02023744010925293, 0.020236576080322265, 0.01813465690612793, 0.018363359451293946, 0.018005279541015624, 0.01796905517578125, 0.0180437126159668, 0.017949760437011717, 0.01796393585205078, 0.01795075225830078, 0.017913856506347657, 0.01803011131286621, 0.018008544921875, 0.017985376358032226, 0.01793244743347168, 0.01793142318725586, 0.017972063064575196, 0.01799504089355469, 0.01795145606994629, 0.018030208587646486, 0.01797318458557129, 0.018014656066894532, 0.017987583160400392, 0.017960351943969728, 0.018073823928833006, 0.018155839920043944, 0.018391103744506837, 0.018135040283203126, 0.01834579277038574, 0.018008256912231447, 0.01795686340332031, 0.01809187126159668, 0.018288927078247072, 0.018182144165039063, 0.018288639068603514, 0.01810348892211914, 0.018049375534057617, 0.018040895462036133, 0.018016672134399413, 0.018167808532714845, 0.01804902458190918, 0.01791139221191406, 0.01815999984741211, 0.018105951309204102, 0.018282943725585938, 0.018175743103027345, 0.018166015625, 0.018888608932495117, 0.018497087478637694, 0.01902569580078125, 0.018345951080322265, 0.018244384765625, 0.018069503784179687, 0.018296607971191405, 0.01806972885131836, 0.017952768325805665, 0.0180633602142334, 0.017962879180908203, 0.018050399780273438, 0.01803753662109375, 0.018051071166992186, 0.018136512756347655, 0.017962751388549806, 0.01801024055480957, 0.018137792587280273, 0.018028543472290038, 0.018035743713378908, 0.018052064895629882, 0.018120704650878908, 0.018014047622680666, 0.01815977668762207, 0.018076992034912108, 0.01812131118774414, 0.018011680603027345, 0.018072128295898438, 0.01792527961730957, 0.018428159713745118, 0.01838345527648926, 0.018358272552490236, 0.018331647872924805, 0.018183391571044923, 0.018170207977294923, 0.018116128921508788, 0.01801215934753418, 0.017987871170043947, 0.017926719665527342, 0.01796905517578125, 0.01799184036254883, 0.018118656158447266, 0.018116128921508788, 0.018040800094604494, 0.017892927169799806, 0.018002784729003907, 0.017993824005126953, 0.017928192138671875, 0.018114591598510744, 0.018018304824829103, 0.018142751693725586, 0.018054752349853515, 0.018004735946655272, 0.018139263153076172, 0.018255872726440428, 0.018128288269042968, 0.017940288543701173, 0.018064159393310547, 0.017946559906005858, 0.01795439910888672, 0.018020671844482423, 0.017952287673950195, 0.01791209602355957, 0.017987199783325195, 0.01807017517089844, 0.017976768493652345, 0.017997695922851564, 0.017969919204711915, 0.017977344512939454, 0.018001920700073244, 0.017988832473754882, 0.017961759567260743, 0.017985471725463865, 0.01790777587890625, 0.017936384201049805, 0.01813811111450195, 0.018250751495361327, 0.018114431381225586, 0.0180546875, 0.017983264923095703, 0.018084480285644532, 0.018134496688842774, 0.01821552085876465, 0.01819865608215332, 0.01824563217163086, 0.018392351150512694, 0.018108800888061525, 0.018169631958007814, 0.01799331283569336, 0.017950784683227538, 0.017990560531616212, 0.01816166305541992, 0.017999296188354493, 0.018032447814941406, 0.01821772766113281, 0.018341888427734376, 0.018040416717529296, 0.018047391891479494, 0.017907680511474608, 0.01790979194641113, 0.017972448348999023, 0.017875072479248046, 0.01800668716430664, 0.018089664459228515, 0.01803500747680664, 0.01805308723449707, 0.018071584701538086, 0.01799728012084961, 0.017967647552490234, 0.017926111221313476, 0.018057247161865235, 0.01987993621826172, 0.01839849662780762, 0.01812144088745117, 0.01805267143249512, 0.017989183425903322, 0.017910047531127928, 0.017952543258666992, 0.01800409507751465, 0.017889984130859377, 0.017810432434082032, 0.01787392044067383, 0.017934335708618163, 0.01787494468688965, 0.01778278350830078, 0.01805251121520996, 0.018074207305908203, 0.017878240585327148, 0.017815359115600587, 0.01804591941833496, 0.017930112838745117, 0.017875072479248046, 0.017862655639648437, 0.01780940818786621, 0.017868799209594728, 0.017942527770996093, 0.018290687561035156, 0.018457984924316405, 0.01794316864013672, 0.01792355155944824, 0.01788368034362793, 0.017989376068115234, 0.017821952819824217, 0.01795395278930664, 0.01787376022338867, 0.017948160171508788, 0.0179368953704834, 0.018020351409912108, 0.018311168670654295, 0.018145280838012694, 0.01805244827270508, 0.017941152572631836, 0.017804447174072265, 0.017929056167602538, 0.017960800170898437, 0.017968992233276367, 0.018048608779907226, 0.0179136962890625, 0.01796108818054199, 0.017969919204711915, 0.01793395233154297, 0.01789148712158203, 0.017881311416625977, 0.017854400634765625, 0.01775334358215332, 0.01821779251098633, 0.018109792709350585, 0.018278079986572264, 0.017968095779418946, 0.017931903839111328, 0.017877119064331055, 0.017999616622924805, 0.017873056411743166, 0.017981792449951173, 0.018098943710327147, 0.018010112762451173, 0.01807360076904297, 0.01797324752807617, 0.01841152000427246, 0.01795686340332031, 0.01811020851135254, 0.01817215919494629, 0.018264064788818358, 0.018204063415527345, 0.018139743804931642, 0.018077600479125978, 0.018134271621704102, 0.018013023376464845, 0.01817804718017578, 0.018266111373901366, 0.018593952178955077, 0.018831199645996093, 0.01871798324584961, 0.01844268798828125, 0.01828803253173828, 0.0180765438079834, 0.018075647354125975, 0.017939775466918946, 0.018004671096801757, 0.017889280319213868, 0.017896480560302734, 0.01806230354309082, 0.018261983871459962, 0.017949888229370117, 0.01790652847290039, 0.017905664443969727, 0.018149055480957032, 0.01810259246826172, 0.018030208587646486, 0.018026079177856445, 0.017978143692016602, 0.018050399780273438, 0.017907392501831054, 0.017971168518066405, 0.017860960006713868, 0.01793401527404785, 0.01802454376220703, 0.01796748733520508, 0.018186176300048828, 0.018004255294799806, 0.018286880493164064, 0.01814860725402832, 0.017963167190551757, 0.01796352005004883, 0.01798873519897461, 0.01806230354309082, 0.01802649688720703, 0.017924095153808595, 0.017917951583862304, 0.017876991271972655, 0.018092031478881835, 0.017847776412963867, 0.017868799209594728, 0.01791644859313965, 0.017926143646240233, 0.01795814323425293, 0.017891359329223634, 0.018607391357421874, 0.018232032775878905, 0.018261568069458008, 0.018006464004516602, 0.017924095153808595, 0.018067455291748045, 0.018069503784179687, 0.018087551116943358, 0.018042335510253905, 0.01802332878112793, 0.017913663864135742, 0.01790995216369629, 0.018081792831420897, 0.01790342330932617, 0.018006208419799805, 0.017888288497924804, 0.01781449508666992, 0.01787651252746582, 0.01817670440673828, 0.017915071487426756, 0.017965375900268556, 0.018000160217285156, 0.017868671417236328, 0.017946752548217773, 0.01796505546569824, 0.017963008880615236, 0.017915327072143553, 0.017927967071533202, 0.017918752670288085, 0.017976480484008787, 0.01789833641052246, 0.017849599838256836, 0.017922815322875978, 0.01785651206970215, 0.018003200531005858, 0.01789785575866699, 0.01839689636230469, 0.01811689567565918, 0.01804844856262207, 0.017990591049194336, 0.01792403221130371, 0.0179234561920166, 0.018010143280029298, 0.017846784591674804, 0.017884544372558594, 0.018024736404418946, 0.0180882568359375, 0.01790540885925293, 0.017953216552734377, 0.017887231826782226, 0.018150720596313476, 0.017902271270751953, 0.017975296020507812, 0.018102272033691406, 0.018141023635864256, 0.01813929557800293, 0.018121759414672853, 0.01809507179260254, 0.01812396812438965, 0.01797587203979492, 0.01799996757507324, 0.01809833526611328, 0.01903206443786621, 0.018186176300048828, 0.018088064193725585, 0.018100927352905274, 0.018165760040283203, 0.017997087478637694, 0.01786953544616699, 0.017923391342163086, 0.0179299201965332, 0.0181495361328125, 0.01806217575073242, 0.017917343139648437, 0.01797372817993164, 0.01805120086669922, 0.017985439300537108, 0.018130111694335937, 0.018135456085205077, 0.017967615127563476, 0.01798454475402832, 0.017947391510009767, 0.018038528442382813, 0.017967584609985352, 0.017872896194458008, 0.017931295394897462, 0.01792508888244629, 0.018061311721801757, 0.01804287910461426, 0.017954816818237306, 0.01803468894958496, 0.01786419105529785, 0.01788688087463379, 0.018180959701538085, 0.01813609504699707, 0.017966047286987304, 0.01786675262451172, 0.017936384201049805, 0.017935840606689454, 0.018024511337280273, 0.017912288665771485, 0.017963008880615236, 0.01785817527770996, 0.017869184494018555, 0.017854463577270507, 0.018230976104736327, 0.01820044708251953, 0.017895872116088868, 0.018056512832641602, 0.018028543472290038, 0.017904319763183595, 0.018059263229370116, 0.018089664459228515, 0.018077760696411132, 0.018303232192993166, 0.01812665557861328, 0.018129087448120116, 0.018226272583007814, 0.018200672149658204, 0.018234176635742186, 0.018274303436279296, 0.01818582344055176, 0.018052928924560546, 0.018159872055053712, 0.018040767669677736, 0.01808016014099121, 0.01836031913757324, 0.01803664016723633, 0.018042144775390626, 0.017981855392456055, 0.018161184310913087, 0.018013055801391602, 0.01797324752807617, 0.018110624313354494, 0.018069343566894533, 0.018116607666015624, 0.018042079925537108, 0.01792073631286621, 0.018029632568359374, 0.01804800033569336, 0.018284351348876952, 0.018288288116455078, 0.01826870346069336, 0.018404672622680664, 0.018159839630126955, 0.01817238426208496, 0.018355840682983397, 0.018436479568481445, 0.01841971206665039, 0.018272287368774415, 0.018147296905517578, 0.01814076805114746, 0.018448799133300782, 0.018226432800292968, 0.01844095993041992, 0.018101600646972655, 0.018094751358032228, 0.01803651237487793, 0.01804265594482422, 0.01800441551208496, 0.018206720352172853, 0.018010112762451173, 0.018103424072265624, 0.017927040100097658, 0.017976863861083985, 0.017943008422851563, 0.017969152450561524, 0.018257823944091797, 0.020266271591186522, 0.020783935546875, 0.01815670394897461, 0.018096256256103515, 0.01803446388244629, 0.018018720626831054, 0.017960927963256837, 0.018489919662475585, 0.018008064270019532, 0.017901567459106444, 0.017938432693481447, 0.017969152450561524, 0.018104320526123048, 0.01798908805847168, 0.0180598087310791, 0.017954463958740233, 0.018001407623291017, 0.01804547119140625, 0.018149696350097656, 0.0179169921875, 0.017993824005126953, 0.01807155227661133, 0.018081792831420897, 0.01802364730834961, 0.018042976379394532, 0.017985408782958984, 0.01802444839477539, 0.01793516731262207, 0.01802566337585449, 0.01794108772277832, 0.017963232040405272, 0.017999391555786132, 0.018038623809814452, 0.018166400909423827, 0.01819254493713379, 0.01808131217956543, 0.018032768249511718, 0.01794272041320801, 0.017887136459350587, 0.017904735565185546, 0.01794918441772461, 0.017944223403930665, 0.018064224243164062, 0.018069503784179687, 0.01789132881164551, 0.018041887283325196, 0.01797007942199707, 0.01805523109436035, 0.01797052764892578, 0.018021055221557617, 0.017950239181518553, 0.01805561637878418, 0.01817804718017578, 0.01796505546569824, 0.01829792022705078, 0.018180416107177733, 0.01808857536315918, 0.01802649688720703, 0.018053119659423827, 0.01814668846130371, 0.018092672348022462, 0.018028543472290038, 0.017964736938476562, 0.017983808517456054, 0.0180633602142334, 0.01818623924255371, 0.01796816062927246, 0.01804591941833496, 0.017997535705566406, 0.018014495849609374, 0.018132511138916015, 0.018141311645507814, 0.017964576721191405, 0.018041664123535157, 0.01807526397705078, 0.018182527542114257, 0.018165760040283203, 0.01807360076904297, 0.0180633602142334, 0.017967103958129883, 0.018035839080810547, 0.018082687377929688, 0.01810220718383789, 0.018113920211791992]",tokens/s,55.2405864476313,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1575.747584,1553.85856,0.0,1168.113664,1154.613248,s,1,8.2784638671875,8.2784638671875,0.0,8.2784638671875,8.2784638671875,8.2784638671875,8.2784638671875,[8.2784638671875],,kWh,3.409349801663666e-05,3.753094556607497e-06,1.1678620453980937e-05,4.952521302722509e-05,,MB,1540.665344,1784.54528,0.0,1369.440256,1323.44832,s,10,0.7626886367797853,0.07626886367797851,0.0007251593590923473,0.07627260589599609,0.07685870513916016,0.07718421630859375,0.07744462524414063,"[0.07750972747802734, 0.07646415710449218, 0.07594127655029297, 0.07678636932373047, 0.07588575744628906, 0.076744384765625, 0.07595843505859375, 0.07464470672607422, 0.07667276763916016, 0.0760810546875]",tokens/s,3356.5466647160256,kWh,2.362456867203897e-06,2.6053597870083755e-07,1.5624071639036026e-06,4.185400009808337e-06,tokens/kWh,61165002.00699408,MB,1544.589312,1805.5168,0.0,1390.411776,1377.233408,s,10,11.789075561523438,1.1789075561523439,0.004333080701251483,1.1797572631835938,1.1836410034179687,1.1847605651855468,1.1856562145996092,"[1.179808837890625, 1.17135400390625, 1.185880126953125, 1.1797056884765624, 1.17297509765625, 1.1762288818359374, 1.1770472412109374, 1.18014990234375, 1.1833922119140625, 1.1825335693359376]",tokens/s,53.43930460978303,kWh,3.4096211346121336e-05,3.760394571868619e-06,1.8331969862693308e-05,5.6188575780683255e-05,tokens/kWh,1121224.3614414304,,s,630,11.786645538330088,0.018708961171952504,0.0005007315480271485,0.01861628818511963,0.018991568183898928,0.019175852489471436,0.020027870216369634,"[0.019041376113891603, 0.019101760864257813, 0.018813791275024413, 0.019369983673095705, 0.019177120208740236, 0.018984928131103515, 0.01898124885559082, 0.018712703704833984, 0.018546112060546877, 0.018558752059936522, 0.01862928009033203, 0.018511743545532228, 0.01858572769165039, 0.018661376953125, 0.0186079044342041, 0.01865260887145996, 0.01874959945678711, 0.01900931167602539, 0.018699104309082032, 0.018663423538208008, 0.018669567108154296, 0.01848748779296875, 0.018640703201293945, 0.018780160903930664, 0.018562431335449218, 0.01862835121154785, 0.018602880477905273, 0.018647039413452148, 0.018711904525756835, 0.018741920471191408, 0.018573280334472655, 0.01870627212524414, 0.018688032150268555, 0.018939456939697265, 0.01874390411376953, 0.01865727996826172, 0.01862246322631836, 0.018814207077026367, 0.018818944931030274, 0.01860083198547363, 0.01861631965637207, 0.01859347152709961, 0.018628927230834962, 0.018703359603881836, 0.01868060874938965, 0.018560287475585937, 0.01884457588195801, 0.018912479400634764, 0.019839040756225584, 0.01967136001586914, 0.018703071594238282, 0.018601696014404298, 0.01869004821777344, 0.018495231628417968, 0.018562847137451172, 0.01843164825439453, 0.018707263946533204, 0.01858121681213379, 0.01851968002319336, 0.018408096313476563, 0.018395135879516602, 0.01835212707519531, 0.018392736434936524, 0.019082048416137695, 0.018870143890380858, 0.018788480758666994, 0.018775583267211914, 0.018626367568969727, 0.018634431838989256, 0.018553823471069337, 0.01848700714111328, 0.01846505546569824, 0.018528160095214845, 0.018538591384887695, 0.01846086311340332, 0.018447263717651367, 0.01845910453796387, 0.018423967361450196, 0.018469152450561525, 0.018474016189575195, 0.0184138240814209, 0.018461408615112303, 0.018468767166137694, 0.01853366470336914, 0.018779136657714843, 0.018527904510498048, 0.01852342414855957, 0.018408319473266602, 0.018562528610229494, 0.018579296112060547, 0.018456575393676757, 0.018346687316894532, 0.018485248565673826, 0.018410528182983398, 0.018793216705322267, 0.018555103302001955, 0.018503679275512695, 0.018458879470825196, 0.018573055267333983, 0.018525856018066406, 0.01864067268371582, 0.018532543182373046, 0.018548704147338866, 0.018630495071411134, 0.018861663818359374, 0.018596832275390624, 0.019092992782592775, 0.018617984771728515, 0.018600704193115235, 0.018642080307006835, 0.018537408828735353, 0.018658336639404298, 0.018678079605102538, 0.018649824142456056, 0.018525568008422852, 0.018645599365234376, 0.018549951553344726, 0.018690879821777345, 0.01862246322631836, 0.018669567108154296, 0.01879449653625488, 0.01863465690612793, 0.018556703567504884, 0.018571584701538087, 0.01854080009460449, 0.018577152252197266, 0.019135583877563478, 0.01917430305480957, 0.018916799545288087, 0.018772544860839843, 0.018564287185668944, 0.018765888214111327, 0.018598112106323242, 0.018772319793701173, 0.01868204879760742, 0.01864419174194336, 0.01869228744506836, 0.018829919815063476, 0.01857356834411621, 0.018697984695434572, 0.01861631965637207, 0.018373823165893553, 0.018448640823364258, 0.01841619110107422, 0.018397184371948243, 0.01846643257141113, 0.018513792037963866, 0.018432512283325195, 0.018679712295532228, 0.019347551345825196, 0.018638240814208985, 0.018624704360961915, 0.01847542381286621, 0.018865375518798827, 0.018873119354248048, 0.018736223220825195, 0.01859881591796875, 0.018687135696411134, 0.018743392944335937, 0.019152639389038086, 0.018785375595092774, 0.018861759185791017, 0.018935007095336916, 0.018734079360961914, 0.01864908790588379, 0.01854025650024414, 0.018490848541259767, 0.01876255989074707, 0.018530303955078126, 0.01858127975463867, 0.01847318458557129, 0.018485248565673826, 0.018534400939941405, 0.018464767456054687, 0.018435935974121093, 0.01850998306274414, 0.018551904678344725, 0.01855580711364746, 0.018541664123535157, 0.01942211151123047, 0.028486719131469728, 0.019104703903198243, 0.018513120651245118, 0.01846556854248047, 0.018415615081787108, 0.018448320388793946, 0.018556320190429687, 0.01842857551574707, 0.01848320007324219, 0.019361600875854493, 0.019020288467407227, 0.01940447998046875, 0.018993152618408202, 0.018937856674194335, 0.019045759201049804, 0.019044992446899413, 0.01877587127685547, 0.01888684844970703, 0.018907136917114258, 0.018782207489013672, 0.018765823364257812, 0.018873727798461915, 0.018815168380737303, 0.018737600326538085, 0.018968576431274413, 0.018662975311279296, 0.01982099151611328, 0.018538335800170898, 0.018665504455566407, 0.01842188835144043, 0.0184586238861084, 0.01898624038696289, 0.01850419235229492, 0.018448640823364258, 0.01848320007324219, 0.01847318458557129, 0.018494527816772462, 0.01849331283569336, 0.018502399444580077, 0.018621536254882814, 0.01862112045288086, 0.018534719467163088, 0.018555904388427736, 0.01843302345275879, 0.02004991912841797, 0.018458240509033202, 0.018527839660644533, 0.01847555160522461, 0.018505983352661133, 0.01852787208557129, 0.01864716720581055, 0.018546943664550782, 0.018509727478027344, 0.01845369529724121, 0.018454431533813476, 0.018561376571655273, 0.018554655075073243, 0.018522335052490235, 0.01848182487487793, 0.018575263977050782, 0.018749536514282225, 0.018601728439331056, 0.019149055480957033, 0.018612064361572266, 0.01851817512512207, 0.018472864151000978, 0.01989846420288086, 0.018583200454711915, 0.018516319274902344, 0.01847644805908203, 0.018431583404541017, 0.018570016860961915, 0.019163999557495117, 0.018869184494018556, 0.01867366409301758, 0.01880268859863281, 0.018767551422119142, 0.018577024459838866, 0.018510528564453125, 0.018433696746826173, 0.01839958381652832, 0.018429792404174805, 0.018458175659179687, 0.018446399688720704, 0.018416288375854493, 0.018401151657104493, 0.018345344543457032, 0.018516607284545898, 0.018524160385131837, 0.01841152000427246, 0.018493408203125, 0.018470943450927733, 0.018405344009399412, 0.018630943298339843, 0.01860323143005371, 0.018466367721557617, 0.01845756721496582, 0.018306207656860352, 0.018389535903930665, 0.01839334487915039, 0.018538272857666016, 0.01854902458190918, 0.018577568054199217, 0.01843388748168945, 0.01841542434692383, 0.018453760147094725, 0.018478015899658203, 0.01844134330749512, 0.018369407653808595, 0.01840496063232422, 0.018469120025634767, 0.018591487884521484, 0.018846111297607424, 0.018820863723754883, 0.018972511291503905, 0.01892915153503418, 0.018989696502685546, 0.018749727249145507, 0.01876527976989746, 0.0188089599609375, 0.018768032073974608, 0.018774272918701172, 0.018874368667602538, 0.018692096710205077, 0.01863580894470215, 0.018653472900390624, 0.018850496292114258, 0.018910655975341795, 0.018808576583862306, 0.01896531105041504, 0.01883900833129883, 0.018704799652099608, 0.018728672027587892, 0.018641056060791014, 0.018521760940551756, 0.018919231414794922, 0.018815200805664064, 0.018903072357177735, 0.018627456665039063, 0.018720767974853517, 0.018573535919189452, 0.018462623596191406, 0.018455327987670897, 0.01883456039428711, 0.018779008865356446, 0.019185663223266602, 0.018712575912475587, 0.019664896011352538, 0.01866547203063965, 0.018887968063354493, 0.018701183319091798, 0.018527711868286133, 0.01867011260986328, 0.01872265625, 0.018520063400268554, 0.01879363250732422, 0.018564128875732423, 0.018478912353515627, 0.018453567504882813, 0.018668479919433593, 0.01855897521972656, 0.018587583541870116, 0.018462432861328124, 0.018534303665161133, 0.018569664001464845, 0.018740991592407226, 0.018841856002807616, 0.018550527572631835, 0.018456415176391603, 0.01857571220397949, 0.018432064056396483, 0.019635456085205078, 0.01857600021362305, 0.01850774383544922, 0.018706207275390626, 0.018533023834228515, 0.018554208755493164, 0.0185380802154541, 0.01845327949523926, 0.018465984344482423, 0.018473791122436523, 0.018519519805908203, 0.01896623992919922, 0.018658111572265625, 0.01858937644958496, 0.01855062484741211, 0.01846953582763672, 0.0187509765625, 0.018677152633666993, 0.018590431213378906, 0.0186431999206543, 0.01861625671386719, 0.01869811248779297, 0.01877734375, 0.01873574447631836, 0.018516223907470705, 0.01856716728210449, 0.01860758399963379, 0.019161088943481445, 0.018932960510253907, 0.018780384063720703, 0.019165855407714844, 0.018689727783203124, 0.01867523193359375, 0.01863075256347656, 0.018684768676757814, 0.018480384826660156, 0.018514432907104493, 0.018505727767944336, 0.018689695358276366, 0.01862895965576172, 0.018499584197998048, 0.018497535705566406, 0.018421760559082033, 0.018478080749511717, 0.018512895584106445, 0.01859119987487793, 0.01854899215698242, 0.018608224868774413, 0.01848953628540039, 0.018579328536987304, 0.01864908790588379, 0.018630271911621095, 0.018541055679321287, 0.01857494354248047, 0.019333375930786132, 0.019066368103027344, 0.018905759811401367, 0.020117023468017577, 0.018998847961425782, 0.01883228874206543, 0.018745344161987306, 0.018652799606323243, 0.01862598419189453, 0.01852934455871582, 0.01845235252380371, 0.018431999206542968, 0.01842131233215332, 0.01835379219055176, 0.018309951782226563, 0.018381856918334962, 0.018471904754638672, 0.018464767456054687, 0.018615680694580077, 0.018670207977294923, 0.018472959518432617, 0.018456575393676757, 0.01899929618835449, 0.018531776428222655, 0.0185861759185791, 0.01855897521972656, 0.01837251281738281, 0.018536544799804686, 0.018517919540405273, 0.019973888397216796, 0.01867513656616211, 0.018925983428955077, 0.018469375610351564, 0.018896383285522463, 0.018573823928833007, 0.018714624404907225, 0.019019775390625, 0.01868185615539551, 0.02110054397583008, 0.018778112411499022, 0.01867068862915039, 0.018678688049316407, 0.018579456329345705, 0.01879971122741699, 0.01873366355895996, 0.01864131164550781, 0.018524160385131837, 0.018560928344726564, 0.018625696182250975, 0.018620447158813478, 0.01892230415344238, 0.019339263916015623, 0.019035871505737306, 0.018877920150756837, 0.01877484893798828, 0.018750751495361328, 0.018668479919433593, 0.01874412727355957, 0.01855177688598633, 0.01859174346923828, 0.018515968322753908, 0.018687999725341797, 0.01855392074584961, 0.01862486457824707, 0.01857583999633789, 0.018495168685913086, 0.019012031555175782, 0.01875721549987793, 0.018750080108642576, 0.01860748863220215, 0.018743616104125976, 0.018785823822021486, 0.01857587242126465, 0.018563135147094727, 0.018540544509887694, 0.018508960723876953, 0.018576223373413085, 0.01840336036682129, 0.01868796730041504, 0.018593791961669923, 0.018761728286743166, 0.018581504821777343, 0.01856822395324707, 0.018592607498168944, 0.018624639511108397, 0.019333023071289063, 0.019001440048217775, 0.01857289505004883, 0.01862451171875, 0.018519744873046876, 0.018596576690673827, 0.018724319458007812, 0.018741792678833007, 0.018581504821777343, 0.018556032180786133, 0.018656320571899414, 0.01871648025512695, 0.018599199295043944, 0.01870921516418457, 0.01934777641296387, 0.019511295318603517, 0.018804000854492187, 0.01903696060180664, 0.01860326385498047, 0.01855753517150879, 0.01850377655029297, 0.018673023223876952, 0.018454336166381837, 0.018518688201904297, 0.018370464324951173, 0.018424064636230468, 0.019087135314941408, 0.018437536239624023, 0.0184835205078125, 0.018542272567749023, 0.01856390380859375, 0.01908121681213379, 0.018759679794311524, 0.018601247787475586, 0.018504640579223634, 0.01861404800415039, 0.018467967987060546, 0.01861311912536621, 0.01846249580383301, 0.018489728927612303, 0.01892099189758301, 0.02113158416748047, 0.019216480255126952, 0.018659231185913085, 0.01881497573852539, 0.018654815673828123, 0.01852047920227051, 0.01856716728210449, 0.018743040084838868, 0.018446592330932616, 0.01853990364074707, 0.018706111907958983, 0.018494367599487305, 0.01850992012023926, 0.01857529640197754, 0.018536447525024414, 0.018513919830322266, 0.018552831649780274, 0.01864019203186035, 0.018651679992675783, 0.02152668762207031, 0.02028544044494629, 0.018941247940063476, 0.018881216049194335, 0.019005599975585936, 0.018933599472045898, 0.018798688888549804, 0.018757055282592774, 0.018670047760009766, 0.018700288772583007, 0.018530303955078126, 0.0185262393951416, 0.018492511749267578, 0.018540992736816406, 0.018549184799194336, 0.01860966491699219, 0.01849497604370117, 0.01924892807006836, 0.018890975952148437, 0.018610176086425782, 0.018780160903930664, 0.018858272552490233, 0.01874118423461914, 0.018847007751464844, 0.018673343658447264, 0.018680639266967773, 0.018569215774536133, 0.018595327377319337, 0.018604448318481445, 0.01855523109436035, 0.018660512924194336, 0.018606271743774414, 0.018487615585327147, 0.018597984313964845, 0.018663423538208008, 0.018692096710205077, 0.018728960037231446, 0.01879225540161133, 0.018602176666259764, 0.01866089630126953, 0.018634687423706053, 0.01892745590209961, 0.0187706241607666, 0.018651136398315428, 0.018572383880615235, 0.018664384841918947, 0.018588768005371094, 0.018674560546875, 0.018953664779663086, 0.018682592391967772, 0.018637760162353516, 0.018582015991210937, 0.018686431884765625, 0.018709823608398436, 0.018585760116577147, 0.01907753562927246, 0.019185728073120117, 0.019152767181396486, 0.019488895416259765, 0.019587072372436523, 0.018925376892089844, 0.018868415832519532, 0.01868185615539551, 0.01898700714111328, 0.019318304061889648, 0.018483648300170897, 0.018542335510253905, 0.01851638412475586, 0.018583391189575197, 0.01880188751220703, 0.018541376113891603, 0.018503679275512695, 0.018499584197998048, 0.018636415481567383, 0.01881110382080078, 0.01894169616699219, 0.018622880935668946, 0.01902124786376953, 0.01898726463317871, 0.01899139213562012]",tokens/s,53.45032205738647,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4287.905792,5792.202752,0.0,5389.68064,5000.446464,s,1,11.2608076171875,11.2608076171875,0.0,11.2608076171875,11.2608076171875,11.2608076171875,11.2608076171875,[11.2608076171875],,kWh,0.00012276578238749303,1.3534278995565779e-05,5.220309731795991e-05,0.0001885031587010187,,MB,1428.078592,5811.07712,0.0,5393.874944,4700.829696,s,10,30.44378271484375,3.044378271484375,0.0033976444752046946,3.0446483154296873,3.048840161132812,3.048877844238281,3.048907990722656,"[3.037720947265625, 3.041265380859375, 3.0423154296875, 3.0420888671875, 3.045289794921875, 3.0440068359375, 3.046708740234375, 3.046639404296875, 3.048831787109375, 3.04891552734375]",tokens/s,84.08941897853573,kWh,8.892191836791879e-05,9.808009071350247e-06,5.898618607779738e-05,0.00015771611351706643,tokens/kWh,1623169.5943502837,MB,1437.523968,5811.07712,0.0,5393.874944,4877.445632,s,10,16.412862670898438,1.6412862670898438,0.0029411297917484145,1.6408640136718748,1.645602099609375,1.6462620483398436,1.6467900073242188,"[1.6367591552734375, 1.639912353515625, 1.6393636474609374, 1.63993505859375, 1.64179296875, 1.638490234375, 1.6454554443359375, 1.6469219970703124, 1.6421558837890624, 1.642075927734375]",tokens/s,38.384528807217144,kWh,4.810366867834094e-05,5.30607144231777e-06,3.2238525790806434e-05,8.564826591146512e-05,tokens/kWh,735566.5561883447,,s,630,16.406602455139158,0.026042226119268503,0.0004061082425303648,0.02602984046936035,0.026376594734191895,0.026492055892944337,0.02751712015151978,"[0.027197439193725585, 0.02629417610168457, 0.02578646469116211, 0.02553856086730957, 0.02541779136657715, 0.025558752059936525, 0.025540576934814454, 0.025577728271484374, 0.025515647888183595, 0.025563520431518556, 0.025618431091308593, 0.02566921615600586, 0.025538976669311524, 0.025569280624389647, 0.02572208023071289, 0.025614912033081055, 0.02568191909790039, 0.025847679138183595, 0.02592323112487793, 0.025790815353393556, 0.025786720275878906, 0.02608332824707031, 0.026039552688598634, 0.02595916748046875, 0.025790464401245116, 0.02583247947692871, 0.025954368591308594, 0.025977312088012697, 0.02595680046081543, 0.025834815979003906, 0.025749439239501952, 0.025841663360595703, 0.025903871536254883, 0.0261079044342041, 0.026175487518310548, 0.026189823150634766, 0.026134143829345702, 0.02608371162414551, 0.026232831954956053, 0.026241024017333983, 0.026243072509765625, 0.02614681625366211, 0.025980928421020507, 0.026002431869506838, 0.026055295944213866, 0.026089855194091797, 0.026408960342407226, 0.026119487762451172, 0.02608777618408203, 0.026242528915405273, 0.02623695945739746, 0.026153823852539063, 0.02614672088623047, 0.02612633514404297, 0.026039520263671876, 0.026063743591308593, 0.026001407623291017, 0.02602128028869629, 0.026064544677734374, 0.02621900749206543, 0.02625779151916504, 0.02643142318725586, 0.02648281669616699, 0.02769068717956543, 0.02670867156982422, 0.02610540771484375, 0.025905055999755858, 0.025624927520751954, 0.02557542419433594, 0.025547935485839845, 0.025471839904785156, 0.025607263565063477, 0.02559056091308594, 0.025602176666259767, 0.025597728729248048, 0.025521856307983398, 0.02592758369445801, 0.025885311126708985, 0.02575103950500488, 0.02597324752807617, 0.02609132766723633, 0.02600774383544922, 0.02593791961669922, 0.025790464401245116, 0.02571820831298828, 0.025667423248291014, 0.025675840377807617, 0.025856672286987306, 0.02577315139770508, 0.025828256607055664, 0.02593769645690918, 0.025933088302612303, 0.02584262466430664, 0.025976383209228515, 0.026140960693359375, 0.026227935791015625, 0.026024511337280273, 0.02603251266479492, 0.02624278450012207, 0.026211711883544923, 0.026123136520385743, 0.02612838363647461, 0.02613657569885254, 0.02629430389404297, 0.026405920028686525, 0.026198688507080077, 0.026071359634399414, 0.026052480697631837, 0.026089599609375, 0.02588467216491699, 0.02585795211791992, 0.02611737632751465, 0.026215072631835937, 0.026031520843505858, 0.026040576934814454, 0.026121824264526368, 0.026141120910644532, 0.026126848220825196, 0.026243072509765625, 0.02630451202392578, 0.02643561553955078, 0.026306528091430664, 0.026376192092895507, 0.026314687728881837, 0.026221696853637694, 0.026376575469970704, 0.027344863891601564, 0.026372095108032227, 0.026025983810424806, 0.025735071182250976, 0.02555913543701172, 0.025550048828125, 0.025490144729614257, 0.02557139205932617, 0.025686016082763673, 0.025742559432983397, 0.025714719772338867, 0.025797376632690428, 0.025773696899414063, 0.025690496444702147, 0.02572287940979004, 0.02571468734741211, 0.025821184158325194, 0.025745407104492187, 0.02568726348876953, 0.025824031829833984, 0.025753599166870117, 0.025903104782104492, 0.025843711853027345, 0.025903039932250977, 0.02582476806640625, 0.026004032135009767, 0.02605411148071289, 0.026069536209106445, 0.02590230369567871, 0.02574620819091797, 0.025902751922607423, 0.026016096115112304, 0.026028032302856444, 0.026050527572631835, 0.025964576721191405, 0.025955488204956054, 0.026177440643310547, 0.02623174476623535, 0.026226688385009765, 0.026371200561523436, 0.02618662452697754, 0.026078399658203126, 0.025908000946044923, 0.025933855056762694, 0.026017120361328125, 0.026116767883300782, 0.026220256805419923, 0.026698015213012696, 0.025995264053344725, 0.0261014404296875, 0.026228416442871095, 0.02616755294799805, 0.02603455924987793, 0.026004800796508788, 0.02601795196533203, 0.026038047790527343, 0.026252031326293945, 0.026306560516357422, 0.026382335662841795, 0.02635775947570801, 0.026183551788330078, 0.026349279403686525, 0.026372095108032227, 0.02756185531616211, 0.026678720474243165, 0.026004127502441406, 0.02565273666381836, 0.025563232421875, 0.025502016067504883, 0.025491552352905275, 0.025552223205566407, 0.02557200050354004, 0.02554265594482422, 0.02550579261779785, 0.025626623153686523, 0.025640352249145508, 0.02586684799194336, 0.02575155258178711, 0.02571878433227539, 0.025634143829345705, 0.02584752082824707, 0.025836256027221678, 0.02573334312438965, 0.02569215965270996, 0.026117599487304688, 0.025811487197875977, 0.02594611167907715, 0.026046464920043946, 0.026232351303100587, 0.026234848022460937, 0.026046335220336916, 0.02596486473083496, 0.025983295440673827, 0.02589481544494629, 0.025983072280883788, 0.026021888732910156, 0.02595840072631836, 0.02590924835205078, 0.026101760864257813, 0.026207424163818358, 0.026227359771728517, 0.027144287109375, 0.026167360305786133, 0.02612633514404297, 0.02614681625366211, 0.026058752059936522, 0.02626918411254883, 0.02625315284729004, 0.026476863861083985, 0.02617788887023926, 0.026009599685668947, 0.025951391220092775, 0.02599203109741211, 0.02597887992858887, 0.02612633514404297, 0.026117984771728515, 0.026074880599975585, 0.025940383911132812, 0.026163200378417968, 0.026126176834106445, 0.026093727111816407, 0.02609766387939453, 0.026100799560546874, 0.026059711456298828, 0.026462080001831055, 0.026744512557983397, 0.027185440063476562, 0.026334367752075195, 0.02580339241027832, 0.025577695846557617, 0.025591583251953126, 0.02552239990234375, 0.025464832305908205, 0.02546073532104492, 0.02550169563293457, 0.02571660804748535, 0.025583744049072266, 0.025644800186157227, 0.025761983871459962, 0.025753664016723632, 0.025847648620605467, 0.02602979278564453, 0.02607961654663086, 0.02614236831665039, 0.025992927551269532, 0.02599135971069336, 0.025975008010864258, 0.025947456359863282, 0.02577916717529297, 0.025790304183959962, 0.025792671203613282, 0.02570240020751953, 0.025819135665893556, 0.025812992095947264, 0.025886720657348632, 0.026030080795288086, 0.026222591400146485, 0.0260928955078125, 0.02597657585144043, 0.026135456085205077, 0.026220544815063477, 0.026212064743041993, 0.02622697639465332, 0.02626937675476074, 0.026380096435546875, 0.026259967803955078, 0.02608742332458496, 0.025955711364746094, 0.025891456604003906, 0.025939968109130858, 0.026093439102172853, 0.026171520233154298, 0.02617158317565918, 0.02607244873046875, 0.025993600845336914, 0.025894720077514647, 0.026112255096435548, 0.026245119094848633, 0.026222591400146485, 0.026256576538085937, 0.026312992095947264, 0.026485279083251954, 0.026568479537963867, 0.026516799926757813, 0.02652662467956543, 0.02661123275756836, 0.02665519905090332, 0.026474496841430665, 0.026164512634277343, 0.02746188735961914, 0.026330783843994142, 0.025821535110473633, 0.02559779167175293, 0.025536672592163086, 0.02543177604675293, 0.025633056640625, 0.025671680450439452, 0.02555084800720215, 0.02548905563354492, 0.025540960311889647, 0.02563043212890625, 0.025636415481567382, 0.025671808242797852, 0.02579871940612793, 0.025734912872314452, 0.0256878719329834, 0.025714975357055664, 0.025831872940063477, 0.025848064422607422, 0.0257126407623291, 0.02583328056335449, 0.02584921646118164, 0.025777055740356446, 0.025855903625488282, 0.02586969566345215, 0.0258156795501709, 0.025792512893676758, 0.02595587158203125, 0.025950687408447266, 0.02589286422729492, 0.025944063186645508, 0.02615500831604004, 0.026169343948364256, 0.026096639633178712, 0.026192895889282225, 0.02618284797668457, 0.025983808517456054, 0.025990367889404297, 0.02610652732849121, 0.02607663917541504, 0.02592630386352539, 0.02606879997253418, 0.02622073554992676, 0.02611155128479004, 0.02609811210632324, 0.0259583683013916, 0.02594000053405762, 0.026158975601196288, 0.02602988815307617, 0.026146656036376954, 0.02612633514404297, 0.02612681579589844, 0.026066591262817383, 0.025936223983764647, 0.02612019157409668, 0.026163200378417968, 0.02618320083618164, 0.026161151885986327, 0.026476640701293946, 0.02644620704650879, 0.02675916862487793, 0.02753324890136719, 0.027477632522583006, 0.026433855056762694, 0.026006656646728514, 0.02573321533203125, 0.02557219123840332, 0.02556470489501953, 0.025594335556030273, 0.02549295997619629, 0.025704639434814453, 0.025751903533935548, 0.025793792724609375, 0.025775999069213868, 0.025756479263305664, 0.025812639236450195, 0.02570863914489746, 0.025791872024536134, 0.025985984802246093, 0.02923520088195801, 0.025638912200927736, 0.02554265594482422, 0.025741119384765625, 0.025733312606811522, 0.02576896095275879, 0.02571571159362793, 0.025647104263305662, 0.026019840240478515, 0.026062847137451172, 0.026015743255615235, 0.02594758415222168, 0.026148448944091796, 0.02622972869873047, 0.02609766387939453, 0.026034175872802736, 0.026050559997558592, 0.026040319442749024, 0.025955615997314455, 0.02649776077270508, 0.02654617691040039, 0.026310144424438478, 0.02606540870666504, 0.026028032302856444, 0.025919551849365233, 0.025990720748901366, 0.026210687637329103, 0.02613248062133789, 0.026048511505126954, 0.02606208038330078, 0.026071807861328126, 0.026001407623291017, 0.026058752059936522, 0.026251264572143555, 0.026183679580688478, 0.026119199752807618, 0.026376768112182616, 0.02639094352722168, 0.026265151977539064, 0.026890687942504883, 0.02778726387023926, 0.026179584503173828, 0.026167295455932618, 0.02612620735168457, 0.026126304626464845, 0.02620841598510742, 0.027289600372314454, 0.02615052795410156, 0.025575328826904296, 0.02548784065246582, 0.025421823501586914, 0.026464256286621093, 0.029837312698364257, 0.02533580780029297, 0.02548940849304199, 0.025769760131835937, 0.025684192657470704, 0.025591808319091795, 0.025640960693359374, 0.0257392635345459, 0.02568191909790039, 0.025649152755737304, 0.02571673583984375, 0.025976831436157227, 0.025887840270996092, 0.025822111129760742, 0.025854976654052734, 0.025861120223999022, 0.025786367416381836, 0.02589004707336426, 0.026053375244140625, 0.02612428855895996, 0.026103391647338867, 0.02603868865966797, 0.026124191284179688, 0.026294368743896485, 0.026113632202148438, 0.0259172477722168, 0.025902719497680665, 0.0261375675201416, 0.026315967559814454, 0.02908857536315918, 0.025999359130859375, 0.025984512329101563, 0.026116544723510743, 0.026095680236816406, 0.02610902404785156, 0.026065824508666992, 0.026060800552368164, 0.026023584365844725, 0.026028383255004884, 0.026003456115722655, 0.02624835205078125, 0.026291040420532225, 0.02615603256225586, 0.026024959564208985, 0.026125440597534178, 0.026139520645141603, 0.026265600204467773, 0.026359424591064454, 0.026263423919677734, 0.026302335739135742, 0.026282047271728514, 0.026268224716186523, 0.026171392440795898, 0.026211360931396484, 0.026461151123046874, 0.026294048309326173, 0.026451711654663087, 0.02730918312072754, 0.026170015335083008, 0.02582963180541992, 0.02570649528503418, 0.025550559997558595, 0.02547920036315918, 0.02565760040283203, 0.025628671646118165, 0.02555084800720215, 0.025525888442993163, 0.02563315200805664, 0.025792512893676758, 0.025771039962768555, 0.025879520416259766, 0.025872255325317384, 0.025723007202148436, 0.025838720321655274, 0.025901439666748047, 0.025880863189697265, 0.025907360076904296, 0.025988672256469728, 0.025985536575317384, 0.025876352310180664, 0.025849727630615233, 0.02605286407470703, 0.026267648696899414, 0.02613248062133789, 0.025992704391479493, 0.025976415634155273, 0.025981855392456055, 0.025957727432250978, 0.026128704071044923, 0.026178943634033204, 0.026200511932373046, 0.02620675277709961, 0.026089471817016603, 0.02611199951171875, 0.026077184677124023, 0.026204063415527345, 0.026525632858276367, 0.026386304855346678, 0.026333471298217774, 0.026413055419921876, 0.026182912826538087, 0.026040191650390624, 0.026080127716064452, 0.02608527946472168, 0.026017887115478516, 0.026213951110839843, 0.026274240493774415, 0.02632499122619629, 0.026230783462524415, 0.026169151306152345, 0.026152416229248045, 0.02642812728881836, 0.026404863357543946, 0.026406911849975585, 0.02625939178466797, 0.02607315254211426, 0.026089471817016603, 0.02609971237182617, 0.02624211120605469, 0.0263887996673584, 0.02729167938232422, 0.026483840942382812, 0.02591811180114746, 0.025702720642089845, 0.025608064651489258, 0.02551206398010254, 0.025378591537475587, 0.025575008392333985, 0.025655935287475586, 0.02571468734741211, 0.025563072204589844, 0.02565331268310547, 0.025687679290771485, 0.025628543853759764, 0.025557024002075195, 0.025807199478149415, 0.02593721580505371, 0.02579948806762695, 0.025886463165283202, 0.026004959106445312, 0.02597763252258301, 0.026042367935180662, 0.025887840270996092, 0.02589993667602539, 0.025800703048706054, 0.026077184677124023, 0.02609286308288574, 0.02611404800415039, 0.026118207931518554, 0.026136896133422852, 0.026277984619140625, 0.026097888946533202, 0.025968639373779297, 0.02599081611633301, 0.026022239685058592, 0.02632499122619629, 0.026396095275878908, 0.02649760055541992, 0.026433439254760743, 0.02658108711242676, 0.026467519760131834, 0.026311487197875977, 0.025997312545776367, 0.025945280075073244, 0.0259400634765625, 0.026141408920288087, 0.0262073917388916, 0.02617430305480957, 0.025989120483398437, 0.02600048065185547, 0.02605558395385742, 0.026050464630126953, 0.026117471694946288, 0.02628223991394043, 0.026056543350219726, 0.026063520431518553, 0.026197919845581053, 0.02633907127380371, 0.0264718074798584, 0.026311199188232423, 0.026429887771606445, 0.026253311157226563, 0.02620128059387207]",tokens/s,38.399175071293364,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2202.902528,2199.781376,0.0,1814.03648,1724.34432,s,1,8.8815517578125,8.8815517578125,0.0,8.8815517578125,8.8815517578125,8.8815517578125,8.8815517578125,[8.8815517578125],,kWh,5.000285015834531e-05,5.5082650203559995e-06,1.723168045197898e-05,7.274279563068028e-05,,MB,2290.577408,2344.484864,0.0,1929.37984,1887.2832,s,10,1.7273880157470707,0.17273880157470703,0.0009003003879789801,0.17271497344970704,0.17398826599121095,0.1739950637817383,0.17400050201416017,"[0.1730663604736328, 0.17141909790039062, 0.1713570556640625, 0.17201394653320312, 0.17269465637207032, 0.17353884887695312, 0.17273529052734374, 0.17400186157226563, 0.17257414245605468, 0.17398675537109376]",tokens/s,1482.0063452233903,kWh,5.223488936457935e-06,5.760622240817699e-07,3.4859898919640294e-06,9.285541052503734e-06,tokens/kWh,27569745.10720328,MB,2297.675776,2493.382656,0.0,2076.18048,1946.880512,s,10,21.76394873046875,2.176394873046875,0.005036307455233611,2.1768383789062504,2.1819935302734375,2.1836013793945312,2.184887658691406,"[2.168451171875, 2.1702099609375, 2.174705810546875, 2.171670166015625, 2.1775966796875, 2.180792724609375, 2.185209228515625, 2.17621826171875, 2.17745849609375, 2.18163623046875]",tokens/s,28.946952954268934,kWh,6.266338690646099e-05,6.9116231360311785e-06,3.217264974603639e-05,0.00010174765978852855,tokens/kWh,619178.8600439426,,s,630,21.76030527877807,0.03454016710917155,0.0005898961425316224,0.03442820739746094,0.03486401138305664,0.03515938243865967,0.036909745674133305,"[0.034625537872314455, 0.03440367889404297, 0.03438998413085938, 0.03461190414428711, 0.03440412902832031, 0.03456227111816406, 0.03486310577392578, 0.03448012924194336, 0.034783199310302736, 0.03438595199584961, 0.03454729461669922, 0.03447849655151367, 0.03449798583984375, 0.03424275207519531, 0.03438230514526367, 0.03423017501831055, 0.03419919967651367, 0.034748767852783205, 0.034320384979248046, 0.03477641677856445, 0.034406848907470707, 0.034255073547363284, 0.034099391937255856, 0.03418502426147461, 0.034449665069580075, 0.034229854583740234, 0.0342193603515625, 0.03435561752319336, 0.03435561752319336, 0.03456607818603516, 0.03431635284423828, 0.03408281707763672, 0.034146305084228515, 0.03434905624389648, 0.03435756683349609, 0.034479808807373044, 0.03429983901977539, 0.03457427215576172, 0.034242111206054686, 0.03441094589233398, 0.03446563339233399, 0.034576576232910154, 0.03445542526245117, 0.034300159454345704, 0.03411964797973633, 0.03416454315185547, 0.03435939025878906, 0.03425494384765625, 0.03503094482421875, 0.0343197135925293, 0.03418348693847656, 0.034388065338134766, 0.03418483352661133, 0.034296703338623044, 0.035733184814453124, 0.0343287353515625, 0.03438729476928711, 0.03428752136230469, 0.03412249755859375, 0.0343383674621582, 0.03471200180053711, 0.034310081481933596, 0.034510814666748046, 0.034503231048583986, 0.03463372802734375, 0.034469375610351564, 0.03527503967285156, 0.03429532623291016, 0.03422019195556641, 0.03418576049804688, 0.034827392578125, 0.03457839965820313, 0.03416361618041992, 0.034178241729736325, 0.0341401596069336, 0.03431302261352539, 0.034086719512939456, 0.03448767852783203, 0.03693856048583984, 0.034532638549804685, 0.03433785629272461, 0.034560928344726564, 0.03419622421264648, 0.034416065216064454, 0.0342042236328125, 0.034055328369140624, 0.03422499084472656, 0.03431808090209961, 0.034199806213378904, 0.03459008026123047, 0.03487590408325195, 0.03429983901977539, 0.03422022247314453, 0.03453952026367187, 0.034320480346679685, 0.034273086547851564, 0.03410528182983399, 0.03420703887939453, 0.03413708877563477, 0.034498401641845707, 0.03424371337890625, 0.03451327896118164, 0.03769375991821289, 0.03425900650024414, 0.03430003356933594, 0.03437100982666016, 0.0342042236328125, 0.03431983947753906, 0.034179294586181644, 0.03406217575073242, 0.03427990341186524, 0.034326560974121095, 0.03406230545043945, 0.03442892837524414, 0.03417292785644531, 0.03408406448364258, 0.034321182250976565, 0.034179073333740234, 0.03496499252319336, 0.034771457672119144, 0.03426713562011719, 0.03409920120239258, 0.03437955093383789, 0.03448342514038086, 0.03431913757324219, 0.034189407348632815, 0.035573760986328126, 0.03456063842773437, 0.03457440185546875, 0.034369537353515625, 0.03422822570800781, 0.03435520172119141, 0.03446169662475586, 0.034148353576660156, 0.03423766326904297, 0.03420240020751953, 0.03412924957275391, 0.03397903823852539, 0.034118942260742184, 0.03405692672729492, 0.03562496185302735, 0.03449651336669922, 0.03438387298583984, 0.03487881469726563, 0.034732288360595706, 0.03480614471435547, 0.034504161834716794, 0.03439059066772461, 0.03432243347167969, 0.03446278381347656, 0.0342327995300293, 0.034173343658447264, 0.03433478546142578, 0.03431443023681641, 0.034872161865234376, 0.03457942581176758, 0.03438908767700195, 0.03410217666625977, 0.034141536712646484, 0.03419321441650391, 0.03420783996582031, 0.03441337585449219, 0.034480159759521484, 0.034483486175537106, 0.034449440002441406, 0.03432860946655274, 0.03448070526123047, 0.03437158584594727, 0.03444736099243164, 0.034359294891357424, 0.03508838272094727, 0.034920448303222655, 0.03480575942993164, 0.034609153747558595, 0.03469107055664063, 0.03451651382446289, 0.03444793701171875, 0.03450815963745117, 0.034510593414306644, 0.03443996810913086, 0.03461939239501953, 0.03450873565673828, 0.034480064392089844, 0.03487551879882812, 0.03425689697265625, 0.03505279922485351, 0.03546780776977539, 0.03447190475463867, 0.03477529525756836, 0.0344725456237793, 0.03434700775146484, 0.03442665481567383, 0.03432265472412109, 0.03422412872314453, 0.03432447814941406, 0.03440614318847656, 0.03450505447387695, 0.03434451293945313, 0.03426339340209961, 0.03459628677368164, 0.03456908798217773, 0.034438846588134765, 0.03441049575805664, 0.03438172912597656, 0.03417712020874023, 0.03438358306884766, 0.034218273162841796, 0.03476601409912109, 0.03404864120483399, 0.03427552032470703, 0.03421830368041992, 0.03419472122192383, 0.034201759338378906, 0.03478144073486328, 0.034252799987792966, 0.034384128570556644, 0.034369281768798825, 0.0341954231262207, 0.03424361419677734, 0.034348033905029295, 0.03428470230102539, 0.03419846343994141, 0.034281185150146484, 0.034144161224365234, 0.03478883361816406, 0.03508102416992188, 0.034328575134277346, 0.03449161529541016, 0.0349007682800293, 0.03461529541015625, 0.03455920028686523, 0.034533409118652346, 0.03427171325683594, 0.034352928161621096, 0.034292224884033204, 0.034329761505126954, 0.034936702728271485, 0.034597854614257816, 0.034303966522216796, 0.03446112060546875, 0.03439471817016602, 0.03465001678466797, 0.034240062713623044, 0.034539295196533204, 0.034267200469970706, 0.034452159881591796, 0.03463388824462891, 0.0343355827331543, 0.035722145080566405, 0.03635516738891602, 0.03447891235351563, 0.03443040084838867, 0.03453952026367187, 0.03487334442138672, 0.036018177032470705, 0.03462758255004883, 0.0344637451171875, 0.03445673751831055, 0.03451580810546875, 0.03453747177124023, 0.03484572982788086, 0.03547132873535156, 0.034202625274658206, 0.03450265502929688, 0.03460847854614258, 0.034405025482177734, 0.0345371208190918, 0.03430332946777344, 0.0341739501953125, 0.034334720611572264, 0.035108863830566404, 0.03449856185913086, 0.03459625625610351, 0.03442748641967774, 0.034420352935791015, 0.034597248077392576, 0.03482230377197266, 0.03464585494995117, 0.034781185150146485, 0.034602558135986325, 0.03460140609741211, 0.03455491256713867, 0.03434972763061524, 0.0344089584350586, 0.03470761489868164, 0.03459775924682617, 0.034382625579833986, 0.03441788864135742, 0.03453734588623047, 0.034740798950195315, 0.03434121704101562, 0.034369537353515625, 0.03401036834716797, 0.034510593414306644, 0.034368095397949217, 0.034285919189453125, 0.03459686279296875, 0.03452083206176758, 0.03447145462036133, 0.03449628829956055, 0.03450479888916016, 0.0342902717590332, 0.03442131042480469, 0.034402046203613285, 0.03425680160522461, 0.034525630950927734, 0.03484627151489258, 0.03490435028076172, 0.03472115325927734, 0.034544063568115235, 0.03458854293823242, 0.03451264190673828, 0.03441907119750977, 0.03439616012573242, 0.03476287841796875, 0.03466896057128906, 0.03440639877319336, 0.03433430480957031, 0.03421014404296875, 0.03554707336425781, 0.03432419204711914, 0.03448873519897461, 0.034162784576416014, 0.03433052825927734, 0.03432236862182617, 0.0342119026184082, 0.03448627090454102, 0.03432447814941406, 0.0342171516418457, 0.03514041519165039, 0.03584534454345703, 0.03470006561279297, 0.034456672668457033, 0.03515177536010742, 0.03440137481689453, 0.034522430419921875, 0.03437836837768555, 0.03426095962524414, 0.03461324691772461, 0.03464191818237305, 0.03425436782836914, 0.03439440155029297, 0.0346049919128418, 0.034274784088134766, 0.034291648864746095, 0.03439276885986328, 0.034166942596435546, 0.03433881759643555, 0.03556137466430664, 0.03506595230102539, 0.034493598937988285, 0.034902305603027345, 0.034320960998535155, 0.034541568756103515, 0.03473612976074219, 0.03711372756958008, 0.03452851104736328, 0.034546558380126956, 0.034119327545166014, 0.034681182861328125, 0.03462473678588867, 0.0343375358581543, 0.03465219116210937, 0.034524223327636716, 0.0343513298034668, 0.034352928161621096, 0.03434182357788086, 0.03459481430053711, 0.03590553665161133, 0.03482828903198242, 0.03444736099243164, 0.03451084899902344, 0.034688190460205076, 0.034491104125976564, 0.034611297607421876, 0.03444326400756836, 0.0342891845703125, 0.03500064086914063, 0.03465529632568359, 0.03463180923461914, 0.034691871643066405, 0.03445139312744141, 0.034739486694335936, 0.035467041015625, 0.034872318267822264, 0.03439606475830078, 0.03445907211303711, 0.034390785217285155, 0.034324382781982424, 0.034463550567626955, 0.03431852722167969, 0.03443711853027344, 0.03467468643188477, 0.03470870590209961, 0.03683919906616211, 0.043655776977539064, 0.03481753540039063, 0.034890655517578126, 0.035159713745117185, 0.03427110290527344, 0.0342061767578125, 0.03427328109741211, 0.034315296173095707, 0.03431244659423828, 0.03427401733398437, 0.03421993637084961, 0.034221824645996095, 0.03421430587768555, 0.03425830459594727, 0.034834369659423825, 0.03438451385498047, 0.03427532958984375, 0.03438617706298828, 0.03463296127319336, 0.03461171340942383, 0.03436502456665039, 0.03447465515136719, 0.034393856048583984, 0.03419340896606445, 0.03429894256591797, 0.03465516662597656, 0.03450262451171875, 0.034635807037353514, 0.0348034553527832, 0.03427353668212891, 0.0343408317565918, 0.03444022369384766, 0.03428195190429688, 0.03436102294921875, 0.034256832122802734, 0.03448665618896484, 0.03433526229858398, 0.03462076950073242, 0.034372257232666015, 0.03442393493652344, 0.034380542755126954, 0.034328575134277346, 0.03449459075927734, 0.03477913665771484, 0.034525184631347655, 0.03508428955078125, 0.034450016021728515, 0.03434064102172851, 0.034310367584228514, 0.0347586555480957, 0.034334495544433595, 0.03472771072387695, 0.03428192138671875, 0.03458867263793945, 0.03527212905883789, 0.037517055511474606, 0.0345032958984375, 0.03457964706420898, 0.034423774719238284, 0.03440438461303711, 0.03429171371459961, 0.03412966537475586, 0.03422233581542969, 0.03432838439941406, 0.03432467269897461, 0.03461324691772461, 0.03437807846069336, 0.03429750442504883, 0.034377727508544925, 0.03437929534912109, 0.0345272331237793, 0.03454819107055664, 0.0344453125, 0.03450912094116211, 0.03426249694824219, 0.03440054321289063, 0.035573951721191405, 0.034303264617919924, 0.034613632202148435, 0.034705631256103514, 0.034318111419677735, 0.03425436782836914, 0.03502774429321289, 0.034667678833007816, 0.03451334381103516, 0.034353343963623044, 0.034480289459228514, 0.03443228912353516, 0.03444009780883789, 0.034284641265869144, 0.034339393615722656, 0.034367488861083983, 0.0343851203918457, 0.03446681594848633, 0.03468783950805664, 0.034193534851074216, 0.03464064025878906, 0.034439231872558596, 0.03471155166625976, 0.03463577651977539, 0.03473779296875, 0.034251136779785155, 0.03448188781738281, 0.034283519744873044, 0.034234657287597656, 0.03429100799560547, 0.03445366287231445, 0.03519071960449219, 0.03462649536132813, 0.03498025512695312, 0.03783814239501953, 0.03452572631835937, 0.03446121597290039, 0.03421446228027344, 0.03434735870361328, 0.034694625854492185, 0.034266910552978515, 0.0393449592590332, 0.034429214477539063, 0.03445657730102539, 0.034444225311279296, 0.03439791870117188, 0.03431747055053711, 0.03449532699584961, 0.03432473754882812, 0.034315391540527346, 0.035709697723388674, 0.03436876678466797, 0.034156383514404295, 0.03430281448364258, 0.03421177673339844, 0.034136062622070314, 0.034459648132324217, 0.03436544036865234, 0.03421120071411133, 0.03413283157348633, 0.034195232391357425, 0.03485257720947266, 0.034439456939697265, 0.03437321472167969, 0.03443689727783203, 0.03438655853271484, 0.03417679977416992, 0.034130142211914065, 0.034285087585449216, 0.03426886367797852, 0.034379615783691406, 0.034352062225341796, 0.034375679016113284, 0.03452687835693359, 0.0345450553894043, 0.03491881561279297, 0.03444575881958008, 0.03457443237304687, 0.03437126541137695, 0.0342305908203125, 0.0343633918762207, 0.034420127868652346, 0.034258625030517575, 0.034447425842285155, 0.034358112335205075, 0.03454771041870117, 0.03437884902954102, 0.03434998321533203, 0.034233345031738284, 0.034313377380371095, 0.03433046340942383, 0.0344002571105957, 0.034598464965820315, 0.03495328140258789, 0.034406784057617185, 0.034587905883789065, 0.0345456657409668, 0.03448831939697266, 0.034592479705810544, 0.03456367874145508, 0.0344780158996582, 0.034990848541259764, 0.03459040069580078, 0.03437369537353516, 0.034346847534179686, 0.03445596694946289, 0.034440990447998046, 0.03462985610961914, 0.03454297637939453, 0.034679424285888674, 0.03455385589599609, 0.034848865509033204, 0.03439187240600586, 0.034318302154541014, 0.0345109748840332, 0.0343900146484375, 0.0344648323059082, 0.036576190948486326, 0.034334720611572264, 0.03461324691772461, 0.03458035278320312, 0.03445900726318359, 0.03414006423950195, 0.03461235046386719, 0.03419894409179688, 0.03421625518798828, 0.03451107025146485, 0.03456905746459961, 0.03515897750854492, 0.03529292678833008, 0.03466476821899414, 0.03440428924560547, 0.03458867263793945, 0.03429580688476563, 0.03476070404052734, 0.03443513488769531, 0.03433430480957031, 0.03447196960449219, 0.034603328704833985, 0.03451289749145508, 0.03453251266479492, 0.03427004623413086, 0.03422201538085937, 0.034383201599121095, 0.034607841491699216, 0.034523136138916014, 0.03533824157714844, 0.03657727813720703, 0.034969600677490234, 0.03494831848144531, 0.03464611053466797, 0.034319038391113284, 0.03463379287719726, 0.034275264739990235, 0.03422163009643555, 0.03455228805541992, 0.0346910400390625, 0.03470745468139649, 0.03533824157714844]",tokens/s,28.951799707259294,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,881.569792,566.099968,0.0,163.577856,154.631168,s,1,7.6678095703125,7.6678095703125,0.0,7.6678095703125,7.6678095703125,7.6678095703125,7.6678095703125,[7.6678095703125],,kWh,1.527519982917814e-05,1.673568278789665e-06,4.761392697949507e-06,2.1710160805917312e-05,,MB,1407.0784,616.431616,0.0,199.22944,187.147776,s,25,0.20059964752197265,0.008023985900878905,7.840909448713876e-05,0.008007583618164062,0.008090898895263673,0.008122873306274413,0.008286211738586425,"[0.008054495811462403, 0.008124799728393554, 0.008039679527282715, 0.008115167617797852, 0.008034208297729491, 0.008041760444641114, 0.008037088394165039, 0.007990623950958252, 0.007955584049224853, 0.008034015655517578, 0.007959584236145019, 0.007987040042877197, 0.00802387237548828, 0.007985375881195068, 0.00799567985534668, 0.007961472034454346, 0.007992224216461181, 0.00794704008102417, 0.008026752471923828, 0.007926623821258545, 0.008007583618164062, 0.008337183952331543, 0.00801961612701416, 0.008000191688537597, 0.008001983642578125]",tokens/s,31904.34319830486,kWh,2.3543523528225843e-07,2.5964113169395376e-08,1.1232983896770417e-07,3.7372918741935793e-07,tokens/kWh,684987976.9030318,MB,1448.415232,618.528768,0.0,201.326592,187.150336,s,25,10.206649902343752,0.4082659960937501,0.0024047117052303823,0.40733929443359373,0.41147852783203126,0.41238172607421875,0.41396415771484374,"[0.41217059326171873, 0.4104404296875, 0.41444720458984374, 0.4091543273925781, 0.40965985107421876, 0.4081994323730469, 0.4097860717773438, 0.4097771301269531, 0.4063571472167969, 0.4064138793945313, 0.40695339965820315, 0.405554443359375, 0.4063354187011719, 0.41243450927734376, 0.40663848876953124, 0.40467529296875, 0.41038739013671877, 0.40558087158203127, 0.4067589111328125, 0.40682455444335935, 0.40591265869140625, 0.40675146484375, 0.40733929443359373, 0.4086716613769531, 0.40942547607421875]",tokens/s,154.31116135749232,kWh,1.175169886538695e-05,1.2959917527276692e-06,4.3302737151124e-06,1.7377964333227017e-05,tokens/kWh,3625280.7746614334,,s,1575,10.195003649711603,0.006473018190293085,0.00015417789668712757,0.006447360038757324,0.006530022430419922,0.0065962017059326165,0.006981290979385375,"[0.006505919933319092, 0.006544064044952393, 0.006686592102050781, 0.00671347188949585, 0.006696832180023193, 0.007092383861541748, 0.006759615898132324, 0.006568607807159424, 0.006544991970062256, 0.0065701441764831545, 0.006531551837921142, 0.006606624126434326, 0.006567584037780762, 0.006637152194976807, 0.006515456199645996, 0.006486303806304931, 0.006463263988494873, 0.006520224094390869, 0.006463103771209717, 0.00646454381942749, 0.006587903976440429, 0.006449535846710205, 0.006511616230010986, 0.006469888210296631, 0.00650108814239502, 0.006594816207885742, 0.00649513578414917, 0.006531968116760254, 0.006533247947692871, 0.006463295936584472, 0.006504096031188965, 0.006478240013122558, 0.006488287925720215, 0.0065103678703308104, 0.0064776320457458495, 0.006498144149780273, 0.006499807834625244, 0.006484064102172851, 0.006531167984008789, 0.006451968193054199, 0.006490047931671143, 0.006510591983795166, 0.006491871833801269, 0.00648198413848877, 0.006490335941314697, 0.00647276782989502, 0.006458303928375244, 0.006475776195526123, 0.006498303890228272, 0.006499551773071289, 0.006480192184448242, 0.006484447956085205, 0.00656390380859375, 0.006483903884887695, 0.00655785608291626, 0.0064917120933532715, 0.0065435199737548825, 0.006522687911987304, 0.006613120079040528, 0.006505919933319092, 0.006494880199432373, 0.006563776016235352, 0.006541183948516846, 0.006458879947662354, 0.006486720085144043, 0.006487103939056396, 0.006531136035919189, 0.006450047969818115, 0.006496255874633789, 0.006473567962646485, 0.00650051212310791, 0.006533120155334473, 0.006475423812866211, 0.006483712196350098, 0.006475999832153321, 0.006447487831115722, 0.0064839677810668945, 0.006460671901702881, 0.006495999813079834, 0.006495232105255127, 0.006490335941314697, 0.00650435209274292, 0.006479008197784424, 0.00648095989227295, 0.0065, 0.006492032051086426, 0.006506624221801757, 0.006522208213806152, 0.006470304012298584, 0.006457344055175781, 0.006459392070770263, 0.006463263988494873, 0.006536416053771973, 0.006560351848602295, 0.006502816200256348, 0.006518784046173095, 0.006604800224304199, 0.0068211197853088375, 0.006574304103851319, 0.006531167984008789, 0.006559199810028076, 0.006658048152923584, 0.006505375862121582, 0.006502208232879639, 0.006495999813079834, 0.006496767997741699, 0.006522016048431396, 0.006496863842010498, 0.0064655041694641115, 0.006510879993438721, 0.006519999980926514, 0.006546239852905273, 0.0064572482109069825, 0.0064672322273254396, 0.006491871833801269, 0.00649289608001709, 0.006467584133148193, 0.00645468807220459, 0.0064306240081787105, 0.006584959983825684, 0.006443071842193604, 0.006486015796661377, 0.006641759872436524, 0.006489247798919678, 0.006472447872161865, 0.0066146559715271, 0.006400000095367431, 0.006464928150177002, 0.006531680107116699, 0.006469632148742676, 0.006493760108947754, 0.006457791805267334, 0.006492159843444824, 0.006495999813079834, 0.006477920055389405, 0.006493535995483399, 0.006454080104827881, 0.006457344055175781, 0.006490303993225098, 0.006436960220336914, 0.006460256099700927, 0.006514560222625732, 0.006488639831542969, 0.006490816116333007, 0.00654252815246582, 0.006496479988098144, 0.006529376029968261, 0.006477791786193848, 0.006508575916290283, 0.006543488025665283, 0.006478847980499268, 0.006452223777770996, 0.006584256172180176, 0.006851744174957275, 0.006887872219085694, 0.0072197761535644535, 0.007321375846862793, 0.0070371518135070804, 0.007351168155670166, 0.006782080173492432, 0.006858528137207031, 0.0066212158203125, 0.006547008037567139, 0.006799776077270508, 0.006836415767669678, 0.006526303768157959, 0.006511072158813477, 0.006453248023986816, 0.00653436803817749, 0.006464255809783936, 0.006468863964080811, 0.006548255920410156, 0.006477407932281494, 0.006474143981933593, 0.0064471039772033695, 0.0064778242111206055, 0.006478879928588867, 0.006455647945404053, 0.006521471977233887, 0.0064913277626037595, 0.006458367824554443, 0.006465343952178955, 0.006524928092956543, 0.006469632148742676, 0.006499616146087647, 0.006470592021942139, 0.006461152076721191, 0.006505663871765137, 0.0065032958984375, 0.006386975765228272, 0.006488831996917724, 0.006459392070770263, 0.006510272026062012, 0.006473696231842041, 0.00659219217300415, 0.006529695987701416, 0.006474944114685058, 0.006480447769165039, 0.006525184154510498, 0.006481919765472412, 0.006500351905822754, 0.006481919765472412, 0.006496064186096192, 0.006504127979278565, 0.006451168060302734, 0.006503136157989502, 0.006498112201690674, 0.006426976203918457, 0.006514336109161377, 0.0064737281799316405, 0.006516736030578613, 0.006516511917114258, 0.006547743797302246, 0.006506144046783447, 0.006512928009033203, 0.0064542717933654785, 0.0065495362281799314, 0.006464064121246338, 0.006480512142181397, 0.006518303871154785, 0.006456607818603515, 0.006486303806304931, 0.006496960163116455, 0.006460671901702881, 0.006490880012512207, 0.006454304218292236, 0.006634463787078857, 0.006522111892700196, 0.006420959949493408, 0.006465439796447754, 0.006434304237365723, 0.006442080020904541, 0.006486144065856934, 0.0064512319564819336, 0.006453951835632324, 0.0065049281120300294, 0.0064592638015747075, 0.006464096069335937, 0.006475200176239013, 0.006492735862731934, 0.006586495876312256, 0.0064898238182067875, 0.0064759359359741215, 0.006520832061767578, 0.006440127849578857, 0.00647219181060791, 0.0064572482109069825, 0.0064618558883666995, 0.006500351905822754, 0.006451200008392334, 0.006500351905822754, 0.006465536117553711, 0.006355391979217529, 0.006455103874206543, 0.006424960136413574, 0.006452576160430908, 0.006450719833374023, 0.006443071842193604, 0.006478591918945313, 0.006481632232666016, 0.006426943778991699, 0.006493887901306152, 0.006454080104827881, 0.006477407932281494, 0.006469759941101074, 0.006611936092376709, 0.006485951900482178, 0.006457888126373291, 0.006486464023590088, 0.006551455974578857, 0.006447199821472168, 0.006463424205780029, 0.006682752132415771, 0.006475456237792969, 0.006478079795837402, 0.00648528003692627, 0.0064430079460144046, 0.006478367805480957, 0.006449344158172607, 0.006517983913421631, 0.00648035192489624, 0.0064617600440979, 0.006469632148742676, 0.006454432010650634, 0.006472544193267822, 0.00648422384262085, 0.0064652800559997555, 0.006467807769775391, 0.0064694080352783205, 0.0064182720184326175, 0.006467743873596192, 0.006448192119598389, 0.006443488121032715, 0.006466015815734864, 0.006467775821685791, 0.006495200157165528, 0.00646230411529541, 0.006490111827850342, 0.006523007869720459, 0.006479328155517578, 0.006504992008209229, 0.006531968116760254, 0.006480832099914551, 0.006481279850006103, 0.006932991981506348, 0.0065129599571228025, 0.006514143943786621, 0.006523359775543213, 0.006585408210754395, 0.00669817590713501, 0.006645088195800781, 0.006510303974151611, 0.006520480155944824, 0.006492640018463135, 0.006502912044525146, 0.0064232640266418456, 0.006476863861083985, 0.00648473596572876, 0.006543200016021728, 0.006430528163909912, 0.0065129919052124025, 0.006480192184448242, 0.006503456115722657, 0.00650716781616211, 0.006465536117553711, 0.006497632026672364, 0.006464159965515136, 0.006537087917327881, 0.006489247798919678, 0.006431712150573731, 0.006467584133148193, 0.006477503776550293, 0.0064412798881530765, 0.006449183940887451, 0.006471295833587646, 0.006451871871948242, 0.00659935998916626, 0.006493184089660644, 0.00653926420211792, 0.006516736030578613, 0.006469632148742676, 0.006492159843444824, 0.006442912101745605, 0.006457568168640137, 0.0064707517623901365, 0.006445536136627197, 0.0064513921737670895, 0.0064943361282348636, 0.006413440227508545, 0.0064850239753723145, 0.0065185918807983394, 0.006473440170288086, 0.0064650559425354, 0.006449952125549317, 0.006458816051483155, 0.006467520236968994, 0.006459807872772217, 0.00648419189453125, 0.0064535999298095705, 0.006470304012298584, 0.006460192203521728, 0.0064327998161315915, 0.0064757437705993655, 0.006459551811218262, 0.006506591796875, 0.0064650559425354, 0.006436384201049804, 0.0064759039878845214, 0.006455967903137207, 0.00644927978515625, 0.006456960201263428, 0.006433152198791504, 0.006446112155914307, 0.006462175846099854, 0.006477215766906738, 0.006472544193267822, 0.0064204797744750975, 0.0064488320350646975, 0.006408192157745361, 0.006434048175811767, 0.006442848205566406, 0.006498752117156982, 0.0064308161735534665, 0.006449535846710205, 0.006457344055175781, 0.00646665620803833, 0.006457952022552491, 0.006451007843017578, 0.006463071823120117, 0.006429599761962891, 0.0064757437705993655, 0.006488351821899414, 0.006463232040405273, 0.006513951778411865, 0.006521632194519043, 0.00653436803817749, 0.006513472080230713, 0.0065913920402526855, 0.006614240169525146, 0.006553376197814941, 0.006500319957733154, 0.006791232109069824, 0.0069213762283325195, 0.00652780818939209, 0.006615039825439453, 0.006635519981384277, 0.006479872226715088, 0.006467584133148193, 0.006560991764068604, 0.006474080085754394, 0.006510560035705566, 0.006490592002868652, 0.006494207859039307, 0.006551551818847656, 0.006473184108734131, 0.006485536098480225, 0.006476319789886474, 0.006458111763000488, 0.006442719936370849, 0.006579967975616455, 0.006406400203704834, 0.006433119773864746, 0.006432415962219239, 0.0065948481559753415, 0.006613823890686035, 0.006393951892852783, 0.006439743995666504, 0.006430111885070801, 0.006426464080810547, 0.00649619197845459, 0.00641926383972168, 0.006418432235717773, 0.006474016189575195, 0.006442719936370849, 0.006488383769989014, 0.006497439861297608, 0.006451744079589844, 0.006469823837280273, 0.006438720226287842, 0.006450943946838379, 0.006471648216247559, 0.006352928161621094, 0.006451935768127442, 0.006911200046539306, 0.006429503917694092, 0.0064225921630859375, 0.006418367862701416, 0.006445055961608887, 0.00643071985244751, 0.006557695865631104, 0.0064245758056640625, 0.0064327998161315915, 0.00643068790435791, 0.006500063896179199, 0.006502175807952881, 0.006506559848785401, 0.0065185918807983394, 0.00644159984588623, 0.006524703979492188, 0.006425888061523438, 0.006468095779418945, 0.006461440086364746, 0.006566336154937744, 0.006477344036102295, 0.006517375946044922, 0.006483551979064942, 0.006476287841796875, 0.006436607837677002, 0.006463488101959228, 0.006436160087585449, 0.006498752117156982, 0.006461567878723144, 0.006418560028076172, 0.006864863872528076, 0.006505663871765137, 0.007390048027038574, 0.006939712047576904, 0.006490943908691406, 0.006529151916503906, 0.006471072196960449, 0.006443615913391113, 0.00644220781326294, 0.006459167957305908, 0.006456287860870361, 0.0064282240867614745, 0.00647321605682373, 0.006442272186279297, 0.006438591957092285, 0.006606847763061524, 0.0064471039772033695, 0.0064453759193420414, 0.006475584030151367, 0.006438176155090332, 0.006431327819824219, 0.006402368068695068, 0.0064336638450622555, 0.006456128120422363, 0.006397408008575439, 0.0064702401161193844, 0.006451136112213135, 0.0064264960289001465, 0.0064691839218139644, 0.00644268798828125, 0.0064412798881530765, 0.006372416019439697, 0.00643782377243042, 0.006434624195098877, 0.0063910079002380375, 0.0063946561813354496, 0.006444608211517334, 0.006416863918304443, 0.0064551677703857425, 0.00641871976852417, 0.0065372161865234375, 0.006565824031829834, 0.0067738242149353025, 0.0064414401054382325, 0.006417984008789062, 0.006429408073425293, 0.006463168144226074, 0.00643881607055664, 0.006419199943542481, 0.0064674878120422365, 0.00642252779006958, 0.00642252779006958, 0.006464704036712647, 0.0064043841361999514, 0.006529568195343017, 0.006401408195495606, 0.006431359767913819, 0.006451263904571534, 0.006395840167999268, 0.006436863899230957, 0.006416287899017334, 0.006414207935333252, 0.006453472137451172, 0.006439104080200196, 0.006429696083068847, 0.006397791862487793, 0.006448095798492432, 0.006468671798706055, 0.006458303928375244, 0.006445055961608887, 0.006418432235717773, 0.006415775775909424, 0.006431519985198975, 0.00641209602355957, 0.006444384098052978, 0.006410912036895752, 0.006408192157745361, 0.0064468798637390134, 0.006391903877258301, 0.0064323520660400394, 0.0064802241325378415, 0.006428864002227783, 0.006443359851837158, 0.00645027208328247, 0.006473855972290039, 0.0064386558532714844, 0.006426400184631348, 0.006445280075073242, 0.006415103912353516, 0.00643884801864624, 0.006461056232452392, 0.006424960136413574, 0.006439231872558594, 0.006442912101745605, 0.0064245758056640625, 0.006561791896820069, 0.0064349441528320315, 0.006453440189361572, 0.006432096004486084, 0.006410592079162598, 0.006453248023986816, 0.006530240058898926, 0.006410048007965088, 0.006446080207824707, 0.006418432235717773, 0.006409920215606689, 0.006434271812438965, 0.006408448219299316, 0.006408768177032471, 0.006404032230377197, 0.006411935806274414, 0.006422815799713134, 0.006410208225250244, 0.006448863983154297, 0.006410367965698242, 0.0064280638694763186, 0.006478784084320069, 0.006404096126556396, 0.006426655769348145, 0.006405344009399414, 0.006433536052703857, 0.006468671798706055, 0.0064050559997558595, 0.006428671836853027, 0.006463488101959228, 0.006427648067474365, 0.006464096069335937, 0.006449567794799805, 0.0064629120826721195, 0.006480095863342285, 0.006439263820648193, 0.006506080150604248, 0.006427103996276855, 0.006415328025817871, 0.006440991878509521, 0.006419583797454834, 0.006438047885894776, 0.006435488224029541, 0.006458367824554443, 0.006472703933715821, 0.006539455890655518, 0.006489503860473633, 0.006488480091094971, 0.00642848014831543, 0.006469888210296631, 0.006420415878295898, 0.0064430079460144046, 0.006447296142578125, 0.006430528163909912, 0.006446176052093506, 0.0064503679275512696, 0.006419519901275635, 0.00644982385635376, 0.006460832118988037, 0.006437280178070068, 0.006473919868469238, 0.0064301438331604, 0.00636521577835083, 0.006420415878295898, 0.006440735816955566, 0.00644371223449707, 0.006396736145019531, 0.006401023864746094, 0.006426623821258545, 0.006442944049835205, 0.0064635519981384275, 0.0064102401733398436, 0.006452864170074463, 0.006424448013305664, 0.00640883207321167, 0.0064633598327636715, 0.006420447826385498, 0.006416768074035645, 0.00664572811126709, 0.00654691219329834, 0.006455615997314453, 0.006398015975952148, 0.006432032108306885, 0.006457376003265381, 0.00640451192855835, 0.0064347519874572755, 0.006508480072021485, 0.006430975914001465, 0.006459743976593018, 0.006434624195098877, 0.006419680118560791, 0.006451007843017578, 0.006415167808532715, 0.0064584641456604, 0.006439839839935303, 0.006559743881225586, 0.006641536235809326, 0.006660223960876465, 0.0064245758056640625, 0.006459392070770263, 0.006397952079772949, 0.0066641921997070315, 0.006428671836853027, 0.006436160087585449, 0.006458047866821289, 0.006418432235717773, 0.006444447994232177, 0.006439455986022949, 0.006419648170471191, 0.006454080104827881, 0.0064141759872436525, 0.006461984157562256, 0.0064856958389282226, 0.006413824081420898, 0.0064496641159057615, 0.006405280113220215, 0.006431072235107422, 0.006451712131500244, 0.006428671836853027, 0.006415808200836181, 0.006517312049865723, 0.006446432113647461, 0.006437535762786865, 0.00641161584854126, 0.006420735836029053, 0.006344704151153564, 0.006405151844024658, 0.006440224170684815, 0.006421472072601318, 0.006412799835205078, 0.006435391902923584, 0.006422175884246826, 0.00662937593460083, 0.006451200008392334, 0.006443359851837158, 0.006452960014343262, 0.006420415878295898, 0.006703104019165039, 0.006473696231842041, 0.00639961576461792, 0.006447519779205322, 0.006429791927337647, 0.0064085121154785155, 0.006402976036071777, 0.006416063785552978, 0.006417568206787109, 0.006419167995452881, 0.0064349441528320315, 0.006428768157958984, 0.006422431945800782, 0.006413824081420898, 0.006424287796020508, 0.006398623943328858, 0.006432896137237549, 0.006414432048797608, 0.006432864189147949, 0.00641161584854126, 0.006435296058654785, 0.00643891191482544, 0.0066007041931152345, 0.0064551677703857425, 0.006467135906219482, 0.006406752109527588, 0.006440896034240723, 0.0064043841361999514, 0.006434432029724121, 0.006428192138671875, 0.006474624156951905, 0.006415999889373779, 0.0064020161628723146, 0.006393983840942383, 0.00643225622177124, 0.006380032062530518, 0.006410079956054688, 0.006394015789031982, 0.0064245758056640625, 0.006406144142150879, 0.006398079872131347, 0.006410111904144287, 0.006393856048583985, 0.006402048110961914, 0.006406047821044922, 0.006409759998321533, 0.006400800228118896, 0.006393663883209229, 0.006395455837249756, 0.006433472156524658, 0.006401887893676758, 0.006338560104370118, 0.006565887928009034, 0.0064386558532714844, 0.006464928150177002, 0.006406847953796387, 0.006449312210083008, 0.006450623989105225, 0.0064308161735534665, 0.0064429759979248045, 0.006420991897583008, 0.006430528163909912, 0.006424767971038818, 0.006460415840148926, 0.00652185583114624, 0.0065409598350524905, 0.006451551914215088, 0.006467167854309082, 0.006449567794799805, 0.0064626879692077635, 0.006453951835632324, 0.006459487915039063, 0.006560895919799805, 0.006516767978668213, 0.006499231815338135, 0.006518080234527588, 0.006500415802001953, 0.006445055961608887, 0.006451776027679443, 0.0064102401733398436, 0.006419680118560791, 0.006417183876037597, 0.006416384220123291, 0.006451007843017578, 0.006424767971038818, 0.006448703765869141, 0.006430208206176758, 0.006437215805053711, 0.006429440021514893, 0.006434656143188476, 0.006445184230804444, 0.0064449281692504886, 0.006406015872955322, 0.0064349441528320315, 0.006452864170074463, 0.006418816089630127, 0.00643891191482544, 0.006412064075469971, 0.006412064075469971, 0.006432608127593994, 0.006428991794586182, 0.006422080039978027, 0.006424672126770019, 0.006410880088806152, 0.006418591976165772, 0.0064345598220825195, 0.006418303966522217, 0.006426208019256592, 0.0064069762229919435, 0.006416192054748535, 0.0064102401733398436, 0.006430496215820312, 0.006386911869049073, 0.006406688213348389, 0.006354047775268555, 0.0063702077865600585, 0.006421919822692871, 0.006423359870910645, 0.006407328128814697, 0.006437471866607666, 0.00642406415939331, 0.00643123197555542, 0.006748159885406494, 0.006710464000701904, 0.006549727916717529, 0.006726240158081055, 0.006925824165344238, 0.010168831825256347, 0.007569183826446533, 0.006467936038970948, 0.0064627199172973635, 0.006466464042663574, 0.006467296123504639, 0.006481919765472412, 0.006422912120819092, 0.006528863906860352, 0.006485311985015869, 0.006400479793548584, 0.006434815883636475, 0.006419680118560791, 0.006429471969604492, 0.006434207916259766, 0.006407904148101807, 0.006429567813873291, 0.006429791927337647, 0.006452191829681397, 0.0064718079566955565, 0.006424032211303711, 0.006460031986236572, 0.006440671920776367, 0.006463615894317627, 0.006471551895141601, 0.006466911792755127, 0.006435488224029541, 0.006494207859039307, 0.0064982080459594724, 0.006442751884460449, 0.006431424140930176, 0.00644371223449707, 0.006452191829681397, 0.006419680118560791, 0.006433568000793457, 0.006426623821258545, 0.0064102401733398436, 0.006444640159606933, 0.006418591976165772, 0.006422783851623535, 0.006412288188934326, 0.006436511993408203, 0.006434591770172119, 0.0063946561813354496, 0.006426112174987793, 0.006420383930206299, 0.006403647899627686, 0.00657040023803711, 0.006402368068695068, 0.006416704177856445, 0.006337376117706299, 0.006414112091064453, 0.006410592079162598, 0.006412288188934326, 0.006387263774871826, 0.006465087890625, 0.0063949117660522465, 0.0064223999977111815, 0.00640828800201416, 0.006413375854492188, 0.006418848037719726, 0.00640227222442627, 0.006504640102386474, 0.00644652795791626, 0.006416959762573242, 0.0064471039772033695, 0.006411808013916016, 0.006439616203308105, 0.006426752090454102, 0.006408160209655762, 0.006414303779602051, 0.006408095836639404, 0.006403552055358887, 0.006429376125335694, 0.006380447864532471, 0.006443776130676269, 0.00641164779663086, 0.0066332159042358394, 0.006461599826812744, 0.006410975933074951, 0.006410592079162598, 0.0064180798530578615, 0.006425824165344238, 0.006961664199829101, 0.0064412479400634765, 0.006444575786590576, 0.006473599910736084, 0.006433375835418701, 0.006434815883636475, 0.0064839677810668945, 0.006426623821258545, 0.006471712112426758, 0.006432735919952392, 0.006475200176239013, 0.00646611213684082, 0.006448416233062744, 0.006437600135803223, 0.006418464183807373, 0.006426080226898194, 0.006455808162689209, 0.006430111885070801, 0.006609504222869873, 0.006694911956787109, 0.006412288188934326, 0.006450975894927978, 0.006423808097839355, 0.006427616119384766, 0.006418432235717773, 0.006389855861663818, 0.006417312145233154, 0.006403071880340576, 0.006502399921417237, 0.0064304318428039554, 0.006348159790039063, 0.0064067840576171875, 0.006422495841979981, 0.006389791965484619, 0.006404096126556396, 0.006373280048370361, 0.006395999908447266, 0.006421567916870118, 0.006368256092071533, 0.006424511909484863, 0.006387712001800537, 0.006402048110961914, 0.006425792217254638, 0.006377503871917724, 0.006405983924865723, 0.006388671875, 0.006393856048583985, 0.00643071985244751, 0.0063868799209594726, 0.006458176136016846, 0.006380735874176025, 0.006412576198577881, 0.006439455986022949, 0.006370495796203613, 0.006415167808532715, 0.006383296012878418, 0.006387296199798584, 0.006435647964477539, 0.006339583873748779, 0.006488992214202881, 0.006428671836853027, 0.006440735816955566, 0.006442272186279297, 0.006384928226470948, 0.006432415962219239, 0.006417759895324707, 0.006435488224029541, 0.006529088020324707, 0.006477759838104248, 0.006424767971038818, 0.00640012788772583, 0.006421567916870118, 0.0064170241355896, 0.006395391941070557, 0.006420447826385498, 0.006394368171691894, 0.006404128074645996, 0.006452352046966553, 0.006380383968353272, 0.006461184024810791, 0.006387712001800537, 0.006420735836029053, 0.006465568065643311, 0.006391136169433593, 0.006398623943328858, 0.006416255950927734, 0.006385791778564453, 0.0064102401733398436, 0.00638915205001831, 0.0063961601257324216, 0.006717919826507568, 0.006401919841766358, 0.006414400100708008, 0.006502783775329589, 0.006402304172515869, 0.006426432132720947, 0.006415679931640625, 0.006402239799499511, 0.0064273600578308105, 0.006374527931213379, 0.006449183940887451, 0.006387648105621338, 0.006400896072387695, 0.006434239864349365, 0.006386240005493164, 0.006410016059875488, 0.006433055877685547, 0.006420703887939453, 0.006415520191192627, 0.0063853759765625, 0.00641315221786499, 0.0063928961753845215, 0.00650870418548584, 0.006483935832977295, 0.0064215359687805175, 0.00640825605392456, 0.006509632110595703, 0.006419392108917237, 0.006446815967559814, 0.00640172815322876, 0.006435135841369629, 0.006428671836853027, 0.006505695819854737, 0.006435103893280029, 0.006441088199615479, 0.007557504177093506, 0.007896639823913575, 0.00764896011352539, 0.007238624095916748, 0.006563615798950195, 0.006395584106445313, 0.006465824127197266, 0.006459199905395508, 0.006426047801971435, 0.0064356160163879395, 0.006391392230987549, 0.006426239967346191, 0.006402847766876221, 0.006436384201049804, 0.006439008235931396, 0.00641596794128418, 0.006406015872955322, 0.006416512012481689, 0.0063946242332458495, 0.006541183948516846, 0.006437024116516113, 0.0065372161865234375, 0.00646668815612793, 0.006394144058227539, 0.0064661440849304195, 0.006395904064178467, 0.006430560111999511, 0.0064431681632995605, 0.006408127784729004, 0.006408607959747315, 0.006411935806274414, 0.0063526082038879396, 0.006426943778991699, 0.006393087863922119, 0.006544095993041992, 0.006440959930419922, 0.006378687858581543, 0.006463488101959228, 0.006392127990722656, 0.0064139518737792965, 0.0064561920166015625, 0.006428671836853027, 0.00644649600982666, 0.006451680183410645, 0.0064366722106933595, 0.00650822401046753, 0.006494847774505616, 0.0064490242004394535, 0.006440800189971924, 0.00652243185043335, 0.0064642238616943355, 0.006412000179290772, 0.006490399837493896, 0.006453248023986816, 0.006397952079772949, 0.006491583824157715, 0.006396736145019531, 0.00644598388671875, 0.006425536155700683, 0.006385568141937256, 0.00641644811630249, 0.006408127784729004, 0.006402048110961914, 0.006434976100921631, 0.006399839878082275, 0.006563839912414551, 0.006432767868041992, 0.006423808097839355, 0.006447872161865234, 0.006479519844055176, 0.0064208321571350094, 0.006453248023986816, 0.006397952079772949, 0.006416319847106933, 0.006397088050842285, 0.006421055793762207, 0.006421855926513672, 0.006406655788421631, 0.0064266881942749024, 0.006388160228729248, 0.006395071983337402, 0.006558432102203369, 0.006387551784515381, 0.006442463874816894, 0.006379327774047852, 0.006380512237548828, 0.006401408195495606, 0.0063658242225646975, 0.006413792133331299, 0.006375807762145996, 0.006408639907836914, 0.006446815967559814, 0.006412288188934326, 0.006399936199188232, 0.006313983917236328, 0.006526976108551025, 0.006454751968383789, 0.006435135841369629, 0.006389984130859375, 0.0064471039772033695, 0.006371327877044678, 0.006420576095581055, 0.0064050559997558595, 0.006433760166168213, 0.006432767868041992, 0.006391424179077149, 0.00644649600982666, 0.006404128074645996, 0.0064355840682983395, 0.006424960136413574, 0.006414048194885254, 0.006537504196166992, 0.006399744033813477, 0.006415999889373779, 0.006439328193664551, 0.006401472091674805, 0.0064234561920166015, 0.006450111865997314, 0.00641926383972168, 0.006425856113433838, 0.006435359954833984, 0.006433279991149903, 0.006387104034423828, 0.006432672023773193, 0.006473919868469238, 0.006471839904785157, 0.00667190408706665, 0.007098847866058349, 0.006541408061981201, 0.0064633917808532716, 0.006442048072814941, 0.006447135925292969, 0.00642252779006958, 0.0064041919708251955, 0.006433311939239502, 0.0064512319564819336, 0.006372831821441651, 0.006421279907226562, 0.006419968128204345, 0.006420447826385498, 0.00653542423248291, 0.006453536033630371, 0.006446815967559814, 0.006479328155517578, 0.0064126081466674804, 0.006467520236968994, 0.006393663883209229, 0.006450975894927978, 0.006473951816558838, 0.006421408176422119, 0.0064316802024841305, 0.006433568000793457, 0.0065168957710266115, 0.006414303779602051, 0.006404096126556396, 0.006442399978637695, 0.006386271953582764, 0.006342400074005127, 0.006423776149749756, 0.00641539192199707, 0.0064273920059204105, 0.006445055961608887, 0.006403200149536133, 0.006412960052490234, 0.00640556812286377, 0.006425375938415527, 0.006418432235717773, 0.006432511806488037, 0.006448927879333496, 0.006442560195922852, 0.006433792114257812, 0.006446368217468261, 0.006437376022338868, 0.006455423831939697, 0.006451551914215088, 0.006413536071777344, 0.006492800235748291, 0.006427968025207519, 0.006457856178283691, 0.006479104042053222, 0.006398719787597656, 0.00647760009765625, 0.006458911895751953, 0.006421088218688965, 0.006456511974334717, 0.006435743808746338, 0.006440671920776367, 0.006408576011657715, 0.00647379207611084, 0.006604864120483399, 0.006428415775299072, 0.006506944179534912, 0.006556287765502929, 0.006423520088195801, 0.006469632148742676, 0.006454751968383789, 0.006480127811431885, 0.006521120071411133, 0.006464799880981445, 0.00649894380569458, 0.006473504066467285, 0.006447423934936523, 0.006516736030578613, 0.006457183837890625, 0.006442848205566406, 0.006492063999176025, 0.006433184146881103, 0.006453248023986816, 0.006497888088226318, 0.006466047763824463, 0.006459455966949463, 0.006410079956054688, 0.0064471039772033695, 0.006471680164337158, 0.00638976001739502, 0.006469632148742676, 0.00642195177078247, 0.006412384033203125, 0.006461696147918701, 0.006428832054138183, 0.006332064151763916, 0.00641593599319458, 0.006423327922821045, 0.0064430079460144046, 0.006388063907623291, 0.006440608024597168, 0.006438496112823486, 0.006403711795806885, 0.006451456069946289, 0.006416800022125244, 0.0064124159812927245, 0.006416384220123291, 0.0064102401733398436, 0.006436863899230957, 0.00636956787109375, 0.006432479858398438, 0.006488287925720215, 0.00637724781036377, 0.006416704177856445, 0.006399680137634278, 0.006424352169036866, 0.006430367946624756, 0.006435391902923584, 0.006463488101959228, 0.0064245758056640625, 0.006443359851837158, 0.006452767848968506, 0.0064225921630859375, 0.006424287796020508, 0.0064160962104797365, 0.006414976119995117, 0.0064143362045288085, 0.006422111988067627, 0.0064618558883666995, 0.006416672229766845, 0.006456352233886718, 0.0064785280227661135, 0.006416384220123291, 0.006407519817352295, 0.00646617603302002, 0.0063695359230041505, 0.006444287776947021, 0.006433248043060303, 0.006430399894714355, 0.006421887874603271, 0.006400832176208496, 0.006436223983764648, 0.006399040222167969, 0.0067454719543457034, 0.00648748779296875, 0.006437888145446777, 0.00645849609375, 0.006478816032409668, 0.006498271942138672, 0.006502272129058838, 0.006420127868652344, 0.0064495038986206055, 0.006467584133148193, 0.006437056064605713, 0.006454112052917481, 0.0064462399482727055, 0.006416224002838135, 0.006422495841979981, 0.006370272159576416, 0.0064839677810668945, 0.006461503982543945, 0.006469312191009521, 0.006479616165161133, 0.006475168228149414, 0.00644163179397583, 0.006471871852874756, 0.00643609619140625, 0.006434624195098877, 0.006464767932891846, 0.00646457576751709, 0.006620927810668946, 0.0064308161735534665, 0.006449312210083008, 0.006457376003265381, 0.006424736022949218, 0.006464992046356201, 0.006476895809173584, 0.006409855842590332, 0.006447360038757324, 0.0064471039772033695, 0.006433055877685547, 0.006450911998748779, 0.006442944049835205, 0.006441088199615479, 0.006419904232025146, 0.006441472053527832, 0.006434815883636475, 0.006387423992156982, 0.006435008049011231, 0.006437983989715576, 0.006445856094360352, 0.006474336147308349, 0.006456607818603515, 0.006453472137451172, 0.006459392070770263, 0.006401567935943603, 0.006443039894104004, 0.006382143974304199, 0.006432767868041992, 0.00652288007736206, 0.006490111827850342, 0.006455296039581298, 0.006440000057220459, 0.0064316802024841305, 0.006448192119598389, 0.006435264110565186, 0.006456992149353027, 0.006449215888977051, 0.0064412479400634765, 0.0064570879936218266, 0.0064498882293701175, 0.006406176090240479, 0.0064793601036071775, 0.0064143362045288085, 0.006468095779418945, 0.006438240051269532, 0.006445856094360352, 0.006462656021118164, 0.006427135944366455, 0.006459584236145019, 0.00643071985244751, 0.006412288188934326, 0.006486015796661377, 0.0064430079460144046, 0.006471327781677246, 0.006479455947875977, 0.006445824146270752, 0.006485119819641113, 0.006646527767181397, 0.006422719955444336, 0.006474944114685058, 0.0064349441528320315, 0.006468224048614502, 0.006586368083953857, 0.006487264156341553, 0.006461887836456299, 0.006437215805053711, 0.006454944133758545, 0.006455808162689209, 0.006459231853485107, 0.006561791896820069, 0.006469632148742676, 0.006406239986419678, 0.006438303947448731, 0.006425087928771973, 0.006471360206604004, 0.006555488109588623, 0.006465695858001709, 0.00648419189453125, 0.006461535930633545, 0.006430272102355957, 0.006459584236145019, 0.006438720226287842, 0.006448736190795898, 0.006480288028717041, 0.006416831970214844, 0.006481887817382813, 0.006434144020080567, 0.006445759773254394, 0.0064410557746887205, 0.006440864086151123, 0.00642416000366211, 0.006407711982727051, 0.006500736236572265, 0.0064739837646484374, 0.006412479877471924, 0.006438975811004639, 0.006436863899230957, 0.0064143362045288085, 0.00642252779006958, 0.006437119960784912, 0.006440320014953613, 0.006470016002655029, 0.0064126081466674804, 0.006567615985870362, 0.006450496196746826, 0.006429728031158447, 0.006458047866821289, 0.006431712150573731, 0.006408192157745361, 0.00643071985244751, 0.006445055961608887, 0.006453536033630371, 0.006442719936370849, 0.006373280048370361, 0.006464992046356201, 0.006404736042022705, 0.0064430079460144046, 0.0064245758056640625, 0.006414207935333252, 0.006518911838531494, 0.006463488101959228, 0.006448287963867188, 0.006468448162078857, 0.0064585919380187986, 0.006478623867034912, 0.006549503803253174, 0.0064544639587402345, 0.006478655815124512, 0.006452415943145752, 0.006762752056121826, 0.0065133762359619145, 0.00642252779006958, 0.006496096134185791, 0.0064237117767333984, 0.006521728038787842, 0.006481887817382813, 0.00644649600982666, 0.00645801591873169, 0.006435008049011231, 0.006435647964477539, 0.006472224235534668, 0.007629151821136475, 0.0066232957839965825, 0.006453120231628418, 0.00643452787399292, 0.006482336044311523, 0.006448544025421142, 0.006458144187927246, 0.006467391967773437, 0.006463488101959228, 0.006447423934936523, 0.006655871868133545, 0.00643887996673584, 0.00646947193145752, 0.00645849609375, 0.006410848140716552, 0.006449344158172607, 0.0064617919921875, 0.006452223777770996, 0.006449183940887451, 0.006388448238372803, 0.0064200000762939455, 0.006464096069335937, 0.006399871826171875, 0.00643891191482544, 0.006426144123077393, 0.006420959949493408, 0.0064340801239013675, 0.006385663986206055, 0.00644374418258667, 0.006418655872344971, 0.006448287963867188, 0.006445695877075195, 0.006453536033630371, 0.006444767951965332, 0.006436863899230957, 0.006334368228912353, 0.006416255950927734, 0.006432960033416748, 0.006426464080810547, 0.006432735919952392, 0.0064329919815063474, 0.006420735836029053, 0.0064225921630859375, 0.006422368049621582, 0.006466495990753174, 0.0064234561920166015, 0.006447231769561768, 0.006411263942718506, 0.006494912147521972, 0.006516928195953369, 0.006645152091979981, 0.006421408176422119, 0.006450240135192871, 0.006402688026428223, 0.006434815883636475, 0.006440896034240723, 0.006426911830902099, 0.006632448196411133, 0.007833983898162842, 0.008087167739868164, 0.006544320106506347, 0.006483903884887695, 0.006508351802825928, 0.006417535781860352, 0.006513279914855957, 0.006426368236541748, 0.006455904006958008, 0.006464863777160644, 0.006427648067474365, 0.00651251220703125, 0.006425824165344238, 0.006435135841369629, 0.006472256183624268, 0.006407040119171143, 0.006429823875427246, 0.006403295993804931, 0.006431136131286621, 0.00642464017868042, 0.006432223796844483, 0.006404640197753907, 0.006455296039581298, 0.006391808032989502, 0.006429887771606445, 0.006472511768341065, 0.006446976184844971, 0.006447328090667725, 0.006385568141937256, 0.0064266881942749024, 0.006416319847106933, 0.006450784206390381, 0.0064345598220825195, 0.006395711898803711, 0.006417247772216797, 0.006398111820220947, 0.006421823978424072, 0.006437407970428467, 0.006409247875213623, 0.006437856197357178]",tokens/s,154.48743856453183,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11063.41888,12232.556544,0.0,11846.811648,11814.785024,s,1,13.955064453125,13.955064453125,0.0,13.955064453125,13.955064453125,13.955064453125,13.955064453125,[13.955064453125],,kWh,0.00019985668940415355,2.2033449359598787e-05,6.798838772401061e-05,0.0002898785264877629,,MB,2123.046912,13165.789184,0.0,12750.68416,12632.68864,s,10,11.351165161132814,1.1351165161132815,0.0009599016006308202,1.1351495361328126,1.136421240234375,1.1366567871093751,1.136845224609375,"[1.134089599609375, 1.135121826171875, 1.1347489013671874, 1.133389404296875, 1.13517724609375, 1.135467041015625, 1.134689697265625, 1.13522021484375, 1.136368896484375, 1.136892333984375]",tokens/s,225.52750873237397,kWh,3.3180238714586114e-05,3.6592622429510743e-06,2.205135097440025e-05,5.889085193193744e-05,tokens/kWh,4347024.9045788925,MB,2131.570688,13438.418944,0.0,13023.31392,12936.608256,s,10,48.979035644531244,4.897903564453125,0.010035874220725688,4.89965966796875,4.90893935546875,4.9091212890625,4.9092668359375,"[4.8790654296875, 4.8823984375, 4.8935400390625, 4.89629443359375, 4.89593603515625, 4.90490576171875, 4.90566845703125, 4.90302490234375, 4.90930322265625, 4.90889892578125]",tokens/s,12.862646062945558,kWh,0.00014357564593457865,1.5834817575470372e-05,9.552535419800056e-05,0.00025493581770804957,tokens/kWh,247121.0227201071,,s,630,48.94920770263676,0.07769715508355035,0.0007933281626137319,0.0776789093017578,0.07870827178955078,0.07899014625549317,0.07974043884277343,"[0.07879679870605469, 0.0766402587890625, 0.07648258972167969, 0.07732630157470703, 0.07665869140625, 0.07662172698974609, 0.07679548645019531, 0.07691222381591797, 0.07733065795898438, 0.07687987518310548, 0.07747174072265625, 0.07694989013671875, 0.07710342407226563, 0.07713504028320313, 0.07679468536376953, 0.07748607635498046, 0.07634944152832031, 0.07625897979736328, 0.07762073516845704, 0.07680060577392578, 0.07694767761230469, 0.07713385772705078, 0.07818828582763672, 0.07827645111083985, 0.07722025299072266, 0.07687577819824219, 0.07700383758544922, 0.0791048355102539, 0.0769411163330078, 0.07698668670654298, 0.07751593780517578, 0.07652620697021484, 0.07746377563476563, 0.0769617919921875, 0.07714943695068359, 0.07696256256103516, 0.07749807739257812, 0.07812944030761719, 0.07769087982177734, 0.07801446533203125, 0.0779714584350586, 0.07735295867919922, 0.07675084686279297, 0.07750188446044921, 0.07781433868408204, 0.07875689697265625, 0.07691567993164063, 0.0771492156982422, 0.07772783660888671, 0.07766515350341797, 0.07799619293212891, 0.07794774627685547, 0.07839631652832031, 0.07761888122558594, 0.07810041809082031, 0.07767472076416015, 0.0777976303100586, 0.07816729736328125, 0.07821695709228516, 0.07808921813964843, 0.07830323028564454, 0.07786905670166015, 0.07787932586669923, 0.07984722900390626, 0.07658598327636719, 0.07778562927246094, 0.076612060546875, 0.07610559844970703, 0.07619779205322266, 0.07701526641845703, 0.07631827545166016, 0.07798009490966797, 0.0771328353881836, 0.07613951873779297, 0.07707030487060547, 0.0775692138671875, 0.0770252456665039, 0.07692390441894531, 0.07724797058105469, 0.07673689270019532, 0.07657283020019531, 0.07764575958251953, 0.07712322998046875, 0.07729587554931641, 0.07697200012207031, 0.07659318542480469, 0.07620198059082031, 0.07756594848632813, 0.07753097534179687, 0.07819468688964844, 0.07758659362792969, 0.07742060852050782, 0.0768729248046875, 0.07715081787109375, 0.07697551727294921, 0.07767926025390624, 0.07783961486816406, 0.07781849670410156, 0.0765687713623047, 0.07705804443359375, 0.07695359802246093, 0.07771926116943359, 0.07812274932861328, 0.078170654296875, 0.07785881805419922, 0.07791929626464844, 0.07688899230957032, 0.07740386962890625, 0.07675526428222657, 0.07747993469238282, 0.07793385314941406, 0.07783497619628907, 0.07845699310302734, 0.07805423736572266, 0.07792947387695312, 0.07823686218261719, 0.07853753662109375, 0.07829913330078125, 0.07846604919433593, 0.07872364807128907, 0.0774249267578125, 0.07813507080078125, 0.07768080139160156, 0.07799791717529297, 0.07734105682373046, 0.07802595520019531, 0.07974614715576171, 0.07608537292480469, 0.07660009765625, 0.07719535827636718, 0.07676723480224609, 0.07720489501953125, 0.07715491485595703, 0.07705190277099609, 0.07716659545898437, 0.07737859344482421, 0.07687999725341797, 0.07705276489257812, 0.07691059112548829, 0.07704691314697265, 0.0769874267578125, 0.07720124816894532, 0.07773551940917969, 0.07683523559570313, 0.07636172485351563, 0.07871282958984376, 0.07712083435058593, 0.07744992065429687, 0.07845862579345703, 0.07711564636230468, 0.07725465393066407, 0.07705929565429688, 0.0766902084350586, 0.07724646759033203, 0.0774471664428711, 0.07757968139648437, 0.07676988983154297, 0.07778656005859375, 0.07753376007080078, 0.07666015625, 0.07765254211425782, 0.07661567687988281, 0.0779327392578125, 0.07811468505859374, 0.07763926696777344, 0.0769148178100586, 0.07981078338623047, 0.07702630615234375, 0.07747686767578126, 0.0775393295288086, 0.07843782043457032, 0.07777110290527343, 0.0780986557006836, 0.07862067413330077, 0.07771942138671875, 0.07897865295410156, 0.07794687652587891, 0.0780064926147461, 0.0792845458984375, 0.078129150390625, 0.07777382659912109, 0.07853568267822265, 0.07845683288574219, 0.07778832244873046, 0.07873766326904297, 0.07804710388183594, 0.07817494201660156, 0.07833757019042968, 0.07856790161132812, 0.0796115493774414, 0.07601747131347657, 0.07666300964355469, 0.07672054290771485, 0.07776223754882812, 0.07631075286865234, 0.07738349151611328, 0.07713343811035156, 0.0764544677734375, 0.07988019561767579, 0.07691468811035156, 0.07656448364257812, 0.0778274917602539, 0.0771396484375, 0.07683766174316406, 0.07698445129394531, 0.07737958526611328, 0.07697203063964844, 0.0769485092163086, 0.07725769805908203, 0.07705734252929687, 0.07782592010498048, 0.07746028900146484, 0.07727101135253907, 0.07751017761230469, 0.07669168090820312, 0.07705980682373047, 0.07694188690185547, 0.07745753479003906, 0.07750643157958985, 0.07774134063720703, 0.07843196868896485, 0.07669023895263671, 0.07830547332763672, 0.07812924957275391, 0.07757366180419922, 0.07721385955810547, 0.07664854431152343, 0.07761094665527343, 0.07803641510009765, 0.07827327728271484, 0.07836876678466796, 0.07800547027587891, 0.07834294128417969, 0.07788483428955079, 0.078246337890625, 0.07881894683837891, 0.07800447845458984, 0.07774031829833984, 0.07852851104736328, 0.07765529632568359, 0.0767352294921875, 0.07798095703125, 0.07833987426757813, 0.07866057586669922, 0.07886166381835938, 0.07870118713378907, 0.07851385498046876, 0.07726105499267578, 0.07817632293701172, 0.07821721649169922, 0.07852047729492187, 0.0792760009765625, 0.07930710601806641, 0.07606041717529297, 0.07722211456298828, 0.07642313385009766, 0.07620162963867187, 0.07750035095214844, 0.07663046264648438, 0.07728720092773438, 0.07709104156494141, 0.07784857940673828, 0.07668441772460938, 0.07752384185791016, 0.07767855834960938, 0.07766223907470703, 0.07780092620849609, 0.07712723541259765, 0.07696681976318359, 0.07674838256835938, 0.07719574737548827, 0.07696383666992188, 0.07785472106933594, 0.07717795562744141, 0.07734979248046875, 0.07661788940429687, 0.07751203155517578, 0.07736179351806641, 0.0777293472290039, 0.07774966430664063, 0.07680502319335937, 0.07664387512207031, 0.07730019378662109, 0.0775492172241211, 0.07837942504882812, 0.07743686676025391, 0.07820697784423829, 0.07767388916015625, 0.07723990631103515, 0.07863314819335937, 0.07796316528320313, 0.0778941421508789, 0.07808860778808593, 0.07800422668457031, 0.07712767791748047, 0.0770109405517578, 0.07772764587402343, 0.07865065765380859, 0.07821759796142579, 0.07795977783203124, 0.07806937408447266, 0.07823763275146485, 0.0783216323852539, 0.0779749755859375, 0.0783020782470703, 0.07826022338867188, 0.07884595489501953, 0.07796717071533203, 0.07839148712158203, 0.07866572570800781, 0.07834623718261718, 0.07851423645019531, 0.07816793823242188, 0.07733663940429687, 0.07948278045654297, 0.07963648223876953, 0.07662550354003907, 0.07584329223632813, 0.07717100524902344, 0.0770679702758789, 0.07737149047851563, 0.07722454071044922, 0.0768511962890625, 0.07695974731445313, 0.07682816314697266, 0.07737920379638671, 0.07792729949951172, 0.07787696075439453, 0.07708905792236329, 0.07638630676269531, 0.07670931243896484, 0.07691117095947266, 0.07740825653076172, 0.07707644653320313, 0.07708879852294923, 0.07707177734375, 0.07765849304199218, 0.0778584976196289, 0.07819728088378906, 0.07785997009277344, 0.07800418853759766, 0.07750953674316406, 0.0777359390258789, 0.07744102478027344, 0.07877632141113282, 0.07706829071044922, 0.07706623840332032, 0.07704370880126953, 0.07833821105957031, 0.0780574722290039, 0.07802864074707032, 0.07860224151611328, 0.07783833312988281, 0.07798985290527344, 0.0789708480834961, 0.07765408325195312, 0.07792009735107422, 0.07815388488769531, 0.0769269790649414, 0.07775027465820313, 0.07792025756835938, 0.0783851547241211, 0.07800438690185547, 0.07882940673828125, 0.07858175659179688, 0.0787327651977539, 0.07902877044677735, 0.07842377471923828, 0.078012451171875, 0.07838671875, 0.07819686126708984, 0.07783395385742188, 0.07817718505859375, 0.07854454040527344, 0.07937433624267579, 0.07886998748779298, 0.07910237121582031, 0.0783568344116211, 0.08064915466308593, 0.07619174194335937, 0.07664998626708984, 0.07613827514648437, 0.07667375946044921, 0.07736115264892578, 0.07811065673828126, 0.07719328308105469, 0.07739545440673828, 0.07670220947265625, 0.07686732482910157, 0.07670825958251953, 0.07972646331787109, 0.07741436767578125, 0.07748387145996094, 0.07636605072021484, 0.07748095703125, 0.07692179107666015, 0.07716623687744141, 0.07684950256347656, 0.07771049499511719, 0.07764435577392578, 0.07722118377685547, 0.0776427230834961, 0.07760691070556641, 0.078276611328125, 0.07687789154052735, 0.07755712127685546, 0.07770905303955078, 0.07668614196777344, 0.0773939208984375, 0.07805734252929687, 0.07873977661132812, 0.07766793823242188, 0.07667520141601562, 0.07826393890380859, 0.07784015655517579, 0.07819657897949218, 0.07852864074707032, 0.07840841674804687, 0.07803903961181641, 0.0768677749633789, 0.07774771118164063, 0.07889110565185548, 0.07892940521240234, 0.07694537353515625, 0.07788211059570313, 0.07792230224609376, 0.07792822265625, 0.07799215698242187, 0.07899954986572266, 0.07805235290527343, 0.07878131103515625, 0.07839699554443359, 0.078412353515625, 0.07873699188232422, 0.07940956878662109, 0.07838925170898438, 0.07908163452148438, 0.07828463745117188, 0.07795906829833985, 0.07881497955322266, 0.07919241333007812, 0.07953113555908203, 0.07610457611083984, 0.07715020751953125, 0.07692098999023438, 0.07667491149902343, 0.0775946273803711, 0.07733033752441407, 0.07769407653808594, 0.07698713684082031, 0.07649712371826171, 0.07666483306884765, 0.07721475219726562, 0.07711782073974609, 0.07711190032958984, 0.07702323150634766, 0.07696588897705078, 0.07699842834472656, 0.07723235321044922, 0.07711862182617188, 0.07795094299316406, 0.07771196746826171, 0.07707584381103516, 0.07702751922607422, 0.07717961883544922, 0.07692899322509765, 0.07778307342529298, 0.07745878601074219, 0.07817488098144532, 0.07760281372070313, 0.07771250915527343, 0.0765633316040039, 0.07774598693847656, 0.07869817352294922, 0.07837747192382813, 0.07779721832275391, 0.07764189147949219, 0.07760237121582031, 0.0766951675415039, 0.07764665222167969, 0.07827251434326171, 0.07869987487792969, 0.07777961730957031, 0.07803494262695312, 0.07796018981933593, 0.0783472671508789, 0.07885740661621093, 0.07879682922363282, 0.07827740478515625, 0.07843772888183594, 0.0781974105834961, 0.07696080017089844, 0.07767686462402344, 0.0786391372680664, 0.07857011413574219, 0.07868825531005859, 0.07870259094238281, 0.0794170913696289, 0.07863104248046875, 0.07900787353515625, 0.07834220886230468, 0.07790995025634766, 0.07955609893798828, 0.07864985656738281, 0.08123238372802734, 0.07621807861328125, 0.07596399688720704, 0.07702352142333985, 0.07714653015136719, 0.07717411041259765, 0.07660825347900391, 0.0772828140258789, 0.07727254486083984, 0.07680099487304687, 0.07698223876953125, 0.07759423828125, 0.07851046752929687, 0.07715020751953125, 0.07661759948730469, 0.07724864196777344, 0.07762124633789062, 0.07763967895507813, 0.07778083038330078, 0.07650685119628907, 0.07642361450195312, 0.07714224243164063, 0.07699842834472656, 0.07785881805419922, 0.07812236785888672, 0.07801248168945313, 0.07788556671142578, 0.07738803100585938, 0.07773744201660156, 0.07794502258300781, 0.07832527923583985, 0.07821209716796874, 0.07656243133544922, 0.07752899169921874, 0.07795692443847656, 0.07830582427978515, 0.078691650390625, 0.07871126556396485, 0.07825430297851563, 0.07669939422607422, 0.07761714935302734, 0.07804927825927735, 0.07861043548583985, 0.07864911651611328, 0.07831356811523438, 0.07710038757324218, 0.07766915130615235, 0.07846092987060548, 0.07866572570800781, 0.07881728363037109, 0.07910195159912109, 0.0791756820678711, 0.07834835052490234, 0.07834003448486328, 0.07837286376953125, 0.07871282958984376, 0.07860368347167969, 0.0785351333618164, 0.07816719818115235, 0.07836940765380859, 0.07904700469970703, 0.07890332794189453, 0.07904367828369141, 0.07981053161621093, 0.0770341796875, 0.07700479888916016, 0.07667462158203125, 0.07640338897705078, 0.0772638702392578, 0.0771502685546875, 0.0764730224609375, 0.0776108169555664, 0.07698576354980469, 0.07709366607666016, 0.07705599975585938, 0.07808150482177735, 0.0778157730102539, 0.07787987518310546, 0.07709490966796875, 0.07691776275634765, 0.07780268859863282, 0.07744290924072265, 0.07754953765869141, 0.07696543884277343, 0.0772899169921875, 0.07705193328857422, 0.07718019104003906, 0.07810527801513673, 0.07785266876220703, 0.07849779510498046, 0.07794908905029296, 0.07675273895263672, 0.07720550537109375, 0.07813843536376953, 0.07783679962158203, 0.0778773422241211, 0.07838345336914063, 0.07799603271484375, 0.07780966186523437, 0.07767424011230468, 0.07791436767578125, 0.07847318267822266, 0.07845276641845703, 0.07822319793701171, 0.07847747039794922, 0.07805955505371094, 0.07787254333496094, 0.07709964752197265, 0.07686742401123046, 0.07838114929199219, 0.07837286376953125, 0.07875379180908203, 0.07855919647216797, 0.0788148193359375, 0.07876242828369141, 0.07828221130371094, 0.07796585845947265, 0.07870793914794921, 0.07934038543701172, 0.07873324584960938, 0.07845683288574219, 0.07818156433105469, 0.07845152282714844, 0.07876131439208985, 0.07920297241210937, 0.07858585357666016]",tokens/s,12.87048411135088,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11156.86912,12232.556544,0.0,11846.811648,11814.785024,s,1,14.019837890625,14.019837890625,0.0,14.019837890625,14.019837890625,14.019837890625,14.019837890625,[14.019837890625],,kWh,0.00020087790467501537,2.215075427018636e-05,6.570116367199397e-05,0.0002887298226171957,,MB,2112.598016,13167.886336,0.0,12750.68416,12641.86368,s,10,13.489798217773437,1.3489798217773437,0.0034900716128019927,1.3496145629882812,1.35192373046875,1.3535305908203126,1.3548160791015624,"[1.3416568603515624, 1.3471947021484374, 1.3464073486328125, 1.350921630859375, 1.348687744140625, 1.351566650390625, 1.3505413818359375, 1.3469140625, 1.3507703857421876, 1.355137451171875]",tokens/s,189.7730387565828,kWh,3.948247184291707e-05,4.353192369482259e-06,2.621657652880083e-05,7.005224074120015e-05,tokens/kWh,3654415.580306163,MB,2118.045696,13461.487616,0.0,13044.28544,12933.698048,s,10,52.2772783203125,5.22772783203125,0.00510274340855043,5.22782861328125,5.233380712890625,5.233863452148438,5.234249643554688,"[5.216259765625, 5.22671923828125, 5.2299931640625, 5.22800244140625, 5.2228984375, 5.2258115234375, 5.2323193359375, 5.2332734375, 5.22765478515625, 5.23434619140625]",tokens/s,12.051124699718951,kWh,0.000152965358242083,1.687452344597683e-05,0.00010179449810220065,0.0002716343797902605,tokens/kWh,231929.40469702237,,s,630,52.27056412506103,0.08296914940485879,0.0007771541549944669,0.08296018981933594,0.08368835525512695,0.08398417205810547,0.0861938885498047,"[0.0854740447998047, 0.081512451171875, 0.08137081909179687, 0.08145951843261719, 0.08185558319091797, 0.0822955551147461, 0.08252841949462891, 0.0822149429321289, 0.0819343032836914, 0.08233510589599609, 0.08250637054443359, 0.08323916625976563, 0.08295398712158203, 0.0820674591064453, 0.08200576019287109, 0.08196860504150391, 0.0819760971069336, 0.08199958038330078, 0.0822786865234375, 0.08237200164794922, 0.08253020477294921, 0.08214189147949219, 0.08285593414306641, 0.08329222106933594, 0.08295827484130859, 0.08268800354003907, 0.08216742706298828, 0.08244445037841797, 0.08304383850097656, 0.08213177490234375, 0.08251110076904297, 0.08253011322021485, 0.08275030517578125, 0.08298700714111328, 0.08329625701904297, 0.08313654327392578, 0.08321810913085938, 0.08262281799316407, 0.08450377655029297, 0.08273379516601563, 0.08253437042236328, 0.08283344268798828, 0.08269414520263672, 0.08305632019042969, 0.083314208984375, 0.08279334259033203, 0.08335334777832032, 0.08338448333740234, 0.08325491333007813, 0.08291974639892578, 0.08319318389892578, 0.08297545623779297, 0.08293990325927734, 0.08318128204345702, 0.08314019012451172, 0.0840379867553711, 0.0835321273803711, 0.08302534484863282, 0.08334502410888672, 0.08307952117919921, 0.08323951721191407, 0.0833966064453125, 0.08370381164550782, 0.08694169616699218, 0.08201385498046875, 0.08140835571289062, 0.08197853088378906, 0.08244918060302735, 0.08181561279296876, 0.08143059539794922, 0.0840150375366211, 0.08201216125488281, 0.08227021026611328, 0.08207974243164062, 0.08310169219970703, 0.08319407653808594, 0.08254188537597656, 0.08219468688964844, 0.08193251037597657, 0.08222467041015626, 0.08199727630615235, 0.08218121337890626, 0.08198748779296874, 0.08309964752197266, 0.08237177276611328, 0.08267657470703126, 0.083246337890625, 0.08342537689208984, 0.08302044677734376, 0.08284143829345703, 0.082884765625, 0.08279004669189453, 0.08217772674560547, 0.08220950317382812, 0.08234291076660157, 0.08376348876953126, 0.08259852600097656, 0.0827658233642578, 0.08330873870849609, 0.08335529327392578, 0.08353977966308594, 0.08285971069335937, 0.08256784057617188, 0.08281613159179688, 0.08248204803466797, 0.08288777923583984, 0.0826866226196289, 0.08368358612060547, 0.08316722869873047, 0.08349187469482422, 0.08324400329589844, 0.08337586975097656, 0.08454940795898437, 0.08286255645751953, 0.08380976104736328, 0.08332857513427734, 0.08322681427001953, 0.0831506576538086, 0.08297062683105469, 0.08337852478027344, 0.08513552093505859, 0.08322108459472656, 0.08376306915283203, 0.08366242980957031, 0.0838470687866211, 0.08356076812744141, 0.08742246246337891, 0.08196761322021484, 0.08256307220458985, 0.08262860870361328, 0.08151193237304688, 0.08191343688964844, 0.08218707275390626, 0.08198271942138671, 0.08196115112304687, 0.08211917114257812, 0.08227037048339844, 0.0829780502319336, 0.08338713836669921, 0.08232345581054687, 0.08244406127929688, 0.08383715057373047, 0.08196505737304688, 0.08207564544677734, 0.08213104248046875, 0.08184611511230469, 0.08204841613769531, 0.08222707366943359, 0.0827476806640625, 0.083693603515625, 0.0834730224609375, 0.08290850830078125, 0.08228710174560547, 0.08428300476074219, 0.08272115325927734, 0.08237232208251953, 0.08260982513427734, 0.08252025604248046, 0.08276831817626953, 0.08276316833496093, 0.08321202850341797, 0.08314060974121094, 0.08302678680419921, 0.08315452575683593, 0.08272476959228516, 0.0832784652709961, 0.0846662368774414, 0.08263827514648438, 0.08311046600341797, 0.08295350646972656, 0.08238768005371094, 0.08355564880371094, 0.08351404571533202, 0.08317542266845702, 0.08351500701904296, 0.08313484954833984, 0.08293389129638672, 0.08338829040527344, 0.0848111343383789, 0.08247360229492187, 0.08353382110595703, 0.08340275573730468, 0.08326348876953125, 0.08338976287841797, 0.08351404571533202, 0.08338227081298828, 0.0860979232788086, 0.08337935638427735, 0.08376771545410157, 0.08623308563232422, 0.0819240951538086, 0.08135475158691406, 0.0813150405883789, 0.08138864135742188, 0.0814691162109375, 0.08220877075195313, 0.08232550048828124, 0.08257331085205079, 0.08218377685546875, 0.08434508514404297, 0.08281465911865235, 0.08297856140136718, 0.08354275512695312, 0.08210006713867188, 0.08156326293945312, 0.08213558197021484, 0.0819441909790039, 0.08226000213623047, 0.08247299194335937, 0.08237907409667969, 0.0826798095703125, 0.08301773071289062, 0.08461228942871094, 0.08326432037353515, 0.08298076629638672, 0.08229283142089844, 0.08222720336914062, 0.0825440673828125, 0.08220460510253906, 0.08253094482421874, 0.08250537872314453, 0.08289315032958984, 0.08320409393310547, 0.08335346984863282, 0.08490611267089844, 0.08293968200683594, 0.08343574523925781, 0.08290096282958985, 0.08316038513183593, 0.08298697662353516, 0.08377830505371094, 0.08255612945556641, 0.08307382202148438, 0.08325939178466797, 0.08347853088378906, 0.08342726135253906, 0.08325856018066406, 0.08500109100341798, 0.08350450897216796, 0.08289055633544921, 0.08325590515136719, 0.08333958435058594, 0.08310979461669922, 0.08286412811279296, 0.08344134521484375, 0.08306310272216796, 0.08372838592529297, 0.08355430603027343, 0.08385327911376952, 0.08369548797607422, 0.08360771179199218, 0.08369152069091797, 0.08583372497558593, 0.08203606414794921, 0.08156972503662109, 0.08127782440185546, 0.08172499084472656, 0.08227664184570313, 0.08266854095458985, 0.08177561950683594, 0.08205059051513672, 0.08227430725097656, 0.08208422088623046, 0.08295999908447266, 0.08347817230224609, 0.08243651580810547, 0.08264518737792968, 0.08203874969482422, 0.08175027465820313, 0.08323654174804687, 0.08254598236083985, 0.08271517181396484, 0.08219696044921875, 0.08233113861083985, 0.08282281494140625, 0.0830472640991211, 0.08303129577636718, 0.08257817840576172, 0.08299849700927735, 0.08282300567626953, 0.0821278076171875, 0.0831119384765625, 0.08240316772460937, 0.08264051055908203, 0.08360809326171875, 0.08245862579345703, 0.08323648071289062, 0.08368800354003907, 0.08363619232177734, 0.0829008331298828, 0.083430908203125, 0.08295276641845703, 0.08272016143798828, 0.08266806030273438, 0.08297676849365235, 0.08343138885498047, 0.08259337615966797, 0.08280518341064454, 0.08291059112548828, 0.08333478546142578, 0.08393651580810547, 0.08333491516113281, 0.08340480041503906, 0.08330592346191407, 0.08308576202392579, 0.08333913421630859, 0.0834163818359375, 0.08291629028320313, 0.0831592025756836, 0.08344764709472656, 0.08360345458984375, 0.08333657836914063, 0.08369209289550782, 0.08373049926757813, 0.08399161529541016, 0.08631094360351563, 0.08187593841552734, 0.08171520233154297, 0.08262246704101563, 0.08190361785888672, 0.08202761840820312, 0.08195369720458984, 0.08228614044189453, 0.08225596618652344, 0.08243440246582032, 0.08258150482177734, 0.08330137634277343, 0.08321321868896485, 0.08247920227050781, 0.08222637176513672, 0.08212322998046875, 0.08240531158447266, 0.08206556701660156, 0.0824947509765625, 0.08254534149169922, 0.0826060791015625, 0.08253187561035157, 0.0829754867553711, 0.08367638397216796, 0.08311888122558594, 0.08283900451660156, 0.0822930908203125, 0.0824567642211914, 0.08264463806152343, 0.08259414672851563, 0.08247296142578125, 0.0828579864501953, 0.08254783630371093, 0.08307539367675781, 0.08340499114990234, 0.0831246109008789, 0.08280473327636718, 0.08305254364013671, 0.08340045166015625, 0.08286233520507813, 0.08295014190673829, 0.08324710083007812, 0.08318156433105468, 0.08288050842285156, 0.08336966705322266, 0.08279481506347657, 0.08369964599609375, 0.083310302734375, 0.08305878448486329, 0.08357708740234375, 0.0835252456665039, 0.08323094177246093, 0.08320159912109375, 0.0835579833984375, 0.0832252197265625, 0.08295641326904298, 0.08302822113037109, 0.08412159729003907, 0.08356658935546875, 0.0837732162475586, 0.08334873962402344, 0.08419222259521485, 0.08347209930419922, 0.0863276138305664, 0.08225532531738282, 0.08188166046142578, 0.08205379486083984, 0.08238175964355468, 0.0823419189453125, 0.0819085464477539, 0.08195481872558594, 0.08178096008300781, 0.08226179504394532, 0.0827508773803711, 0.08367164611816406, 0.08336383819580079, 0.08246272277832031, 0.08207154846191406, 0.08270636749267578, 0.08304425811767578, 0.0824768295288086, 0.08242774200439452, 0.08282579040527344, 0.08227798461914063, 0.0822908172607422, 0.08259529876708985, 0.08315491485595704, 0.08301168060302734, 0.08328656005859375, 0.08280194854736328, 0.08285481262207031, 0.08229273223876953, 0.08242908477783203, 0.08305136108398438, 0.08300953674316407, 0.0828006362915039, 0.08261564636230469, 0.08307574462890625, 0.08344166564941406, 0.08318156433105468, 0.08311971282958984, 0.08287273406982422, 0.08451862335205078, 0.08252793884277344, 0.08307158660888672, 0.08344278717041016, 0.08323350524902344, 0.0834845428466797, 0.08265760040283203, 0.0835072021484375, 0.083525634765625, 0.08362531280517578, 0.08357545471191406, 0.08351251220703125, 0.08333599853515625, 0.08305254364013671, 0.08296038055419921, 0.083451904296875, 0.0839024658203125, 0.08361564636230469, 0.08339055633544921, 0.08353791809082031, 0.08349919891357421, 0.08356639862060547, 0.08410521697998047, 0.08350508880615234, 0.08713782501220703, 0.08264518737792968, 0.08180582427978515, 0.08184422302246094, 0.08169062042236327, 0.08241970825195312, 0.08231116485595703, 0.08222278594970703, 0.0823729248046875, 0.08238489532470702, 0.08280409240722657, 0.08298079681396485, 0.08329081726074218, 0.08285305786132813, 0.08242054748535156, 0.08234803009033204, 0.08228659057617188, 0.08272486114501953, 0.08245862579345703, 0.08247039794921875, 0.08228233337402344, 0.08309804534912109, 0.08295680236816406, 0.08347004699707031, 0.08308121490478515, 0.08312828826904296, 0.08272489929199218, 0.0827883529663086, 0.08232550048828124, 0.08262659454345703, 0.08308528137207032, 0.08292066955566406, 0.08308201599121094, 0.08341299438476563, 0.08333017730712891, 0.0834031982421875, 0.08361004638671875, 0.08340016174316406, 0.0829415054321289, 0.08315293121337891, 0.08292447662353515, 0.08324710083007812, 0.08315654754638672, 0.08311033630371094, 0.08330364990234375, 0.08302467346191406, 0.08353411102294922, 0.08341177368164063, 0.0835040283203125, 0.08347238159179687, 0.0833108139038086, 0.08347011566162109, 0.08330239868164062, 0.08311369323730469, 0.08319209289550782, 0.08336998748779297, 0.08366268920898437, 0.08369961547851562, 0.08369792175292969, 0.08351538848876953, 0.08437366485595703, 0.08369718170166016, 0.08349311828613282, 0.08560972595214844, 0.08193698883056641, 0.08161705780029296, 0.0821372833251953, 0.08231027221679688, 0.08228729248046875, 0.08255078125, 0.0820940170288086, 0.08238233947753906, 0.08247148895263672, 0.08248873901367187, 0.08328790283203125, 0.0829808349609375, 0.08278518676757812, 0.08201817321777344, 0.08223680114746093, 0.0823732452392578, 0.08256873321533204, 0.0824768295288086, 0.08238969421386719, 0.08267161560058593, 0.08243609619140625, 0.08256841278076171, 0.08329007720947265, 0.08277894592285157, 0.08325270080566406, 0.08327017974853515, 0.08285932922363282, 0.08252598571777343, 0.08278422546386718, 0.0827109146118164, 0.0825472640991211, 0.08294009399414062, 0.08288998413085938, 0.08299372863769532, 0.08364031982421875, 0.08323481750488282, 0.08300748443603516, 0.08313871765136718, 0.08271855926513672, 0.08350428771972657, 0.08295203399658203, 0.08316124725341797, 0.08322777557373047, 0.0829513931274414, 0.08330630493164062, 0.08325788879394531, 0.08311158752441407, 0.08300390625, 0.08273094177246093, 0.08410115051269532, 0.08342736053466797, 0.083525634765625, 0.08346828460693359, 0.08311746978759765, 0.08334944152832031, 0.083159423828125, 0.08373072052001954, 0.08356614685058594, 0.0838023681640625, 0.08377926635742187, 0.08376483154296875, 0.08397507476806641, 0.08650447845458985, 0.08210736083984375, 0.08181350708007812, 0.08205657958984375, 0.08244863891601563, 0.08247647857666016, 0.0822259521484375, 0.08191535949707031, 0.08232621002197266, 0.08254668426513671, 0.08245801544189453, 0.08362044525146485, 0.08305868530273437, 0.08273065948486329, 0.08245283508300781, 0.08233721923828125, 0.08228463745117187, 0.08211414337158203, 0.08274774169921875, 0.0834683837890625, 0.08264749145507813, 0.08288665771484376, 0.08283481597900391, 0.08358156585693359, 0.08362726593017578, 0.08294477081298827, 0.08252767944335937, 0.08224985504150391, 0.08242803192138672, 0.0826180191040039, 0.08319452667236328, 0.08326322937011718, 0.08293170928955078, 0.08298684692382813, 0.08367984008789063, 0.08370976257324218, 0.08333103942871094, 0.0834845428466797, 0.08299945831298829, 0.0827883529663086, 0.08293376159667969, 0.08317906951904297, 0.08322297668457031, 0.08326143646240235, 0.083019775390625, 0.08331878662109375, 0.08311142730712891, 0.08405657958984375, 0.08492880249023438, 0.08314441680908204, 0.08287769317626953, 0.08289523315429688, 0.0833908462524414, 0.08349420928955079, 0.08347727966308593, 0.08359458923339844, 0.08342720031738281, 0.08345366668701172, 0.08396284484863281, 0.08387779235839844, 0.0836445083618164, 0.08373452758789063, 0.0835420150756836]",tokens/s,12.052672676206061,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,918.441984,622.723072,0.0,220.20096,205.438976,s,1,7.8970478515625,7.8970478515625,0.0,7.8970478515625,7.8970478515625,7.8970478515625,7.8970478515625,[7.8970478515625],,kWh,2.2174442354162236e-05,2.4385341187183537e-06,7.807784024010944e-06,3.242076049689153e-05,,MB,1400.963072,677.249024,0.0,260.046848,226.388992,s,16,0.21212710475921634,0.01325794404745102,9.033278918196807e-05,0.013232848167419434,0.013387760162353515,0.013434344053268432,0.0134547917842865,"[0.013213055610656738, 0.013248895645141602, 0.013269184112548828, 0.013459903717041016, 0.013219200134277343, 0.013166624069213867, 0.01322371196746826, 0.013214112281799317, 0.01317078399658203, 0.013304800033569335, 0.013349696159362793, 0.013425824165344238, 0.013241984367370606, 0.013157600402832032, 0.013140224456787109, 0.013321503639221192]",tokens/s,19309.177884878667,kWh,3.832853812335801e-07,4.226903379935297e-08,2.2667158699475678e-07,6.522260020276899e-07,tokens/kWh,392501984.2878507,MB,1442.562048,691.929088,0.0,274.726912,226.391552,s,16,10.0667763671875,0.6291735229492188,0.0034928218358242645,0.6284032287597656,0.6337787475585938,0.6356130065917969,0.6364722106933594,"[0.62723681640625, 0.6258632202148438, 0.630160888671875, 0.62606201171875, 0.626392578125, 0.6303976440429687, 0.6282301635742188, 0.6226085815429687, 0.6285762939453124, 0.6272710571289063, 0.6352550048828125, 0.63668701171875, 0.632302490234375, 0.6276344604492188, 0.6310910034179688, 0.6310071411132813]",tokens/s,100.13135915937897,kWh,1.8004295368768942e-05,1.985478094194924e-06,6.973525145381034e-06,2.69632986083449e-05,tokens/kWh,2336509.3757668827,,s,1008,10.05840968990327,0.009978581041570693,0.00017098170591646654,0.009944575786590576,0.010122015762329102,0.010221994972229004,0.010624712324142456,"[0.009594911575317383, 0.009933856010437012, 0.009855360031127929, 0.009916095733642579, 0.00985587215423584, 0.009894944190979004, 0.009855199813842773, 0.00987827205657959, 0.009910176277160645, 0.00986736011505127, 0.009930751800537109, 0.009872896194458008, 0.009886207580566407, 0.00993280029296875, 0.009944543838500977, 0.009922911643981933, 0.009974271774291991, 0.009907168388366699, 0.00996012783050537, 0.009967071533203125, 0.009972288131713867, 0.009873408317565918, 0.00991641616821289, 0.009788448333740235, 0.010045568466186523, 0.009892704010009766, 0.009898015975952149, 0.009999648094177246, 0.009994943618774415, 0.010012672424316407, 0.010204192161560058, 0.010054623603820801, 0.01001587200164795, 0.009933695793151856, 0.009984000205993653, 0.009964544296264649, 0.010107904434204102, 0.010065983772277833, 0.010000320434570312, 0.00993721580505371, 0.010055104255676269, 0.009918720245361327, 0.010053791999816895, 0.009956607818603516, 0.00995798397064209, 0.009898143768310547, 0.009854816436767578, 0.010115072250366211, 0.010014623641967773, 0.009814111709594727, 0.009966591835021972, 0.009818400382995605, 0.01003593635559082, 0.009942208290100098, 0.010038080215454102, 0.010247936248779297, 0.010048895835876466, 0.00993984031677246, 0.00990608024597168, 0.009863264083862304, 0.009979904174804688, 0.009868703842163085, 0.009886303901672363, 0.009719008445739747, 0.0100032320022583, 0.009999360084533691, 0.009939455986022949, 0.009875391960144043, 0.009932352066040038, 0.009876128196716308, 0.009886303901672363, 0.009928447723388672, 0.009980256080627442, 0.01002665615081787, 0.010100128173828125, 0.009892704010009766, 0.009932864189147949, 0.009844415664672852, 0.010059807777404785, 0.009877663612365722, 0.009856831550598145, 0.009805824279785156, 0.009868576049804687, 0.009970399856567383, 0.009866944313049316, 0.009893759727478028, 0.009933247566223144, 0.009903936386108399, 0.009949503898620605, 0.009907999992370605, 0.009936991691589356, 0.009971776008605958, 0.009912447929382324, 0.009935744285583496, 0.010092896461486816, 0.009979904174804688, 0.009914976119995117, 0.009975168228149414, 0.009929344177246094, 0.009955615997314453, 0.00991590404510498, 0.00986246395111084, 0.010031840324401856, 0.009931232452392578, 0.009811424255371093, 0.009903552055358887, 0.009911104202270508, 0.009968768119812012, 0.009889823913574219, 0.009966431617736816, 0.009842975616455078, 0.009924672126770019, 0.010029760360717774, 0.009998911857604981, 0.00984921646118164, 0.009855104446411132, 0.009923520088195801, 0.00988259220123291, 0.009930208206176757, 0.00992307186126709, 0.009881631851196289, 0.010042367935180664, 0.009851903915405273, 0.009975520133972168, 0.0099269437789917, 0.009883487701416015, 0.009668288230895996, 0.010077823638916015, 0.00993727970123291, 0.009928607940673828, 0.009832544326782227, 0.00998806381225586, 0.0099399995803833, 0.010012960433959961, 0.009853599548339843, 0.010463552474975586, 0.009923328399658203, 0.009961952209472656, 0.0110250244140625, 0.009947039604187012, 0.009919967651367188, 0.009922080039978028, 0.010030079841613769, 0.009895936012268066, 0.009768159866333007, 0.009834336280822755, 0.009890399932861327, 0.009838944435119628, 0.009782272338867188, 0.009896960258483887, 0.009832032203674316, 0.009840288162231446, 0.009898367881774903, 0.009934528350830078, 0.01012224006652832, 0.009862144470214844, 0.009800383567810059, 0.009937248229980468, 0.010002016067504883, 0.010089823722839356, 0.01004207992553711, 0.009954303741455077, 0.009975104331970215, 0.00982579231262207, 0.009869824409484864, 0.009804767608642578, 0.009876192092895508, 0.009835871696472168, 0.00983516788482666, 0.009871007919311524, 0.009879648208618164, 0.009832703590393066, 0.010041472434997558, 0.010044544219970704, 0.009993056297302245, 0.009852255821228027, 0.009857312202453613, 0.009959712028503417, 0.009878623962402343, 0.009865568161010743, 0.009904512405395508, 0.009965760231018066, 0.009991840362548828, 0.009889280319213867, 0.009974623680114746, 0.011286527633666991, 0.010265888214111328, 0.01132367992401123, 0.010270463943481445, 0.00967635154724121, 0.010000927925109863, 0.010084256172180176, 0.010357888221740722, 0.010021247863769532, 0.009928832054138184, 0.009836928367614746, 0.009994400024414063, 0.00988758373260498, 0.009967616081237793, 0.009887743949890136, 0.009862848281860351, 0.010000479698181153, 0.010010848045349121, 0.009977855682373048, 0.009955327987670898, 0.010018272399902344, 0.00997001552581787, 0.009939135551452637, 0.009936896324157715, 0.009959584236145019, 0.009836159706115722, 0.010039520263671876, 0.010026911735534667, 0.009867103576660156, 0.009916223526000977, 0.010019264221191405, 0.00988268756866455, 0.009876288414001465, 0.009873536109924317, 0.009907999992370605, 0.009912351608276367, 0.009951423645019532, 0.009852831840515136, 0.009836640357971192, 0.009814016342163086, 0.009878944396972657, 0.009945088386535645, 0.010052191734313964, 0.009963520050048828, 0.009969663619995118, 0.009930784225463868, 0.009957663536071778, 0.009983839988708497, 0.009991904258728028, 0.009970815658569335, 0.010054656028747559, 0.009885312080383301, 0.009851263999938966, 0.009815711975097657, 0.009967967987060547, 0.009871232032775878, 0.009863295555114747, 0.009977408409118652, 0.009853535652160645, 0.009856703758239747, 0.009907360076904298, 0.00985529613494873, 0.009831104278564453, 0.009861120223999023, 0.009834495544433594, 0.009934847831726074, 0.009807168006896972, 0.009912480354309082, 0.010161664009094238, 0.00996793556213379, 0.009937536239624024, 0.010038496017456055, 0.009920991897583008, 0.00988268756866455, 0.009987135887145997, 0.009875328063964843, 0.00988310432434082, 0.009900544166564941, 0.009907936096191406, 0.009883968353271485, 0.009904128074645996, 0.00991641616821289, 0.009985343933105469, 0.009958016395568847, 0.009854240417480469, 0.010248991966247558, 0.01002905559539795, 0.009947135925292968, 0.010087743759155273, 0.009943743705749512, 0.009945088386535645, 0.009871359825134277, 0.009879551887512206, 0.009973759651184083, 0.00992460823059082, 0.009932928085327149, 0.009916064262390136, 0.009930111885070802, 0.009904800415039062, 0.009951423645019532, 0.010000224113464356, 0.00992204761505127, 0.009822879791259766, 0.009846176147460937, 0.009836511611938476, 0.009808511734008788, 0.009945088386535645, 0.00987660789489746, 0.009978336334228516, 0.010060447692871094, 0.009901408195495606, 0.009873632431030274, 0.010121760368347167, 0.009911487579345703, 0.009952032089233398, 0.009936575889587402, 0.009999679565429688, 0.009930784225463868, 0.009904800415039062, 0.0098220157623291, 0.00986131191253662, 0.009906175613403321, 0.009843040466308593, 0.009855903625488282, 0.009912128448486329, 0.009995200157165528, 0.009980128288269044, 0.00981174373626709, 0.009986047744750976, 0.009928735733032226, 0.009548895835876465, 0.009905311584472656, 0.00986905574798584, 0.010045151710510253, 0.009906463623046875, 0.009969856262207031, 0.009983807563781739, 0.00988976001739502, 0.009938624382019043, 0.010066271781921387, 0.010125184059143066, 0.009936287879943847, 0.01001865577697754, 0.010018976211547851, 0.010070752143859864, 0.010157664299011231, 0.01005350399017334, 0.01030339241027832, 0.010377120018005372, 0.01022435188293457, 0.010620991706848144, 0.010231743812561036, 0.010049535751342773, 0.010076160430908204, 0.01011411190032959, 0.010073023796081543, 0.009902079582214356, 0.009877375602722168, 0.009918592453002929, 0.009961152076721191, 0.009886176109313964, 0.009945152282714843, 0.009881471633911133, 0.009895456314086915, 0.00984102439880371, 0.010082304000854492, 0.010092543601989747, 0.009918463706970216, 0.010049759864807129, 0.009942367553710937, 0.010006976127624512, 0.01009062385559082, 0.009949407577514648, 0.00994985580444336, 0.009947967529296876, 0.010039487838745116, 0.009897983551025391, 0.009869215965270996, 0.00988316822052002, 0.009877056121826171, 0.009929280281066894, 0.00985523223876953, 0.010071392059326173, 0.009924736022949219, 0.010214112281799316, 0.009984000205993653, 0.009920512199401856, 0.009885184288024903, 0.009863295555114747, 0.01004582405090332, 0.00992255973815918, 0.01006387233734131, 0.009926303863525391, 0.009618623733520508, 0.009935680389404298, 0.00999177646636963, 0.00994547176361084, 0.010021056175231934, 0.009882528305053711, 0.009939871788024902, 0.009891872406005859, 0.009934687614440918, 0.009791616439819337, 0.009915583610534668, 0.010092927932739257, 0.009900511741638184, 0.009993280410766602, 0.010060735702514648, 0.009889792442321778, 0.009917984008789063, 0.009927295684814453, 0.00986300754547119, 0.009884767532348633, 0.00999721622467041, 0.009985376358032226, 0.009860799789428712, 0.009853535652160645, 0.009942815780639649, 0.009905792236328126, 0.009896767616271973, 0.009885087966918945, 0.01000934410095215, 0.01002086353302002, 0.009871359825134277, 0.009969951629638672, 0.009830112457275391, 0.009947104454040527, 0.00988368034362793, 0.009931903839111328, 0.010224512100219726, 0.010042943954467774, 0.010998080253601075, 0.00995081615447998, 0.009951775550842284, 0.010067008018493652, 0.010189984321594238, 0.009979680061340332, 0.00996342372894287, 0.009897055625915528, 0.010063199996948243, 0.010036191940307616, 0.01007487964630127, 0.010031200408935547, 0.009949024200439454, 0.010008543968200683, 0.009912063598632812, 0.00990441608428955, 0.00991641616821289, 0.009814240455627442, 0.009853983879089355, 0.009884415626525879, 0.009934656143188477, 0.009946304321289063, 0.010019840240478516, 0.010023200035095214, 0.009875167846679687, 0.00955196762084961, 0.009856991767883301, 0.009928704261779785, 0.00994480037689209, 0.00991641616821289, 0.00985852813720703, 0.009888575553894044, 0.009904128074645996, 0.009902079582214356, 0.009928768157958984, 0.00987769603729248, 0.010055423736572266, 0.009973759651184083, 0.009970975875854491, 0.009959391593933106, 0.009902591705322266, 0.010017024040222169, 0.009891615867614747, 0.009953887939453124, 0.009892800331115722, 0.009970335960388184, 0.009858752250671387, 0.010008735656738281, 0.009801471710205078, 0.009863967895507813, 0.009836192131042481, 0.009956671714782714, 0.009810527801513673, 0.009810303688049317, 0.009808799743652345, 0.009920991897583008, 0.009794943809509278, 0.009966431617736816, 0.009875743865966798, 0.009824095726013183, 0.00984659194946289, 0.009783488273620605, 0.009817503929138183, 0.009779808044433593, 0.009829888343811035, 0.009814528465270997, 0.009785344123840332, 0.0097642240524292, 0.009753215789794921, 0.009818431854248046, 0.009798848152160644, 0.009833087921142578, 0.009887616157531738, 0.010047295570373534, 0.009808192253112793, 0.009924480438232422, 0.009873408317565918, 0.009911808013916015, 0.009949695587158204, 0.010098879814147949, 0.009875200271606445, 0.009816287994384765, 0.009850720405578613, 0.009873408317565918, 0.009875455856323241, 0.009786432266235351, 0.009817024230957032, 0.009795136451721191, 0.009584639549255371, 0.009848832130432129, 0.009837759971618652, 0.01003603172302246, 0.009887743949890136, 0.009879839897155763, 0.00990771198272705, 0.009900544166564941, 0.009887359619140624, 0.009936991691589356, 0.009887231826782226, 0.009957759857177735, 0.009875071525573731, 0.00986953639984131, 0.009869791984558106, 0.00986911964416504, 0.009971712112426758, 0.009865216255187988, 0.009801312446594239, 0.010045856475830077, 0.010108927726745605, 0.00993791961669922, 0.009933407783508302, 0.009938559532165527, 0.009947872161865235, 0.010152064323425292, 0.010086336135864259, 0.010041407585144043, 0.010185952186584473, 0.00992739200592041, 0.00997327995300293, 0.009843168258666992, 0.009864255905151368, 0.009993311882019042, 0.009996479988098144, 0.01023904037475586, 0.01029360008239746, 0.010151968002319336, 0.010046879768371582, 0.009894720077514648, 0.009949119567871093, 0.009895135879516601, 0.00991919994354248, 0.009883199691772461, 0.010059904098510743, 0.009907839775085449, 0.009894720077514648, 0.00990351963043213, 0.009978303909301758, 0.010088128089904785, 0.010024448394775391, 0.00994540786743164, 0.010060416221618653, 0.010182687759399414, 0.010287103652954101, 0.01002086353302002, 0.009914719581604003, 0.009813759803771973, 0.010043519973754883, 0.009881312370300294, 0.009905247688293458, 0.009962464332580566, 0.010262847900390625, 0.009703424453735352, 0.009944671630859376, 0.009974176406860352, 0.009926015853881837, 0.009878144264221192, 0.009851167678833008, 0.009812992095947265, 0.009843199729919434, 0.009934399604797363, 0.009875455856323241, 0.009931424140930175, 0.009856927871704101, 0.009848928451538086, 0.009913375854492187, 0.0098887357711792, 0.010245280265808105, 0.009939807891845703, 0.009864864349365234, 0.009916768074035645, 0.009922752380371094, 0.010125120162963868, 0.009918463706970216, 0.009891936302185059, 0.009999744415283203, 0.00986575984954834, 0.009893888473510743, 0.009893856048583984, 0.009904159545898437, 0.009928704261779785, 0.009965567588806153, 0.010115072250366211, 0.009994496345520019, 0.009881279945373534, 0.009881664276123046, 0.009885696411132813, 0.009856512069702148, 0.00990822410583496, 0.009875967979431152, 0.00986953639984131, 0.009920191764831544, 0.009994336128234863, 0.009937248229980468, 0.010020511627197266, 0.009955648422241212, 0.010083359718322754, 0.010021727561950683, 0.00993660831451416, 0.00995132827758789, 0.010004480361938477, 0.00992579174041748, 0.010021439552307129, 0.009922847747802735, 0.009955360412597656, 0.010001503944396972, 0.009994879722595214, 0.00995686435699463, 0.009990912437438965, 0.01017958354949951, 0.010118399620056152, 0.010123007774353028, 0.010076160430908204, 0.009936896324157715, 0.009976863861083984, 0.009707008361816406, 0.010106975555419923, 0.010168992042541503, 0.010080256462097169, 0.010156031608581542, 0.010043392181396485, 0.010139039993286133, 0.009996383666992188, 0.010078335762023926, 0.010033535957336425, 0.010143744468688964, 0.010239647865295411, 0.010169919967651367, 0.0100730562210083, 0.009967776298522949, 0.009986016273498535, 0.009938624382019043, 0.009938943862915038, 0.009867263793945312, 0.009871392250061036, 0.010014464378356933, 0.010016160011291504, 0.010040127754211425, 0.0101212158203125, 0.01007430362701416, 0.010022047996520997, 0.010058496475219727, 0.00991427230834961, 0.009930751800537109, 0.009934368133544922, 0.009998815536499023, 0.009920512199401856, 0.009926624298095703, 0.00999244785308838, 0.010284832000732422, 0.010016768455505372, 0.009930368423461913, 0.009904512405395508, 0.010098688125610352, 0.009979104042053223, 0.009973759651184083, 0.010068767547607422, 0.010037407875061036, 0.009971424102783203, 0.010223551750183106, 0.010173824310302735, 0.01006060791015625, 0.010090496063232422, 0.01006713581085205, 0.010085087776184082, 0.010102879524230958, 0.010065024375915527, 0.01002790355682373, 0.010014911651611329, 0.010242176055908204, 0.010261599540710448, 0.010060383796691894, 0.01101414394378662, 0.010744128227233887, 0.010219103813171386, 0.010147680282592773, 0.010099072456359864, 0.010069888114929199, 0.009771007537841797, 0.010059776306152344, 0.010031231880187988, 0.010065792083740234, 0.010147456169128419, 0.009992032051086426, 0.009914912223815919, 0.009986144065856933, 0.01009449577331543, 0.009965567588806153, 0.009975456237792968, 0.010062175750732422, 0.010083392143249511, 0.01006220817565918, 0.011135552406311035, 0.011411616325378418, 0.010110591888427735, 0.009957792282104493, 0.01, 0.009922143936157226, 0.010169055938720703, 0.009920384407043457, 0.009822208404541016, 0.009891839981079101, 0.009860575675964356, 0.009902400016784668, 0.0098472957611084, 0.00994211196899414, 0.00996224021911621, 0.009829312324523927, 0.009912320137023926, 0.01022764778137207, 0.009984383583068848, 0.010037280082702637, 0.010329728126525878, 0.01106383991241455, 0.010124863624572754, 0.010097439765930177, 0.010116864204406738, 0.010393312454223633, 0.01013644790649414, 0.01009228801727295, 0.010127167701721191, 0.009995903968811035, 0.01039151954650879, 0.0101692476272583, 0.01018876838684082, 0.010188063621520997, 0.010189248085021972, 0.010245599746704102, 0.010102463722229003, 0.01007919979095459, 0.010051168441772462, 0.010122591972351075, 0.010001440048217774, 0.009992159843444825, 0.010000224113464356, 0.010235072135925293, 0.009992799758911132, 0.009910592079162598, 0.009893888473510743, 0.010057663917541504, 0.009803647994995118, 0.00976364803314209, 0.010094719886779785, 0.010028703689575195, 0.010029312133789062, 0.010100640296936036, 0.010001983642578125, 0.010037759780883788, 0.010087936401367188, 0.01011353588104248, 0.009998656272888184, 0.010061535835266113, 0.010092512130737304, 0.010027008056640625, 0.009986047744750976, 0.009911359786987305, 0.009935808181762696, 0.010180255889892579, 0.010099040031433106, 0.009916064262390136, 0.009924896240234375, 0.009936863899230958, 0.009963295936584472, 0.009971391677856446, 0.009951199531555176, 0.009970335960388184, 0.009866527557373047, 0.01021350383758545, 0.009988703727722169, 0.009924799919128417, 0.009993184089660644, 0.010148703575134277, 0.010255743980407715, 0.010133376121520996, 0.009918911933898926, 0.010053888320922851, 0.009916768074035645, 0.01004751968383789, 0.009971391677856446, 0.009973407745361328, 0.009974111557006836, 0.009953280448913575, 0.009889568328857422, 0.009942815780639649, 0.009941439628601074, 0.009979904174804688, 0.009936832427978515, 0.009939007759094239, 0.010055999755859375, 0.009869088172912597, 0.009873727798461913, 0.009858655929565429, 0.009836159706115722, 0.009816255569458008, 0.01060268783569336, 0.010012831687927246, 0.011384448051452637, 0.010000224113464356, 0.00996127986907959, 0.010624992370605468, 0.009922240257263184, 0.009894880294799804, 0.010055104255676269, 0.009887840270996094, 0.009678239822387696, 0.009937600135803222, 0.009858976364135743, 0.009945247650146484, 0.009912256240844727, 0.009920415878295898, 0.010240032196044921, 0.009891103744506836, 0.010048192024230957, 0.00993280029296875, 0.009891839981079101, 0.00990447998046875, 0.010092191696166992, 0.010192288398742675, 0.010002304077148437, 0.009980159759521484, 0.009943519592285156, 0.00982630443572998, 0.009932831764221191, 0.009936384201049805, 0.00998755168914795, 0.009917440414428711, 0.010000543594360351, 0.010151007652282714, 0.01007043170928955, 0.00992086410522461, 0.009981856346130372, 0.009893728256225585, 0.00987775993347168, 0.009865471839904785, 0.00981497573852539, 0.009814528465270997, 0.009861439704895019, 0.009934975624084472, 0.00988054370880127, 0.009901151657104493, 0.010047295570373534, 0.009920255661010742, 0.009974016189575195, 0.009904128074645996, 0.010143327713012695, 0.01000649642944336, 0.009912960052490235, 0.010047455787658692, 0.009985535621643067, 0.009961824417114257, 0.009900032043457031, 0.00991436767578125, 0.009924863815307617, 0.00994438362121582, 0.009896384239196778, 0.01005686378479004, 0.010015904426574707, 0.009930432319641113, 0.009953280448913575, 0.009991423606872558, 0.009937664031982422, 0.009940320014953614, 0.00986793613433838, 0.009876959800720215, 0.009965632438659668, 0.010068448066711425, 0.010079423904418945, 0.009547904014587402, 0.009895487785339356, 0.009910880088806152, 0.009872223854064941, 0.009851103782653808, 0.009798175811767578, 0.009830880165100097, 0.00981379222869873, 0.010020288467407226, 0.009863743782043457, 0.009926655769348144, 0.009957375526428223, 0.009957375526428223, 0.00995356845855713, 0.009908191680908203, 0.009934592247009277, 0.00993836784362793, 0.009957951545715332, 0.009873472213745117, 0.009973343849182128, 0.00998639965057373, 0.010065216064453125, 0.010121919631958009, 0.010074336051940918, 0.010112447738647461, 0.010119520187377929, 0.01012940788269043, 0.009963199615478516, 0.009900351524353028, 0.009911423683166503, 0.009812864303588867, 0.009881600379943848, 0.009881600379943848, 0.00999833583831787, 0.010006431579589845, 0.010025055885314941, 0.01003929615020752, 0.010005503654479981, 0.010546143531799317, 0.010211359977722169, 0.010180095672607421, 0.010287712097167969, 0.010145695686340332, 0.010187104225158691, 0.010082112312316895, 0.010471263885498046, 0.009959103584289551, 0.010125120162963868, 0.009945599555969239, 0.010557439804077149, 0.009987648010253907, 0.009988736152648926, 0.009881407737731933, 0.009990528106689453, 0.009858624458312988, 0.009844767570495605, 0.010025312423706054, 0.009999584197998047, 0.01010912036895752, 0.010161727905273437, 0.010011167526245117, 0.010037440299987793, 0.010114879608154296, 0.009659168243408203, 0.01007590389251709, 0.01006991958618164, 0.01010870361328125, 0.009924384117126464, 0.009990943908691406, 0.009961119651794434, 0.009932448387145996, 0.010076383590698242, 0.00999443244934082, 0.010148127555847168, 0.010140992164611817, 0.009935551643371583, 0.010051199913024902, 0.00989740753173828, 0.01055635166168213, 0.010301312446594239, 0.010297663688659668, 0.010360383987426757, 0.010374624252319336, 0.009945183753967286, 0.009926655769348144, 0.009988736152648926, 0.010012736320495605, 0.009900032043457031, 0.009932512283325196, 0.009877792358398437, 0.009933024406433106, 0.009895711898803712, 0.009904447555541993, 0.009891712188720703, 0.009923904418945313, 0.010060416221618653, 0.010172287940979003, 0.009934176445007324, 0.009913056373596192, 0.009871552467346192, 0.009897727966308594, 0.009959424018859863, 0.009967488288879394, 0.010038816452026367, 0.010003071784973144, 0.009930720329284668, 0.009994239807128906, 0.009957375526428223, 0.009944607734680175, 0.009970144271850586, 0.010086400032043457, 0.009924063682556153, 0.009894495964050292, 0.009938783645629883, 0.009948639869689942, 0.009980575561523437, 0.009969823837280274, 0.009908032417297364, 0.010059935569763184, 0.010065759658813477, 0.010136704444885253, 0.01002716827392578, 0.010015359878540039, 0.009963775634765625, 0.010075648307800293, 0.009886303901672363]",tokens/s,100.21464934082387,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1532.243968,1331.560448,0.0,945.815552,943.480832,s,1,8.4270751953125,8.4270751953125,0.0,8.4270751953125,8.4270751953125,8.4270751953125,8.4270751953125,[8.4270751953125],,kWh,3.3326626258334124e-05,3.6687491853690647e-06,1.0465008372012585e-05,4.746038381571577e-05,,MB,1631.690752,1495.138304,0.0,1080.03328,1046.519808,s,10,0.8916538009643554,0.08916538009643554,0.0009173184373064862,0.08953016281127929,0.08999176330566407,0.08999356231689452,0.0899950015258789,"[0.08999136352539062, 0.0882333755493164, 0.08714701080322265, 0.08921766662597656, 0.08929718780517579, 0.08829519653320313, 0.08992934417724609, 0.08976313781738281, 0.08978415679931641, 0.089995361328125]",tokens/s,2871.0694635420928,kWh,2.749875139252211e-06,3.0326341865696393e-07,1.830131266803804e-06,4.883269824712979e-06,tokens/kWh,52423889.973158866,MB,1631.690752,1583.218688,0.0,1166.016512,1082.823168,s,10,16.371494262695315,1.6371494262695314,0.00546598714968148,1.6382313232421875,1.6432295288085936,1.6443327209472656,1.6452152746582032,"[1.6295498046875, 1.6346856689453124, 1.6454359130859375, 1.6272725830078125, 1.642984375, 1.634318115234375, 1.6391318359375, 1.6379979248046874, 1.6384647216796875, 1.6416533203125]",tokens/s,38.48152098342917,kWh,4.7733504611161794e-05,5.264647640237536e-06,2.2114971222595324e-05,7.511312347399466e-05,tokens/kWh,838734.9252199796,,s,630,16.368950139999395,0.025982460539681573,0.0004111665621706175,0.025910736083984375,0.02633743362426758,0.026485423469543457,0.027768209915161136,"[0.026147872924804687, 0.026046464920043946, 0.026079551696777344, 0.02594428825378418, 0.02576963233947754, 0.02597763252258301, 0.026046464920043946, 0.025916799545288086, 0.02578521537780762, 0.02567523193359375, 0.02565932846069336, 0.025917760848999022, 0.026379423141479494, 0.026075551986694336, 0.02593020820617676, 0.025980928421020507, 0.025824352264404295, 0.025852832794189453, 0.025647104263305662, 0.025649152755737304, 0.025549983978271483, 0.025706783294677734, 0.025542303085327147, 0.02567875289916992, 0.025613664627075195, 0.025960735321044922, 0.025885087966918945, 0.026347103118896483, 0.02606492805480957, 0.026028383255004884, 0.0258571834564209, 0.026116960525512694, 0.02599295997619629, 0.026169248580932617, 0.026042720794677735, 0.02590105628967285, 0.025796607971191408, 0.025812992095947264, 0.02566934394836426, 0.025772159576416015, 0.025766271591186524, 0.025820255279541016, 0.025886560440063478, 0.025886848449707033, 0.02565555191040039, 0.025653087615966796, 0.02595622444152832, 0.025916000366210938, 0.02591472053527832, 0.02585990333557129, 0.025864576339721678, 0.025877119064331055, 0.02570444869995117, 0.025637920379638673, 0.0258242244720459, 0.02575155258178711, 0.02571673583984375, 0.025829023361206054, 0.025773792266845702, 0.025776767730712892, 0.025648576736450195, 0.025925792694091798, 0.025837440490722657, 0.026333568572998046, 0.026419136047363283, 0.026183679580688478, 0.026236831665039064, 0.026293792724609376, 0.0260380802154541, 0.026071807861328126, 0.025906400680541994, 0.02592639923095703, 0.025790496826171874, 0.025769855499267576, 0.025808544158935548, 0.02595187187194824, 0.02603660774230957, 0.025967071533203125, 0.025874431610107423, 0.025855968475341797, 0.025728799819946288, 0.025702463150024415, 0.026489023208618165, 0.026791200637817383, 0.02710966491699219, 0.027044288635253905, 0.026007551193237305, 0.025913728713989257, 0.026279296875, 0.026143232345581056, 0.026031871795654297, 0.025958272933959962, 0.025984415054321287, 0.025950784683227538, 0.025691551208496095, 0.02568169593811035, 0.025753664016723632, 0.02569215965270996, 0.025561248779296875, 0.025799423217773437, 0.02570649528503418, 0.025840896606445313, 0.02605129623413086, 0.026187807083129882, 0.026445856094360353, 0.025914688110351563, 0.025684127807617186, 0.025426431655883788, 0.02538502311706543, 0.025524255752563476, 0.025335071563720703, 0.025449087142944336, 0.02545193672180176, 0.02552828788757324, 0.025485023498535157, 0.026375072479248047, 0.025772031784057618, 0.025767936706542968, 0.02577939224243164, 0.02586502456665039, 0.025877632141113282, 0.02637686347961426, 0.025833696365356446, 0.025972768783569335, 0.025913312911987306, 0.025717920303344726, 0.02600102424621582, 0.0260447998046875, 0.026077184677124023, 0.02602992057800293, 0.026075328826904297, 0.026066816329956055, 0.025878623962402345, 0.026050048828125, 0.02588070487976074, 0.02605660820007324, 0.025819936752319337, 0.025923263549804686, 0.026032127380371094, 0.026105920791625978, 0.0263535041809082, 0.026480447769165038, 0.02622719955444336, 0.02627952003479004, 0.026304704666137695, 0.026388671875, 0.026224447250366212, 0.02612998390197754, 0.026111936569213866, 0.026210880279541014, 0.02638265609741211, 0.02602969551086426, 0.02596012878417969, 0.02589743995666504, 0.02594915199279785, 0.026043264389038086, 0.02595337677001953, 0.025873184204101562, 0.025960575103759764, 0.02596409606933594, 0.026402847290039062, 0.029670911788940428, 0.02646646308898926, 0.026249984741210937, 0.026230655670166015, 0.025914688110351563, 0.025845951080322265, 0.02584025573730469, 0.025954048156738282, 0.02591974449157715, 0.026003456115722655, 0.025903104782104492, 0.025871519088745118, 0.0259531192779541, 0.02628112030029297, 0.026157920837402343, 0.02635923194885254, 0.025996896743774416, 0.026069055557250975, 0.025926559448242188, 0.026271295547485352, 0.025809343338012696, 0.025906976699829103, 0.025871648788452148, 0.025768896102905274, 0.02575263977050781, 0.02581395149230957, 0.02588057518005371, 0.02633113670349121, 0.02620070457458496, 0.026366176605224608, 0.02603536033630371, 0.026147680282592775, 0.025931776046752928, 0.025730783462524415, 0.025702688217163087, 0.02585536003112793, 0.02570083236694336, 0.025612447738647463, 0.025667583465576172, 0.02571993637084961, 0.025762687683105467, 0.02571216011047363, 0.025695775985717772, 0.025422784805297853, 0.025616287231445312, 0.025550752639770507, 0.02561248016357422, 0.02548531150817871, 0.02574336051940918, 0.02573107147216797, 0.02589286422729492, 0.025749120712280273, 0.025872671127319335, 0.0258602237701416, 0.02591119956970215, 0.026312383651733398, 0.026110336303710936, 0.02592972755432129, 0.025659103393554688, 0.0258023681640625, 0.025832351684570314, 0.02581273651123047, 0.0261529598236084, 0.025927488327026366, 0.025785728454589842, 0.02594246482849121, 0.02588710403442383, 0.02559702491760254, 0.02571356773376465, 0.025436159133911132, 0.02549900817871094, 0.025455232620239257, 0.025655296325683592, 0.02543519973754883, 0.02556835174560547, 0.0254638729095459, 0.025436256408691408, 0.02547283172607422, 0.02570697593688965, 0.025702816009521484, 0.025801855087280272, 0.025571744918823244, 0.025688127517700197, 0.025591840744018556, 0.025678207397460937, 0.02553411293029785, 0.027172895431518556, 0.028177728652954103, 0.026199039459228517, 0.025996959686279297, 0.02593212890625, 0.02651491165161133, 0.026443391799926757, 0.02623529624938965, 0.02575564765930176, 0.025824832916259765, 0.02562553596496582, 0.026204160690307617, 0.025769216537475586, 0.02566966438293457, 0.02580143928527832, 0.025855264663696288, 0.02612224006652832, 0.02653651237487793, 0.027107711791992187, 0.029938688278198244, 0.02646735954284668, 0.026303264617919923, 0.02623711967468262, 0.026264352798461912, 0.02680179214477539, 0.026140224456787108, 0.025749824523925782, 0.02584121513366699, 0.02583033561706543, 0.02580588722229004, 0.02565011215209961, 0.025618015289306642, 0.025729440689086915, 0.02568934440612793, 0.025768575668334962, 0.025896575927734374, 0.026153600692749024, 0.02622966384887695, 0.026029024124145508, 0.02610972785949707, 0.026103872299194336, 0.02648102378845215, 0.02639849662780762, 0.026189823150634766, 0.025928768157958984, 0.026083648681640623, 0.026276479721069335, 0.02591744041442871, 0.025974720001220704, 0.0259683837890625, 0.02635603141784668, 0.026198015213012696, 0.025712032318115235, 0.02583612823486328, 0.025675775527954102, 0.026230783462524415, 0.02575974464416504, 0.025776128768920898, 0.025645055770874024, 0.025882240295410155, 0.025721216201782228, 0.02572902488708496, 0.02592767906188965, 0.026015743255615235, 0.025949344635009766, 0.02576860809326172, 0.025725183486938478, 0.025784255981445313, 0.026160512924194336, 0.0259421443939209, 0.026099679946899414, 0.026012191772460936, 0.026097280502319336, 0.026099199295043944, 0.026265823364257812, 0.026120479583740235, 0.026337663650512697, 0.02636115264892578, 0.0263374080657959, 0.026249824523925783, 0.026127904891967774, 0.025988960266113283, 0.026218559265136717, 0.026059295654296873, 0.026011327743530273, 0.025950527191162108, 0.026066944122314452, 0.02578643226623535, 0.025886240005493163, 0.02581340789794922, 0.025763391494750976, 0.025597856521606444, 0.025578048706054686, 0.025449472427368162, 0.02560508728027344, 0.02556879997253418, 0.026866111755371094, 0.025813024520874025, 0.025992895126342775, 0.02573139190673828, 0.02568191909790039, 0.02575974464416504, 0.0259102725982666, 0.025854976654052734, 0.0257126407623291, 0.02557119941711426, 0.025677503585815428, 0.02547551918029785, 0.02551603126525879, 0.02565135955810547, 0.025743200302124025, 0.025604095458984375, 0.02551398468017578, 0.025701759338378906, 0.026306751251220704, 0.026708192825317383, 0.026591136932373048, 0.026188095092773436, 0.02616524887084961, 0.02583919906616211, 0.02581510353088379, 0.025776479721069338, 0.0261529598236084, 0.02587238311767578, 0.02613657569885254, 0.02589695930480957, 0.025852928161621092, 0.025912128448486327, 0.02602774429321289, 0.025814624786376954, 0.02569094467163086, 0.02611609649658203, 0.025841663360595703, 0.027792959213256835, 0.02609811210632324, 0.026034175872802736, 0.026284032821655274, 0.0260098876953125, 0.026648000717163087, 0.025880863189697265, 0.025806175231933594, 0.025838239669799805, 0.025865631103515627, 0.026036832809448244, 0.02590924835205078, 0.026033279418945312, 0.02578256034851074, 0.025717344284057617, 0.025772031784057618, 0.02623904037475586, 0.02623072052001953, 0.026568864822387694, 0.026109792709350585, 0.025810943603515626, 0.025734399795532225, 0.025858816146850587, 0.02596976089477539, 0.02607606315612793, 0.026246335983276366, 0.0262357120513916, 0.02589244842529297, 0.025761247634887696, 0.026226751327514647, 0.025779071807861327, 0.025808544158935548, 0.025717023849487305, 0.025911359786987304, 0.026222784042358397, 0.025798463821411134, 0.025747743606567383, 0.02588047981262207, 0.0282128963470459, 0.026126495361328127, 0.025996639251708985, 0.02581692886352539, 0.025805631637573243, 0.02578416061401367, 0.02581929588317871, 0.02578995132446289, 0.025819232940673828, 0.0257325439453125, 0.026387424468994142, 0.025823200225830078, 0.025829408645629885, 0.025956352233886718, 0.02590105628967285, 0.025892896652221678, 0.025855520248413085, 0.02597318458557129, 0.0259237117767334, 0.02579158401489258, 0.02585206413269043, 0.02577824020385742, 0.025733055114746092, 0.0265482234954834, 0.026103103637695312, 0.02631340789794922, 0.02596249580383301, 0.02598297691345215, 0.0259804801940918, 0.027046335220336913, 0.026787616729736327, 0.027707616806030275, 0.026517120361328125, 0.026345855712890626, 0.026311872482299804, 0.02625004768371582, 0.026396671295166017, 0.026382335662841795, 0.02614476776123047, 0.025954591751098634, 0.026097375869750975, 0.025957952499389647, 0.025847551345825195, 0.02593452835083008, 0.025914848327636717, 0.025940511703491213, 0.025927007675170897, 0.02595088005065918, 0.026027904510498048, 0.026058656692504883, 0.026251487731933594, 0.02622038459777832, 0.02618726348876953, 0.026217279434204103, 0.02595987129211426, 0.025909183502197265, 0.025550592422485353, 0.02558195114135742, 0.025407840728759765, 0.02558527946472168, 0.0255184326171875, 0.025604127883911133, 0.025554399490356445, 0.02583795166015625, 0.025798784255981446, 0.02589014434814453, 0.02588703918457031, 0.025919872283935545, 0.025743392944335936, 0.02577846336364746, 0.02571641540527344, 0.025804800033569338, 0.025718719482421874, 0.02578019142150879, 0.025704544067382814, 0.025807231903076173, 0.025919296264648437, 0.025917312622070313, 0.025761024475097656, 0.025873088836669923, 0.02585737609863281, 0.025834112167358397, 0.025772064208984376, 0.02572697639465332, 0.025841407775878907, 0.025921375274658202, 0.026241727828979492, 0.02692915153503418, 0.02649648094177246, 0.02620470428466797, 0.026090879440307618, 0.02579631996154785, 0.02605558395385742, 0.02584115219116211, 0.02576223945617676, 0.025645055770874024, 0.02603628730773926, 0.02582841682434082, 0.026106815338134765, 0.026341087341308595, 0.02621059226989746, 0.025812223434448243, 0.025909887313842774, 0.02587638473510742, 0.025899328231811524, 0.025723968505859375, 0.027257951736450195, 0.028678144454956055, 0.026381664276123047, 0.025935808181762696, 0.02597920036315918, 0.02569001579284668, 0.025917695999145507, 0.025806623458862303, 0.026196191787719727, 0.02573311996459961, 0.02585331153869629, 0.02562704086303711, 0.026007711410522463, 0.025656991958618165, 0.025944608688354492, 0.025775999069213868, 0.02612665557861328, 0.02569593620300293, 0.025847808837890625, 0.025712736129760744, 0.02582102394104004, 0.025985055923461915, 0.02592950439453125, 0.026109216690063476, 0.02597590446472168, 0.02583129692077637, 0.025944000244140626, 0.025753055572509766, 0.025846368789672853, 0.02573721694946289, 0.025927616119384767, 0.025765567779541015, 0.025979263305664062, 0.026034175872802736, 0.026207935333251952, 0.025882463455200195, 0.025909887313842774, 0.02573910331726074, 0.025841503143310546, 0.025718944549560547, 0.025767839431762696, 0.02586844825744629, 0.02601363182067871, 0.026360063552856444, 0.026242048263549804, 0.026315776824951172, 0.026226848602294923, 0.025818048477172853, 0.025853055953979492, 0.025921279907226563, 0.025811328887939453, 0.025896608352661134, 0.025735328674316407, 0.025916511535644532, 0.02636057662963867, 0.02660966491699219, 0.02632208061218262, 0.026056991577148438, 0.026077888488769532, 0.025809856414794923, 0.025924543380737304, 0.025911231994628907, 0.02595020866394043, 0.025886783599853514, 0.026828800201416016, 0.026241024017333983, 0.025913312911987306, 0.02620419120788574, 0.025934879302978515, 0.02617344093322754, 0.026096607208251955, 0.025793855667114257, 0.02572972869873047, 0.025802688598632814, 0.02584377670288086, 0.025772031784057618, 0.025585664749145507, 0.025724735260009766, 0.025692352294921873, 0.026089471817016603, 0.02612393569946289, 0.026171743392944338, 0.026206207275390626, 0.02629631996154785, 0.02598297691345215, 0.02599078369140625, 0.025997888565063475, 0.028026336669921874, 0.026669408798217772, 0.026458240509033202, 0.025884544372558594, 0.025839616775512695, 0.025955648422241212, 0.026284896850585937, 0.025950048446655275, 0.025999359130859375, 0.025776128768920898, 0.026156511306762695, 0.0257541446685791, 0.025984128952026366, 0.025805471420288086, 0.025939264297485352, 0.025665599822998045, 0.0258240966796875, 0.025800512313842772, 0.026418592453002928]",tokens/s,38.487501923567066,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1577.832448,1830.682624,0.0,1428.160512,1322.516992,s,1,8.501408203125,8.501408203125,0.0,8.501408203125,8.501408203125,8.501408203125,8.501408203125,[8.501408203125],,kWh,4.2276048004138526e-05,4.655759823319818e-06,1.5504734626026195e-05,6.243654245348454e-05,,MB,1635.086336,1851.654144,0.0,1434.451968,1320.892416,s,10,5.892094238281251,0.5892094238281251,0.0023980970454494112,0.5883463745117188,0.5903819213867187,0.5932184265136718,0.5954876306152344,"[0.596054931640625, 0.5885479125976563, 0.5882093505859375, 0.5882651977539063, 0.5872356567382813, 0.5897515869140625, 0.5878108520507812, 0.5884275512695313, 0.5880696411132813, 0.5897215576171875]",tokens/s,434.4804913960037,kWh,1.7546881444850084e-05,1.935117392653855e-06,1.160456810717734e-05,3.108656694468128e-05,tokens/kWh,8235068.235600073,MB,1644.003328,1851.654144,0.0,1434.451968,1373.031936,s,10,14.25049914550781,1.4250499145507813,0.0038485226644004986,1.425344970703125,1.4288069335937499,1.4291724487304687,1.4294648608398437,"[1.4281622314453124, 1.4232542724609376, 1.4238001708984376, 1.41647216796875, 1.426866455078125, 1.423823486328125, 1.428212646484375, 1.4295379638671875, 1.4287257080078124, 1.42164404296875]",tokens/s,44.20897777455009,kWh,4.1229922894311795e-05,4.547664990407788e-06,2.0430596409822387e-05,6.620818429454198e-05,tokens/kWh,951543.9922008789,,s,630,14.247362445831294,0.022614861025129048,0.00038278810395630993,0.022519776344299317,0.022900194549560545,0.02315372314453125,0.02416656530380249,"[0.023134944915771484, 0.022806367874145507, 0.022479007720947266, 0.022621471405029295, 0.022327520370483397, 0.022364383697509767, 0.022509855270385744, 0.022355871200561525, 0.022557823181152344, 0.022618175506591797, 0.02269481658935547, 0.022684959411621092, 0.022680383682250976, 0.022422752380371093, 0.02243244743347168, 0.02251804733276367, 0.022535776138305662, 0.022728063583374022, 0.022591615676879884, 0.022651071548461913, 0.02263311958312988, 0.02286672019958496, 0.022795263290405272, 0.023230464935302734, 0.023021568298339845, 0.023084320068359376, 0.02282569694519043, 0.022730016708374025, 0.02301366424560547, 0.022562463760375975, 0.022656896591186523, 0.02266444778442383, 0.022587167739868165, 0.02266406440734863, 0.022436159133911133, 0.02239148712158203, 0.022488832473754883, 0.022521568298339845, 0.022469152450561525, 0.02258291244506836, 0.022608255386352537, 0.022626304626464845, 0.02267305564880371, 0.022819520950317383, 0.02247203254699707, 0.022818368911743166, 0.025033472061157226, 0.022796287536621093, 0.02263039970397949, 0.022599519729614256, 0.02256233596801758, 0.02255523109436035, 0.02262828826904297, 0.022425472259521486, 0.022609760284423828, 0.022511999130249025, 0.022523967742919922, 0.022474687576293947, 0.022546560287475585, 0.02249478340148926, 0.022556224822998048, 0.022571584701538087, 0.022446271896362304, 0.023158912658691407, 0.022738815307617188, 0.022443071365356445, 0.022513824462890623, 0.022352672576904296, 0.022347776412963868, 0.022397151947021486, 0.022689472198486327, 0.022511135101318358, 0.02248147201538086, 0.022488096237182616, 0.022360128402709963, 0.022467487335205077, 0.022421503067016603, 0.0223887996673584, 0.022460319519042968, 0.022366239547729493, 0.02247657585144043, 0.022403295516967774, 0.022475103378295898, 0.022428640365600584, 0.022471359252929687, 0.022370304107666016, 0.022726335525512696, 0.022444351196289063, 0.022517759323120116, 0.02247212791442871, 0.02244256019592285, 0.02248828887939453, 0.022358816146850587, 0.02251103973388672, 0.022323776245117187, 0.022619712829589845, 0.02245180892944336, 0.022463008880615233, 0.02249558448791504, 0.02234774398803711, 0.02235830307006836, 0.02242911911010742, 0.02232089614868164, 0.02269647979736328, 0.022509056091308592, 0.02248739242553711, 0.022562976837158202, 0.022513151168823242, 0.025946624755859377, 0.023701248168945314, 0.022677759170532226, 0.022523967742919922, 0.0224006404876709, 0.022411584854125977, 0.022489023208618165, 0.022494592666625977, 0.022542303085327148, 0.022612480163574217, 0.0224006404876709, 0.022456928253173827, 0.022398399353027343, 0.02244051170349121, 0.022417343139648438, 0.022973535537719726, 0.02353455924987793, 0.022830848693847657, 0.02309529685974121, 0.022920831680297852, 0.02285775947570801, 0.02257539176940918, 0.02237177658081055, 0.022458368301391602, 0.022440576553344728, 0.022490720748901367, 0.022391199111938476, 0.02246444892883301, 0.02245408058166504, 0.022409311294555666, 0.022501535415649414, 0.022550111770629884, 0.022508256912231444, 0.02249020767211914, 0.022542303085327148, 0.022585599899291993, 0.02263488006591797, 0.022572736740112304, 0.02254060745239258, 0.022691936492919923, 0.02301532745361328, 0.022979808807373048, 0.023198047637939454, 0.022782400131225587, 0.02268934440612793, 0.022550975799560547, 0.022520927429199217, 0.02267673683166504, 0.02239820861816406, 0.02234543991088867, 0.02265363121032715, 0.02242355155944824, 0.02246441650390625, 0.02238307189941406, 0.022611583709716797, 0.02226790428161621, 0.022257408142089843, 0.022480863571166992, 0.022537567138671874, 0.02232979202270508, 0.022317216873168944, 0.02235763168334961, 0.022338464736938478, 0.022310720443725587, 0.022269952774047853, 0.022519807815551757, 0.022573055267333983, 0.022438207626342774, 0.022363840103149416, 0.025405439376831054, 0.023152544021606446, 0.022415679931640627, 0.02239891242980957, 0.022255071640014647, 0.022346176147460938, 0.02238822364807129, 0.02253660774230957, 0.02266854476928711, 0.022815040588378906, 0.022808319091796876, 0.022696672439575197, 0.023045440673828126, 0.0226592960357666, 0.02252230453491211, 0.022423456192016602, 0.022540576934814455, 0.022439775466918947, 0.02246611213684082, 0.02249567985534668, 0.022417343139648438, 0.022450239181518554, 0.022338623046875, 0.022315040588378906, 0.022459295272827147, 0.022281631469726563, 0.022305376052856447, 0.022353759765625, 0.02243600082397461, 0.022455808639526367, 0.02252400016784668, 0.022440351486206055, 0.022401248931884766, 0.02244380760192871, 0.02270412826538086, 0.022726879119873047, 0.022543712615966795, 0.023390207290649414, 0.022346271514892577, 0.022327199935913086, 0.022428735733032227, 0.02229987144470215, 0.022372159957885742, 0.02264463996887207, 0.02238591957092285, 0.02233011245727539, 0.02247475242614746, 0.022410303115844726, 0.022282400131225587, 0.023294464111328125, 0.022466848373413086, 0.022543935775756835, 0.02244374465942383, 0.022425664901733398, 0.022373023986816405, 0.022455360412597655, 0.02239945602416992, 0.02249679946899414, 0.02239689636230469, 0.02235491180419922, 0.02242121505737305, 0.022300960540771485, 0.022503423690795898, 0.022458368301391602, 0.02249497604370117, 0.022627840042114256, 0.02257161521911621, 0.022470815658569336, 0.022421503067016603, 0.022377504348754882, 0.022462944030761718, 0.022343551635742188, 0.022354816436767577, 0.022388479232788087, 0.022377824783325194, 0.02325503921508789, 0.02292118453979492, 0.02257731246948242, 0.02244915199279785, 0.022510784149169922, 0.02239811134338379, 0.022518207550048828, 0.02256480026245117, 0.02254863929748535, 0.022478847503662108, 0.022425695419311522, 0.022445568084716795, 0.022427328109741212, 0.02241334342956543, 0.022409343719482423, 0.022432479858398437, 0.02260105514526367, 0.022464704513549805, 0.022462335586547852, 0.022568672180175782, 0.02256355285644531, 0.02255462455749512, 0.022746976852416993, 0.022632608413696288, 0.022458368301391602, 0.02249737548828125, 0.02243984031677246, 0.022439359664916992, 0.022368831634521483, 0.022337535858154296, 0.022540576934814455, 0.022698816299438478, 0.02250435256958008, 0.02264806365966797, 0.02271059226989746, 0.022583744049072266, 0.022603776931762694, 0.022533279418945312, 0.022766239166259767, 0.024281280517578125, 0.024043359756469727, 0.024093120574951173, 0.022984224319458006, 0.02280672073364258, 0.02283103942871094, 0.02267145538330078, 0.022575071334838867, 0.022411359786987304, 0.022583200454711915, 0.0225098876953125, 0.022419136047363283, 0.02254879951477051, 0.02267136001586914, 0.022697664260864257, 0.022638111114501952, 0.022538400650024413, 0.022466079711914062, 0.022526527404785158, 0.02253558349609375, 0.022397760391235352, 0.02247270393371582, 0.02262361526489258, 0.022690496444702148, 0.023177215576171875, 0.022951263427734375, 0.022514400482177736, 0.02245804786682129, 0.022558975219726562, 0.022540288925170897, 0.022549631118774414, 0.022544416427612304, 0.022467391967773438, 0.022464544296264648, 0.02256502342224121, 0.022488927841186522, 0.022475807189941407, 0.02244812774658203, 0.022469600677490233, 0.02253183937072754, 0.022423904418945314, 0.022492639541625975, 0.02249977684020996, 0.022478015899658203, 0.023290687561035157, 0.023207935333251953, 0.02309734344482422, 0.022822271347045897, 0.022804832458496092, 0.022759296417236327, 0.022630271911621095, 0.0225633602142334, 0.022478847503662108, 0.022343423843383788, 0.02239923286437988, 0.022491039276123045, 0.02234102439880371, 0.0224652156829834, 0.02233344078063965, 0.022496608734130858, 0.02240768051147461, 0.022340063095092774, 0.022385568618774415, 0.022292512893676758, 0.022323711395263672, 0.02234956741333008, 0.02263212776184082, 0.023791711807250978, 0.02380259132385254, 0.02263814353942871, 0.022479520797729493, 0.022363872528076173, 0.0225731201171875, 0.02244963264465332, 0.022559263229370116, 0.022540128707885743, 0.022493343353271484, 0.022441888809204103, 0.022603872299194337, 0.022998367309570313, 0.022637439727783204, 0.0224355525970459, 0.022564224243164063, 0.022491840362548827, 0.02249113655090332, 0.02247065544128418, 0.022412927627563476, 0.023091007232666015, 0.022832447052001954, 0.02272287940979004, 0.022503807067871095, 0.0225133113861084, 0.022441503524780273, 0.022493919372558593, 0.022491039276123045, 0.022546047210693358, 0.022522111892700196, 0.022438207626342774, 0.022437887191772463, 0.022623647689819337, 0.022454048156738283, 0.022428031921386718, 0.022407615661621094, 0.022394880294799805, 0.022431808471679686, 0.022495168685913086, 0.0224768009185791, 0.022780160903930664, 0.022572799682617186, 0.02265292739868164, 0.02239897537231445, 0.0225218563079834, 0.02241663932800293, 0.022499456405639648, 0.02250815963745117, 0.022509824752807616, 0.022335231781005858, 0.022580320358276368, 0.022432672500610353, 0.02243404769897461, 0.02240233612060547, 0.02245590400695801, 0.02253932762145996, 0.022671167373657226, 0.02264473533630371, 0.022542335510253905, 0.022555904388427736, 0.02243440055847168, 0.022591648101806642, 0.022690080642700197, 0.022676319122314454, 0.02266540718078613, 0.02269276809692383, 0.022722143173217774, 0.022634143829345702, 0.022521535873413087, 0.0225218563079834, 0.02253536033630371, 0.022466527938842774, 0.022475584030151367, 0.02246976089477539, 0.02261801528930664, 0.022562368392944336, 0.02263091278076172, 0.023226144790649415, 0.0254116153717041, 0.02621798324584961, 0.022823392868041994, 0.022568960189819336, 0.022607648849487304, 0.02314451217651367, 0.02280646324157715, 0.022632192611694336, 0.022440191268920898, 0.022597631454467772, 0.022363967895507812, 0.022503263473510744, 0.022491487503051757, 0.022404319763183595, 0.022506271362304688, 0.02242953681945801, 0.022642463684082032, 0.022616447448730467, 0.022673408508300782, 0.02263596725463867, 0.02283577537536621, 0.022633983612060548, 0.022714879989624022, 0.022634496688842775, 0.02260086441040039, 0.022913759231567382, 0.023088607788085937, 0.022547103881835937, 0.0224136962890625, 0.022447999954223634, 0.022478271484375, 0.02257708740234375, 0.022481279373168947, 0.02249728012084961, 0.02253004837036133, 0.022898687362670898, 0.022481056213378908, 0.022427488327026367, 0.022535488128662108, 0.022519744873046876, 0.02237254333496094, 0.02245270347595215, 0.02248303985595703, 0.02245382308959961, 0.022516159057617186, 0.02265088081359863, 0.022523136138916017, 0.022514432907104493, 0.022640287399291994, 0.022554239273071288, 0.02260860824584961, 0.022657024383544923, 0.022666528701782228, 0.02282147216796875, 0.024101024627685548, 0.024162111282348634, 0.02306496047973633, 0.022875328063964844, 0.022753728866577148, 0.02300054359436035, 0.022991487503051758, 0.022706239700317384, 0.022583072662353515, 0.02265110397338867, 0.022443231582641603, 0.02286476707458496, 0.023015615463256835, 0.02295599937438965, 0.02316499137878418, 0.02278883171081543, 0.022591455459594727, 0.022370304107666016, 0.022376447677612304, 0.02240127944946289, 0.022410207748413086, 0.02253094482421875, 0.02249616050720215, 0.022365184783935548, 0.0225218563079834, 0.022435840606689454, 0.02253446388244629, 0.022598527908325197, 0.022541120529174806, 0.022525888442993164, 0.022624319076538085, 0.022839296340942384, 0.022863807678222655, 0.02334316825866699, 0.024168384552001952, 0.023006464004516603, 0.02274940872192383, 0.022688352584838867, 0.023154687881469727, 0.02262144088745117, 0.02252672004699707, 0.022460416793823244, 0.022749183654785156, 0.022893632888793945, 0.022676128387451172, 0.02256070327758789, 0.022497631072998046, 0.022549728393554687, 0.024089376449584962, 0.022620384216308593, 0.022610944747924806, 0.02252060890197754, 0.022419456481933595, 0.022572320938110353, 0.02249372863769531, 0.02241334342956543, 0.022585535049438478, 0.022666303634643555, 0.02272787284851074, 0.022685407638549804, 0.022488479614257813, 0.02253027153015137, 0.02263078308105469, 0.022581247329711913, 0.022700031280517577, 0.02258710479736328, 0.022519519805908203, 0.022592063903808593, 0.02255072021484375, 0.022472320556640626, 0.022640832901000975, 0.02259984016418457, 0.022540191650390624, 0.022594879150390625, 0.022690431594848633, 0.02264838409423828, 0.02297929573059082, 0.023237951278686525, 0.022769344329833983, 0.022452320098876953, 0.022514591217041014, 0.02246793556213379, 0.02258211135864258, 0.022511743545532228, 0.02246214485168457, 0.022389984130859374, 0.02245849609375, 0.022500032424926757, 0.022423040390014647, 0.022513280868530272, 0.02250771141052246, 0.022436832427978514, 0.022531999588012695, 0.02244112014770508, 0.02265500831604004, 0.022677663803100587, 0.022847936630249022, 0.022611263275146485, 0.022514368057250978, 0.02245631980895996, 0.022473983764648438, 0.02249180793762207, 0.02236016082763672, 0.02238003158569336, 0.02231324768066406, 0.022380767822265626, 0.022281248092651366, 0.022402271270751953, 0.022270751953125, 0.022827680587768556, 0.02234720039367676, 0.022385536193847658, 0.022513343811035157, 0.02333523178100586, 0.022915008544921876, 0.023130176544189453, 0.022623775482177734, 0.02250595283508301, 0.022446367263793947, 0.022396095275878908, 0.02253913688659668, 0.022508384704589844, 0.022538400650024413, 0.022458080291748048, 0.022475711822509764, 0.02248294448852539, 0.022355199813842774, 0.02247756767272949, 0.02239897537231445, 0.022499008178710936, 0.022488895416259765, 0.02240800094604492, 0.022406368255615233, 0.022524192810058595, 0.02255638313293457, 0.022635103225708008, 0.022509151458740235, 0.022952192306518553, 0.02301955223083496, 0.023166496276855467]",tokens/s,44.218710824215364,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2220.568576,2493.382656,0.0,2107.63776,1984.899072,s,1,8.7991142578125,8.7991142578125,0.0,8.7991142578125,8.7991142578125,8.7991142578125,8.7991142578125,[8.7991142578125],,kWh,4.6938570408292435e-05,5.170452264874343e-06,1.5688068105979003e-05,6.779709077914579e-05,,MB,2276.220928,2789.081088,0.0,2373.976064,2247.84384,s,10,1.604810607910156,0.1604810607910156,0.0013822046819055736,0.1607711486816406,0.161258740234375,0.16155865325927735,0.16179858367919922,"[0.16119209289550782, 0.1609375, 0.1607592315673828, 0.15647602844238281, 0.16069577026367188, 0.16078306579589843, 0.16094857788085937, 0.16059397888183594, 0.1618585662841797, 0.1605657958984375]",tokens/s,1595.2038124509454,kWh,4.8053975245889756e-06,5.299484805977398e-07,3.2033176810489365e-06,8.538663686235652e-06,tokens/kWh,29981272.176426467,MB,2282.96704,2872.967168,0.0,2457.862144,2341.346816,s,10,15.669698120117188,1.5669698120117188,0.006042270705908201,1.5665640258789062,1.5752135498046875,1.57671669921875,1.57791921875,"[1.566098876953125, 1.5666270751953124, 1.5782198486328125, 1.5653162841796875, 1.5665009765625, 1.555477294921875, 1.5608172607421875, 1.568401611328125, 1.5748795166015626, 1.567359375]",tokens/s,40.20498641203488,kWh,4.547751118291527e-05,5.015900144899683e-06,2.8342624222350276e-05,7.883603555016522e-05,tokens/kWh,799126.9418908263,,s,630,15.66344718551636,0.024862614580184694,0.0004799840773342904,0.024755200386047366,0.025275891876220702,0.025483235740661622,0.026360424175262458,"[0.025483200073242188, 0.024910367965698243, 0.024820768356323242, 0.024890335083007812, 0.024788991928100586, 0.024890880584716796, 0.02473200035095215, 0.02864348793029785, 0.026216447830200194, 0.024982816696166994, 0.024815328598022462, 0.024897823333740233, 0.02466419219970703, 0.024768991470336912, 0.02470924758911133, 0.024637088775634766, 0.02468809509277344, 0.024597375869750977, 0.02448588752746582, 0.02467020797729492, 0.024561376571655275, 0.024721696853637697, 0.024713216781616212, 0.025651199340820312, 0.025044992446899415, 0.02473574447631836, 0.024625408172607423, 0.025454399108886718, 0.02458358383178711, 0.02457244873046875, 0.024623104095458984, 0.024606143951416016, 0.024574527740478514, 0.024680448532104493, 0.024451072692871095, 0.02446950340270996, 0.024424448013305664, 0.024630495071411133, 0.024488672256469727, 0.02474505615234375, 0.024559680938720702, 0.024603551864624023, 0.02466320037841797, 0.024949567794799805, 0.024803232192993165, 0.02486079978942871, 0.02467430305480957, 0.024651391983032227, 0.02457747268676758, 0.024665023803710936, 0.024754175186157225, 0.02471664047241211, 0.024647680282592774, 0.02475075149536133, 0.024676031112670898, 0.02601580810546875, 0.02528486442565918, 0.024662015914916992, 0.024555519104003908, 0.02463327980041504, 0.024721471786499024, 0.024579423904418946, 0.024654495239257813, 0.025761791229248047, 0.025162975311279298, 0.02512771224975586, 0.02512076759338379, 0.024895488739013674, 0.02471232032775879, 0.02450432014465332, 0.024617792129516602, 0.02480134391784668, 0.024602624893188478, 0.02471667289733887, 0.02455411148071289, 0.024680448532104493, 0.02489097595214844, 0.024805055618286134, 0.024857311248779296, 0.024809471130371095, 0.024829568862915038, 0.024629632949829103, 0.024803071975708007, 0.024614912033081054, 0.02458380889892578, 0.02470361518859863, 0.024999935150146483, 0.02471673583984375, 0.02467487907409668, 0.024645280838012696, 0.024612991333007813, 0.02448406410217285, 0.02454732894897461, 0.024476863861083983, 0.024677183151245115, 0.024520416259765625, 0.024571680068969728, 0.024683008193969725, 0.025374080657958983, 0.02513279914855957, 0.024915903091430665, 0.024945472717285155, 0.024696735382080077, 0.024592607498168946, 0.024806432723999024, 0.024748191833496094, 0.02481398391723633, 0.02473206329345703, 0.024595712661743162, 0.024566495895385742, 0.02460470390319824, 0.024937568664550783, 0.0245600643157959, 0.024743999481201172, 0.02477097511291504, 0.025294368743896484, 0.02887222480773926, 0.024965471267700195, 0.025139583587646484, 0.025124864578247072, 0.0249202880859375, 0.02476851272583008, 0.024868864059448242, 0.02485196876525879, 0.02483046340942383, 0.024782751083374025, 0.025788095474243163, 0.02490768051147461, 0.02468118476867676, 0.02453094482421875, 0.024792448043823242, 0.024796960830688476, 0.02458505630493164, 0.024772607803344726, 0.024815616607666017, 0.024639167785644532, 0.024570144653320313, 0.02469071960449219, 0.024606719970703125, 0.02456985664367676, 0.02479913520812988, 0.024837919235229492, 0.024959039688110352, 0.024934656143188478, 0.025188352584838865, 0.02551807975769043, 0.025276351928710937, 0.025531551361083985, 0.025348127365112303, 0.02531724739074707, 0.02536140823364258, 0.02532147216796875, 0.025384960174560548, 0.025483264923095703, 0.025246976852416992, 0.02513587188720703, 0.024786943435668944, 0.02485043144226074, 0.02469068717956543, 0.024729600906372072, 0.024673887252807617, 0.02480374336242676, 0.024620704650878907, 0.024655328750610352, 0.024636127471923827, 0.024694368362426757, 0.02463801574707031, 0.024659296035766602, 0.024670879364013673, 0.024815616607666017, 0.024833120346069337, 0.025047967910766602, 0.025458688735961913, 0.02570240020751953, 0.02570240020751953, 0.025339359283447264, 0.02533363151550293, 0.025408159255981444, 0.025256959915161133, 0.025609216690063476, 0.025374719619750977, 0.025600000381469725, 0.025276351928710937, 0.025246944427490235, 0.025229248046875, 0.02549648094177246, 0.025231359481811523, 0.02538003158569336, 0.02509052848815918, 0.02601190376281738, 0.025554943084716796, 0.025115936279296876, 0.024879840850830077, 0.024827552795410157, 0.025014080047607423, 0.024646175384521483, 0.024672256469726563, 0.024688032150268553, 0.024588640213012696, 0.024617216110229493, 0.024645631790161132, 0.024539007186889648, 0.024653312683105468, 0.024599168777465822, 0.02468659210205078, 0.024807424545288087, 0.02510220718383789, 0.024804704666137694, 0.02505763244628906, 0.024959423065185546, 0.02507107162475586, 0.024756767272949218, 0.02482099151611328, 0.024754816055297852, 0.02489151954650879, 0.024612863540649413, 0.0247193603515625, 0.024551424026489257, 0.02484547233581543, 0.024836288452148438, 0.02468486404418945, 0.02456611251831055, 0.02467635154724121, 0.024532991409301756, 0.024467456817626954, 0.024550943374633788, 0.02472598457336426, 0.024967168807983397, 0.024885248184204102, 0.024787967681884765, 0.025232383728027344, 0.02497331237792969, 0.024954399108886718, 0.02481305694580078, 0.02517296028137207, 0.024868864059448242, 0.024621055603027343, 0.024552480697631836, 0.025430944442749022, 0.024641599655151367, 0.024803327560424804, 0.02472083282470703, 0.024791616439819336, 0.024536096572875976, 0.024873952865600586, 0.024763423919677733, 0.025203680038452147, 0.025016319274902343, 0.024791040420532227, 0.024838144302368165, 0.024780704498291017, 0.024886911392211913, 0.025632768630981444, 0.02516377639770508, 0.025133056640625, 0.02488915252685547, 0.024899776458740235, 0.02470911979675293, 0.024587392807006836, 0.024674848556518556, 0.024708831787109375, 0.0247589111328125, 0.02488319969177246, 0.024791040420532227, 0.024666112899780275, 0.024766368865966795, 0.024871007919311523, 0.024852479934692383, 0.0247193603515625, 0.024691808700561525, 0.024615840911865236, 0.0246824951171875, 0.024672256469726563, 0.02533171272277832, 0.03155961608886719, 0.02492131233215332, 0.024861536026000976, 0.02476032066345215, 0.024877056121826172, 0.02486412811279297, 0.024789567947387695, 0.024838207244873046, 0.02477804756164551, 0.024736255645751954, 0.024665952682495117, 0.024596832275390626, 0.024823808670043947, 0.02476851272583008, 0.024666112899780275, 0.024760095596313477, 0.02473187255859375, 0.024662015914916992, 0.024556671142578125, 0.02468134307861328, 0.024627199172973634, 0.024680448532104493, 0.024641536712646486, 0.024563711166381837, 0.02450841522216797, 0.024542720794677734, 0.024547168731689453, 0.0245766716003418, 0.024635391235351564, 0.024865983963012695, 0.02470790481567383, 0.024774656295776368, 0.024634464263916016, 0.024564640045166015, 0.02448147201538086, 0.02454764747619629, 0.024475648880004884, 0.024521919250488283, 0.02462393569946289, 0.024755584716796876, 0.024799871444702148, 0.02550009536743164, 0.024862720489501954, 0.02465715217590332, 0.02454159927368164, 0.0244200325012207, 0.024471935272216798, 0.024676063537597655, 0.024874847412109376, 0.024607616424560545, 0.02528665542602539, 0.024444543838500976, 0.02457574462890625, 0.024723455429077147, 0.024511104583740236, 0.024393728256225586, 0.0244401912689209, 0.024461952209472657, 0.024501632690429688, 0.024404607772827148, 0.02453887939453125, 0.024537343978881836, 0.024626976013183595, 0.024686111450195312, 0.024730304718017578, 0.02448588752746582, 0.024571231842041016, 0.02452547264099121, 0.024532991409301756, 0.02442438316345215, 0.024496192932128905, 0.02448723220825195, 0.024502975463867187, 0.024410112380981445, 0.024450912475585937, 0.02435702323913574, 0.024453119277954103, 0.024612512588500977, 0.02490598487854004, 0.024942047119140626, 0.024948991775512696, 0.024931808471679688, 0.02485545539855957, 0.025135103225708007, 0.025141248703002928, 0.02464944076538086, 0.024662143707275392, 0.02461302375793457, 0.02469228744506836, 0.024664512634277345, 0.026056703567504884, 0.024837984085083007, 0.024800703048706053, 0.025141727447509764, 0.024822015762329102, 0.02470297622680664, 0.024612863540649413, 0.024641536712646486, 0.024592384338378907, 0.024532991409301756, 0.024590335845947265, 0.02451456069946289, 0.024726879119873046, 0.024687263488769533, 0.025582015991210936, 0.02501750373840332, 0.024746688842773437, 0.024770463943481445, 0.024731903076171874, 0.024732831954956055, 0.024662879943847655, 0.024825536727905273, 0.024916160583496095, 0.025018495559692384, 0.025269952774047852, 0.025239871978759765, 0.024796159744262695, 0.024790016174316407, 0.02461404800415039, 0.024540000915527344, 0.02456985664367676, 0.024680448532104493, 0.02452479934692383, 0.02489139175415039, 0.024806880950927736, 0.02517046356201172, 0.02500809669494629, 0.024973344802856446, 0.02471244812011719, 0.024699264526367188, 0.024582527160644532, 0.024567359924316405, 0.024555967330932616, 0.024524639129638672, 0.024508512496948243, 0.02453715133666992, 0.0245166072845459, 0.02452275276184082, 0.02452627182006836, 0.02461955261230469, 0.024876863479614257, 0.024848608016967772, 0.024631296157836914, 0.0245402889251709, 0.02450681686401367, 0.025450944900512695, 0.024633344650268556, 0.02462451171875, 0.024726144790649413, 0.024573951721191405, 0.024485504150390625, 0.02460652732849121, 0.024696735382080077, 0.024789663314819337, 0.024788991928100586, 0.02491596794128418, 0.025179487228393554, 0.02490230369567871, 0.024641408920288085, 0.025151615142822267, 0.024737279891967775, 0.024676864624023437, 0.024655616760253907, 0.02468889617919922, 0.02446335983276367, 0.024594432830810548, 0.024819711685180663, 0.02539311981201172, 0.02494102478027344, 0.027723583221435547, 0.02509555244445801, 0.024918848037719727, 0.025059328079223633, 0.02507142448425293, 0.025005407333374023, 0.02480624008178711, 0.024674560546875, 0.024590080261230468, 0.024620288848876952, 0.02456243133544922, 0.024592384338378907, 0.02456483268737793, 0.024664640426635742, 0.02464188766479492, 0.024639488220214844, 0.024596479415893553, 0.02472723197937012, 0.024607040405273437, 0.025276416778564452, 0.025219072341918947, 0.024735488891601563, 0.02482815933227539, 0.024754175186157225, 0.024604671478271483, 0.02451251220703125, 0.02464064025878906, 0.024683391571044922, 0.02463488006591797, 0.02479769515991211, 0.02469068717956543, 0.024856096267700196, 0.024754655838012694, 0.025403072357177734, 0.024892799377441405, 0.02508687973022461, 0.025620288848876953, 0.025813215255737303, 0.024909568786621095, 0.024936704635620116, 0.024680063247680663, 0.02467878341674805, 0.024608768463134766, 0.0247193603515625, 0.025006080627441408, 0.024927295684814454, 0.02491606330871582, 0.024862655639648436, 0.024697216033935546, 0.02465203285217285, 0.024660255432128905, 0.02465177536010742, 0.02484783935546875, 0.024926687240600588, 0.02478291130065918, 0.024750015258789063, 0.024618303298950196, 0.02491263961791992, 0.024979455947875977, 0.02504902458190918, 0.02500819206237793, 0.025659456253051757, 0.02492313575744629, 0.024990720748901366, 0.02494054412841797, 0.024870912551879884, 0.025208799362182618, 0.024977407455444335, 0.02505936050415039, 0.02488924789428711, 0.02473916816711426, 0.02460339164733887, 0.024606719970703125, 0.024549375534057616, 0.02450841522216797, 0.02449612808227539, 0.024475616455078127, 0.024573984146118163, 0.02469068717956543, 0.024678047180175782, 0.02472380828857422, 0.02459984016418457, 0.024510656356811523, 0.02470297622680664, 0.024719327926635743, 0.024681024551391602, 0.024725759506225586, 0.02489904022216797, 0.02477449607849121, 0.024666080474853514, 0.0248602237701416, 0.02479123115539551, 0.024924896240234376, 0.0253439998626709, 0.025492799758911132, 0.02525859260559082, 0.025036895751953125, 0.02484592056274414, 0.024859039306640626, 0.02487500762939453, 0.024739744186401368, 0.02477065658569336, 0.024962207794189454, 0.025061376571655275, 0.0251595516204834, 0.02643452835083008, 0.025275840759277343, 0.02550009536743164, 0.025192384719848634, 0.025270463943481446, 0.025397247314453125, 0.02537398338317871, 0.025081663131713866, 0.026419231414794922, 0.02537766456604004, 0.025022464752197264, 0.02495692825317383, 0.024969215393066405, 0.025233407974243165, 0.02525312042236328, 0.025066240310668945, 0.02490777587890625, 0.02490163230895996, 0.024970720291137696, 0.025775936126708983, 0.02521104049682617, 0.024879135131835938, 0.024844287872314453, 0.02475779151916504, 0.02482784080505371, 0.024670112609863282, 0.024793344497680662, 0.024680831909179687, 0.025310943603515625, 0.024744224548339844, 0.024859935760498046, 0.02463203239440918, 0.024868864059448242, 0.024680448532104493, 0.024766464233398438, 0.025151487350463866, 0.024906944274902344, 0.02465059280395508, 0.024655839920043946, 0.024911359786987306, 0.02517647933959961, 0.02473494338989258, 0.024804224014282228, 0.025114208221435546, 0.026648256301879884, 0.024899808883666993, 0.025007648468017576, 0.025002880096435545, 0.024874399185180664, 0.024593088150024416, 0.02491187286376953, 0.024825759887695312, 0.025357887268066405, 0.024508960723876955, 0.02449203109741211, 0.024641536712646486, 0.024604671478271483, 0.02454528045654297, 0.024645631790161132, 0.02457804870605469, 0.024989343643188475, 0.025268352508544922, 0.025252063751220702, 0.025196544647216795, 0.02524073600769043, 0.02487104034423828, 0.024761056900024413, 0.024589696884155274, 0.02467024040222168, 0.024694688796997072, 0.02478316879272461, 0.02483033561706543, 0.02467196846008301, 0.02445136070251465, 0.024625152587890626, 0.024696832656860353, 0.024679904937744142, 0.024480287551879882, 0.024624767303466796, 0.02457638359069824, 0.025008127212524413, 0.024954879760742187]",tokens/s,40.22103133099252,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11155.16928,12232.556544,0.0,11846.811648,11814.785024,s,1,13.8414208984375,13.8414208984375,0.0,13.8414208984375,13.8414208984375,13.8414208984375,13.8414208984375,[13.8414208984375],,kWh,0.00019904100667495945,2.1948391714944302e-05,6.715616483599085e-05,0.0002881455632258946,,MB,2120.298496,13165.789184,0.0,12750.68416,12641.86368,s,10,11.782311645507814,1.1782311645507815,0.0006644680805306273,1.1781806030273438,1.1789724243164064,1.1791570495605468,1.1793047497558593,"[1.1773643798828124, 1.177268798828125, 1.1785863037109374, 1.1777186279296874, 1.178931396484375, 1.178248046875, 1.1793416748046874, 1.17890478515625, 1.17783447265625, 1.1781131591796874]",tokens/s,217.27485038778778,kWh,3.438735040416759e-05,3.7923358945711874e-06,2.288096274919993e-05,6.106064904793871e-05,tokens/kWh,4192552.8796625533,MB,2126.62272,13461.487616,0.0,13044.28544,12933.698048,s,10,51.07986181640625,5.107986181640625,0.0050973818980087045,5.108480224609375,5.113295556640625,5.113744458007813,5.114103579101562,"[5.09498681640625, 5.1054814453125, 5.10866162109375, 5.11319580078125, 5.1060703125, 5.10936279296875, 5.114193359375, 5.1115615234375, 5.10804931640625, 5.108298828125]",tokens/s,12.333627727192704,kWh,0.00014868601331208085,1.640047953633189e-05,9.888560688619818e-05,0.00026397209973461094,tokens/kWh,238661.58606662665,,s,630,51.05173093414307,0.08103449354625883,0.0007782904925350935,0.08098972702026366,0.08196940307617187,0.08222986488342285,0.08363603729248048,"[0.08349244689941407, 0.07999727630615235, 0.07962831878662109, 0.07978182220458985, 0.0789320297241211, 0.0791852798461914, 0.08039279937744141, 0.08120928192138673, 0.08161929321289063, 0.07957997131347656, 0.07971692657470703, 0.0798703384399414, 0.08123744201660156, 0.08063187408447266, 0.08042521667480469, 0.07996441650390625, 0.07997235107421875, 0.0798760986328125, 0.08060281372070313, 0.08075910186767578, 0.08130086517333984, 0.08016345977783203, 0.08107843017578124, 0.08063085174560547, 0.08052623748779297, 0.08210623931884765, 0.08100249481201172, 0.08075030517578125, 0.0804081268310547, 0.08047401428222656, 0.0800346908569336, 0.08101219177246094, 0.08072557067871093, 0.08123197174072265, 0.08052758026123047, 0.08171501159667968, 0.08009603118896484, 0.08153913879394531, 0.08098194885253907, 0.08047615814208985, 0.08075059509277344, 0.07985971069335937, 0.08088985443115235, 0.08091190338134766, 0.0808287353515625, 0.08094022369384765, 0.08051606750488281, 0.08040847778320312, 0.08084838104248047, 0.082100830078125, 0.08150016021728515, 0.08124416351318359, 0.0815571517944336, 0.08088201904296875, 0.08104134368896485, 0.08151660919189453, 0.08165766143798828, 0.08135084533691406, 0.08154096221923827, 0.08219868469238281, 0.08162489318847656, 0.08234786987304688, 0.08245487976074219, 0.08416275024414062, 0.07957651519775391, 0.07957151794433594, 0.08032569885253907, 0.08065119934082031, 0.07926921844482422, 0.08080799865722656, 0.08047881317138672, 0.07996409606933594, 0.08108255767822266, 0.08065161895751953, 0.0814760971069336, 0.08168000030517578, 0.08096604919433593, 0.08044707489013672, 0.08055612945556641, 0.08015872192382813, 0.08025529479980469, 0.08031759643554688, 0.08053372955322266, 0.08032729339599609, 0.08099533081054687, 0.08021862030029298, 0.08136550140380859, 0.08253440093994141, 0.08079523468017578, 0.08067932891845703, 0.08067235565185547, 0.0809168930053711, 0.08019967651367188, 0.08116028594970703, 0.0806909408569336, 0.08007276916503907, 0.08157721710205078, 0.08124912261962891, 0.08138082885742187, 0.08181116485595703, 0.08143545532226562, 0.08090214538574218, 0.082229248046875, 0.08103628540039062, 0.08104768371582032, 0.08102591705322265, 0.08091648101806641, 0.08120524597167969, 0.08154537963867188, 0.0810013427734375, 0.08186163330078125, 0.08187820434570313, 0.0815289306640625, 0.0823070068359375, 0.08037577819824218, 0.08054876708984375, 0.08030143737792969, 0.08068147277832032, 0.08113289642333985, 0.08104962921142578, 0.08120381164550781, 0.08086748504638672, 0.0817390365600586, 0.08151235198974609, 0.08176914978027344, 0.08206495666503906, 0.08336412811279297, 0.07988489532470704, 0.07952710723876953, 0.07981958770751953, 0.08056361389160156, 0.08024329376220703, 0.08045158386230469, 0.0805459213256836, 0.08019955444335937, 0.0802995834350586, 0.08075234985351562, 0.08047280120849609, 0.08009030151367187, 0.08108493041992187, 0.0807542724609375, 0.08045231628417969, 0.08015795135498047, 0.08082694244384765, 0.08019987487792969, 0.08049612426757813, 0.08025778961181641, 0.08051814270019532, 0.08026188659667968, 0.08133837127685548, 0.08028956604003906, 0.08182150268554687, 0.08135830688476563, 0.08107923126220704, 0.08107212829589844, 0.08058806610107422, 0.08109334564208984, 0.08184627532958984, 0.080666015625, 0.08083721923828124, 0.0810059814453125, 0.08113011169433594, 0.08076898956298828, 0.08103116607666015, 0.08154080200195313, 0.08081644439697265, 0.08116838073730469, 0.08198675537109375, 0.08073075103759765, 0.081508544921875, 0.08111103820800782, 0.08049459075927734, 0.0821024627685547, 0.08106988525390625, 0.08157552337646484, 0.08222557067871093, 0.08162691497802735, 0.0809493408203125, 0.08139759826660156, 0.08185475158691406, 0.08164710235595703, 0.08174819183349609, 0.08168476867675781, 0.08155894470214843, 0.08161545562744141, 0.08215952301025391, 0.0820730209350586, 0.08208354949951172, 0.08203266906738281, 0.08373654174804687, 0.0797675552368164, 0.07952793884277344, 0.08093449401855468, 0.07988880157470703, 0.0796747817993164, 0.08065699005126953, 0.08082015991210938, 0.0807663345336914, 0.0800382080078125, 0.08075302124023437, 0.0806522216796875, 0.08123942565917969, 0.08078729248046874, 0.08052003479003907, 0.08065843200683594, 0.08030413055419922, 0.08038195037841797, 0.08086732482910156, 0.08086732482910156, 0.0801313247680664, 0.08049868774414062, 0.08143334197998046, 0.08173939514160156, 0.08137152099609375, 0.0813482894897461, 0.0814062728881836, 0.08066387176513672, 0.08057421112060546, 0.07986886596679688, 0.08125849914550781, 0.08161280059814453, 0.08062076568603516, 0.08044403076171874, 0.08084601593017578, 0.08165007781982422, 0.08160665893554687, 0.08202476501464843, 0.08099046325683594, 0.08086844635009766, 0.08119209289550781, 0.08064208221435547, 0.08136061096191406, 0.08188079833984375, 0.08115638732910156, 0.08090799713134765, 0.08184607696533203, 0.08209865570068359, 0.0824883804321289, 0.0812076187133789, 0.08156633758544922, 0.08106147003173828, 0.08104303741455078, 0.08143341064453125, 0.08197081756591797, 0.08194310760498047, 0.08106278228759765, 0.08057952117919921, 0.08273305511474609, 0.08213241577148438, 0.08200828552246094, 0.0824236831665039, 0.08177097320556641, 0.08269171142578124, 0.07977190399169921, 0.07945353698730469, 0.08015337371826171, 0.08075043487548828, 0.08053533172607422, 0.08004656219482421, 0.08119286346435547, 0.08107417297363281, 0.07994761657714844, 0.0805664291381836, 0.0805212173461914, 0.08124006652832032, 0.07995555114746093, 0.07967366027832032, 0.08060733032226562, 0.08077324676513672, 0.08105107116699219, 0.07963619232177735, 0.07999260711669921, 0.0811192626953125, 0.0809352035522461, 0.08022723388671875, 0.08148870086669922, 0.08128505706787109, 0.08087792205810547, 0.08051366424560547, 0.07997644805908204, 0.08175635528564452, 0.08121324920654296, 0.08053462219238282, 0.0803787841796875, 0.08050406646728515, 0.0804031982421875, 0.08053555297851563, 0.08168243408203125, 0.08140358734130859, 0.08088304138183594, 0.08148271942138671, 0.08137932586669921, 0.08142594909667969, 0.08104335784912109, 0.08097039794921874, 0.08091161346435546, 0.08177267456054688, 0.08000697326660157, 0.08132476806640625, 0.08237055969238281, 0.08147510528564453, 0.08143920135498046, 0.0813358383178711, 0.0817318115234375, 0.08163353729248046, 0.08202649688720703, 0.08177446746826172, 0.08100077056884766, 0.08171721649169922, 0.0810884780883789, 0.08207552337646484, 0.08214733123779297, 0.08210550689697266, 0.08227241516113282, 0.08134883117675781, 0.08398470306396484, 0.07951065826416015, 0.08031731414794922, 0.08017305755615234, 0.08083385467529297, 0.08074259185791016, 0.07990512084960938, 0.08077667236328125, 0.08047481536865235, 0.08084185791015625, 0.08053158569335937, 0.08029414367675781, 0.08155792236328124, 0.0804019546508789, 0.08061510467529297, 0.0804315185546875, 0.08006294250488281, 0.0806478042602539, 0.08009766387939453, 0.08052272033691406, 0.08087145233154297, 0.0819163818359375, 0.08025856018066406, 0.0806219482421875, 0.08129961395263671, 0.08035327911376954, 0.08086080169677734, 0.08154729461669921, 0.08102333068847656, 0.08101174163818359, 0.08078639984130859, 0.08071552276611328, 0.08062598419189453, 0.08151853179931641, 0.08162509155273437, 0.08102092742919922, 0.08183602905273438, 0.0811827850341797, 0.08049247741699218, 0.08126220703125, 0.08140541076660156, 0.08132291412353515, 0.0810250244140625, 0.08093695831298828, 0.08194809722900391, 0.08160009765625, 0.08152518463134766, 0.08047465515136719, 0.08149811553955078, 0.08164556884765625, 0.08041433715820312, 0.08059283447265625, 0.08136032104492187, 0.08130665588378906, 0.08133849334716797, 0.08143183898925781, 0.08196924591064453, 0.08180989074707032, 0.08276525115966797, 0.08137705230712891, 0.08206854248046876, 0.08188880157470703, 0.08124604797363282, 0.08459264373779297, 0.07973273468017578, 0.07969491577148438, 0.08087443542480469, 0.08040835571289062, 0.08040265655517578, 0.08092467498779297, 0.08089126586914062, 0.08090668487548829, 0.08017641448974609, 0.08047113800048829, 0.08257917022705077, 0.08087766265869141, 0.08060108947753906, 0.08066668701171875, 0.08039852905273437, 0.0806164779663086, 0.08175692749023437, 0.08064326477050782, 0.08133916473388672, 0.08037145233154297, 0.08041702270507813, 0.0801075210571289, 0.08162287902832031, 0.0811493148803711, 0.0804298553466797, 0.08056832122802735, 0.08081613159179687, 0.08068300628662109, 0.0813939208984375, 0.08100204467773438, 0.08089619445800782, 0.08131584167480468, 0.08100863647460937, 0.08030738830566406, 0.0809889907836914, 0.08142134094238282, 0.08138358306884766, 0.08115229034423828, 0.08093341064453125, 0.08146534729003906, 0.08132527923583985, 0.08197792053222656, 0.08112969970703125, 0.08018745422363281, 0.08063104248046875, 0.08028025817871094, 0.08085295867919921, 0.08125647735595704, 0.08231526184082032, 0.0822303695678711, 0.08139459228515625, 0.08139571380615235, 0.08238285064697265, 0.08167183685302734, 0.08187120056152344, 0.08105699157714844, 0.0815155487060547, 0.08179401397705079, 0.08152963256835938, 0.08143666839599609, 0.08319999694824219, 0.08187875366210938, 0.08417485046386719, 0.07988019561767579, 0.0802344970703125, 0.08108182525634766, 0.07988428497314454, 0.07963471984863281, 0.0794155502319336, 0.08097382354736328, 0.08099942779541015, 0.08038925170898438, 0.08062089538574219, 0.08120281219482423, 0.08237471771240235, 0.08082927703857422, 0.0802713623046875, 0.08084473419189453, 0.07992530822753906, 0.07936409759521484, 0.08084480285644531, 0.08079769897460938, 0.08066028594970703, 0.08080329895019531, 0.08126294708251953, 0.08138553619384765, 0.08159648132324218, 0.08212095642089844, 0.08087865447998047, 0.08051155090332031, 0.08030809783935547, 0.08000768280029297, 0.08104550170898438, 0.08074195098876953, 0.08153337860107422, 0.08042845153808593, 0.08063238525390624, 0.08203794860839844, 0.08170992279052734, 0.08118083190917968, 0.0807442855834961, 0.08093081665039062, 0.08086323547363282, 0.08134041595458984, 0.08032675170898437, 0.0804002914428711, 0.08173894500732422, 0.08089615631103515, 0.08148777770996093, 0.08131683349609375, 0.08166371154785156, 0.08242182159423828, 0.08183602905273438, 0.08141209411621093, 0.08095334625244141, 0.08148729705810547, 0.08129142761230469, 0.08055817413330078, 0.08189897918701172, 0.08179357147216797, 0.08174588775634765, 0.08217033386230468, 0.08208370971679688, 0.08295552062988282, 0.08174777221679687, 0.08369468688964844, 0.07941155242919921, 0.0799381103515625, 0.07984649658203125, 0.0794244155883789, 0.08072134399414063, 0.08031289672851563, 0.08072191619873047, 0.08101702117919922, 0.08069510650634766, 0.0807874526977539, 0.0813545913696289, 0.08091161346435546, 0.0806448974609375, 0.08074668884277343, 0.07985759735107421, 0.07978803253173829, 0.08052114868164062, 0.08034217834472657, 0.08061430358886719, 0.08108850860595704, 0.080500732421875, 0.08040185546875, 0.08105980682373047, 0.08108297729492188, 0.0813628158569336, 0.08074393463134766, 0.0807594223022461, 0.08074444580078124, 0.08066867065429688, 0.0808058853149414, 0.08064614105224609, 0.08197456359863281, 0.08148655700683594, 0.08065606689453125, 0.08144265747070313, 0.08145699310302734, 0.08117664337158204, 0.08129183959960938, 0.08139981079101563, 0.08142623901367188, 0.08067091369628906, 0.0807127685546875, 0.08121356964111329, 0.08103814697265625, 0.08141414642333984, 0.08173772430419922, 0.0819439697265625, 0.08131644439697265, 0.08054486083984375, 0.08155133056640625, 0.08082118225097656, 0.08134451293945312, 0.08155955505371094, 0.08187200164794922, 0.0810728988647461, 0.08218374633789062, 0.08169120025634766, 0.08210399627685547, 0.08189295959472656, 0.08150048065185547, 0.08152719879150391, 0.08192387390136718, 0.08371097564697266, 0.07947283172607422, 0.07926445007324219, 0.07992742156982421, 0.07965455627441406, 0.08065853118896485, 0.08048870086669922, 0.08076467132568359, 0.08082867431640625, 0.08027545928955078, 0.07986809539794922, 0.08235295867919921, 0.08087859344482422, 0.08014224243164063, 0.08037590026855469, 0.08044544219970703, 0.08021186828613282, 0.08090838623046875, 0.08013187408447266, 0.08047577667236328, 0.08026710510253907, 0.0815206069946289, 0.08106851196289062, 0.08094751739501953, 0.0813570556640625, 0.0807070083618164, 0.08068656158447265, 0.08108118438720703, 0.08049254608154296, 0.08116377258300782, 0.0816211166381836, 0.08103577423095704, 0.08042483520507812, 0.08060118103027344, 0.08097372436523438, 0.08171520233154297, 0.08180326080322266, 0.08049664306640625, 0.08073625946044923, 0.08127078247070313, 0.08164892578125, 0.08066941070556641, 0.08157183837890625, 0.08115814208984375, 0.08128915405273437, 0.08092822265625, 0.08154131317138671, 0.08143004608154297, 0.08122662353515625, 0.08047974395751953, 0.08151667022705078, 0.08091276550292968, 0.08139161682128906, 0.08135475158691406, 0.0814202880859375, 0.08328601837158203, 0.081499267578125, 0.08136179351806641, 0.08237468719482421, 0.08129097747802734, 0.08262067413330078, 0.081438720703125, 0.08206569671630859]",tokens/s,12.340423889107747,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4174.72512,5932.711936,0.0,5530.189824,5138.859008,s,1,11.685021484375,11.685021484375,0.0,11.685021484375,11.685021484375,11.685021484375,11.685021484375,[11.685021484375],,kWh,0.00013353349158328455,1.4722364149102071e-05,5.848310234202181e-05,0.00020673895807440842,,MB,1447.56736,5951.586304,0.0,5534.384128,4844.878336,s,10,1.991224639892578,0.1991224639892578,0.0006377076438351236,0.19929886627197263,0.1997789993286133,0.19990297317504882,0.20000215225219728,"[0.19811541748046874, 0.19975144958496094, 0.19828326416015624, 0.20002694702148438, 0.1992388458251953, 0.19839424133300781, 0.19968832397460937, 0.19950547790527343, 0.19935888671875, 0.19886178588867187]",tokens/s,1285.640981289839,kWh,5.847094525417257e-06,6.44829296737964e-07,3.865414203439998e-06,1.0357338025595219e-05,tokens/kWh,24716775.620083917,MB,1469.677568,5951.586304,0.0,5534.384128,5015.800832,s,10,15.297438964843751,1.529743896484375,0.008922970515632808,1.5285436401367187,1.5370168945312501,1.5447652221679689,1.5509638842773439,"[1.5298094482421876, 1.52605322265625, 1.5283548583984374, 1.517797607421875, 1.528732421875, 1.5315093994140625, 1.5213272705078125, 1.526046142578125, 1.5525135498046876, 1.5352950439453126]",tokens/s,41.1833641858518,kWh,4.473506692374636e-05,4.932115279690566e-06,2.974936268835937e-05,7.94165448917963e-05,tokens/kWh,793285.5815603214,,s,630,15.294663835525515,0.02427724418337383,0.00046611104725901236,0.024174975395202636,0.024609135627746585,0.024936987495422364,0.0259331065940857,"[0.025132448196411132, 0.02447987174987793, 0.024312095642089845, 0.024305824279785157, 0.024214719772338866, 0.024115135192871093, 0.024163232803344727, 0.0242174072265625, 0.02391468811035156, 0.025198591232299804, 0.0249036808013916, 0.025429567337036132, 0.024144319534301757, 0.02435481643676758, 0.024240127563476564, 0.02411420822143555, 0.02427788734436035, 0.02419638442993164, 0.024050016403198243, 0.024130016326904296, 0.02409881591796875, 0.024194368362426756, 0.02399286460876465, 0.02400604820251465, 0.02393369674682617, 0.024027135848999022, 0.023937023162841797, 0.023984128952026368, 0.02401286315917969, 0.024010240554809572, 0.024158655166625978, 0.024073728561401365, 0.024367584228515624, 0.024613216400146486, 0.02450399971008301, 0.024569440841674804, 0.024620447158813476, 0.025455615997314454, 0.024376928329467775, 0.024317855834960937, 0.024123903274536132, 0.024137344360351563, 0.024203647613525392, 0.024233728408813476, 0.02419705581665039, 0.024659711837768553, 0.02426870346069336, 0.024001184463500976, 0.024029184341430664, 0.024133312225341798, 0.0245600643157959, 0.024201087951660157, 0.024391679763793944, 0.024213504791259766, 0.024168352127075195, 0.024086463928222657, 0.024080448150634766, 0.024059999465942384, 0.02425651168823242, 0.024194271087646484, 0.02412828826904297, 0.02397772789001465, 0.024077920913696288, 0.02509414482116699, 0.024442623138427735, 0.024373504638671876, 0.024809471130371095, 0.02448588752746582, 0.024993791580200195, 0.024440832138061523, 0.024358911514282225, 0.024344575881958007, 0.024229888916015626, 0.024154048919677734, 0.02410809516906738, 0.02430259132385254, 0.02409984016418457, 0.02404035186767578, 0.02397398376464844, 0.024157535552978514, 0.023990367889404295, 0.023953056335449217, 0.02416716766357422, 0.024314016342163087, 0.024143871307373048, 0.024098047256469725, 0.02415283203125, 0.02481171226501465, 0.02599711990356445, 0.024381439208984376, 0.02428927993774414, 0.02425823974609375, 0.024244543075561523, 0.024204320907592773, 0.024390623092651366, 0.024164352416992187, 0.024374271392822267, 0.02410393524169922, 0.024174591064453126, 0.02408185577392578, 0.024156255722045897, 0.0240296630859375, 0.024084800720214843, 0.0240797119140625, 0.024281440734863283, 0.024163808822631836, 0.02410755157470703, 0.02420307159423828, 0.024036960601806642, 0.02401340866088867, 0.0241779842376709, 0.024267295837402343, 0.024004415512084962, 0.023972192764282228, 0.0240262393951416, 0.023927040100097656, 0.02397657585144043, 0.02412656021118164, 0.023942047119140625, 0.02390630340576172, 0.023952768325805663, 0.023927423477172853, 0.02385817527770996, 0.023946239471435548, 0.023953407287597657, 0.023949312210083007, 0.02517196846008301, 0.02431939125061035, 0.024076255798339843, 0.024314495086669923, 0.024608991622924806, 0.024653343200683592, 0.02403772735595703, 0.023958623886108397, 0.024062175750732422, 0.024017215728759766, 0.02396598434448242, 0.02402873611450195, 0.024224191665649413, 0.024154111862182616, 0.024213504791259766, 0.024505823135375977, 0.024348735809326172, 0.024279359817504884, 0.02422697639465332, 0.024155136108398437, 0.024092960357666015, 0.023962528228759765, 0.024034112930297852, 0.023989919662475587, 0.024109216690063478, 0.02400464057922363, 0.023989728927612305, 0.02387945556640625, 0.024031200408935548, 0.023911231994628905, 0.023937152862548827, 0.02394316864013672, 0.025231359481811523, 0.024252416610717774, 0.024063199996948243, 0.024011552810668944, 0.024137664794921875, 0.024082496643066408, 0.024188928604125977, 0.024059104919433593, 0.024290079116821288, 0.024061952590942383, 0.023953407287597657, 0.023851007461547852, 0.02391449546813965, 0.0239736328125, 0.023941280364990235, 0.02410316848754883, 0.023831520080566406, 0.02385958480834961, 0.025648639678955077, 0.02895305633544922, 0.024566080093383787, 0.024788480758666992, 0.024197343826293946, 0.024078880310058594, 0.02408239936828613, 0.024182783126831055, 0.024086687088012697, 0.02415190315246582, 0.02400624084472656, 0.024101280212402345, 0.024213504791259766, 0.024939104080200194, 0.02444643211364746, 0.02437990379333496, 0.024098848342895506, 0.024403968811035157, 0.024262655258178712, 0.024234079360961915, 0.024127456665039064, 0.02403887939453125, 0.02405219268798828, 0.02411520004272461, 0.024199167251586915, 0.024014848709106446, 0.024102304458618166, 0.023904735565185548, 0.02399807929992676, 0.024029504776000975, 0.024412351608276366, 0.024082143783569335, 0.024084768295288085, 0.024352767944335937, 0.024145919799804686, 0.024008703231811524, 0.024139488220214844, 0.024482080459594727, 0.024043008804321288, 0.024365568161010744, 0.024154111862182616, 0.02410675239562988, 0.024309152603149413, 0.02396860885620117, 0.024162303924560546, 0.023995807647705078, 0.02420591926574707, 0.024118400573730468, 0.024156095504760743, 0.02384569549560547, 0.023885440826416016, 0.023863807678222656, 0.0239554557800293, 0.023777280807495117, 0.02389811134338379, 0.023952735900878906, 0.024134304046630858, 0.023842559814453126, 0.02398627281188965, 0.023926944732666017, 0.024053760528564453, 0.023948352813720705, 0.023982559204101563, 0.02425222396850586, 0.023880352020263673, 0.023960607528686523, 0.02397433662414551, 0.02388400077819824, 0.0239619197845459, 0.024193023681640623, 0.023916543960571288, 0.023889728546142578, 0.024037504196166994, 0.023939136505126954, 0.024020736694335937, 0.023937023162841797, 0.025196191787719726, 0.024606592178344728, 0.02454140853881836, 0.024554880142211914, 0.024314752578735353, 0.024415840148925783, 0.02437276840209961, 0.02402742385864258, 0.024427040100097656, 0.024045631408691405, 0.024006656646728516, 0.02405344009399414, 0.02411961555480957, 0.02409267234802246, 0.025022464752197264, 0.024235424041748048, 0.024240703582763673, 0.024184864044189455, 0.024270048141479494, 0.024080511093139648, 0.024117919921875, 0.024072288513183594, 0.024296512603759767, 0.02435568046569824, 0.02490982437133789, 0.02446950340270996, 0.024435935974121095, 0.02416099166870117, 0.0242906551361084, 0.024148704528808594, 0.02425436782836914, 0.024051807403564454, 0.024100351333618163, 0.0240064640045166, 0.024115903854370117, 0.0241212158203125, 0.024785024642944336, 0.024049280166625976, 0.0243656005859375, 0.02433612823486328, 0.024221792221069335, 0.02451046371459961, 0.02409881591796875, 0.02427199935913086, 0.024073087692260742, 0.023990272521972656, 0.02405068778991699, 0.024068416595458983, 0.024308704376220704, 0.024245983123779298, 0.024229888916015626, 0.024049663543701173, 0.02406809616088867, 0.02397590446472168, 0.02414588737487793, 0.024278976440429687, 0.024035455703735352, 0.02411529541015625, 0.024556800842285155, 0.02446143913269043, 0.024262655258178712, 0.024029727935791015, 0.024162303924560546, 0.025216928482055666, 0.024672224044799806, 0.024418880462646484, 0.02443791961669922, 0.024330591201782225, 0.02436761665344238, 0.024373247146606446, 0.02429542350769043, 0.024243263244628905, 0.025101503372192382, 0.02546214485168457, 0.024768320083618164, 0.024276992797851563, 0.024184480667114257, 0.024148160934448243, 0.02408064079284668, 0.02426697540283203, 0.024111616134643556, 0.024157920837402345, 0.02445100784301758, 0.024256607055664063, 0.024110111236572265, 0.02417558479309082, 0.02402083206176758, 0.02416655921936035, 0.024376672744750978, 0.024373920440673828, 0.024010751724243166, 0.024162303924560546, 0.02413065528869629, 0.024378271102905275, 0.024276992797851563, 0.024280384063720704, 0.024144575119018553, 0.024201215744018553, 0.024158079147338866, 0.024237920761108398, 0.024107295989990233, 0.024266752243041992, 0.02432204818725586, 0.02430975914001465, 0.02422332763671875, 0.024039743423461914, 0.024220991134643554, 0.02423017692565918, 0.02400716781616211, 0.02415001678466797, 0.024081504821777344, 0.02437753677368164, 0.02476851272583008, 0.024730335235595702, 0.02452275276184082, 0.02493440055847168, 0.02430771255493164, 0.024319999694824217, 0.023983743667602538, 0.024121376037597658, 0.024013151168823244, 0.024171648025512697, 0.023880416870117188, 0.023988576889038087, 0.024069952011108398, 0.024238079071044923, 0.025776384353637695, 0.02457161521911621, 0.02430182456970215, 0.024302879333496095, 0.024239904403686525, 0.024267040252685546, 0.024331199645996095, 0.024284704208374024, 0.024092416763305664, 0.024201215744018553, 0.02398371124267578, 0.024017759323120117, 0.02411520004272461, 0.024044544219970702, 0.023954143524169923, 0.023936864852905273, 0.02402342414855957, 0.0244039363861084, 0.0240897274017334, 0.023953983306884766, 0.02390678405761719, 0.02395136070251465, 0.024079584121704103, 0.0241889591217041, 0.02402579116821289, 0.024059200286865236, 0.02398908805847168, 0.02402022361755371, 0.024277376174926757, 0.02390447998046875, 0.023996223449707033, 0.02398988723754883, 0.024025087356567384, 0.02421561622619629, 0.02415843200683594, 0.023954879760742186, 0.024062816619873046, 0.024034944534301758, 0.023967424392700196, 0.02390255928039551, 0.02390252876281738, 0.02400464057922363, 0.02401478385925293, 0.02393027114868164, 0.02392073631286621, 0.023849567413330077, 0.024033248901367186, 0.024004608154296874, 0.02428313636779785, 0.024088607788085938, 0.024063520431518555, 0.024043424606323242, 0.02403936004638672, 0.024161983489990234, 0.02405059242248535, 0.024131584167480468, 0.0241081600189209, 0.024140672683715822, 0.024395263671875, 0.02438604736328125, 0.02445871925354004, 0.02482774353027344, 0.024597183227539062, 0.025075712203979493, 0.024388736724853515, 0.024377344131469726, 0.02415216064453125, 0.02405251121520996, 0.0243558406829834, 0.024287712097167968, 0.024523391723632812, 0.023990175247192384, 0.024066047668457033, 0.02575155258178711, 0.024874624252319337, 0.024357248306274414, 0.024149280548095703, 0.02503548812866211, 0.023994367599487306, 0.023988224029541014, 0.023977983474731446, 0.024149503707885742, 0.024046079635620117, 0.024174240112304686, 0.024102975845336914, 0.02400079917907715, 0.024006656646728516, 0.02407219123840332, 0.024033023834228517, 0.023953664779663087, 0.02415011215209961, 0.023997568130493165, 0.024214303970336915, 0.024300575256347656, 0.024310720443725585, 0.024387615203857422, 0.024444927215576173, 0.02433228874206543, 0.024258560180664062, 0.02413750457763672, 0.023978208541870116, 0.02393052864074707, 0.02426464080810547, 0.023820480346679686, 0.02386092758178711, 0.02394576072692871, 0.024424448013305664, 0.024235712051391602, 0.024097087860107422, 0.024020992279052734, 0.024065759658813475, 0.025335071563720703, 0.02407846450805664, 0.02404751968383789, 0.023886816024780273, 0.024193023681640623, 0.024069536209106446, 0.02417535972595215, 0.02402902412414551, 0.02413363265991211, 0.024080223083496093, 0.024041631698608398, 0.024057855606079103, 0.02411929512023926, 0.024215551376342775, 0.024190048217773437, 0.02520921516418457, 0.025106496810913086, 0.024487167358398437, 0.02450876808166504, 0.024359136581420898, 0.024460447311401366, 0.02432057571411133, 0.024309856414794922, 0.024346431732177733, 0.024326528549194336, 0.02433856010437012, 0.024200544357299805, 0.024258495330810547, 0.024318687438964842, 0.02429747200012207, 0.02442255973815918, 0.024688032150268553, 0.024344415664672853, 0.02447420883178711, 0.024748031616210937, 0.024672447204589845, 0.026105535507202147, 0.02728153610229492, 0.024778751373291014, 0.024489984512329102, 0.024391647338867187, 0.02424825668334961, 0.024436704635620116, 0.024303199768066407, 0.024539392471313478, 0.024286687850952147, 0.02420512008666992, 0.02446335983276367, 0.024132095336914062, 0.02460518455505371, 0.02612224006652832, 0.029525983810424806, 0.027832256317138673, 0.02467647933959961, 0.024830944061279298, 0.02445801544189453, 0.024273120880126953, 0.024352767944335937, 0.024449024200439453, 0.024303615570068358, 0.024127071380615234, 0.02414633560180664, 0.024004575729370117, 0.024126527786254882, 0.024167232513427735, 0.02419113540649414, 0.02411110305786133, 0.024270431518554687, 0.02415862464904785, 0.024208383560180666, 0.02413260841369629, 0.024155712127685545, 0.02421993637084961, 0.025552896499633788, 0.02432534408569336, 0.024218143463134764, 0.02463171195983887, 0.024199167251586915, 0.02503708839416504, 0.02447145652770996, 0.02431804847717285, 0.024223520278930665, 0.024234207153320312, 0.0245534725189209, 0.02434662437438965, 0.024473600387573242, 0.024294784545898437, 0.024849023818969727, 0.024790143966674803, 0.024775232315063477, 0.02492624092102051, 0.02478927993774414, 0.02455084800720215, 0.02490220832824707, 0.024257984161376953, 0.02417635154724121, 0.02419183921813965, 0.024225791931152343, 0.024147968292236328, 0.024090560913085937, 0.024096832275390626, 0.02411235237121582, 0.02407913589477539, 0.024061952590942383, 0.02445254325866699, 0.02450899124145508, 0.02498387145996094, 0.024376800537109375, 0.024223552703857423, 0.0242873592376709, 0.024610431671142578, 0.024361631393432618, 0.024281024932861328, 0.024440895080566405, 0.024442880630493165, 0.024348127365112306, 0.024187423706054687, 0.024225791931152343, 0.02424415969848633, 0.02417795181274414, 0.024178495407104494, 0.024056095123291016, 0.023915199279785155, 0.024094720840454102, 0.024311296463012694, 0.024442975997924804, 0.024174463272094725, 0.024332063674926758, 0.024287424087524413, 0.02451718330383301, 0.024338144302368164, 0.024440576553344726, 0.024345119476318358, 0.024567743301391602, 0.02443881607055664, 0.024547264099121092, 0.024123008728027345, 0.02424630355834961, 0.024299968719482423, 0.024190847396850585, 0.024036544799804688]",tokens/s,41.19083667185116,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1577.934848,1830.682624,0.0,1428.160512,1322.516992,s,1,8.4893681640625,8.4893681640625,0.0,8.4893681640625,8.4893681640625,8.4893681640625,8.4893681640625,[8.4893681640625],,kWh,4.261665718333158e-05,4.693066557231918e-06,1.544417902199502e-05,6.275390276255852e-05,,MB,1404.882944,1851.654144,0.0,1434.451968,1322.072064,s,10,5.821099060058595,0.5821099060058594,0.003052036463480814,0.5821749267578125,0.5834613891601563,0.5860191162109375,0.5880652978515625,"[0.5885768432617188, 0.582186279296875, 0.5828930053710938, 0.5816031494140625, 0.5822227783203126, 0.582510986328125, 0.5750269775390625, 0.581782470703125, 0.5821329956054687, 0.58216357421875]",tokens/s,439.7794941449135,kWh,1.7382077610536904e-05,1.9169354052489373e-06,1.1568195529058014e-05,3.086720854484385e-05,tokens/kWh,8293590.903371889,MB,1424.850944,1851.654144,0.0,1434.451968,1374.923264,s,10,11.045701049804688,1.1045701049804688,0.0032780045076496096,1.1056564331054688,1.1084706787109375,1.1085881225585938,1.1086820776367188,"[1.108444580078125, 1.10619384765625, 1.10870556640625, 1.10000341796875, 1.106984619140625, 1.106602783203125, 1.0991595458984376, 1.1051190185546875, 1.1031064453125, 1.1013812255859374]",tokens/s,57.035764154701596,kWh,3.185534633321501e-05,3.51359809087334e-06,1.7329299811144125e-05,5.269824423523247e-05,tokens/kWh,1195485.7493692376,,s,630,11.0433478164673,0.01752912351820204,0.00042757420868205033,0.01743928050994873,0.01772744312286377,0.018004723739624025,0.019618373584747315,"[0.020017248153686523, 0.01784649658203125, 0.017644992828369142, 0.01753926467895508, 0.01753494453430176, 0.01761734390258789, 0.017670495986938477, 0.01758438491821289, 0.017522432327270507, 0.017454399108886718, 0.017509056091308595, 0.0174036808013916, 0.01737321662902832, 0.017706304550170898, 0.017680864334106445, 0.017871103286743163, 0.01793244743347168, 0.01794047927856445, 0.01795686340332031, 0.017633119583129884, 0.017753856658935547, 0.0175250244140625, 0.017539360046386718, 0.017612287521362305, 0.01737772750854492, 0.017459327697753907, 0.01764124870300293, 0.01745075225830078, 0.017659488677978515, 0.01743235206604004, 0.017663007736206056, 0.017577823638916017, 0.017473535537719728, 0.017532928466796875, 0.017483776092529296, 0.017544960021972655, 0.017522560119628907, 0.017658239364624025, 0.01746352005004883, 0.01749737548828125, 0.017570016860961914, 0.01756716728210449, 0.01744691276550293, 0.017445728302001952, 0.017563583374023438, 0.01753843116760254, 0.017638080596923827, 0.017549312591552735, 0.01746329689025879, 0.017473535537719728, 0.017500160217285156, 0.017534303665161132, 0.01732809638977051, 0.01746566390991211, 0.017365663528442384, 0.017335903167724608, 0.01744499206542969, 0.017383424758911133, 0.017358848571777344, 0.017595712661743163, 0.017383712768554688, 0.017396127700805664, 0.017558784484863282, 0.0185118408203125, 0.01804102325439453, 0.017693119049072267, 0.017538047790527343, 0.017632255554199217, 0.017865823745727538, 0.01962691116333008, 0.01773577690124512, 0.017605728149414062, 0.017433408737182618, 0.017591552734375, 0.017367744445800783, 0.0173875846862793, 0.01737014389038086, 0.017605600357055665, 0.017383424758911133, 0.01741823959350586, 0.017332223892211913, 0.017426431655883787, 0.017375232696533204, 0.01730352020263672, 0.017326112747192382, 0.01733830451965332, 0.017563711166381835, 0.0174465274810791, 0.017471071243286132, 0.0174716796875, 0.017410688400268555, 0.017592096328735353, 0.017466848373413085, 0.017609439849853515, 0.017549312591552735, 0.017555456161499023, 0.017462303161621093, 0.01745724868774414, 0.01741827201843262, 0.0173919677734375, 0.017448991775512696, 0.017615327835083006, 0.017455295562744142, 0.01741529655456543, 0.017466047286987304, 0.017344512939453126, 0.01735878372192383, 0.017414207458496093, 0.01740390396118164, 0.017472576141357422, 0.017512800216674805, 0.017490079879760742, 0.017405759811401366, 0.0174718074798584, 0.01748204803466797, 0.017491872787475587, 0.01748124885559082, 0.01750454330444336, 0.017794975280761717, 0.017585599899291992, 0.01758099174499512, 0.017555360794067384, 0.017641504287719725, 0.01764463996887207, 0.01750934410095215, 0.017633279800415038, 0.01856585693359375, 0.01813478469848633, 0.017571903228759764, 0.01745734405517578, 0.01742585563659668, 0.01739833641052246, 0.017535104751586914, 0.017403776168823243, 0.017438880920410155, 0.017505887985229493, 0.017508735656738283, 0.017288448333740235, 0.017312383651733397, 0.017291263580322267, 0.017383424758911133, 0.017385471343994142, 0.017537023544311522, 0.017358848571777344, 0.017426591873168945, 0.01745110321044922, 0.017581760406494142, 0.017448896408081054, 0.01745318412780762, 0.017459199905395507, 0.017407327651977538, 0.017365663528442384, 0.017391616821289063, 0.01734764862060547, 0.01734137535095215, 0.01738051223754883, 0.017273696899414062, 0.01726406478881836, 0.017263423919677733, 0.017259456634521483, 0.017361759185791015, 0.017330144882202147, 0.017418336868286134, 0.017325983047485352, 0.0174653434753418, 0.01723401641845703, 0.017272960662841796, 0.01729420852661133, 0.018234399795532225, 0.020801408767700195, 0.023414783477783203, 0.01768876838684082, 0.0175402889251709, 0.01735744094848633, 0.017432096481323243, 0.017347360610961916, 0.017411775588989258, 0.017454687118530272, 0.017435039520263672, 0.017331455230712892, 0.01745395278930664, 0.017294944763183592, 0.017412160873413084, 0.017334175109863282, 0.01737468719482422, 0.017439104080200194, 0.017536640167236328, 0.01745587158203125, 0.017393760681152344, 0.018739103317260742, 0.018063167572021484, 0.017864736557006836, 0.017604320526123048, 0.017543743133544922, 0.01737718391418457, 0.017287168502807617, 0.017358144760131835, 0.017305631637573243, 0.01746601676940918, 0.017448703765869142, 0.01734681510925293, 0.017696767807006835, 0.01740390396118164, 0.017555456161499023, 0.017487680435180664, 0.017332416534423828, 0.01736012840270996, 0.017374176025390625, 0.017348384857177733, 0.01741312026977539, 0.017427616119384766, 0.01743062400817871, 0.01752038383483887, 0.017391616821289063, 0.01742848014831543, 0.01739776039123535, 0.017548927307128905, 0.017455072402954103, 0.01731011199951172, 0.017328128814697266, 0.017317888259887695, 0.01735481643676758, 0.01732192039489746, 0.017481664657592773, 0.017420000076293945, 0.017305952072143554, 0.017309024810791017, 0.017450784683227537, 0.017339263916015625, 0.01738956832885742, 0.017354751586914064, 0.017362943649291994, 0.017407039642333984, 0.01743788719177246, 0.017381120681762695, 0.01740083122253418, 0.017430976867675783, 0.01740652847290039, 0.017475584030151366, 0.01745088005065918, 0.017410175323486328, 0.017524063110351564, 0.017457599639892577, 0.01741152000427246, 0.017445663452148437, 0.017431615829467773, 0.017500864028930665, 0.01734067153930664, 0.01761894416809082, 0.017332191467285155, 0.01746303939819336, 0.017426719665527345, 0.018692319869995117, 0.01804243278503418, 0.01763523292541504, 0.01738159942626953, 0.017528928756713868, 0.017500383377075195, 0.017504159927368163, 0.017270111083984376, 0.017353471755981446, 0.01730463981628418, 0.017204416275024413, 0.017442176818847657, 0.017307680130004884, 0.01730726432800293, 0.017332447052001952, 0.019597471237182616, 0.01841391944885254, 0.0174202880859375, 0.017375200271606446, 0.01732352066040039, 0.01763145637512207, 0.01740348815917969, 0.01737392044067383, 0.01737932777404785, 0.01728102493286133, 0.01739776039123535, 0.0172728328704834, 0.01726464080810547, 0.017299264907836915, 0.01725651168823242, 0.017741376876831055, 0.017455776214599608, 0.017656063079833983, 0.017553056716918945, 0.01744486427307129, 0.01742848014831543, 0.017442304611206053, 0.017384992599487305, 0.017855264663696288, 0.017494335174560546, 0.01734841537475586, 0.017285280227661133, 0.017440671920776366, 0.017438720703125, 0.01777382469177246, 0.018000640869140626, 0.01761689567565918, 0.017516000747680664, 0.01745359992980957, 0.017411584854125976, 0.01742255973815918, 0.017401632308959962, 0.019899967193603516, 0.017447071075439455, 0.01746950340270996, 0.017439456939697267, 0.01751865577697754, 0.017477792739868166, 0.017694496154785157, 0.01741414451599121, 0.01743667221069336, 0.017432479858398436, 0.01746745681762695, 0.018593856811523438, 0.017975200653076173, 0.01768592071533203, 0.017460031509399412, 0.017542943954467774, 0.01768057632446289, 0.01759382438659668, 0.017731935501098632, 0.01764963150024414, 0.017632959365844726, 0.01774627113342285, 0.01782374382019043, 0.018069503784179687, 0.017794975280761717, 0.017764448165893554, 0.017630975723266603, 0.01762656021118164, 0.017630016326904297, 0.017633087158203126, 0.01785260772705078, 0.017717248916625978, 0.01763942337036133, 0.017479679107666016, 0.01745894432067871, 0.01735308837890625, 0.017291135787963867, 0.01745075225830078, 0.017367584228515625, 0.01738313674926758, 0.017340576171875, 0.017381216049194338, 0.017360895156860352, 0.017317472457885744, 0.017388095855712892, 0.017370464324951172, 0.01729974365234375, 0.017307872772216796, 0.01725644874572754, 0.017385440826416014, 0.01725472068786621, 0.017490911483764648, 0.017351327896118166, 0.01729955291748047, 0.017326143264770506, 0.017286880493164063, 0.01753094482421875, 0.01755766487121582, 0.01805721664428711, 0.01794047927856445, 0.01770844841003418, 0.017940448760986327, 0.017681024551391603, 0.01772694396972656, 0.017561792373657226, 0.017505983352661132, 0.017334943771362306, 0.01745510482788086, 0.01739571189880371, 0.01743177604675293, 0.01742313575744629, 0.017504255294799806, 0.017439807891845703, 0.01749087905883789, 0.018495744705200195, 0.018132991790771484, 0.017594079971313476, 0.017512447357177736, 0.017471744537353517, 0.017516672134399416, 0.01783184051513672, 0.017580032348632812, 0.017511648178100588, 0.017449440002441405, 0.017409824371337892, 0.017500703811645507, 0.01755465507507324, 0.01748796844482422, 0.017399904251098632, 0.017432256698608397, 0.0174355525970459, 0.01741209602355957, 0.017385471343994142, 0.01739776039123535, 0.017348608016967772, 0.017309696197509765, 0.017334495544433594, 0.017325855255126952, 0.01744076728820801, 0.017450143814086914, 0.01742915153503418, 0.01742051124572754, 0.01746339225769043, 0.017405824661254884, 0.01744198417663574, 0.017367136001586913, 0.017257183074951173, 0.017358848571777344, 0.01741347122192383, 0.017334144592285158, 0.017281408309936522, 0.01735628890991211, 0.01732636833190918, 0.017338623046875, 0.017493728637695313, 0.01734543991088867, 0.01742336082458496, 0.017396480560302734, 0.017336320877075196, 0.017404991149902342, 0.01731065559387207, 0.017920000076293945, 0.017380704879760744, 0.01733286476135254, 0.01732521629333496, 0.01730614471435547, 0.017326431274414064, 0.017372512817382814, 0.017282751083374022, 0.01734115219116211, 0.017254175186157225, 0.0174617919921875, 0.01752262306213379, 0.01744486427307129, 0.017246208190917968, 0.017309696197509765, 0.017456672668457032, 0.020726207733154298, 0.018008064270019532, 0.017768447875976562, 0.017632543563842775, 0.017521055221557617, 0.01745088005065918, 0.017492031097412108, 0.017337760925292968, 0.017445087432861328, 0.017408767700195314, 0.017435680389404296, 0.01734124755859375, 0.01733180809020996, 0.017334304809570312, 0.01741263961791992, 0.017339967727661134, 0.017280576705932617, 0.017326656341552733, 0.017718944549560547, 0.017555456161499023, 0.017380096435546874, 0.01735260772705078, 0.017337791442871092, 0.017387327194213868, 0.017541887283325196, 0.017622304916381837, 0.01755187225341797, 0.017541343688964844, 0.01758323287963867, 0.017609695434570312, 0.017680288314819336, 0.01750592041015625, 0.017498720169067384, 0.017481727600097655, 0.017514272689819334, 0.017430816650390625, 0.0174814395904541, 0.017425888061523436, 0.017378847122192384, 0.017497087478637697, 0.01741004753112793, 0.017369087219238282, 0.017809215545654296, 0.017674272537231445, 0.017583295822143553, 0.017605855941772462, 0.01745075225830078, 0.0173832950592041, 0.017501663208007813, 0.017594079971313476, 0.017429248809814453, 0.017534975051879884, 0.01750752067565918, 0.0175830078125, 0.01749964714050293, 0.017555423736572266, 0.017423999786376952, 0.017463424682617187, 0.017371328353881835, 0.017380319595336913, 0.017348320007324218, 0.01737513542175293, 0.017356895446777345, 0.01864089584350586, 0.017889152526855467, 0.017552608489990233, 0.01756048011779785, 0.01755731201171875, 0.017612800598144532, 0.017561792373657226, 0.017354751586914064, 0.017499296188354493, 0.01738252830505371, 0.01739276885986328, 0.017425216674804688, 0.017403968811035155, 0.017606367111206056, 0.017584127426147463, 0.01736662483215332, 0.017504287719726563, 0.018209344863891603, 0.018063167572021484, 0.017537023544311522, 0.01741414451599121, 0.01737343978881836, 0.017309440612792968, 0.017291263580322267, 0.017227615356445312, 0.01733990478515625, 0.017325759887695313, 0.017310688018798828, 0.01731328010559082, 0.017342624664306642, 0.01733363151550293, 0.017363712310791014, 0.017344736099243165, 0.017346559524536134, 0.017369087219238282, 0.017319936752319336, 0.018558752059936522, 0.018519807815551757, 0.017529184341430665, 0.017444416046142577, 0.0173819522857666, 0.017323904037475586, 0.017531007766723634, 0.0173090877532959, 0.017488351821899416, 0.017475711822509767, 0.017547264099121093, 0.01744419288635254, 0.01763974380493164, 0.017303903579711913, 0.01740595245361328, 0.01754252815246582, 0.017402496337890625, 0.017299455642700197, 0.01734822463989258, 0.017246847152709962, 0.017782272338867186, 0.01753932762145996, 0.017332096099853516, 0.01721766471862793, 0.01738096046447754, 0.01730137634277344, 0.017553695678710936, 0.018605728149414063, 0.017819263458251952, 0.020245023727416992, 0.017614912033081055, 0.017715328216552733, 0.01765328025817871, 0.017959392547607422, 0.017489280700683594, 0.018025087356567382, 0.017710176467895508, 0.0174355525970459, 0.017541088104248048, 0.01740188789367676, 0.017378719329833984, 0.01733468818664551, 0.01739731216430664, 0.017375871658325194, 0.01736729621887207, 0.0173055362701416, 0.017273887634277344, 0.01733033561706543, 0.017387392044067383, 0.01728998374938965, 0.017350656509399414, 0.017456256866455078, 0.017422815322875977, 0.017375648498535155, 0.01729955291748047, 0.017423328399658204, 0.01737619209289551, 0.017336320877075196, 0.017372415542602538, 0.017283840179443358, 0.017264095306396485, 0.01737740707397461, 0.017392032623291014, 0.017510528564453124, 0.017454368591308594, 0.017401824951171874, 0.017373823165893556, 0.01740595245361328, 0.01731295967102051, 0.017275711059570313, 0.017266143798828126, 0.01720783996582031, 0.017201152801513672, 0.017278528213500975, 0.01728761672973633, 0.017331455230712892, 0.01741081619262695, 0.017252479553222656, 0.017229408264160157, 0.017244224548339845, 0.01721776008605957, 0.017274751663208007, 0.01734876823425293, 0.017285087585449218, 0.0172728328704834, 0.0173855037689209, 0.017362688064575197, 0.017349920272827148, 0.01823753547668457, 0.017562463760375978]",tokens/s,57.04791793848742,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1068.35968,874.381312,0.0,488.636416,482.553856,s,1,7.9416318359375,7.9416318359375,0.0,7.9416318359375,7.9416318359375,7.9416318359375,7.9416318359375,[7.9416318359375],,kWh,2.282997047915766e-05,2.5110728185080824e-06,7.735283966026074e-06,3.307632726369182e-05,,MB,1352.2944,1033.764864,0.0,616.562688,581.925888,s,10,0.3426809310913086,0.03426809310913086,0.00021561512630907155,0.034208703994750976,0.03449870796203613,0.0346219144821167,0.03472047969818115,"[0.03444438552856445, 0.03403363037109375, 0.034286174774169925, 0.034162689208984375, 0.03474512100219727, 0.03424275207519531, 0.03417465591430664, 0.03447132873535156, 0.034023681640625, 0.03409651184082031]",tokens/s,7470.506140646265,kWh,1.0528663027577382e-06,1.1611232301006529e-07,6.96580293474813e-07,1.8655589192426166e-06,tokens/kWh,137224290.99367785,MB,1391.403008,1048.444928,0.0,631.242752,597.192192,s,10,14.854388427734374,1.4854388427734375,0.0072443893363181204,1.4881287231445313,1.4925449707031249,1.4929033447265625,1.4931900439453125,"[1.4794061279296875, 1.488220947265625, 1.49326171875, 1.4912325439453125, 1.4911302490234375, 1.49246533203125, 1.4880364990234376, 1.4814072265625, 1.469777099609375, 1.47945068359375]",tokens/s,42.411709042409164,kWh,4.2769091471833244e-05,4.7168268310236025e-06,1.6959322720122172e-05,6.444524102297902e-05,tokens/kWh,977574.1233947173,,s,630,14.848834779739377,0.023569579015459335,0.0003443773790665014,0.023508495330810546,0.023902095603942873,0.024109512424468994,0.024928350620269775,"[0.02309350395202637, 0.02347987174987793, 0.0234968318939209, 0.02353990364074707, 0.023349472045898437, 0.023486175537109376, 0.023398496627807616, 0.023414304733276367, 0.023412799835205077, 0.02363475227355957, 0.023658336639404295, 0.02360963249206543, 0.02342268753051758, 0.023315744400024416, 0.023251935958862303, 0.023320320129394532, 0.023167999267578124, 0.02341177558898926, 0.023291839599609374, 0.023352384567260742, 0.023284671783447265, 0.023438720703125, 0.023240896224975587, 0.023204063415527342, 0.023134431838989257, 0.023283199310302736, 0.023223136901855467, 0.02328495979309082, 0.023340576171875, 0.02329897689819336, 0.02333286476135254, 0.023355392456054686, 0.02337900733947754, 0.02344646453857422, 0.02373955154418945, 0.024010944366455077, 0.024384159088134766, 0.024912128448486327, 0.024061695098876953, 0.023795711517333985, 0.023823360443115234, 0.02395404815673828, 0.023857536315917968, 0.023594560623168944, 0.02351686477661133, 0.023779199600219725, 0.023466079711914063, 0.023360288619995118, 0.02344550323486328, 0.02329804801940918, 0.02327337646484375, 0.023297279357910157, 0.023398656845092774, 0.023386720657348634, 0.023510976791381834, 0.02356435203552246, 0.023363744735717774, 0.023223199844360352, 0.023344064712524416, 0.023240703582763672, 0.02332819175720215, 0.023296575546264648, 0.023285343170166017, 0.02287843132019043, 0.023250623703002928, 0.02325993537902832, 0.023205215454101563, 0.02333942413330078, 0.02358812713623047, 0.023468767166137695, 0.024325567245483397, 0.02589958381652832, 0.02392268753051758, 0.023541759490966797, 0.02400454330444336, 0.023797311782836915, 0.02397808074951172, 0.023959968566894533, 0.024014848709106446, 0.02412723159790039, 0.02380326461791992, 0.02385804748535156, 0.02371347236633301, 0.023472448348999024, 0.02350284767150879, 0.023427072525024413, 0.023771135330200196, 0.02375913619995117, 0.023606399536132812, 0.023695743560791016, 0.023419296264648438, 0.023479360580444336, 0.023364351272583007, 0.02349875259399414, 0.023705408096313475, 0.023426496505737304, 0.023323135375976564, 0.02355449676513672, 0.02340380859375, 0.023253824234008787, 0.02343846321105957, 0.023310943603515624, 0.023390207290649414, 0.023397695541381835, 0.023421951293945312, 0.023340896606445314, 0.02357052803039551, 0.023549312591552733, 0.02344988822937012, 0.023421024322509764, 0.023523584365844726, 0.023408031463623045, 0.023666559219360353, 0.023719743728637697, 0.023921600341796877, 0.02367487907409668, 0.023516895294189454, 0.023644159317016602, 0.023521631240844727, 0.02388547134399414, 0.023592960357666014, 0.02377302360534668, 0.0234968318939209, 0.023369760513305665, 0.023608320236206053, 0.02345062446594238, 0.023121503829956053, 0.023541536331176758, 0.023790048599243163, 0.023543968200683593, 0.023631872177124022, 0.02354787254333496, 0.02348793601989746, 0.023626527786254882, 0.023692415237426757, 0.023736671447753908, 0.023780799865722655, 0.023737247467041016, 0.023751712799072264, 0.023931488037109375, 0.023918272018432617, 0.023928768157958986, 0.023530431747436523, 0.02355414390563965, 0.024236896514892577, 0.0235732479095459, 0.023569952011108397, 0.023470687866210937, 0.0235828800201416, 0.023475231170654295, 0.023468544006347656, 0.02359071922302246, 0.023648767471313475, 0.02361257553100586, 0.023627679824829103, 0.023495616912841796, 0.02367487907409668, 0.023582719802856447, 0.023715967178344725, 0.023733856201171875, 0.02377244758605957, 0.023880767822265624, 0.024061887741088868, 0.02390425682067871, 0.02435465621948242, 0.02382975959777832, 0.023622528076171875, 0.023676544189453124, 0.02367967987060547, 0.023729888916015626, 0.024262399673461915, 0.023853311538696288, 0.023820287704467775, 0.02363369560241699, 0.023799840927124023, 0.023505088806152343, 0.023560192108154295, 0.023495807647705078, 0.023641216278076173, 0.023507999420166015, 0.023497568130493165, 0.023585792541503905, 0.023636768341064453, 0.02390185546875, 0.024309600830078125, 0.023578784942626954, 0.023566783905029295, 0.023639808654785155, 0.023478015899658203, 0.023549951553344727, 0.02364825630187988, 0.023627775192260742, 0.02360870361328125, 0.023671072006225587, 0.023803680419921876, 0.023648832321166994, 0.02347417640686035, 0.023498111724853516, 0.023674816131591798, 0.023511199951171874, 0.023798303604125978, 0.024450111389160156, 0.025185216903686525, 0.024029184341430664, 0.02377507209777832, 0.02364204788208008, 0.023554271697998046, 0.023405759811401368, 0.02337260818481445, 0.02344153594970703, 0.02341075134277344, 0.023704959869384767, 0.023556543350219727, 0.023711456298828124, 0.02345097541809082, 0.023432479858398438, 0.02344105529785156, 0.02364556884765625, 0.023519872665405273, 0.023490400314331056, 0.023654560089111328, 0.023464223861694337, 0.02342268753051758, 0.023440607070922853, 0.023434015274047853, 0.023617279052734374, 0.02362393569946289, 0.02353152084350586, 0.02353971290588379, 0.023394399642944336, 0.023506847381591797, 0.023547903060913086, 0.02354351997375488, 0.023437599182128906, 0.02355955123901367, 0.023399328231811522, 0.025486112594604492, 0.02379257583618164, 0.023692991256713865, 0.023746879577636718, 0.023537343978881835, 0.02366640090942383, 0.02381622314453125, 0.023705888748168945, 0.02379599952697754, 0.024167839050292968, 0.023759456634521486, 0.02362883186340332, 0.02353887939453125, 0.023541696548461916, 0.023507904052734375, 0.02342006492614746, 0.023518943786621095, 0.023767744064331055, 0.02394976043701172, 0.023680864334106447, 0.023597888946533203, 0.02359334373474121, 0.02357948875427246, 0.023529184341430663, 0.023682559967041016, 0.026663488388061523, 0.023674367904663086, 0.02354038429260254, 0.024189952850341798, 0.023648096084594727, 0.023692319869995117, 0.023763967514038087, 0.023831520080566406, 0.023554048538208007, 0.023375743865966796, 0.023386240005493164, 0.023603456497192383, 0.02337686347961426, 0.02360758399963379, 0.02361961555480957, 0.023362016677856444, 0.023605247497558594, 0.023508991241455078, 0.023465248107910158, 0.023476959228515625, 0.023533056259155274, 0.02349667167663574, 0.023365760803222658, 0.023638015747070314, 0.02367657661437988, 0.02365635108947754, 0.023413600921630858, 0.0237260799407959, 0.0234967041015625, 0.023476224899291992, 0.023425024032592775, 0.023457664489746094, 0.02354787254333496, 0.023571680068969727, 0.02358348846435547, 0.023627967834472657, 0.02348575973510742, 0.023345632553100584, 0.023377727508544922, 0.023494144439697266, 0.023660703659057616, 0.023851871490478516, 0.02500704002380371, 0.023890911102294923, 0.023512319564819337, 0.023634559631347658, 0.02366476821899414, 0.023478271484375, 0.023490463256835938, 0.023500768661499024, 0.023634048461914064, 0.023828479766845705, 0.023764991760253908, 0.02343503952026367, 0.02328348731994629, 0.023570592880249024, 0.02346224021911621, 0.02353286361694336, 0.023794368743896486, 0.023768831253051757, 0.024117504119873047, 0.024037439346313475, 0.024187007904052735, 0.024235584259033202, 0.024125696182250977, 0.024129695892333984, 0.02433417510986328, 0.02415542411804199, 0.02412544059753418, 0.02375881576538086, 0.02356915283203125, 0.023481536865234375, 0.02349465560913086, 0.023505727767944337, 0.02396348762512207, 0.023660512924194337, 0.023697439193725585, 0.02346403121948242, 0.023406591415405274, 0.02351862335205078, 0.023582752227783204, 0.023496320724487305, 0.02346905517578125, 0.023662591934204103, 0.023512704849243164, 0.023466367721557618, 0.023472127914428712, 0.02364735984802246, 0.023442144393920897, 0.02365167999267578, 0.023491392135620116, 0.023523296356201172, 0.023530847549438478, 0.024050111770629882, 0.023797567367553712, 0.02365683174133301, 0.02352511978149414, 0.023638336181640626, 0.023358783721923827, 0.023607328414916993, 0.02371014404296875, 0.02350022315979004, 0.023463872909545897, 0.023630207061767577, 0.023565919876098632, 0.023558975219726563, 0.02391993522644043, 0.02409974479675293, 0.023881343841552733, 0.023685407638549805, 0.02353318405151367, 0.023505216598510743, 0.02355401611328125, 0.02344553565979004, 0.023554048538208007, 0.02350492858886719, 0.02384588813781738, 0.023396352767944335, 0.023541759490966797, 0.023549951553344727, 0.023666688919067383, 0.023513088226318358, 0.02367283248901367, 0.02346188735961914, 0.023451648712158202, 0.023451072692871094, 0.02493497657775879, 0.02413145637512207, 0.023727487564086915, 0.023636735916137696, 0.02356947135925293, 0.023311296463012696, 0.023468032836914062, 0.02332057571411133, 0.023605247497558594, 0.024004608154296874, 0.023818239212036133, 0.02365452766418457, 0.023744255065917968, 0.023668863296508788, 0.023649311065673827, 0.02362876892089844, 0.02365785598754883, 0.023575040817260744, 0.023686656951904295, 0.023579008102416994, 0.02385536003112793, 0.023611391067504883, 0.023601152420043944, 0.023744543075561522, 0.023671903610229493, 0.02359164810180664, 0.02354191970825195, 0.023451648712158202, 0.02354915237426758, 0.02352204895019531, 0.023466016769409178, 0.02382044792175293, 0.02375859260559082, 0.023702880859375, 0.02394393539428711, 0.02350611114501953, 0.023490655899047853, 0.023733983993530272, 0.023409215927124024, 0.023436800003051757, 0.023463935852050782, 0.023411520004272462, 0.023462015151977537, 0.023457344055175782, 0.023564735412597657, 0.023390207290649414, 0.023316320419311524, 0.023351104736328124, 0.02355753517150879, 0.02332972717285156, 0.023373823165893554, 0.02338764762878418, 0.023908895492553713, 0.02397737693786621, 0.02323219108581543, 0.023464799880981446, 0.024365087509155274, 0.023367328643798826, 0.023293664932250976, 0.023310688018798827, 0.023422847747802733, 0.023537824630737305, 0.024018239974975587, 0.024132095336914062, 0.023954656600952147, 0.023821247100830077, 0.023543039321899415, 0.023431167602539063, 0.023351072311401367, 0.023434207916259765, 0.023425119400024414, 0.023429023742675782, 0.023629823684692384, 0.02342911911010742, 0.02329395294189453, 0.02325481605529785, 0.02355129623413086, 0.023310976028442384, 0.023779199600219725, 0.02338150405883789, 0.02346486473083496, 0.02337718391418457, 0.023470815658569337, 0.023401664733886718, 0.023501567840576172, 0.023299455642700195, 0.02328646469116211, 0.02320150375366211, 0.023230752944946288, 0.02327756881713867, 0.023219520568847657, 0.02327008056640625, 0.02327142333984375, 0.023150592803955077, 0.023357440948486328, 0.02357062339782715, 0.023626815795898436, 0.023946048736572266, 0.02660960006713867, 0.024164703369140624, 0.02370886421203613, 0.02344598388671875, 0.02369740867614746, 0.023508991241455078, 0.0233123836517334, 0.02326870346069336, 0.023288000106811525, 0.023312416076660156, 0.023369855880737304, 0.02334547233581543, 0.02340656089782715, 0.02318329620361328, 0.023234655380249023, 0.023224319458007812, 0.023173120498657225, 0.0232704963684082, 0.023261503219604494, 0.022988832473754883, 0.023558143615722657, 0.023875423431396484, 0.023296031951904297, 0.02315817642211914, 0.023095136642456056, 0.023266048431396485, 0.023100704193115235, 0.02332963180541992, 0.0231824951171875, 0.02318182373046875, 0.02326540756225586, 0.023287647247314452, 0.023071104049682618, 0.023125120162963867, 0.023276416778564454, 0.02326348876953125, 0.023182527542114258, 0.023162752151489257, 0.023658624649047853, 0.02330691146850586, 0.023684000015258787, 0.023583744049072267, 0.023267040252685545, 0.023326143264770508, 0.023391071319580077, 0.023142400741577147, 0.02342073631286621, 0.023238847732543946, 0.023728288650512696, 0.023347200393676756, 0.023453535079956053, 0.023330816268920897, 0.023400447845458985, 0.023287616729736327, 0.023197887420654296, 0.023171072006225587, 0.0232857608795166, 0.02333616065979004, 0.023345632553100584, 0.02329631996154785, 0.02319708824157715, 0.023265888214111328, 0.023301279067993164, 0.02335011291503906, 0.023457664489746094, 0.023500928878784178, 0.02335247993469238, 0.02327801513671875, 0.023475967407226562, 0.023394975662231445, 0.023443456649780273, 0.02341593551635742, 0.02333785629272461, 0.023348928451538086, 0.0231713924407959, 0.023228416442871092, 0.023265024185180665, 0.02332316780090332, 0.023293727874755858, 0.023306047439575196, 0.02327564811706543, 0.02336319923400879, 0.02319561576843262, 0.023599039077758788, 0.023603967666625977, 0.023566335678100587, 0.023834815979003908, 0.0238590087890625, 0.024184032440185545, 0.023993120193481446, 0.0239554557800293, 0.02381523132324219, 0.02389311981201172, 0.02449542427062988, 0.024070655822753906, 0.023868928909301756, 0.023730688095092774, 0.023504896163940428, 0.02336502456665039, 0.023315135955810546, 0.023256256103515626, 0.02331945610046387, 0.02335628890991211, 0.02334777641296387, 0.023434816360473634, 0.023298688888549805, 0.02323695945739746, 0.02339958381652832, 0.023202495574951174, 0.02333286476135254, 0.02366454315185547, 0.023470176696777343, 0.023378944396972655, 0.023333343505859375, 0.02333497619628906, 0.023271808624267577, 0.023256511688232423, 0.023181983947753906, 0.023165088653564453, 0.02343049621582031, 0.023334495544433592, 0.023380895614624024, 0.023247072219848633, 0.0232589111328125, 0.023222272872924804, 0.02325267219543457, 0.02322012710571289, 0.023493024826049806, 0.023379520416259767, 0.02328166389465332, 0.02340902328491211, 0.023259199142456055, 0.023658496856689453, 0.02356831932067871, 0.023289920806884766, 0.02345683288574219, 0.023278528213500977, 0.023439359664916993, 0.023436351776123045, 0.023352256774902345, 0.02343343925476074, 0.023506336212158203, 0.0235830078125, 0.023316415786743164, 0.023312543869018553]",tokens/s,42.427571546530295,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1575.989248,1553.85856,0.0,1168.113664,1154.613248,s,1,8.3609306640625,8.3609306640625,0.0,8.3609306640625,8.3609306640625,8.3609306640625,8.3609306640625,[8.3609306640625],,kWh,3.452151820829386e-05,3.8006526669543884e-06,1.1646953761990098e-05,4.9969124637238344e-05,,MB,1540.83328,1784.54528,0.0,1369.440256,1323.44832,s,10,0.778748001098633,0.07787480010986329,0.0013949311070111992,0.07796204757690431,0.07929342575073242,0.0794167781829834,0.07951546012878419,"[0.07954013061523438, 0.07850444793701172, 0.07858013153076172, 0.07734806060791016, 0.07798838043212891, 0.07793571472167969, 0.07421536254882813, 0.07790611267089843, 0.0792660140991211, 0.0774636459350586]",tokens/s,3287.3278600888007,kWh,2.53256366612844e-06,2.792932919426841e-07,1.6722115868103091e-06,4.484068544881434e-06,tokens/kWh,57091009.523532845,MB,1547.014144,1805.5168,0.0,1390.411776,1377.26208,s,10,14.509163696289063,1.4509163696289062,0.00425365713752737,1.4513443603515626,1.4567287841796874,1.4568821533203125,1.4570048486328124,"[1.4514412841796875, 1.4512474365234376, 1.446343017578125, 1.4519345703125, 1.4478619384765625, 1.4570355224609375, 1.4510238037109375, 1.453216796875, 1.4566947021484375, 1.4423646240234376]",tokens/s,43.42083480394752,kWh,4.194423758971311e-05,4.62609602382244e-06,2.0782806377188427e-05,6.735313999072399e-05,tokens/kWh,935368.4179932292,,s,630,14.503784038543696,0.023021879426259842,0.0005322910303474362,0.022890800476074218,0.023390700149536134,0.023716588497161864,0.025567808361053476,"[0.023088672637939452, 0.02297536087036133, 0.02306662368774414, 0.023059776306152344, 0.022973375320434572, 0.022986303329467772, 0.02287424087524414, 0.022744192123413085, 0.023147455215454103, 0.02267523193359375, 0.022649055480957032, 0.022568960189819336, 0.022841344833374022, 0.02275916862487793, 0.02271776008605957, 0.022979520797729493, 0.023401695251464842, 0.023513311386108397, 0.023372352600097655, 0.02328371238708496, 0.02296124839782715, 0.022934335708618164, 0.022796607971191405, 0.022837024688720703, 0.0227740478515625, 0.02283900833129883, 0.022789791107177736, 0.022795679092407227, 0.022924224853515626, 0.022837247848510742, 0.02294169616699219, 0.022844959259033203, 0.02271232032775879, 0.02305039978027344, 0.023445823669433593, 0.024557567596435546, 0.024649728775024415, 0.023610624313354492, 0.023306304931640626, 0.02352179145812988, 0.023286144256591798, 0.02344278335571289, 0.02343574333190918, 0.023267328262329103, 0.022804479598999023, 0.02276777648925781, 0.022830944061279296, 0.022790143966674805, 0.02264451217651367, 0.022788320541381836, 0.022931455612182617, 0.02287401580810547, 0.022956127166748046, 0.02292633628845215, 0.023047136306762694, 0.0230031681060791, 0.022982048034667968, 0.022812543869018556, 0.022948095321655274, 0.022935039520263673, 0.023031936645507813, 0.02300809669494629, 0.02288640022277832, 0.023117504119873046, 0.023185247421264647, 0.02289302444458008, 0.022922271728515624, 0.022778432846069337, 0.022739007949829103, 0.022733152389526366, 0.02283942413330078, 0.022808448791503906, 0.022677183151245117, 0.022825183868408202, 0.022618207931518555, 0.022607135772705077, 0.02281545639038086, 0.02266316795349121, 0.022757375717163086, 0.02289664077758789, 0.022855680465698244, 0.022871936798095703, 0.02279772758483887, 0.022811199188232423, 0.02295724868774414, 0.022842336654663085, 0.023009279251098632, 0.023129568099975586, 0.023009824752807616, 0.022921215057373046, 0.023078176498413087, 0.023227104187011717, 0.023324256896972657, 0.02317763137817383, 0.022988800048828126, 0.022988639831542968, 0.022954143524169923, 0.023169023513793945, 0.02312396812438965, 0.023212032318115236, 0.023459680557250978, 0.023746719360351564, 0.02358460807800293, 0.023311775207519533, 0.023278335571289062, 0.023375871658325196, 0.022986175537109375, 0.022970687866210936, 0.023013631820678712, 0.02314035224914551, 0.023110719680786134, 0.02302457618713379, 0.02293337631225586, 0.023107711791992187, 0.023152639389038086, 0.02305843162536621, 0.023127679824829103, 0.02297648048400879, 0.02288604736328125, 0.022870784759521486, 0.022737056732177734, 0.022998079299926758, 0.02508448028564453, 0.023068895339965822, 0.022858848571777345, 0.02282383918762207, 0.02327142333984375, 0.023244672775268555, 0.023148672103881836, 0.023006784439086915, 0.022902624130249023, 0.022860383987426756, 0.023017471313476562, 0.02286367988586426, 0.02293110466003418, 0.02278006362915039, 0.02286630439758301, 0.022757375717163086, 0.022883680343627928, 0.023220352172851563, 0.023106016159057618, 0.02324038314819336, 0.023372159957885743, 0.02328313636779785, 0.023058399200439453, 0.023124576568603516, 0.02306800079345703, 0.02293827247619629, 0.02292736053466797, 0.022904224395751953, 0.02273859214782715, 0.022731103897094727, 0.022837024688720703, 0.022802751541137697, 0.02316339111328125, 0.022881439208984375, 0.022880319595336915, 0.022756128311157225, 0.022820863723754883, 0.02270412826538086, 0.022825088500976563, 0.022834720611572264, 0.022943647384643554, 0.02298681640625, 0.022919551849365234, 0.022779199600219728, 0.02269657516479492, 0.02266636848449707, 0.02279929542541504, 0.022757375717163086, 0.022869375228881834, 0.02299087905883789, 0.022932159423828126, 0.02276278305053711, 0.022725248336791994, 0.022726655960083008, 0.02285977554321289, 0.022758752822875976, 0.023034528732299806, 0.022790143966674805, 0.02276348876953125, 0.02271843147277832, 0.022833215713500978, 0.022871551513671876, 0.02326579284667969, 0.023209535598754882, 0.023445951461791993, 0.023424959182739256, 0.023328832626342774, 0.02403296089172363, 0.023578943252563475, 0.02310495948791504, 0.022923423767089845, 0.022858144760131836, 0.02285158348083496, 0.022951839447021484, 0.02300124740600586, 0.02299283218383789, 0.02286796760559082, 0.02288435173034668, 0.02515558433532715, 0.024395776748657227, 0.023037952423095705, 0.02307593536376953, 0.022964544296264648, 0.022950496673583985, 0.023007232666015624, 0.02287843132019043, 0.02271824073791504, 0.023055551528930664, 0.022923839569091796, 0.023017728805541992, 0.02291302490234375, 0.02292531204223633, 0.02285158348083496, 0.02301535987854004, 0.02319308853149414, 0.02453971290588379, 0.023158496856689453, 0.023331104278564455, 0.02312556838989258, 0.022998720169067382, 0.02290505599975586, 0.02286422348022461, 0.022792127609252928, 0.022773920059204103, 0.02275472068786621, 0.022706880569458007, 0.022878175735473634, 0.02283318328857422, 0.022805887222290037, 0.02277030372619629, 0.022734848022460938, 0.022711551666259766, 0.022857919692993164, 0.022803007125854494, 0.02291859245300293, 0.022913600921630858, 0.022935104370117188, 0.02282659149169922, 0.02296713638305664, 0.022960128784179686, 0.02319705581665039, 0.023159423828125, 0.023117824554443358, 0.02294166374206543, 0.022966304779052735, 0.022916128158569336, 0.022772512435913085, 0.022804672241210938, 0.022748544692993165, 0.022843936920166015, 0.024041696548461913, 0.023023168563842775, 0.022837791442871094, 0.02292736053466797, 0.022937023162841796, 0.022726207733154296, 0.02274764823913574, 0.022917631149291993, 0.022929023742675782, 0.023347583770751953, 0.02308073616027832, 0.02337321662902832, 0.023673120498657226, 0.02375529670715332, 0.02315673637390137, 0.023068864822387694, 0.022965471267700197, 0.022959840774536132, 0.022939712524414062, 0.02294816017150879, 0.023046655654907225, 0.022956127166748046, 0.0228822078704834, 0.022736000061035155, 0.022698207855224608, 0.022733055114746093, 0.022593952178955077, 0.022824256896972657, 0.023007936477661133, 0.02297590446472168, 0.022892480850219728, 0.022921279907226564, 0.022780448913574218, 0.022939712524414062, 0.023037952423095705, 0.022718719482421875, 0.022589183807373046, 0.022763519287109374, 0.022642047882080078, 0.022717216491699218, 0.022740831375122072, 0.022767616271972657, 0.022761472702026365, 0.02271455955505371, 0.02279609680175781, 0.0228185920715332, 0.022726879119873047, 0.022820863723754883, 0.02280563163757324, 0.022776800155639647, 0.022742368698120116, 0.02332320022583008, 0.022970048904418946, 0.02293280029296875, 0.02325935935974121, 0.023154783248901366, 0.023521984100341797, 0.02316041564941406, 0.02318182373046875, 0.02307472038269043, 0.02288640022277832, 0.0237172794342041, 0.022760032653808594, 0.02396406364440918, 0.023405120849609374, 0.023390207290649414, 0.02324505615234375, 0.02331644821166992, 0.023416608810424806, 0.023080575942993165, 0.02308857536315918, 0.0230031681060791, 0.022893632888793945, 0.022978143692016603, 0.02318880081176758, 0.023652639389038086, 0.02288912010192871, 0.022865055084228515, 0.022813535690307616, 0.022898687362670898, 0.022839040756225587, 0.022747488021850587, 0.022908832550048826, 0.022730752944946288, 0.022850719451904297, 0.022844255447387694, 0.022740575790405275, 0.022775360107421875, 0.022803104400634766, 0.02279251289367676, 0.022819807052612304, 0.022823808670043945, 0.022842880249023437, 0.02286345672607422, 0.02279110336303711, 0.02679507255554199, 0.023818399429321287, 0.023630624771118165, 0.02409267234802246, 0.022781951904296875, 0.022603776931762694, 0.02309087944030762, 0.022815135955810546, 0.022656927108764647, 0.022720703125, 0.022714271545410156, 0.02284329605102539, 0.02274675178527832, 0.0227270393371582, 0.022718015670776366, 0.02312031936645508, 0.024291391372680663, 0.022695871353149415, 0.022634496688842775, 0.02291302490234375, 0.022781951904296875, 0.023040000915527343, 0.023197696685791015, 0.0235536003112793, 0.02334339141845703, 0.023322784423828125, 0.023627647399902342, 0.023146591186523437, 0.02352729606628418, 0.023373983383178712, 0.023177215576171875, 0.02403721618652344, 0.023778879165649414, 0.022866527557373048, 0.023009279251098632, 0.02290483283996582, 0.02278758430480957, 0.022618528366088866, 0.022784095764160156, 0.023107200622558593, 0.02296460723876953, 0.025650239944458007, 0.0256746883392334, 0.023316511154174803, 0.02293462371826172, 0.022846336364746093, 0.022899967193603515, 0.022612159729003906, 0.022697664260864257, 0.022619007110595703, 0.022676799774169924, 0.022681503295898436, 0.022620704650878905, 0.022563072204589845, 0.022589439392089843, 0.02271232032775879, 0.022726015090942384, 0.02275328063964844, 0.022685632705688477, 0.022788799285888672, 0.022761472702026365, 0.022689695358276366, 0.022687936782836916, 0.022738847732543945, 0.022629728317260744, 0.02269455909729004, 0.022690975189208984, 0.022640832901000975, 0.022635168075561523, 0.022658239364624022, 0.022705215454101563, 0.02261916732788086, 0.022585376739501953, 0.02363257598876953, 0.023715744018554686, 0.022617984771728515, 0.0227043514251709, 0.022822303771972655, 0.02265353584289551, 0.022666400909423828, 0.02283401679992676, 0.022775264739990236, 0.023007648468017578, 0.022997119903564452, 0.023576576232910155, 0.023615327835083008, 0.02319276809692383, 0.027462879180908204, 0.023021312713623048, 0.022777536392211913, 0.022662656784057617, 0.022636991500854492, 0.022605344772338866, 0.02275823974609375, 0.022923263549804687, 0.02282921600341797, 0.023172351837158205, 0.025632768630981444, 0.02449622344970703, 0.02381654357910156, 0.023711200714111327, 0.023144287109375, 0.022941919326782228, 0.023054975509643555, 0.022761760711669923, 0.0228853759765625, 0.02271433639526367, 0.022639360427856445, 0.02276323127746582, 0.022628223419189453, 0.022862239837646483, 0.022925056457519532, 0.023154144287109376, 0.023544607162475587, 0.02333830451965332, 0.023153343200683595, 0.02323865509033203, 0.023084928512573242, 0.023051584243774414, 0.023126848220825197, 0.02294169616699219, 0.02290278434753418, 0.022912031173706056, 0.022764511108398437, 0.022748767852783205, 0.02273731231689453, 0.02288649559020996, 0.02304400062561035, 0.027314144134521483, 0.023066591262817383, 0.022929471969604494, 0.02269753646850586, 0.022774208068847657, 0.022880352020263672, 0.022908672332763672, 0.022927520751953125, 0.022685279846191408, 0.022718879699707033, 0.022721599578857422, 0.022856639862060546, 0.02269913673400879, 0.02282159996032715, 0.022927263259887695, 0.022892799377441406, 0.022773759841918945, 0.022845439910888672, 0.02322163200378418, 0.02285539245605469, 0.022780832290649415, 0.02275328063964844, 0.02273427200317383, 0.02269856071472168, 0.02274508857727051, 0.022760896682739257, 0.02264931106567383, 0.022671680450439453, 0.02264249610900879, 0.023347200393676756, 0.02320355224609375, 0.02299728012084961, 0.022841344833374022, 0.022805952072143556, 0.023149120330810548, 0.023629823684692384, 0.023095008850097656, 0.02356630325317383, 0.023123455047607423, 0.02292572784423828, 0.02311609649658203, 0.025408767700195314, 0.025119743347167968, 0.023047807693481446, 0.022876672744750977, 0.022879936218261718, 0.022917152404785156, 0.022864992141723633, 0.022724512100219727, 0.022674432754516603, 0.0227523193359375, 0.022893503189086915, 0.02277779197692871, 0.02258675193786621, 0.02259187126159668, 0.02277180862426758, 0.022818784713745117, 0.02525004768371582, 0.02409676742553711, 0.02293065643310547, 0.02307766342163086, 0.022847488403320314, 0.022777759552001953, 0.02265507125854492, 0.023326719284057617, 0.022816768646240236, 0.02299452781677246, 0.022665088653564452, 0.022600223541259765, 0.02259324836730957, 0.02269152069091797, 0.022737503051757812, 0.022724607467651366, 0.022788095474243163, 0.02274051284790039, 0.022663072586059572, 0.0229051513671875, 0.0228702392578125, 0.022849056243896486, 0.02279680061340332, 0.02277516746520996, 0.02292799949645996, 0.024067104339599608, 0.022834367752075195, 0.022712095260620117, 0.022964223861694336, 0.027473344802856445, 0.023081535339355468, 0.022791231155395508, 0.022834112167358398, 0.022837247848510742, 0.022765567779541016, 0.023517248153686523, 0.023057119369506836, 0.023236928939819337, 0.022901664733886717, 0.023012319564819337, 0.022799680709838867, 0.02277475166320801, 0.02285968017578125, 0.023098304748535158, 0.022719039916992187, 0.02294816017150879, 0.02303171157836914, 0.02290287971496582, 0.022780191421508788, 0.022823968887329103, 0.022770368576049804, 0.022826143264770508, 0.022737760543823243, 0.022742080688476562, 0.02280339241027832, 0.02281785583496094, 0.02301024055480957, 0.023127647399902345, 0.02281923294067383, 0.022784000396728517, 0.022939647674560547, 0.022923263549804687, 0.02307209587097168, 0.02308777618408203, 0.02309939193725586, 0.022904928207397462, 0.022651968002319337, 0.02274390411376953, 0.024063167572021486, 0.023395135879516603, 0.02308915138244629, 0.023070880889892578, 0.023121599197387696, 0.023052448272705077, 0.022858976364135742, 0.022776479721069335, 0.022888511657714845, 0.02277791976928711, 0.022896255493164062, 0.022791711807250977, 0.022702560424804688, 0.02283353614807129, 0.022683231353759766, 0.022860063552856445, 0.022722688674926758, 0.022650367736816408, 0.022673919677734376, 0.02264473533630371, 0.022673088073730467, 0.022690399169921875, 0.022666976928710936, 0.022722560882568358, 0.022674751281738282, 0.022608512878417968, 0.02260793685913086, 0.022681407928466797, 0.02276131248474121, 0.02268707275390625]",tokens/s,43.43694020303802,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6757.380096,9179.103232,0.0,8776.58112,8188.314112,s,1,13.911306640625,13.911306640625,0.0,13.911306640625,13.911306640625,13.911306640625,13.911306640625,[13.911306640625],,kWh,0.0001931991353250396,2.1279811507146977e-05,8.704229185602097e-05,0.00030152123868820756,,MB,1668.1984,9193.783296,0.0,8776.58112,7654.291456,s,10,54.00182861328125,5.400182861328124,0.007725065581188414,5.3994658203125,5.408142236328125,5.410999340820313,5.413285024414062,"[5.387064453125, 5.3932001953125, 5.39271728515625, 5.39710009765625, 5.39768603515625, 5.40124560546875, 5.40493310546875, 5.40651806640625, 5.40750732421875, 5.4138564453125]",tokens/s,47.405802094827436,kWh,0.0001577664852837491,1.740210587005296e-05,0.00010483516720139751,0.00028000375835519957,tokens/kWh,914273.4422701943,MB,1671.94624,9193.783296,0.0,8776.58112,7908.775424,s,10,25.630825683593752,2.563082568359375,0.0028335166914363764,2.5629377441406254,2.566312670898437,2.567445031738281,2.5683509204101562,"[2.559715087890625, 2.562753662109375, 2.558194091796875, 2.562730224609375, 2.563903564453125, 2.56130322265625, 2.56606103515625, 2.563121826171875, 2.564465576171875, 2.568577392578125]",tokens/s,24.5797777948005,kWh,7.456537864999972e-05,8.225113199060636e-06,4.971440088260282e-05,0.0001325048927316632,tokens/kWh,475454.14136202383,,s,630,25.62733416366578,0.04067830819629487,0.00044013133849250797,0.04064996719360352,0.04130856857299805,0.04150696716308594,0.04173847675323487,"[0.04150067138671875, 0.04055036926269531, 0.039970848083496095, 0.04044972610473633, 0.0395984001159668, 0.03997219085693359, 0.03995427322387695, 0.03994297790527344, 0.0401954231262207, 0.0399571533203125, 0.04025062561035156, 0.04045286560058594, 0.04014284896850586, 0.040030208587646485, 0.0401769905090332, 0.04046611022949219, 0.04029315185546875, 0.04022905731201172, 0.04036345672607422, 0.04048543930053711, 0.040697856903076174, 0.04078979110717774, 0.04060496139526367, 0.04054521560668945, 0.040476673126220705, 0.040647838592529295, 0.04054512023925781, 0.04044793701171875, 0.04041036987304687, 0.040334144592285154, 0.04033087921142578, 0.04033779144287109, 0.04042099380493164, 0.04054857635498047, 0.040487071990966794, 0.04056607818603516, 0.04076739120483398, 0.04069046401977539, 0.04047836685180664, 0.040456001281738284, 0.04042934417724609, 0.04053449630737305, 0.04123881530761719, 0.0414266242980957, 0.04112403106689453, 0.04059695816040039, 0.040724224090576175, 0.040788894653320314, 0.04110521697998047, 0.041396446228027343, 0.04160713577270508, 0.041193473815917966, 0.040736766815185545, 0.04071148681640625, 0.04097708892822265, 0.04096803283691406, 0.04100425720214844, 0.04110636901855469, 0.04099071884155273, 0.0417894401550293, 0.04155187225341797, 0.041139904022216796, 0.04068998336791992, 0.0415852165222168, 0.040225982666015625, 0.03990131378173828, 0.03990959930419922, 0.03988729476928711, 0.039937343597412106, 0.039924030303955076, 0.040175998687744144, 0.04046031951904297, 0.04074697494506836, 0.04058272171020508, 0.040231361389160156, 0.040065025329589846, 0.040357887268066404, 0.04024518585205078, 0.04040892791748047, 0.04043535995483399, 0.040316574096679686, 0.0404420166015625, 0.04043635177612305, 0.04032729721069336, 0.04048076629638672, 0.04069171142578125, 0.041442783355712894, 0.04118172836303711, 0.04041436767578125, 0.04031942367553711, 0.0404607048034668, 0.04031488037109375, 0.04031068801879883, 0.04011404800415039, 0.04082304000854492, 0.04127856063842773, 0.041544544219970704, 0.04092316818237305, 0.04063846588134765, 0.04051968002319336, 0.04061347198486328, 0.0407371826171875, 0.04074627304077148, 0.04058287811279297, 0.04067020797729492, 0.04065075302124024, 0.04087398529052735, 0.040714241027832034, 0.040822784423828126, 0.040871936798095705, 0.04130815887451172, 0.041033153533935544, 0.04058988952636719, 0.0406789436340332, 0.04068102264404297, 0.040744895935058596, 0.04072323226928711, 0.041474239349365234, 0.04086374282836914, 0.04078540802001953, 0.04087187194824219, 0.04128825759887695, 0.041799678802490234, 0.041545726776123046, 0.041508766174316404, 0.04115465545654297, 0.04147017669677734, 0.040548351287841795, 0.03995590209960938, 0.03986489486694336, 0.04007724761962891, 0.040032318115234375, 0.039907329559326174, 0.0397737922668457, 0.039926174163818356, 0.0398267822265625, 0.04009801483154297, 0.04025126266479492, 0.04020873641967773, 0.04035606384277344, 0.04015513610839844, 0.040114177703857425, 0.04018380737304687, 0.040216255187988284, 0.0402250862121582, 0.04046847915649414, 0.040574462890625, 0.04054230499267578, 0.04033686447143554, 0.04086249542236328, 0.04097244644165039, 0.04058265686035156, 0.04023961639404297, 0.04025139236450195, 0.040389694213867185, 0.0403540153503418, 0.04034633636474609, 0.04040499114990234, 0.04055039978027344, 0.04055436706542969, 0.0406385612487793, 0.04068560028076172, 0.04080640029907227, 0.040927230834960936, 0.04077363204956055, 0.04066243362426758, 0.040712799072265625, 0.041093120574951174, 0.04079795074462891, 0.040638721466064454, 0.040758975982666014, 0.04062239837646484, 0.04072230529785156, 0.04128575897216797, 0.04117715072631836, 0.04072582244873047, 0.04081059265136719, 0.041156158447265626, 0.041828704833984376, 0.04146172714233398, 0.0411060791015625, 0.040766815185546874, 0.040653472900390626, 0.04079206466674805, 0.040822528839111326, 0.0408408317565918, 0.04119308853149414, 0.04126617431640625, 0.04153548812866211, 0.04150476837158203, 0.040574977874755856, 0.040292350769042966, 0.04000483322143555, 0.03992160034179688, 0.03992828750610351, 0.03994867324829102, 0.039948287963867186, 0.040261470794677734, 0.04013891220092773, 0.04042083358764648, 0.04036457443237305, 0.04023091125488281, 0.040114177703857425, 0.04036022567749024, 0.0402388801574707, 0.040435199737548826, 0.04077203369140625, 0.04072447967529297, 0.04054771041870117, 0.04082956695556641, 0.04043366241455078, 0.040292350769042966, 0.040597023010253905, 0.04080883026123047, 0.04104777526855469, 0.04060383987426758, 0.04024496078491211, 0.04029391860961914, 0.040428062438964844, 0.0403600959777832, 0.040484161376953126, 0.040481727600097654, 0.04035379028320313, 0.040421089172363284, 0.04071657562255859, 0.04095584106445312, 0.04062009429931641, 0.04072243118286133, 0.040771583557128906, 0.04076688003540039, 0.041330593109130856, 0.04130271911621094, 0.041232383728027344, 0.04096614456176758, 0.040973888397216794, 0.040780223846435544, 0.04078790283203125, 0.0408289909362793, 0.0409620475769043, 0.041312255859375, 0.04120576095581055, 0.04106140899658203, 0.041167713165283205, 0.04081881713867187, 0.041164798736572264, 0.04170735931396485, 0.04097859191894531, 0.04101283264160156, 0.04091088104248047, 0.04092144012451172, 0.04108214569091797, 0.040896255493164065, 0.041431488037109374, 0.04015161514282226, 0.040030208587646485, 0.03992083358764648, 0.04003923034667969, 0.04004988861083984, 0.04007539367675781, 0.04000966262817383, 0.04015996932983398, 0.04043503952026367, 0.040204288482666016, 0.04041107177734375, 0.04046499252319336, 0.040622207641601564, 0.04034115219116211, 0.04008176040649414, 0.04009535980224609, 0.04013235092163086, 0.04015987014770508, 0.04019305419921875, 0.04039369583129883, 0.04025958251953125, 0.04054185485839844, 0.040806751251220706, 0.04086111831665039, 0.04066566467285156, 0.04058313751220703, 0.040415264129638674, 0.04039606475830078, 0.040350433349609374, 0.04056419372558594, 0.040743457794189454, 0.04054748916625977, 0.04054246520996094, 0.04089622497558594, 0.04085644912719726, 0.040837120056152344, 0.04064448165893555, 0.040796287536621095, 0.041166847229003906, 0.04108083343505859, 0.04065894317626953, 0.040543838500976564, 0.04067334365844726, 0.04090678405761719, 0.04081286239624023, 0.04075724792480469, 0.04105011367797851, 0.041291553497314455, 0.041285694122314455, 0.04096966552734375, 0.04155583953857422, 0.04096259307861328, 0.041029953002929685, 0.04098457717895508, 0.04103782272338867, 0.04103168106079102, 0.04150447845458984, 0.04140675354003906, 0.04181926345825195, 0.041294719696044924, 0.04162966537475586, 0.04135935974121094, 0.041738559722900394, 0.0409620475769043, 0.040323070526123043, 0.04019353485107422, 0.04022118377685547, 0.040457374572753904, 0.040285022735595706, 0.03993142318725586, 0.04030828857421875, 0.04040739059448242, 0.040016448974609375, 0.04005475234985351, 0.040417312622070316, 0.04041020965576172, 0.04014153671264648, 0.040032447814941405, 0.0401874885559082, 0.04067164611816406, 0.0403592643737793, 0.04030940628051758, 0.04028006362915039, 0.040494720458984376, 0.04062246322631836, 0.040775230407714844, 0.04057952117919922, 0.04053606414794922, 0.04033078384399414, 0.04047100830078125, 0.04073267364501953, 0.040925182342529294, 0.04044950485229492, 0.04035433578491211, 0.040497150421142575, 0.04046755218505859, 0.040401214599609374, 0.040419742584228514, 0.04068985748291016, 0.040578529357910155, 0.04052409744262695, 0.040517856597900394, 0.04053401565551758, 0.040372161865234374, 0.04055043029785156, 0.04084739303588867, 0.040699905395507815, 0.040775390625, 0.04099440002441406, 0.041210174560546875, 0.04168947219848633, 0.04154687881469726, 0.04137664031982422, 0.04087807846069336, 0.04103535842895508, 0.04076927947998047, 0.0408009262084961, 0.0413487663269043, 0.04095564651489258, 0.041118305206298826, 0.041541473388671875, 0.04106399917602539, 0.04073241424560547, 0.040943614959716795, 0.04107478332519531, 0.041537406921386716, 0.040609375, 0.04013929748535156, 0.04007526397705078, 0.04030668640136719, 0.040262687683105466, 0.04029334259033203, 0.04081795120239258, 0.04057891082763672, 0.040524383544921876, 0.040236671447753905, 0.04036188888549805, 0.040067840576171875, 0.04036524963378906, 0.04037001419067383, 0.04090972900390625, 0.04072639846801758, 0.04044819259643555, 0.0401448974609375, 0.04040697479248047, 0.04047264099121094, 0.04037958526611328, 0.04088915252685547, 0.04109104156494141, 0.04067536163330078, 0.04025548934936524, 0.04056038284301758, 0.0404134407043457, 0.040318977355957034, 0.04058844757080078, 0.040667552947998044, 0.041226593017578125, 0.041252960205078126, 0.041027584075927735, 0.0407347183227539, 0.04065075302124024, 0.0407256965637207, 0.04073555374145508, 0.04095558547973633, 0.041236801147460936, 0.04135116958618164, 0.04096553421020508, 0.04061040115356445, 0.04077568054199219, 0.04067942428588867, 0.0407691535949707, 0.041304256439208986, 0.04070377731323242, 0.04090512084960937, 0.04090454483032226, 0.04064422225952148, 0.04044240188598633, 0.040519615173339844, 0.040708160400390624, 0.04081868743896484, 0.040820735931396485, 0.04135321426391601, 0.041695232391357424, 0.0413573112487793, 0.040953025817871094, 0.04100787353515625, 0.04091910552978516, 0.04142694473266602, 0.04149452972412109, 0.04026572799682617, 0.03971686553955078, 0.03973324966430664, 0.03979616165161133, 0.039914047241210934, 0.039989246368408206, 0.04033683013916016, 0.0403419189453125, 0.0400316162109375, 0.04006991958618164, 0.040304641723632816, 0.04031283187866211, 0.04016073608398438, 0.040083999633789065, 0.040412193298339845, 0.040334304809570315, 0.04032281494140625, 0.04031103897094727, 0.040286209106445314, 0.04049081420898438, 0.04072876739501953, 0.04063625717163086, 0.04044780731201172, 0.04059580612182617, 0.04055244827270508, 0.04041523361206055, 0.04063759994506836, 0.040675262451171874, 0.04041616058349609, 0.04079363250732422, 0.04055292892456055, 0.04074905776977539, 0.0405684814453125, 0.040482944488525394, 0.040441471099853514, 0.04041993713378906, 0.04064665603637695, 0.04066019058227539, 0.040661792755126956, 0.040486785888671876, 0.040752960205078126, 0.040779998779296875, 0.04085958480834961, 0.04073283386230469, 0.04114636611938476, 0.04170342254638672, 0.04155392074584961, 0.041299232482910155, 0.04114505767822266, 0.04132406234741211, 0.041818592071533205, 0.041442783355712894, 0.041049793243408204, 0.04137660980224609, 0.0413757438659668, 0.04125286483764649, 0.04163948822021484, 0.041202110290527345, 0.04080559921264648, 0.04131510543823242, 0.04117452621459961, 0.04074265670776367, 0.04154985427856445, 0.04075212860107422, 0.040344417572021486, 0.040040576934814456, 0.04003430557250977, 0.04009481430053711, 0.04028713607788086, 0.0405340461730957, 0.04036796951293945, 0.04011635208129883, 0.04030054473876953, 0.040320510864257815, 0.040650718688964846, 0.04066355133056641, 0.04046803283691406, 0.04014947128295898, 0.040091617584228516, 0.04041321563720703, 0.040243198394775394, 0.04016742324829101, 0.04017881774902344, 0.04020054244995117, 0.04038623809814453, 0.04074991989135742, 0.04109481430053711, 0.04066339111328125, 0.04060160064697266, 0.04067532730102539, 0.04035772705078125, 0.04080246353149414, 0.04106159973144531, 0.04105849456787109, 0.0404752311706543, 0.04038230514526367, 0.04046659088134766, 0.04077500915527344, 0.040807071685791015, 0.0407347183227539, 0.04114604949951172, 0.04130438232421875, 0.04129584121704102, 0.04075302505493164, 0.04061199951171875, 0.04057702255249023, 0.04059312057495117, 0.04069609451293945, 0.04072857666015625, 0.04093718338012695, 0.04082057571411133, 0.04096585464477539, 0.04087881469726563, 0.040921089172363284, 0.040928321838378905, 0.04126998519897461, 0.041672927856445316, 0.041637889862060545, 0.04127126312255859, 0.040769569396972655, 0.040959423065185546, 0.04083055877685547, 0.041290721893310546, 0.04102540969848633, 0.04118745422363281, 0.04173827362060547, 0.04102115249633789, 0.04054451370239258, 0.040542209625244144, 0.04021430587768555, 0.04013427352905274, 0.039989856719970705, 0.04014668655395508, 0.04015539169311524, 0.04016128158569336, 0.040321025848388675, 0.04045004653930664, 0.04031283187866211, 0.04039193725585938, 0.040729183197021485, 0.04082624053955078, 0.040620800018310546, 0.0404859504699707, 0.04030972671508789, 0.0401797103881836, 0.04032921600341797, 0.04049075317382812, 0.04081638336181641, 0.04064921569824219, 0.04092102432250976, 0.04092115020751953, 0.04078182220458984, 0.0410885124206543, 0.040727039337158204, 0.04044595336914063, 0.040357887268066404, 0.040359935760498046, 0.04046137619018555, 0.040549312591552734, 0.04056051254272461, 0.04088025665283203, 0.040871936798095705, 0.040793567657470706, 0.040744991302490235, 0.04137420654296875, 0.041422496795654296, 0.04140457534790039, 0.04035094451904297, 0.0403647346496582, 0.04066847991943359, 0.040702686309814454, 0.0412756462097168, 0.041371646881103515, 0.040828929901123044, 0.04086700820922851, 0.04081894302368164, 0.04112134552001953, 0.0415877456665039, 0.04186928176879883, 0.0413757438659668, 0.04102143859863281, 0.04100067138671875, 0.040841503143310545, 0.04079539108276367, 0.04080511856079101, 0.041178592681884764, 0.04157904052734375, 0.04158464050292969]",tokens/s,24.583126593526416,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,920.45312,645.791744,0.0,260.046848,253.520896,s,1,8.4575537109375,8.4575537109375,0.0,8.4575537109375,8.4575537109375,8.4575537109375,8.4575537109375,[8.4575537109375],,kWh,1.525743499590438e-05,1.6758218937423899e-06,4.7905593879571384e-06,2.172381627760391e-05,,MB,1276.715008,756.9408,0.0,341.835776,312.39168,s,13,0.1669797782897949,0.012844598329984223,8.659386592104476e-05,0.012831456184387207,0.012957267379760742,0.01297356185913086,0.01298541976928711,"[0.012963680267333985, 0.012921728134155274, 0.012988384246826173, 0.012931615829467773, 0.012828927993774414, 0.012776384353637696, 0.012809184074401856, 0.012831456184387207, 0.012686112403869628, 0.012864800453186035, 0.01286297607421875, 0.012728447914123536, 0.012786080360412597]",tokens/s,19930.55706556411,kWh,3.758816441441505e-07,4.1453022669044214e-08,2.48772900290877e-07,6.661075671040716e-07,tokens/kWh,384322311.65451235,MB,1289.895936,784.203776,0.0,369.098752,313.0496,s,13,9.842195861816405,0.7570919893704927,0.0025163987288099802,0.757154541015625,0.7601525268554687,0.7606012695312501,0.760902822265625,"[0.7593627319335937, 0.7609782104492188, 0.7589215698242188, 0.7571211547851563, 0.7603499755859375, 0.757154541015625, 0.7557616577148437, 0.7543430786132812, 0.75779345703125, 0.7528635864257812, 0.752891357421875, 0.7561783447265625, 0.7584761962890625]",tokens/s,83.21313774880022,kWh,2.1758115899604137e-05,2.39956066737411e-06,8.474964237421268e-06,3.2632640804399515e-05,tokens/kWh,1930582.3386351978,,s,819,9.835783355712897,0.012009503486828927,0.000204401259050258,0.011974656105041503,0.012125823783874511,0.012223737621307374,0.012880195636749265,"[0.01183743953704834, 0.01207699203491211, 0.0120382080078125, 0.01205020809173584, 0.012039392471313477, 0.012089983940124512, 0.012038528442382812, 0.012009023666381836, 0.012002943992614746, 0.01199084758758545, 0.011953439712524414, 0.01202883243560791, 0.01196127986907959, 0.01196019172668457, 0.011921407699584961, 0.011978976249694824, 0.012011296272277832, 0.011970560073852539, 0.011937791824340821, 0.01193769645690918, 0.012144736289978027, 0.011960384368896484, 0.01198192024230957, 0.011969375610351562, 0.011960320472717285, 0.011904447555541993, 0.011983136177062988, 0.0120316162109375, 0.011987839698791504, 0.011951904296875, 0.01197276782989502, 0.011969504356384277, 0.011957119941711426, 0.012023807525634766, 0.011941887855529786, 0.011986656188964844, 0.011997056007385255, 0.01195468807220459, 0.012294048309326172, 0.012218367576599122, 0.012040191650390625, 0.012002880096435548, 0.012517631530761719, 0.012074943542480468, 0.01200111961364746, 0.012024224281311035, 0.012232704162597656, 0.012197664260864258, 0.012171551704406739, 0.012437631607055664, 0.012402496337890624, 0.012156703948974609, 0.012234975814819336, 0.012074751853942871, 0.012069120407104492, 0.011978048324584961, 0.01199174404144287, 0.01196348762512207, 0.012162112236022949, 0.012054368019104004, 0.012005439758300781, 0.012031935691833496, 0.012027456283569335, 0.011882495880126954, 0.012056639671325683, 0.012042176246643067, 0.012126208305358887, 0.012062175750732422, 0.012087231636047363, 0.01200915241241455, 0.011963295936584472, 0.011974783897399902, 0.011955488204956054, 0.011974656105041503, 0.012016063690185546, 0.011962400436401368, 0.011972031593322753, 0.011971263885498047, 0.011943936347961426, 0.012025888442993165, 0.012034015655517578, 0.012038496017456055, 0.012018495559692383, 0.012116288185119629, 0.012083744049072266, 0.012052000045776367, 0.012052032470703125, 0.012022624015808106, 0.01220576000213623, 0.012065088272094727, 0.01205408000946045, 0.012122624397277832, 0.01200870418548584, 0.012043007850646973, 0.012048383712768555, 0.012011648178100586, 0.01199084758758545, 0.01202387237548828, 0.011950079917907714, 0.011995231628417969, 0.01209455966949463, 0.012102399826049805, 0.012388416290283203, 0.012037407875061036, 0.012048576354980469, 0.012052351951599122, 0.012114208221435547, 0.012023263931274415, 0.011995167732238769, 0.012180031776428223, 0.012012895584106445, 0.011977696418762207, 0.011997344017028809, 0.012027744293212891, 0.011962271690368653, 0.011989312171936035, 0.011970335960388184, 0.011968511581420899, 0.01201516819000244, 0.011992832183837891, 0.0119999361038208, 0.012316191673278808, 0.012236767768859864, 0.01205504035949707, 0.01204633617401123, 0.013901760101318359, 0.011819007873535157, 0.012072735786437989, 0.012085472106933594, 0.012194879531860351, 0.012061856269836425, 0.012092608451843262, 0.012135007858276366, 0.01204633617401123, 0.012090847969055176, 0.012053215980529785, 0.012058431625366212, 0.012121631622314452, 0.012058079719543457, 0.01201193618774414, 0.012061311721801757, 0.012067935943603515, 0.01207817554473877, 0.0120481595993042, 0.011976703643798828, 0.012076064109802246, 0.01198147201538086, 0.012041855812072753, 0.011944864273071289, 0.011933183670043946, 0.01192784023284912, 0.01202995204925537, 0.011932991981506347, 0.011891679763793945, 0.011984607696533204, 0.011966560363769531, 0.011966367721557618, 0.011961440086364745, 0.011908063888549805, 0.011935680389404298, 0.012093440055847168, 0.01194598388671875, 0.012072959899902343, 0.0119585599899292, 0.011951680183410645, 0.012011232376098633, 0.01206227207183838, 0.012055007934570313, 0.012144895553588867, 0.012077343940734864, 0.012070783615112305, 0.012148736000061035, 0.012058527946472167, 0.01288742446899414, 0.012004063606262208, 0.012017120361328125, 0.012008095741271973, 0.012019200325012207, 0.012003999710083008, 0.012033696174621583, 0.011978655815124513, 0.011995295524597167, 0.011982272148132325, 0.011931360244750977, 0.01240278434753418, 0.011967231750488281, 0.011962368011474609, 0.012015263557434082, 0.011958687782287598, 0.01169382381439209, 0.012039648056030274, 0.011977343559265137, 0.011989248275756837, 0.011956128120422363, 0.011968799591064453, 0.011990528106689453, 0.011903008460998536, 0.011933343887329102, 0.011903231620788574, 0.01190499210357666, 0.011970751762390137, 0.011862144470214844, 0.012093440055847168, 0.012382207870483398, 0.012099424362182616, 0.012036255836486816, 0.012031167984008788, 0.01196063995361328, 0.011966015815734863, 0.011969023704528809, 0.01195257568359375, 0.012119232177734375, 0.012417856216430665, 0.012031999588012696, 0.012275712013244629, 0.0120315523147583, 0.012208415985107421, 0.012367648124694824, 0.012211647987365723, 0.012041152000427246, 0.011923295974731445, 0.011987104415893554, 0.011962688446044922, 0.011957280158996582, 0.011940447807312012, 0.011984064102172851, 0.011968735694885255, 0.011952799797058105, 0.01203600025177002, 0.011993184089660644, 0.011984895706176758, 0.011959839820861816, 0.012003680229187012, 0.011961503982543946, 0.011942527770996093, 0.011940192222595215, 0.012025856018066406, 0.011936896324157715, 0.011993984222412109, 0.011940128326416015, 0.012048095703125, 0.011931167602539063, 0.01209596824645996, 0.011923456192016601, 0.011993087768554688, 0.012068096160888673, 0.011996095657348632, 0.011974464416503907, 0.011966464042663574, 0.011941887855529786, 0.012029600143432618, 0.011988415718078613, 0.011854399681091308, 0.012064607620239257, 0.012034303665161132, 0.011972000122070312, 0.012018272399902344, 0.012031999588012696, 0.011949919700622558, 0.011985247611999511, 0.012052191734313964, 0.012001343727111816, 0.011973695755004883, 0.012064767837524413, 0.01231935977935791, 0.012845215797424316, 0.01203014373779297, 0.01200972843170166, 0.01197439956665039, 0.01215078353881836, 0.012101696014404298, 0.011999103546142578, 0.012091551780700684, 0.012004287719726562, 0.012014752388000488, 0.012072192192077637, 0.011993535995483398, 0.012157055854797363, 0.01204793643951416, 0.012054400444030762, 0.011947872161865235, 0.01202672004699707, 0.011920639991760254, 0.011952768325805664, 0.011978752136230468, 0.012275712013244629, 0.01217740821838379, 0.012088640213012695, 0.012067520141601562, 0.01206067180633545, 0.012236800193786621, 0.012092576026916504, 0.01196735954284668, 0.01198896026611328, 0.012226880073547363, 0.012033727645874024, 0.012042240142822265, 0.012306591987609864, 0.012130080223083496, 0.012103872299194336, 0.012036992073059081, 0.012061951637268066, 0.012148672103881836, 0.012086432456970215, 0.01203439998626709, 0.011897151947021484, 0.012042240142822265, 0.012023200035095214, 0.012017727851867676, 0.012052607536315919, 0.01211638355255127, 0.012018912315368653, 0.011945055961608888, 0.011929471969604493, 0.011966208457946777, 0.011646047592163086, 0.011957152366638184, 0.011968511581420899, 0.01202995204925537, 0.011973983764648437, 0.01190550422668457, 0.011944128036499023, 0.01197987174987793, 0.011905311584472656, 0.011966879844665528, 0.011966208457946777, 0.011915743827819824, 0.011935615539550781, 0.012044575691223145, 0.012135519981384277, 0.012255840301513672, 0.01217296028137207, 0.012059136390686035, 0.012056415557861327, 0.012075167655944824, 0.012078495979309082, 0.01206112003326416, 0.012091360092163085, 0.012063167572021484, 0.012055871963500976, 0.011966976165771484, 0.012003071784973144, 0.01200761604309082, 0.012087295532226563, 0.012056575775146485, 0.012223648071289063, 0.012058719635009766, 0.011963135719299316, 0.011984895706176758, 0.012003328323364258, 0.011929471969604493, 0.011954303741455077, 0.011978752136230468, 0.011966464042663574, 0.011920543670654296, 0.011998271942138672, 0.012117600440979004, 0.012025983810424804, 0.012052191734313964, 0.011956576347351075, 0.011915167808532716, 0.011931743621826172, 0.011937824249267578, 0.012017631530761718, 0.01207043170928955, 0.012042304039001465, 0.0120283203125, 0.011890912055969238, 0.01220905590057373, 0.012139391899108887, 0.011990559577941895, 0.011993247985839844, 0.011967071533203125, 0.012098719596862793, 0.011885024070739747, 0.011946080207824707, 0.01219315242767334, 0.011886431694030761, 0.011860256195068359, 0.011946271896362305, 0.01194598388671875, 0.011927071571350098, 0.011923263549804688, 0.011953824043273926, 0.01192147159576416, 0.011959232330322266, 0.01195622444152832, 0.011888640403747559, 0.011886303901672363, 0.011935903549194335, 0.01194611167907715, 0.011954175949096679, 0.011944128036499023, 0.011967904090881347, 0.01189900779724121, 0.011851455688476563, 0.01187820816040039, 0.011881248474121094, 0.011895968437194824, 0.011893600463867187, 0.011921407699584961, 0.011892288208007813, 0.011861536026000976, 0.011862431526184082, 0.011921855926513672, 0.011819071769714355, 0.012017663955688476, 0.011920415878295898, 0.012101632118225097, 0.012139488220214843, 0.013294943809509277, 0.012448448181152344, 0.012715871810913087, 0.012079232215881348, 0.012087295532226563, 0.012013216018676758, 0.011956576347351075, 0.011942015647888184, 0.011884575843811035, 0.011912511825561524, 0.011899423599243165, 0.011964415550231934, 0.012031935691833496, 0.011998559951782227, 0.011916192054748535, 0.011987808227539063, 0.01192240047454834, 0.011935775756835937, 0.011931808471679687, 0.01213599967956543, 0.011976960182189942, 0.0119072322845459, 0.012009440422058105, 0.011877280235290527, 0.012092448234558106, 0.01188425636291504, 0.011911392211914062, 0.011932767868041993, 0.01201683235168457, 0.011929344177246094, 0.011999232292175293, 0.011675647735595703, 0.011967776298522949, 0.011940447807312012, 0.011982784271240235, 0.011968704223632812, 0.01204582405090332, 0.012007935523986817, 0.011986975669860839, 0.012001248359680175, 0.012090527534484863, 0.012251999855041504, 0.012046208381652832, 0.01212758445739746, 0.012118816375732422, 0.012017663955688476, 0.012101920127868652, 0.011986528396606445, 0.01190511989593506, 0.011970591545104981, 0.01194217586517334, 0.011986240386962891, 0.01196067237854004, 0.01210374355316162, 0.01195139217376709, 0.011916000366210938, 0.011941887855529786, 0.011927167892456054, 0.011902912139892578, 0.01191750431060791, 0.011922911643981933, 0.011926303863525391, 0.012097760200500489, 0.012054304122924804, 0.011949664115905761, 0.012032095909118651, 0.011936063766479493, 0.011919360160827636, 0.011913215637207031, 0.011931008338928222, 0.011950431823730468, 0.011885151863098145, 0.012052032470703125, 0.01194611167907715, 0.01190505599975586, 0.011888256072998047, 0.011980704307556152, 0.011906784057617187, 0.012024703979492188, 0.011930751800537109, 0.011846272468566895, 0.011869824409484864, 0.01190345573425293, 0.011890687942504884, 0.011919360160827636, 0.012012831687927246, 0.011985631942749023, 0.011907135963439942, 0.011896384239196778, 0.011903552055358887, 0.011958080291748047, 0.01195622444152832, 0.011896639823913575, 0.011993568420410156, 0.011663935661315917, 0.01202175998687744, 0.012041728019714355, 0.012109919548034668, 0.01198681640625, 0.011958656311035156, 0.0120732479095459, 0.012107647895812989, 0.012025856018066406, 0.011943903923034668, 0.011913087844848633, 0.011950240135192872, 0.011920607566833497, 0.011865152359008788, 0.011982080459594726, 0.011915743827819824, 0.011907072067260742, 0.01196623992919922, 0.011947615623474121, 0.011936384201049805, 0.011908896446228027, 0.01186956787109375, 0.011932512283325196, 0.01190937614440918, 0.011953920364379882, 0.011919487953186035, 0.011939552307128906, 0.011908512115478515, 0.011898880004882812, 0.011944704055786133, 0.012238944053649902, 0.012170528411865235, 0.012036864280700683, 0.012289312362670899, 0.012123744010925292, 0.012190719604492188, 0.012168191909790039, 0.012159008026123047, 0.012512224197387695, 0.012292096138000488, 0.012160544395446777, 0.012126943588256837, 0.012257023811340332, 0.012238016128540039, 0.012133184432983399, 0.012137824058532715, 0.012092063903808594, 0.01212825584411621, 0.011985919952392577, 0.01191641616821289, 0.011929311752319337, 0.011886752128601075, 0.011933535575866698, 0.011906559944152833, 0.01198966407775879, 0.01199833583831787, 0.011950976371765136, 0.011937791824340821, 0.011920576095581054, 0.011973440170288086, 0.012056575775146485, 0.012003071784973144, 0.012020992279052735, 0.011737215995788574, 0.011972479820251465, 0.01195967960357666, 0.011898752212524413, 0.011905792236328126, 0.01188803195953369, 0.01193660831451416, 0.011934847831726074, 0.011899135589599609, 0.011894495964050292, 0.01194870376586914, 0.0118538236618042, 0.011890687942504884, 0.01205247974395752, 0.013078368186950684, 0.012847264289855958, 0.011981151580810546, 0.011961824417114257, 0.011971776008605956, 0.011925600051879882, 0.011889599800109863, 0.01184937572479248, 0.01189408016204834, 0.01190511989593506, 0.011951007843017579, 0.011896063804626465, 0.01208191967010498, 0.01183462429046631, 0.011866944313049316, 0.011843520164489746, 0.011876192092895508, 0.011915295600891114, 0.011916799545288086, 0.011898816108703613, 0.011942591667175293, 0.011999423980712891, 0.01185977554321289, 0.011871520042419434, 0.01195900821685791, 0.011887840270996094, 0.011874719619750977, 0.011855775833129883, 0.011839200019836425, 0.01185644817352295, 0.011880831718444824, 0.011861215591430664, 0.011868927955627441, 0.011849535942077636, 0.011923359870910645, 0.01195356845855713, 0.011901663780212403, 0.011907072067260742, 0.011884544372558594, 0.011872063636779786, 0.01189292812347412, 0.011965984344482422, 0.011923839569091797, 0.01193395233154297, 0.011992480278015137, 0.011925951957702637, 0.011935423851013184, 0.01189411163330078, 0.011881152153015136, 0.01164303970336914, 0.011986559867858886, 0.011980480194091796, 0.012056927680969238, 0.012020000457763672, 0.012040351867675781, 0.01204371166229248, 0.012042207717895508, 0.012015968322753907, 0.012024991989135743, 0.012016672134399414, 0.012029760360717774, 0.011965439796447755, 0.01196134376525879, 0.011988096237182617, 0.012039104461669921, 0.012125727653503417, 0.012022175788879395, 0.011962368011474609, 0.011886943817138671, 0.011898271560668945, 0.011849696159362794, 0.011890624046325684, 0.011999744415283203, 0.011970399856567383, 0.011898912429809571, 0.011954208374023438, 0.011964351654052734, 0.011919360160827636, 0.0120381441116333, 0.011904735565185547, 0.011929887771606446, 0.011884672164916992, 0.011982175827026367, 0.01195577621459961, 0.01187119960784912, 0.01184768009185791, 0.011846912384033204, 0.011854592323303223, 0.011857919692993164, 0.011888640403747559, 0.011918656349182129, 0.01190163230895996, 0.0120032958984375, 0.01194819164276123, 0.011876223564147949, 0.012124159812927245, 0.01193558406829834, 0.011885984420776367, 0.011930368423461913, 0.011912480354309082, 0.01195622444152832, 0.011919615745544433, 0.011859552383422851, 0.01186911964416504, 0.011894720077514648, 0.011863360404968262, 0.01183788776397705, 0.011878656387329102, 0.011917311668395996, 0.011986944198608398, 0.012025535583496094, 0.01190345573425293, 0.011591263771057129, 0.011903264045715333, 0.011941535949707031, 0.011907423973083496, 0.01197488021850586, 0.011960607528686523, 0.011909119606018067, 0.011974783897399902, 0.011982463836669921, 0.011940095901489258, 0.01216528034210205, 0.011949919700622558, 0.011896479606628417, 0.012224543571472167, 0.012005696296691895, 0.011992287635803223, 0.01206771183013916, 0.011847552299499511, 0.011873760223388673, 0.011893312454223632, 0.011870047569274903, 0.011880191802978515, 0.011942303657531739, 0.011973759651184083, 0.011944831848144532, 0.01195359992980957, 0.011917119979858398, 0.01188326358795166, 0.011879584312438966, 0.011872960090637206, 0.012080575942993164, 0.012018400192260742, 0.012002623558044434, 0.011936448097229003, 0.011895808219909668, 0.011898048400878906, 0.011896639823913575, 0.011913215637207031, 0.01188150405883789, 0.011891679763793945, 0.011923232078552247, 0.01192739200592041, 0.011968255996704102, 0.011903615951538086, 0.011937631607055663, 0.011905088424682617, 0.011860223770141601, 0.011857760429382325, 0.011898688316345215, 0.011810336112976074, 0.01186246395111084, 0.011905632019042968, 0.011865632057189941, 0.011984576225280761, 0.01198089599609375, 0.012044095993041992, 0.013054783821105958, 0.01426598358154297, 0.012208191871643067, 0.012058624267578125, 0.01202291202545166, 0.01195248031616211, 0.011954719543457032, 0.01158291244506836, 0.012014143943786621, 0.011976896286010742, 0.01200928020477295, 0.011992480278015137, 0.01208995246887207, 0.012042304039001465, 0.012008959770202637, 0.011935968399047852, 0.011929471969604493, 0.011997535705566406, 0.011952128410339356, 0.011959327697753906, 0.011992032051086426, 0.011952383995056153, 0.011941632270812989, 0.014386816024780274, 0.013428832054138183, 0.0133571195602417, 0.011962335586547851, 0.011942144393920898, 0.011925312042236328, 0.011974944114685058, 0.01193785572052002, 0.011990880012512207, 0.011964415550231934, 0.01205571174621582, 0.012071328163146973, 0.01203228759765625, 0.012011679649353027, 0.012050432205200195, 0.012011520385742188, 0.012033568382263184, 0.01213814353942871, 0.012088352203369141, 0.011962400436401368, 0.011927359580993653, 0.011925632476806641, 0.011896736145019531, 0.011861791610717773, 0.011872320175170899, 0.01181004810333252, 0.011893024444580078, 0.011872832298278808, 0.01187606430053711, 0.011868096351623536, 0.012022080421447754, 0.011882719993591308, 0.011876383781433105, 0.011869376182556152, 0.01186464023590088, 0.011910688400268555, 0.01190163230895996, 0.011842944145202637, 0.011914752006530761, 0.011981535911560058, 0.011982720375061035, 0.01189510440826416, 0.01190511989593506, 0.011884448051452636, 0.011882495880126954, 0.011890463829040528, 0.01198310375213623]",tokens/s,83.26738912201665,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2176.999424,2499.674112,0.0,2097.152,1986.693632,s,1,9.32875,9.32875,0.0,9.32875,9.32875,9.32875,9.32875,[9.32875],,kWh,6.556545032919227e-05,7.215800666686287e-06,2.591363184198392e-05,9.869488283786248e-05,,MB,2013.425664,2514.354176,0.0,2097.152,1862.37952,s,10,12.138623291015627,1.2138623291015624,0.0024066979826023933,1.213941467285156,1.216209094238281,1.217078057861328,1.2177732287597656,"[1.2100533447265625, 1.211544677734375, 1.211374755859375, 1.2128619384765624, 1.2125426025390624, 1.2153228759765624, 1.21502099609375, 1.2160159912109374, 1.2159390869140625, 1.217947021484375]",tokens/s,210.89706292267743,kWh,3.5608593294163407e-05,3.9256650709155334e-06,2.3627074457199804e-05,6.316133282227874e-05,tokens/kWh,4053112.6966608553,MB,2017.558528,2514.354176,0.0,2097.152,1946.953216,s,10,15.408370361328126,1.5408370361328125,0.005133509008044592,1.5411513061523436,1.5466705932617186,1.5468938903808593,1.547072528076172,"[1.54582958984375, 1.5466209716796875, 1.54179931640625, 1.5454635009765625, 1.5471171875, 1.5355103759765625, 1.536437744140625, 1.531685791015625, 1.5405032958984375, 1.537402587890625]",tokens/s,40.88686767168914,kWh,4.527883967541737e-05,4.996107159983837e-06,2.7035188294802227e-05,7.731013513020339e-05,tokens/kWh,814899.6233662935,,s,630,15.405564287185662,0.024453276646326457,0.00039441685526148834,0.02437129592895508,0.02476141471862793,0.02510407600402832,0.02578503967285157,"[0.025188032150268554, 0.024631616592407226, 0.02451817512512207, 0.024689056396484374, 0.024393791198730468, 0.025069568634033205, 0.024298688888549805, 0.024477760314941407, 0.024402399063110352, 0.024510112762451172, 0.024445087432861327, 0.02444540786743164, 0.024268800735473633, 0.02430521583557129, 0.024188640594482422, 0.024394464492797852, 0.02432592010498047, 0.024580127716064454, 0.024656063079833986, 0.024604671478271483, 0.02465996742248535, 0.024592384338378907, 0.024811519622802734, 0.02549760055541992, 0.02471116828918457, 0.02476032066345215, 0.0245166072845459, 0.02468659210205078, 0.024493663787841798, 0.024566495895385742, 0.024610496520996093, 0.024735488891601563, 0.024699039459228515, 0.02479964828491211, 0.02469852828979492, 0.02470710372924805, 0.02472879981994629, 0.024603424072265626, 0.024645631790161132, 0.024627199172973634, 0.024641408920288085, 0.024356992721557617, 0.024381439208984376, 0.02431795120239258, 0.02422969627380371, 0.024196832656860352, 0.026177248001098632, 0.02441868782043457, 0.024086143493652342, 0.02411392021179199, 0.02405894470214844, 0.024265663146972656, 0.024203264236450195, 0.024268800735473633, 0.024186880111694335, 0.02445516777038574, 0.02425651168823242, 0.02407219123840332, 0.024245471954345704, 0.024260576248168946, 0.02487775993347168, 0.02445120048522949, 0.0244715518951416, 0.025397216796875, 0.024651391983032227, 0.02422412872314453, 0.02429737663269043, 0.024596479415893553, 0.024449472427368165, 0.024278688430786132, 0.02425356864929199, 0.02432089614868164, 0.02441152000427246, 0.024531200408935548, 0.028815488815307617, 0.024668415069580077, 0.02473779106140137, 0.024406015396118166, 0.025130048751831054, 0.024366016387939452, 0.024534624099731447, 0.024242591857910157, 0.024238079071044923, 0.02416819190979004, 0.024981760025024415, 0.02417980766296387, 0.024303743362426758, 0.024373279571533204, 0.02426540756225586, 0.02421766471862793, 0.024152000427246093, 0.02421766471862793, 0.024664064407348633, 0.024489984512329102, 0.024297407150268555, 0.024172191619873048, 0.024437152862548828, 0.02430748748779297, 0.024639711380004883, 0.024417823791503906, 0.02487548828125, 0.024401920318603516, 0.024627199172973634, 0.024395103454589843, 0.024239776611328125, 0.02426892852783203, 0.02427372741699219, 0.024477760314941407, 0.02433228874206543, 0.02550783920288086, 0.025488672256469728, 0.024576736450195313, 0.024680448532104493, 0.024670112609863282, 0.024514848709106446, 0.0244467830657959, 0.02436457633972168, 0.024433120727539063, 0.024352575302124025, 0.02437139129638672, 0.024369152069091796, 0.02445516777038574, 0.024364128112792968, 0.02428816032409668, 0.024303295135498046, 0.024424032211303712, 0.025563583374023438, 0.02492857551574707, 0.024590015411376953, 0.02444476890563965, 0.024312288284301757, 0.024221696853637696, 0.024231136322021483, 0.02446790313720703, 0.024543584823608397, 0.02439369583129883, 0.024418336868286133, 0.02445292854309082, 0.02433990478515625, 0.02433296012878418, 0.02428099250793457, 0.024627071380615234, 0.024417984008789063, 0.024388223648071288, 0.024369152069091796, 0.02615648078918457, 0.025854528427124022, 0.024655872344970704, 0.024510080337524415, 0.02448441505432129, 0.02446710395812988, 0.0243734073638916, 0.02432204818725586, 0.024518655776977538, 0.025294399261474608, 0.024293760299682617, 0.024498239517211914, 0.024412160873413087, 0.02431795120239258, 0.024180736541748047, 0.024365055084228517, 0.024408063888549804, 0.024333728790283202, 0.024416000366210937, 0.024224607467651368, 0.02431385612487793, 0.024424192428588867, 0.024297311782836915, 0.02436124801635742, 0.02430758476257324, 0.024299776077270508, 0.024319999694824217, 0.02416556739807129, 0.024173376083374023, 0.02439276885986328, 0.02428758430480957, 0.024504671096801756, 0.024426752090454102, 0.02433148765563965, 0.024312320709228515, 0.024313343048095702, 0.024311647415161133, 0.02437004852294922, 0.024205375671386718, 0.02428860855102539, 0.024199615478515624, 0.02440985679626465, 0.024760799407958986, 0.024336383819580077, 0.02522300720214844, 0.025266336441040038, 0.02753936004638672, 0.024852895736694337, 0.024935232162475587, 0.024947584152221678, 0.024717023849487305, 0.024551904678344727, 0.02445088005065918, 0.02448601531982422, 0.024539007186889648, 0.024371200561523438, 0.024321823120117186, 0.02432022476196289, 0.024394975662231446, 0.02431875228881836, 0.024442880630493165, 0.02444198417663574, 0.02450726318359375, 0.02428463935852051, 0.024468000411987306, 0.02437084770202637, 0.024779104232788087, 0.024467456817626954, 0.02450432014465332, 0.02450841522216797, 0.02441526412963867, 0.024302560806274413, 0.02426470375061035, 0.02435481643676758, 0.024285184860229493, 0.024299871444702147, 0.024460287094116212, 0.024382112503051757, 0.024290912628173827, 0.024308128356933592, 0.024254463195800782, 0.024428096771240235, 0.02426310348510742, 0.024299648284912108, 0.024632768630981447, 0.024426944732666017, 0.024365055084228517, 0.02453875160217285, 0.02431599998474121, 0.02451263999938965, 0.024413856506347656, 0.024484352111816408, 0.024424415588378906, 0.02474991989135742, 0.024578239440917967, 0.024399456024169923, 0.024269216537475585, 0.024625152587890626, 0.02467430305480957, 0.0244836483001709, 0.024420352935791017, 0.02443878364562988, 0.024508607864379882, 0.024450496673583986, 0.024334623336791993, 0.024312095642089845, 0.024219104766845703, 0.025213119506835937, 0.02487071990966797, 0.024743648529052736, 0.02485443115234375, 0.024625728607177735, 0.02532966423034668, 0.02452275276184082, 0.024487648010253906, 0.02442678451538086, 0.024375295639038085, 0.024426496505737305, 0.02451046371459961, 0.024606719970703125, 0.024445280075073243, 0.02443199920654297, 0.024594112396240233, 0.024451679229736328, 0.024477472305297853, 0.024692672729492188, 0.024891679763793945, 0.02465078353881836, 0.024314847946166993, 0.02427903938293457, 0.02473369598388672, 0.02446950340270996, 0.02432204818725586, 0.024369152069091796, 0.024416576385498046, 0.024364736557006834, 0.02431795120239258, 0.02431577682495117, 0.024348800659179687, 0.024328191757202147, 0.024541183471679686, 0.0243790397644043, 0.02429939270019531, 0.024864927291870117, 0.024546911239624023, 0.024240863800048827, 0.0243507194519043, 0.024389631271362306, 0.024418304443359375, 0.024380607604980467, 0.024549823760986328, 0.024620960235595703, 0.0245863037109375, 0.024407487869262695, 0.024626144409179686, 0.02442412757873535, 0.024385440826416017, 0.02435103988647461, 0.024298847198486326, 0.024250335693359375, 0.02505107116699219, 0.0244150390625, 0.024385568618774413, 0.02447267150878906, 0.024791967391967772, 0.025083904266357423, 0.024813568115234375, 0.025118303298950196, 0.024860256195068358, 0.0246997127532959, 0.02514496040344238, 0.02463577651977539, 0.024854528427124024, 0.024604671478271483, 0.02553593635559082, 0.025614912033081055, 0.024766368865966795, 0.02469868850708008, 0.02471555137634277, 0.024544992446899415, 0.024563232421875, 0.02428179168701172, 0.024364160537719726, 0.024337343215942383, 0.024409151077270506, 0.024318912506103515, 0.024254304885864258, 0.02418694305419922, 0.024164064407348633, 0.024164735794067384, 0.0242872314453125, 0.02404761505126953, 0.02417020797729492, 0.024126943588256837, 0.024121664047241212, 0.024128000259399415, 0.02464899253845215, 0.024312543869018554, 0.024207040786743163, 0.024199487686157227, 0.024156160354614258, 0.024442367553710938, 0.02416896057128906, 0.024395776748657227, 0.024373247146606446, 0.02431385612487793, 0.024233983993530273, 0.024231935501098634, 0.02422889518737793, 0.024302560806274413, 0.02435686492919922, 0.024184831619262694, 0.024186880111694335, 0.024434688568115235, 0.02424563217163086, 0.02418451118469238, 0.02440278434753418, 0.02410095977783203, 0.02407756805419922, 0.02422208023071289, 0.024354400634765624, 0.02425657653808594, 0.025288480758666992, 0.024185056686401366, 0.024263391494750975, 0.024225791931152343, 0.02416761589050293, 0.024200000762939454, 0.02399795150756836, 0.024379135131835938, 0.024031103134155272, 0.024118143081665037, 0.024110431671142577, 0.025342016220092772, 0.0247523193359375, 0.024522560119628906, 0.024571903228759767, 0.024608768463134766, 0.02444697570800781, 0.024380767822265625, 0.0243022403717041, 0.02429542350769043, 0.024417503356933594, 0.024419071197509766, 0.02435910415649414, 0.02431497573852539, 0.024318399429321288, 0.024197439193725585, 0.02426470375061035, 0.024199136734008787, 0.024255584716796875, 0.024206272125244142, 0.024250368118286132, 0.024233983993530273, 0.024139776229858398, 0.024538784027099608, 0.02457744026184082, 0.024457504272460937, 0.02432476806640625, 0.024473600387573242, 0.024444927215576173, 0.02437104034423828, 0.024328351974487305, 0.024282175064086912, 0.024285728454589844, 0.02422825622558594, 0.024161632537841798, 0.024222368240356444, 0.024273984909057616, 0.024255424499511718, 0.024191999435424806, 0.0245296630859375, 0.024553728103637696, 0.02435686492919922, 0.024180671691894532, 0.024291391372680663, 0.024147968292236328, 0.024104032516479492, 0.024120223999023437, 0.024250303268432617, 0.02406425666809082, 0.02409199905395508, 0.024068063735961914, 0.02415977668762207, 0.024253023147583007, 0.024211839675903322, 0.024186880111694335, 0.02425551986694336, 0.024110048294067384, 0.024271936416625978, 0.027251264572143555, 0.024699264526367188, 0.024414207458496092, 0.02447279930114746, 0.02448873519897461, 0.024388671875, 0.02517193603515625, 0.02475174331665039, 0.024510879516601563, 0.024594207763671876, 0.024568031311035157, 0.02449843215942383, 0.024493824005126952, 0.025071807861328125, 0.024952287673950194, 0.024719615936279297, 0.024602720260620117, 0.02434659194946289, 0.02448508834838867, 0.024537887573242188, 0.024309791564941407, 0.02425004768371582, 0.02411756706237793, 0.024147552490234377, 0.024048032760620116, 0.024008703231811524, 0.023996416091918944, 0.02395955276489258, 0.024039424896240235, 0.024020992279052734, 0.024123392105102538, 0.024293664932250977, 0.024059616088867187, 0.024123392105102538, 0.02411110305786133, 0.024118656158447264, 0.02411564826965332, 0.024215744018554686, 0.024176639556884767, 0.024229471206665038, 0.02419731140136719, 0.02471548843383789, 0.024870912551879884, 0.024600576400756836, 0.024408063888549804, 0.024233983993530273, 0.02411724853515625, 0.024057855606079103, 0.02417647933959961, 0.024293535232543944, 0.024102912902832032, 0.02428486442565918, 0.024377664566040038, 0.024373247146606446, 0.024293376922607423, 0.024453119277954103, 0.024329919815063477, 0.024333887100219727, 0.02430438423156738, 0.024211456298828125, 0.02414182472229004, 0.02407609558105469, 0.02410825538635254, 0.024026079177856444, 0.024083616256713868, 0.024009567260742187, 0.024241279602050782, 0.02410585594177246, 0.02412067222595215, 0.02522883224487305, 0.024676671981811525, 0.02443734359741211, 0.024610143661499023, 0.024390111923217772, 0.02424777603149414, 0.024308448791503907, 0.024474943161010742, 0.024471519470214843, 0.02441494369506836, 0.02433433532714844, 0.024412160873413087, 0.024638784408569335, 0.024729759216308593, 0.0247608642578125, 0.024729183197021484, 0.024778272628784178, 0.024644479751586915, 0.024874559402465822, 0.02539155197143555, 0.024696832656860353, 0.02450227165222168, 0.0244715518951416, 0.024376768112182617, 0.02427471923828125, 0.024236543655395508, 0.02420479965209961, 0.024174623489379883, 0.024112991333007813, 0.024128032684326173, 0.024134016036987303, 0.026871807098388673, 0.024451072692871095, 0.024366847991943358, 0.024299776077270508, 0.024319072723388672, 0.024533920288085938, 0.024193023681640623, 0.024266368865966798, 0.02420345687866211, 0.024148160934448243, 0.024086240768432618, 0.024041248321533204, 0.024175104141235353, 0.024204736709594728, 0.024180864334106444, 0.024162752151489258, 0.024223552703857423, 0.024410303115844727, 0.024472927093505858, 0.024428415298461913, 0.024535839080810546, 0.024518655776977538, 0.02439891242980957, 0.024279712677001953, 0.024287424087524413, 0.024240224838256837, 0.02427836799621582, 0.024191423416137694, 0.024225088119506837, 0.025086687088012694, 0.02420550346374512, 0.0240762882232666, 0.0251246395111084, 0.024679136276245118, 0.024858591079711913, 0.024606719970703125, 0.02452070426940918, 0.024799232482910157, 0.024541183471679686, 0.024442880630493165, 0.024573951721191405, 0.024408063888549804, 0.02435481643676758, 0.02425369644165039, 0.02435148811340332, 0.0241943359375, 0.02415279960632324, 0.024151744842529296, 0.02423948860168457, 0.024299999237060547, 0.02512643241882324, 0.024395904541015624, 0.024291999816894533, 0.024190303802490234, 0.02426144027709961, 0.02433228874206543, 0.024266752243041992, 0.02427494430541992, 0.024479232788085937, 0.024713727951049806, 0.02471731185913086, 0.02492403221130371, 0.02469081687927246, 0.0245164794921875, 0.024531072616577148, 0.024567808151245117, 0.024276992797851563, 0.024071935653686524, 0.024109312057495117, 0.024154111862182616, 0.02405171203613281, 0.023992319107055664, 0.023984128952026368, 0.024126976013183594, 0.023978496551513673, 0.02426873588562012, 0.024190624237060546, 0.02414204788208008, 0.024307903289794923, 0.02428108787536621, 0.025195711135864256, 0.025459392547607422, 0.02450649642944336, 0.0243507194519043, 0.024245439529418947, 0.024402624130249025, 0.024317279815673828, 0.024251199722290038, 0.024204383850097655, 0.024279935836791992, 0.024236032485961914, 0.024153375625610353, 0.024300128936767577, 0.024110815048217774, 0.02433679962158203]",tokens/s,40.89431508354636,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3843.485696,5236.457472,0.0,4833.93536,4546.659328,s,1,10.957439453125,10.957439453125,0.0,10.957439453125,10.957439453125,10.957439453125,10.957439453125,[10.957439453125],,kWh,0.00011120032802083794,1.22588058528432e-05,4.815253852200052e-05,0.00017161167239568168,,MB,2071.01952,5270.011904,0.0,4852.809728,4095.21408,s,10,26.707928955078124,2.6707928955078124,0.006245286641557769,2.6711156005859373,2.6772308105468747,2.67802666015625,2.6786633398437503,"[2.656837646484375, 2.664251220703125, 2.66907080078125, 2.67033935546875, 2.669186767578125, 2.671891845703125, 2.678822509765625, 2.674153076171875, 2.677053955078125, 2.67632177734375]",tokens/s,95.85168525443652,kWh,7.79918718425021e-05,8.602322753788288e-06,5.178176364760007e-05,0.00013837595824389048,tokens/kWh,1850032.355684177,MB,2081.497088,5270.011904,0.0,4852.809728,4197.764096,s,10,22.094751464843753,2.209475146484375,0.008217439253106406,2.2111572265625,2.219243505859375,2.221573046875,2.2234366796875,"[2.223902587890625, 2.218725830078125, 2.21336962890625, 2.213949462890625, 2.209154296875, 2.200388916015625, 2.204223876953125, 2.21316015625, 2.1994306640625, 2.198446044921875]",tokens/s,28.513559023392038,kWh,6.434142826416896e-05,7.097402333022428e-06,4.2580422953200636e-05,0.00011401925355039203,tokens/kWh,552538.260322468,,s,630,22.091589752197287,0.0350660154796782,0.00040114852744904564,0.035012815475463865,0.03538098487854004,0.03571183776855469,0.03656088031768799,"[0.03589564895629883, 0.03524985504150391, 0.03512944030761719, 0.03516057586669922, 0.0350143051147461, 0.03499401473999023, 0.03519068908691406, 0.03522364807128906, 0.035317726135253906, 0.037200511932373045, 0.035569343566894535, 0.03563961410522461, 0.036089855194091795, 0.03555942535400391, 0.03510681533813476, 0.03541785430908203, 0.03533849716186523, 0.0351960334777832, 0.034966304779052736, 0.03487343978881836, 0.034915359497070315, 0.034976734161376956, 0.03573907089233398, 0.03510489654541016, 0.035228096008300784, 0.03541376113891601, 0.038967552185058596, 0.0353702392578125, 0.03497788619995117, 0.034980510711669924, 0.0350904312133789, 0.0349881591796875, 0.03499814224243164, 0.034942752838134764, 0.03488380813598633, 0.03506175994873047, 0.03508569717407226, 0.035060352325439456, 0.03512934494018555, 0.03534579086303711, 0.03546585464477539, 0.0369315185546875, 0.03536624145507813, 0.03513971328735352, 0.035201633453369144, 0.03502489471435547, 0.03526041412353516, 0.035631103515625, 0.03520307159423828, 0.03511705780029297, 0.03561824035644531, 0.03511475372314453, 0.03490899276733399, 0.03473612976074219, 0.03503923034667969, 0.034902015686035154, 0.0348581428527832, 0.03481686401367187, 0.03489583969116211, 0.03514998245239258, 0.03500336074829102, 0.03481078338623047, 0.03495449447631836, 0.035862529754638675, 0.03510067367553711, 0.03520716857910156, 0.03539763259887695, 0.03501670455932617, 0.03526652908325195, 0.03533004760742187, 0.03507401657104492, 0.03491231918334961, 0.035138687133789065, 0.03516249465942383, 0.03525609588623047, 0.035305633544921874, 0.03515427017211914, 0.03533846282958984, 0.035189983367919925, 0.035295040130615234, 0.03578160095214844, 0.03515718460083008, 0.03521862411499024, 0.03518425750732422, 0.0348873291015625, 0.03530905532836914, 0.03518960189819336, 0.035108863830566404, 0.035676158905029294, 0.03519862365722656, 0.03622537612915039, 0.03516371154785156, 0.03510047912597656, 0.03508623886108399, 0.035090625762939455, 0.03507033538818359, 0.035463329315185546, 0.035307422637939456, 0.03504556655883789, 0.03477443313598633, 0.03506012725830078, 0.03488111877441406, 0.0347509765625, 0.03518259048461914, 0.03472588729858399, 0.035235809326171874, 0.03542428970336914, 0.035245792388916015, 0.03658540725708008, 0.03525462341308594, 0.0349409294128418, 0.035993343353271486, 0.03520889663696289, 0.03508915328979492, 0.03532128143310547, 0.03517660903930664, 0.03497347259521484, 0.035310016632080075, 0.03504457473754883, 0.03500022506713867, 0.034849662780761716, 0.03502899169921875, 0.03493008041381836, 0.03489548873901367, 0.034960224151611326, 0.03529331207275391, 0.03628236770629883, 0.03535871887207031, 0.035294815063476564, 0.035023265838623044, 0.035356670379638674, 0.034841598510742186, 0.03484569549560547, 0.03493273544311523, 0.03501465606689453, 0.03483647918701172, 0.0349224967956543, 0.035024833679199216, 0.03518265533447266, 0.03518259048461914, 0.03504323196411133, 0.03487311935424805, 0.035028865814208984, 0.03511763381958008, 0.035870113372802735, 0.03588761520385742, 0.035348129272460935, 0.035324222564697264, 0.035522560119628906, 0.03500646209716797, 0.035942401885986325, 0.035356670379638674, 0.03523376083374023, 0.03517443084716797, 0.035139167785644534, 0.035418529510498044, 0.03509408187866211, 0.03498831939697265, 0.035180702209472656, 0.03635935974121094, 0.03491648101806641, 0.03464876937866211, 0.03474668884277344, 0.034740062713623045, 0.03507388687133789, 0.03503292846679688, 0.03522272109985351, 0.035361759185791014, 0.03508969497680664, 0.03497024154663086, 0.034869407653808596, 0.03492019271850586, 0.034944671630859375, 0.035058208465576175, 0.03491382217407227, 0.034802337646484376, 0.0352210578918457, 0.03527091217041016, 0.03495452880859375, 0.034902751922607424, 0.03464191818237305, 0.034924385070800784, 0.03491775894165039, 0.035203487396240234, 0.03499456024169922, 0.035046527862548825, 0.034876640319824216, 0.03502153778076172, 0.03478192138671875, 0.035952606201171876, 0.035068321228027347, 0.035143680572509765, 0.035272384643554686, 0.0350129280090332, 0.034887680053710936, 0.034917598724365236, 0.0350272331237793, 0.03488787078857422, 0.034810142517089845, 0.034832225799560544, 0.03520940780639648, 0.03526860809326172, 0.03497574234008789, 0.03551599884033203, 0.03513695907592773, 0.034925537109375, 0.03481516647338867, 0.03554387283325195, 0.03507814407348633, 0.03537913513183594, 0.03516831970214844, 0.03547331237792969, 0.03564134216308594, 0.035012702941894534, 0.035059711456298825, 0.03523350524902344, 0.03510300827026367, 0.03495872116088867, 0.03516204833984375, 0.0350557746887207, 0.035156513214111326, 0.03509657669067383, 0.03489791870117188, 0.036688159942626954, 0.03484572982788086, 0.03477779388427735, 0.03484864044189453, 0.03476083374023437, 0.034764801025390625, 0.03493276977539062, 0.03534435272216797, 0.03491443252563477, 0.03482406234741211, 0.034810142517089845, 0.035036319732666014, 0.03488355255126953, 0.03499273681640625, 0.035031105041503904, 0.035583934783935546, 0.03564361572265625, 0.035471134185791016, 0.035299327850341795, 0.035108863830566404, 0.035143680572509765, 0.03497071838378906, 0.03505039978027344, 0.03528041458129883, 0.035283073425292966, 0.03528316879272461, 0.0350615348815918, 0.035246368408203124, 0.035065921783447265, 0.03616153717041016, 0.035102718353271486, 0.03500851058959961, 0.035253726959228515, 0.03506595230102539, 0.034906558990478516, 0.03489996719360351, 0.03537715148925781, 0.03537100982666016, 0.035059711456298825, 0.03501260757446289, 0.034877376556396486, 0.03475836944580078, 0.03450505447387695, 0.03482352066040039, 0.03463212966918945, 0.034652385711669925, 0.03457180786132812, 0.0351341438293457, 0.03486697769165039, 0.03520419311523437, 0.0350483512878418, 0.03506975936889648, 0.0348551025390625, 0.0351539192199707, 0.03521868896484375, 0.035420928955078125, 0.03527475357055664, 0.03500646209716797, 0.03485068893432617, 0.03502707290649414, 0.035043327331542966, 0.03521535873413086, 0.035124320983886716, 0.03496223831176758, 0.03535424041748047, 0.03527276611328125, 0.03501305770874023, 0.034764766693115234, 0.0372490234375, 0.03521331024169922, 0.03529523086547852, 0.034980159759521484, 0.03492761611938477, 0.03479759979248047, 0.03482281494140625, 0.03493273544311523, 0.03483417510986328, 0.03471366500854492, 0.03509433746337891, 0.034859424591064454, 0.03521523284912109, 0.035020896911621094, 0.03490569686889648, 0.03499663925170898, 0.0348807373046875, 0.03501955032348633, 0.0352902717590332, 0.034945537567138675, 0.03482681655883789, 0.034883201599121096, 0.035033248901367185, 0.035194881439208986, 0.03624127960205078, 0.035211135864257816, 0.034907745361328124, 0.035132289886474606, 0.034786529541015625, 0.034914878845214846, 0.03501798248291016, 0.03496831893920899, 0.03515801620483398, 0.03498188781738281, 0.03509683227539063, 0.035378273010253904, 0.03488127899169922, 0.0345814094543457, 0.034678462982177735, 0.03500419235229492, 0.03499244689941406, 0.03471164703369141, 0.03486492919921875, 0.034689376831054684, 0.03464396667480469, 0.034995262145996096, 0.035027904510498045, 0.03461110305786133, 0.03474678421020508, 0.03484844970703125, 0.03491849517822266, 0.035018657684326174, 0.035149822235107424, 0.03529449462890625, 0.03526057434082031, 0.035754558563232425, 0.03560655975341797, 0.03492758560180664, 0.034915103912353515, 0.03485424041748047, 0.035113761901855466, 0.03492873764038086, 0.03520841598510742, 0.03488438415527344, 0.03500566482543945, 0.035927902221679686, 0.03475347137451172, 0.03471676635742187, 0.03466307067871094, 0.03440851211547852, 0.03442860794067383, 0.03432294464111328, 0.03436912155151367, 0.034501121520996096, 0.03444284820556641, 0.03483884811401367, 0.03457228851318359, 0.034612705230712894, 0.034735679626464844, 0.03474121475219726, 0.03467984008789062, 0.03529180908203125, 0.03503696060180664, 0.03478992080688476, 0.034748416900634765, 0.03509657669067383, 0.0345272331237793, 0.03564972686767578, 0.03493241500854492, 0.03517427062988281, 0.0348740463256836, 0.03520723342895508, 0.03470940780639648, 0.03499580764770508, 0.034893310546875, 0.03476950454711914, 0.03495977783203125, 0.0348037109375, 0.03493801498413086, 0.034988033294677735, 0.035019615173339846, 0.03526448059082031, 0.03505487823486328, 0.03485507202148438, 0.03509308624267578, 0.034988033294677735, 0.034872352600097654, 0.03492281723022461, 0.03466854476928711, 0.03453961563110351, 0.03484064102172851, 0.03504383850097656, 0.034799137115478516, 0.03491193771362305, 0.03498448181152344, 0.03484659194946289, 0.03480928039550781, 0.03516511917114258, 0.034907520294189455, 0.03568703842163086, 0.0352174072265625, 0.034831520080566405, 0.0349639663696289, 0.03485971069335937, 0.03465084838867188, 0.03510323333740235, 0.034902271270751954, 0.03532806396484375, 0.03511840057373047, 0.03524691009521484, 0.03485465621948242, 0.03563372802734375, 0.034987201690673826, 0.03737615966796875, 0.03529545593261719, 0.034861087799072266, 0.03467478561401367, 0.034737281799316407, 0.03467910385131836, 0.03471318435668945, 0.03472003173828125, 0.03473398590087891, 0.03470608139038086, 0.03453555297851563, 0.03456409454345703, 0.03463782501220703, 0.03514572906494141, 0.03482758331298828, 0.03502985763549805, 0.03480355072021484, 0.03585433578491211, 0.03517427062988281, 0.03519910430908203, 0.03498105621337891, 0.0348782730102539, 0.03522281646728516, 0.03554582214355469, 0.03531158447265625, 0.03535657501220703, 0.03529238510131836, 0.03513232040405274, 0.03509657669067383, 0.035011775970458986, 0.03490012741088867, 0.03483907318115234, 0.0347751693725586, 0.03482767868041992, 0.03518729782104492, 0.035103775024414065, 0.03532191848754883, 0.03507465744018555, 0.03476425552368164, 0.03547564697265625, 0.03525699234008789, 0.03510409545898437, 0.035811199188232425, 0.035033889770507816, 0.034914272308349606, 0.034799903869628904, 0.0346580810546875, 0.034643936157226565, 0.03467264175415039, 0.03479449462890625, 0.034753536224365236, 0.03484025573730469, 0.035053054809570314, 0.03504825592041016, 0.035133438110351564, 0.03496550369262695, 0.03495427322387695, 0.03498428726196289, 0.03494976043701172, 0.035198974609375, 0.03503472137451172, 0.035000736236572266, 0.03501465606689453, 0.03505766296386719, 0.03507814407348633, 0.03650083160400391, 0.035076351165771485, 0.03487171173095703, 0.03503036880493164, 0.0352314567565918, 0.03541609573364258, 0.03525929641723633, 0.03517030334472656, 0.03526646423339844, 0.03542639923095703, 0.03545292663574219, 0.035323902130126955, 0.03527824020385742, 0.03519548797607422, 0.03523174285888672, 0.036074081420898435, 0.03526857757568359, 0.035123710632324216, 0.03529663848876953, 0.035142017364501954, 0.03504537582397461, 0.03505766296386719, 0.03493817520141602, 0.03528291320800781, 0.03533526229858398, 0.03534150314331055, 0.03493119812011719, 0.0353135986328125, 0.03474835205078125, 0.03470307159423828, 0.03461916732788086, 0.03477766418457031, 0.0346861457824707, 0.03482876968383789, 0.034479679107666014, 0.034669345855712894, 0.034350975036621094, 0.03519612884521484, 0.03495209503173828, 0.03495695877075195, 0.03453286361694336, 0.03461206436157226, 0.03469311904907227, 0.03480985641479492, 0.034510078430175783, 0.03470003128051758, 0.034823326110839846, 0.03459718322753906, 0.03445779037475586, 0.03465865707397461, 0.03470745468139649, 0.03456819152832031, 0.03479555130004883, 0.03530543899536133, 0.03508992004394531, 0.03506227111816406, 0.035119102478027346, 0.03504742431640625, 0.03501670455932617, 0.03476275253295898, 0.03477638244628906, 0.03457500839233398, 0.03482422256469726, 0.03489382553100586, 0.03522355270385742, 0.03466239929199219, 0.03487859344482422, 0.034861953735351565, 0.03648716735839844, 0.034852863311767575, 0.034740222930908206, 0.034665504455566404, 0.03465526580810547, 0.03479545593261719, 0.034746368408203124, 0.03460300827026367, 0.035178497314453126, 0.03468492889404297, 0.035732128143310546, 0.034695201873779294, 0.034786750793457034, 0.03527529525756836, 0.0346399040222168, 0.034778526306152344, 0.03473798370361328, 0.03502774429321289, 0.03491366577148437, 0.03484915161132812, 0.03504272079467773, 0.034683807373046875, 0.0349035530090332, 0.03525471878051758, 0.03605487823486328, 0.03477046585083008, 0.034611358642578124, 0.0346847038269043, 0.034706111907958984, 0.03476860809326172, 0.03481401443481445, 0.034640094757080075, 0.03462963104248047, 0.034484222412109376, 0.034761791229248044, 0.0347982063293457, 0.034901790618896485, 0.034848384857177735, 0.03497385787963867, 0.034711326599121094, 0.034812767028808596, 0.03486080169677734, 0.03469964981079102, 0.034770942687988284, 0.034823806762695315, 0.03481145477294922, 0.03481375885009766, 0.034597248077392576, 0.034793537139892576, 0.03487315368652344, 0.03503139114379883, 0.03495158386230469, 0.03475251388549805, 0.0348076171875, 0.035124927520751956, 0.0351646728515625, 0.03476697540283203, 0.03490911865234375, 0.03487430572509766, 0.03493478393554687, 0.03491430282592774, 0.03486310577392578, 0.03457401657104492, 0.034788734436035154, 0.0348656005859375, 0.036289024353027347, 0.034729984283447264, 0.034995361328125, 0.034855777740478516, 0.035124225616455076, 0.034861759185791014, 0.034691390991210935, 0.034928638458251955]",tokens/s,28.517639837908867,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1542.254592,1539.178496,0.0,1136.656384,1111.384576,s,1,8.7102216796875,8.7102216796875,0.0,8.7102216796875,8.7102216796875,8.7102216796875,8.7102216796875,[8.7102216796875],,kWh,4.449880866669294e-05,4.901120047946633e-06,1.658973549399645e-05,6.598966420863603e-05,,MB,1619.910656,1610.481664,0.0,1193.279488,1029.128704,s,10,5.868300048828125,0.5868300048828125,0.001677313251022654,0.5864563293457031,0.5878336242675781,0.5896507904052735,0.5911045233154297,"[0.5914679565429688, 0.5857305297851563, 0.5862147216796875, 0.586959228515625, 0.5854459228515625, 0.5860387573242187, 0.5853843383789062, 0.5866979370117188, 0.5869308471679687, 0.5874298095703125]",tokens/s,436.2421789443472,kWh,1.7426049505393524e-05,1.921716549575675e-06,1.1578244556705602e-05,3.09260106116748e-05,tokens/kWh,8277821.643874043,MB,1627.082752,1612.578816,0.0,1195.37664,1083.494912,s,10,11.813073486328124,1.1813073486328123,0.004050209822046776,1.180841552734375,1.1878831787109376,1.1881446533203126,1.1883538330078125,"[1.1805814208984375, 1.175162109375, 1.1835008544921874, 1.1811016845703124, 1.177036376953125, 1.1878250732421876, 1.1787449951171876, 1.17951708984375, 1.18119775390625, 1.1884061279296876]",tokens/s,53.33074417332046,kWh,3.406485709460756e-05,3.757387801121807e-06,1.790944570009499e-05,5.5731690595824374e-05,tokens/kWh,1130416.0940834656,,s,630,11.810528398513801,0.018746870473831417,0.0003472329252708096,0.01867523193359375,0.0190575813293457,0.019283316993713377,0.020061819190979012,"[0.019347936630249023, 0.019526847839355467, 0.01880556869506836, 0.018716255187988282, 0.018663583755493166, 0.018665727615356446, 0.018951168060302736, 0.018956352233886718, 0.018670143127441405, 0.018493824005126953, 0.018642623901367186, 0.01868012809753418, 0.01860403251647949, 0.018700288772583007, 0.018753536224365236, 0.01863043212890625, 0.018681407928466797, 0.018721439361572265, 0.018601984024047852, 0.018722688674926758, 0.01858121681213379, 0.018586015701293944, 0.01871183967590332, 0.018569183349609376, 0.01869264030456543, 0.018729183197021486, 0.01867510414123535, 0.018702367782592773, 0.019058271408081053, 0.019010175704956056, 0.018708831787109376, 0.018571264266967775, 0.01861631965637207, 0.01842790412902832, 0.018542591094970702, 0.01844374465942383, 0.01849398422241211, 0.018456064224243163, 0.018528160095214845, 0.0186082878112793, 0.018633152008056642, 0.018540128707885743, 0.01853481674194336, 0.018492799758911133, 0.018448928833007812, 0.018561119079589843, 0.018494911193847656, 0.01860870361328125, 0.018589696884155273, 0.018692096710205077, 0.0186279354095459, 0.01867024040222168, 0.01875494384765625, 0.01906988716125488, 0.01868345642089844, 0.01864716720581055, 0.018744512557983397, 0.01914963150024414, 0.018941856384277343, 0.01946428871154785, 0.019037567138671874, 0.01939683151245117, 0.019202239990234377, 0.01959587287902832, 0.019136512756347656, 0.018683488845825196, 0.018462303161621094, 0.018496320724487304, 0.018421663284301757, 0.01849331283569336, 0.018542816162109375, 0.018588863372802734, 0.01866352081298828, 0.018531040191650392, 0.018638784408569337, 0.01856108856201172, 0.018655231475830078, 0.018778112411499022, 0.018589408874511718, 0.01853059196472168, 0.018513055801391603, 0.018432479858398437, 0.018602367401123046, 0.018950143814086915, 0.018675455093383787, 0.018753791809082033, 0.01880303955078125, 0.01869379234313965, 0.01861359977722168, 0.01850022315979004, 0.01869318389892578, 0.018797536849975587, 0.01864499282836914, 0.018577375411987306, 0.01870031929016113, 0.018523616790771483, 0.018592287063598632, 0.01859110450744629, 0.018655040740966796, 0.018852447509765623, 0.018681888580322267, 0.018607872009277344, 0.018655519485473632, 0.018548831939697266, 0.018554655075073243, 0.018663711547851562, 0.01861337661743164, 0.018570079803466796, 0.01882307243347168, 0.01850124740600586, 0.018498048782348633, 0.018508928298950195, 0.0185086727142334, 0.01859993553161621, 0.018597888946533202, 0.018658496856689452, 0.018666303634643555, 0.018644128799438477, 0.018625280380249024, 0.01863279914855957, 0.019332607269287108, 0.01867622375488281, 0.01859584045410156, 0.01860812759399414, 0.018540544509887694, 0.01847091293334961, 0.019228160858154295, 0.01883407974243164, 0.0186562557220459, 0.01882761573791504, 0.019003904342651368, 0.01882963180541992, 0.018683584213256835, 0.018647039413452148, 0.018441568374633788, 0.018552831649780274, 0.018593856811523438, 0.018514528274536132, 0.018755584716796874, 0.0187291202545166, 0.01874723243713379, 0.01878819274902344, 0.018634912490844726, 0.018528160095214845, 0.01848531150817871, 0.018661407470703124, 0.020207616806030275, 0.02044927978515625, 0.018933727264404298, 0.01879209518432617, 0.018694656372070313, 0.01855251121520996, 0.018864320755004882, 0.018708480834960937, 0.01890108871459961, 0.018655391693115236, 0.018840864181518556, 0.01879292869567871, 0.018667520523071288, 0.020152320861816408, 0.018693536758422852, 0.018567487716674803, 0.01878790473937988, 0.018518367767333985, 0.01849996757507324, 0.018941951751708985, 0.018963552474975585, 0.018834463119506838, 0.019023744583129883, 0.018932895660400392, 0.01876259231567383, 0.018726911544799805, 0.01853385543823242, 0.018683712005615236, 0.018639423370361327, 0.018742752075195313, 0.01858176040649414, 0.01868435287475586, 0.018601152420043947, 0.01871955108642578, 0.018524160385131837, 0.018525375366210937, 0.018676544189453127, 0.018499584197998048, 0.019093055725097657, 0.018581247329711913, 0.018510528564453125, 0.018520063400268554, 0.018540063858032228, 0.019349504470825195, 0.019074432373046873, 0.01881920051574707, 0.018675872802734375, 0.018581855773925782, 0.01868704032897949, 0.01854150390625, 0.018529855728149414, 0.01883795166015625, 0.01865727996826172, 0.01866703987121582, 0.018651456832885743, 0.018798368453979492, 0.018579839706420898, 0.01860848045349121, 0.018675359725952148, 0.018621984481811522, 0.01881324768066406, 0.01854070472717285, 0.01867263984680176, 0.018700351715087892, 0.018695104598999025, 0.018722816467285155, 0.018708320617675783, 0.018712064743041993, 0.018762271881103517, 0.01884377670288086, 0.01869824028015137, 0.018489343643188477, 0.01896556854248047, 0.018627040863037108, 0.01852463912963867, 0.01846220779418945, 0.01856358337402344, 0.018636064529418947, 0.01862112045288086, 0.01861801528930664, 0.01881126403808594, 0.018961952209472655, 0.018894847869873048, 0.018919712066650392, 0.018788543701171875, 0.018777727127075195, 0.018681472778320312, 0.019061504364013673, 0.0188723201751709, 0.019326976776123047, 0.018898944854736328, 0.018834880828857422, 0.018797119140625, 0.018585439682006835, 0.01853251266479492, 0.018655231475830078, 0.018592832565307617, 0.018676671981811523, 0.0188372802734375, 0.019175647735595703, 0.01887788772583008, 0.018639423370361327, 0.018697727203369142, 0.018661951065063475, 0.018897024154663086, 0.018660768508911133, 0.019694080352783205, 0.019318784713745117, 0.01906412887573242, 0.018848031997680665, 0.01913488006591797, 0.018710527420043945, 0.018743295669555664, 0.01873846435546875, 0.018796287536621093, 0.018835840225219728, 0.018942495346069337, 0.018821184158325194, 0.01864908790588379, 0.01851955223083496, 0.018712831497192384, 0.018832799911499023, 0.01884214401245117, 0.018620576858520508, 0.018654592514038087, 0.018496288299560546, 0.018774015426635742, 0.018511871337890624, 0.01844633674621582, 0.018536447525024414, 0.01840742492675781, 0.018429088592529296, 0.018385248184204103, 0.018471263885498048, 0.018440223693847655, 0.01841756820678711, 0.01841142463684082, 0.01834956741333008, 0.01832953643798828, 0.01845952033996582, 0.018579519271850586, 0.01860166358947754, 0.01863500785827637, 0.018836511611938476, 0.018772863388061525, 0.018686048507690428, 0.018630527496337892, 0.01858367919921875, 0.018556800842285157, 0.018473087310791017, 0.018569215774536133, 0.018513408660888672, 0.018465280532836914, 0.018513471603393554, 0.018539039611816407, 0.018447999954223634, 0.018516319274902344, 0.018638015747070313, 0.018748159408569335, 0.01873427200317383, 0.018831520080566405, 0.018756256103515626, 0.019120128631591796, 0.018720767974853517, 0.019092927932739256, 0.018672191619873046, 0.018898944854736328, 0.01872233581542969, 0.018589920043945312, 0.019594144821166993, 0.019057504653930663, 0.01882111930847168, 0.018998912811279297, 0.018823551177978515, 0.018860031127929687, 0.01899929618835449, 0.01908460807800293, 0.018997951507568358, 0.0190316162109375, 0.018992576599121094, 0.018971647262573242, 0.01902592086791992, 0.01902387237548828, 0.019163135528564454, 0.019041919708251955, 0.0189116153717041, 0.019314687728881837, 0.018769920349121092, 0.018786016464233397, 0.0188767032623291, 0.0186428165435791, 0.018639135360717773, 0.01867487907409668, 0.01862518310546875, 0.018597888946533202, 0.018714624404907225, 0.01861222457885742, 0.018540544509887694, 0.018514272689819335, 0.018523807525634765, 0.018593791961669923, 0.01897881507873535, 0.01891302490234375, 0.019091264724731445, 0.02202668762207031, 0.019900415420532228, 0.01894326400756836, 0.018695903778076173, 0.018586624145507814, 0.01863580894470215, 0.018529247283935547, 0.018532352447509767, 0.01860748863220215, 0.018642656326293944, 0.018631071090698243, 0.018612735748291014, 0.018679040908813477, 0.01866828727722168, 0.018581504821777343, 0.018497535705566406, 0.01846272087097168, 0.01862860870361328, 0.019578176498413084, 0.018604736328125, 0.01859174346923828, 0.018700096130371095, 0.018560447692871095, 0.018428672790527345, 0.018466272354125977, 0.018530847549438477, 0.018712575912475587, 0.01872643280029297, 0.01931724739074707, 0.018900991439819336, 0.018620384216308593, 0.018653024673461915, 0.018688192367553712, 0.018683679580688478, 0.01887651252746582, 0.01879257583618164, 0.018737152099609376, 0.018693504333496095, 0.018723264694213867, 0.018733247756958008, 0.01877564811706543, 0.01871708869934082, 0.018513919830322266, 0.0184434871673584, 0.01861459159851074, 0.018399839401245118, 0.01839910316467285, 0.018372608184814454, 0.018327775955200194, 0.01845180892944336, 0.018409919738769532, 0.018495487213134765, 0.018542591094970702, 0.018927616119384767, 0.018595327377319337, 0.01856972885131836, 0.019098751068115233, 0.01864793586730957, 0.018679840087890625, 0.018595199584960937, 0.018600223541259765, 0.018757951736450194, 0.01866761589050293, 0.018521568298339845, 0.01849798393249512, 0.01846067237854004, 0.018478944778442384, 0.01843388748168945, 0.018477088928222658, 0.018420000076293946, 0.01838489532470703, 0.018391040802001952, 0.01842521667480469, 0.018747264862060548, 0.018591840744018553, 0.01854252815246582, 0.01870921516418457, 0.01869004821777344, 0.01847443199157715, 0.018709056854248046, 0.022248607635498047, 0.019848031997680662, 0.019001344680786132, 0.019097375869750976, 0.01877769660949707, 0.01866841506958008, 0.018585695266723632, 0.018549535751342775, 0.018594688415527343, 0.01848476791381836, 0.018667999267578124, 0.019313919067382813, 0.018973567962646484, 0.018835519790649412, 0.018855648040771486, 0.018970399856567382, 0.01879532814025879, 0.01874652862548828, 0.01876617622375488, 0.018814495086669922, 0.01877654457092285, 0.018800735473632812, 0.01876527976989746, 0.018854528427124023, 0.018740671157836914, 0.018747360229492187, 0.018584159851074217, 0.018542591094970702, 0.018765823364257812, 0.018698047637939454, 0.018616447448730467, 0.018571327209472657, 0.018536224365234374, 0.018559200286865234, 0.01884774398803711, 0.01874662399291992, 0.018692863464355468, 0.019775199890136718, 0.018753631591796875, 0.018812416076660156, 0.018711231231689454, 0.018774015426635742, 0.018589696884155273, 0.018520063400268554, 0.018520063400268554, 0.018540544509887694, 0.018599552154541017, 0.018710912704467772, 0.019099647521972657, 0.018908512115478514, 0.019021823883056642, 0.019335391998291016, 0.018776031494140626, 0.018741151809692384, 0.018610111236572267, 0.018614912033081056, 0.01861974334716797, 0.01869276809692383, 0.018702335357666015, 0.01862006378173828, 0.018497888565063476, 0.01842585563659668, 0.018364288330078124, 0.01838092803955078, 0.01840947151184082, 0.018509824752807616, 0.018591648101806642, 0.018614368438720705, 0.018782207489013672, 0.01862860870361328, 0.018573312759399413, 0.018576799392700197, 0.01849964714050293, 0.01845452880859375, 0.019468160629272462, 0.01918694305419922, 0.019051456451416017, 0.019009183883666993, 0.01877334403991699, 0.018742271423339844, 0.01867884826660156, 0.018672576904296877, 0.01856716728210449, 0.01855401611328125, 0.01851024055480957, 0.01846928024291992, 0.018579488754272462, 0.018449663162231445, 0.01847987174987793, 0.0185894718170166, 0.018591968536376954, 0.018522111892700196, 0.01905039978027344, 0.018548831939697266, 0.018909183502197266, 0.019577920913696287, 0.01856812858581543, 0.018548736572265623, 0.018681503295898436, 0.0187989444732666, 0.018745567321777342, 0.01876083183288574, 0.018573984146118164, 0.018554304122924806, 0.018526784896850584, 0.01849888038635254, 0.01847158432006836, 0.01848838424682617, 0.018650079727172853, 0.018493440628051756, 0.01852115249633789, 0.018553247451782228, 0.018602527618408204, 0.018733055114746093, 0.018771968841552734, 0.018599231719970702, 0.018700992584228516, 0.018618368148803712, 0.018579456329345705, 0.01885919952392578, 0.018699071884155274, 0.0191016960144043, 0.018691072463989256, 0.018568191528320312, 0.019385887145996095, 0.021754112243652344, 0.01883977508544922, 0.018746463775634766, 0.018690208435058593, 0.018658048629760744, 0.018595008850097655, 0.018455360412597658, 0.018522111892700196, 0.018435232162475584, 0.01872572708129883, 0.018731008529663085, 0.018474048614501953, 0.019447423934936522, 0.018960351943969726, 0.018786624908447267, 0.01881248092651367, 0.018979007720947266, 0.018941984176635743, 0.01879315185546875, 0.01900748825073242, 0.018939647674560547, 0.018980480194091796, 0.01908390426635742, 0.0190784969329834, 0.019134271621704103, 0.019301216125488282, 0.019337215423583985, 0.019062080383300782, 0.019100095748901365, 0.01887820816040039, 0.018866687774658202, 0.01926144027709961, 0.019079168319702147, 0.018769695281982423, 0.01857967948913574, 0.018716672897338867, 0.018692096710205077, 0.018661376953125, 0.018644575119018555, 0.018601503372192383, 0.018616447448730467, 0.018627328872680662, 0.01907711982727051, 0.018947776794433595, 0.01866374397277832, 0.018687999725341797, 0.018663232803344726, 0.018641088485717775, 0.01883135986328125, 0.02012774467468262, 0.01887027168273926, 0.018925407409667968, 0.018889984130859374, 0.01889961624145508, 0.01874355125427246, 0.01862384033203125, 0.018590368270874024, 0.018544639587402344, 0.018523935317993165, 0.018594015121459962, 0.018572927474975586, 0.018641279220581054, 0.01874492835998535, 0.018815231323242188, 0.018649248123168944, 0.018707584381103516, 0.01883020782470703, 0.018783872604370117, 0.0187491512298584, 0.018887327194213866, 0.018755168914794923, 0.018790464401245117, 0.018731359481811524, 0.018662591934204102, 0.01924790382385254]",tokens/s,53.3422365826814,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1068.539904,874.381312,0.0,488.636416,482.553856,s,1,7.91195166015625,7.91195166015625,0.0,7.91195166015625,7.91195166015625,7.91195166015625,7.91195166015625,[7.91195166015625],,kWh,2.322903196242502e-05,2.5550845255023405e-06,7.732228407997255e-06,3.3516344895924614e-05,,MB,1334.120448,1031.667712,0.0,616.562688,582.974464,s,10,0.27800300598144534,0.027800300598144534,0.0004450032035215409,0.02772118377685547,0.02799483470916748,0.02852311315536499,0.028945735912323,"[0.0290513916015625, 0.02755731201171875, 0.027536415100097657, 0.02739436721801758, 0.027819936752319335, 0.027838016510009767, 0.02748575973510742, 0.027755647659301757, 0.02787743949890137, 0.02768671989440918]",tokens/s,9208.533522730546,kWh,9.369306699787483e-07,1.0328879820881823e-07,6.248500369165626e-07,1.6650695051041291e-06,tokens/kWh,153747335.60085854,MB,1347.301376,1044.250624,0.0,629.1456,597.192192,s,10,14.602696411132813,1.4602696411132814,0.006644689093043162,1.4599320678710939,1.467748876953125,1.4705623657226563,1.4728131567382814,"[1.4621405029296874, 1.4603460693359376, 1.459333740234375, 1.4544063720703124, 1.4733758544921876, 1.4499307861328126, 1.4671236572265625, 1.45951806640625, 1.4641121826171875, 1.4524091796875]",tokens/s,43.14271708885902,kWh,4.167851425919107e-05,4.596676815572489e-06,1.6535747024888386e-05,6.281093809965194e-05,tokens/kWh,1003010.0155493316,,s,630,14.596821525573725,0.02316955797710116,0.0007745700390872891,0.022991679191589356,0.023425770378112793,0.023833027076721188,0.027664256324768068,"[0.02296188735961914, 0.023120351791381836, 0.023152448654174804, 0.02309529685974121, 0.02287615966796875, 0.022938655853271483, 0.023088096618652344, 0.0228822078704834, 0.022910112380981444, 0.022969280242919922, 0.022790143966674805, 0.02302137565612793, 0.02294822311401367, 0.023027519226074218, 0.022900192260742188, 0.022948383331298828, 0.02282899284362793, 0.022906015396118164, 0.022792991638183595, 0.022919296264648437, 0.022932767868041992, 0.022743743896484377, 0.022900768280029297, 0.02304217529296875, 0.023062015533447267, 0.02322060775756836, 0.0229289608001709, 0.022827455520629883, 0.02309119987487793, 0.02299625587463379, 0.02291974449157715, 0.023337247848510743, 0.02312384033203125, 0.022800031661987304, 0.022993343353271484, 0.02302288055419922, 0.022907360076904297, 0.023142335891723632, 0.023379871368408203, 0.02309718322753906, 0.023589344024658204, 0.027996095657348632, 0.02359856033325195, 0.023255487442016602, 0.02319580841064453, 0.023160928726196288, 0.02303990364074707, 0.023015424728393553, 0.022968320846557616, 0.02308710479736328, 0.02287820816040039, 0.023080511093139647, 0.022992544174194336, 0.023034656524658203, 0.02315894317626953, 0.023100927352905275, 0.026765663146972655, 0.024057855606079103, 0.023293312072753907, 0.02312460708618164, 0.023201791763305665, 0.023451648712158202, 0.023021568298339845, 0.022619232177734375, 0.023066272735595705, 0.022931583404541017, 0.023414047241210937, 0.022804479598999023, 0.022859647750854493, 0.022813535690307616, 0.02290073585510254, 0.022794240951538085, 0.02276911926269531, 0.023939136505126954, 0.02385763168334961, 0.023209407806396486, 0.02306719970703125, 0.022970367431640625, 0.02418502426147461, 0.023080768585205077, 0.023072256088256835, 0.02319539260864258, 0.022854047775268553, 0.023086944580078126, 0.027603647232055665, 0.023160640716552734, 0.02302060890197754, 0.023023935317993165, 0.02320454406738281, 0.02296006393432617, 0.022958080291748048, 0.02288844871520996, 0.023082015991210937, 0.023256032943725587, 0.023134271621704103, 0.023424800872802735, 0.023609504699707032, 0.02329190444946289, 0.023287744522094728, 0.02318547248840332, 0.023121728897094726, 0.023091392517089845, 0.023066335678100586, 0.02369673538208008, 0.02301228713989258, 0.022949472427368164, 0.02294416046142578, 0.022947839736938477, 0.022967584609985353, 0.023062368392944337, 0.02287824058532715, 0.022885215759277343, 0.022863391876220704, 0.022927839279174803, 0.022937440872192384, 0.02299305534362793, 0.02289664077758789, 0.022732799530029296, 0.022814815521240234, 0.022769344329833983, 0.022687488555908204, 0.023011743545532228, 0.023115583419799805, 0.023284095764160156, 0.02332044792175293, 0.024157472610473633, 0.023242752075195314, 0.02778425598144531, 0.02350111961364746, 0.023101343154907226, 0.02283798408508301, 0.022912704467773437, 0.023001407623291014, 0.022812671661376953, 0.02309529685974121, 0.022939647674560547, 0.022945215225219726, 0.022960672378540038, 0.023126047134399415, 0.022931455612182617, 0.0228351993560791, 0.02297001647949219, 0.023103103637695313, 0.023069408416748045, 0.022798303604125977, 0.022679679870605467, 0.022787551879882812, 0.022712480545043944, 0.02275766372680664, 0.022818431854248047, 0.022978431701660158, 0.02315110397338867, 0.022978656768798827, 0.022902687072753905, 0.022880096435546875, 0.022916608810424805, 0.02277187156677246, 0.022839744567871093, 0.022820928573608398, 0.022919008255004883, 0.02275049591064453, 0.022862367630004883, 0.022952096939086914, 0.022999231338500976, 0.023142400741577147, 0.02305536079406738, 0.02307788848876953, 0.02331398391723633, 0.023167232513427734, 0.02336992073059082, 0.023041471481323243, 0.023261247634887697, 0.027889631271362306, 0.023218496322631836, 0.02308937644958496, 0.023254240036010742, 0.02308572769165039, 0.023091583251953124, 0.023207679748535156, 0.02297760009765625, 0.02292857551574707, 0.02314998435974121, 0.02295180892944336, 0.02282566452026367, 0.02286569595336914, 0.02308608055114746, 0.02314556884765625, 0.023119775772094727, 0.023021568298339845, 0.022583295822143554, 0.023023616790771483, 0.023045791625976562, 0.023031967163085938, 0.023240608215332033, 0.02287558364868164, 0.022854496002197265, 0.02326460838317871, 0.02291324806213379, 0.022950336456298827, 0.022814720153808594, 0.02292038345336914, 0.022987199783325196, 0.022905216217041016, 0.022868127822875978, 0.022717504501342772, 0.022727455139160156, 0.022915071487426757, 0.023294111251831055, 0.023276512145996093, 0.023124576568603516, 0.024316192626953125, 0.023187456130981447, 0.02300921630859375, 0.022936800003051757, 0.02315126419067383, 0.027037887573242186, 0.023373760223388673, 0.022888671875, 0.023002975463867186, 0.022896223068237305, 0.02294816017150879, 0.022918272018432616, 0.022827423095703125, 0.02299347114562988, 0.022761472702026365, 0.022905920028686525, 0.022774688720703123, 0.022997024536132813, 0.022911136627197265, 0.022931232452392578, 0.02316703987121582, 0.02285977554321289, 0.022712127685546875, 0.02277961540222168, 0.022795743942260742, 0.02298703956604004, 0.022831840515136717, 0.02323865509033203, 0.022929407119750975, 0.022976703643798828, 0.022939456939697265, 0.02286796760559082, 0.022961952209472655, 0.0228702392578125, 0.02433433532714844, 0.022873952865600587, 0.022961439132690428, 0.02298876762390137, 0.02297100830078125, 0.023284095764160156, 0.022976415634155273, 0.023334495544433592, 0.02336844825744629, 0.02352921676635742, 0.02343449592590332, 0.023274112701416015, 0.02336195182800293, 0.02342246437072754, 0.025268192291259765, 0.0259399356842041, 0.02336105537414551, 0.022870784759521486, 0.022916255950927736, 0.02280953598022461, 0.023306400299072265, 0.023131807327270507, 0.02296544075012207, 0.022993824005126954, 0.023154687881469727, 0.02305843162536621, 0.022988319396972656, 0.023097824096679688, 0.02324239921569824, 0.02323286437988281, 0.022916799545288087, 0.02288467216491699, 0.022912544250488283, 0.022921695709228515, 0.022936864852905272, 0.023261920928955078, 0.02297804832458496, 0.02299545669555664, 0.022953983306884765, 0.022861824035644532, 0.02284339141845703, 0.022812671661376953, 0.022935552597045897, 0.022882303237915038, 0.02284339141845703, 0.022749183654785156, 0.022824960708618162, 0.022798208236694335, 0.02550592041015625, 0.02832793617248535, 0.023267328262329103, 0.023233760833740236, 0.023374624252319336, 0.02315020751953125, 0.02318339157104492, 0.023093248367309572, 0.023032159805297853, 0.022959680557250978, 0.023153087615966798, 0.027646080017089843, 0.02332383918762207, 0.02288915252685547, 0.022732799530029296, 0.02281488037109375, 0.02303984069824219, 0.022990848541259764, 0.023613439559936524, 0.024780576705932617, 0.023324895858764648, 0.02325721549987793, 0.02309926414489746, 0.023029760360717775, 0.022931455612182617, 0.022890592575073244, 0.022950815200805663, 0.022925567626953126, 0.022877216339111328, 0.0227357120513916, 0.022728607177734374, 0.02288355255126953, 0.023025407791137695, 0.022675424575805663, 0.02266281509399414, 0.022741376876831056, 0.022976512908935546, 0.023009279251098632, 0.022835071563720704, 0.022968448638916016, 0.023193376541137695, 0.023016735076904295, 0.02285196876525879, 0.022868288040161132, 0.022790399551391602, 0.022800384521484376, 0.0227061767578125, 0.0227061767578125, 0.022917215347290038, 0.02302560043334961, 0.0227838077545166, 0.022669279098510742, 0.022805728912353516, 0.022967264175415038, 0.02532352066040039, 0.02510438346862793, 0.022937599182128905, 0.02281657600402832, 0.022991039276123046, 0.02310553550720215, 0.02283075141906738, 0.02317344093322754, 0.022752351760864258, 0.022913984298706055, 0.023033536911010743, 0.02286390495300293, 0.022800703048706055, 0.022840320587158205, 0.022841535568237304, 0.022766048431396485, 0.02272051239013672, 0.022976831436157228, 0.02329190444946289, 0.023040000915527343, 0.022999040603637694, 0.023115327835083008, 0.02291142463684082, 0.022724607467651366, 0.02289459228515625, 0.023078176498413087, 0.023025728225708007, 0.02320867156982422, 0.02322412872314453, 0.023136383056640626, 0.023367712020874024, 0.0235316162109375, 0.023213888168334963, 0.02372403144836426, 0.023594175338745117, 0.023474143981933593, 0.023249759674072265, 0.023339168548583984, 0.02317091178894043, 0.023197696685791015, 0.022992319107055664, 0.02313478469848633, 0.02300124740600586, 0.023409856796264648, 0.02818729591369629, 0.023574655532836913, 0.023457664489746094, 0.02306051254272461, 0.022826976776123047, 0.022836832046508788, 0.02265670394897461, 0.023311071395874024, 0.026482431411743165, 0.023523584365844726, 0.023257087707519532, 0.02311916732788086, 0.022925823211669923, 0.022898815155029298, 0.02278201675415039, 0.02285296058654785, 0.022913312911987303, 0.022792064666748046, 0.022998847961425782, 0.02302227210998535, 0.022924863815307617, 0.022802112579345703, 0.02269615936279297, 0.022667808532714842, 0.022929248809814454, 0.02275129508972168, 0.022814815521240234, 0.022724639892578124, 0.022605791091918945, 0.02269388771057129, 0.02272649574279785, 0.022720447540283205, 0.022745311737060545, 0.022763519287109374, 0.02274508857727051, 0.022732288360595702, 0.022839935302734374, 0.022951616287231445, 0.022976831436157228, 0.02293734359741211, 0.023137439727783204, 0.023176000595092772, 0.023361696243286132, 0.023447328567504883, 0.025258207321166994, 0.02671836853027344, 0.023494495391845702, 0.023525375366210938, 0.023443328857421876, 0.02308927917480469, 0.023178911209106444, 0.02296294403076172, 0.02304991912841797, 0.023006624221801757, 0.02300374412536621, 0.02306988716125488, 0.022823360443115233, 0.02293209648132324, 0.023150335311889647, 0.022951936721801756, 0.02289459228515625, 0.022720640182495117, 0.022790016174316405, 0.02288751983642578, 0.022948768615722655, 0.022906879425048828, 0.023029760360717775, 0.02300035285949707, 0.02282979202270508, 0.022978559494018554, 0.02312182426452637, 0.023391456604003907, 0.023274368286132812, 0.023114816665649414, 0.023158912658691407, 0.023173952102661134, 0.02292531204223633, 0.022971647262573242, 0.02365292739868164, 0.023236799240112304, 0.023115743637084962, 0.023049312591552733, 0.023045312881469725, 0.023193344116210938, 0.022922847747802736, 0.02297078323364258, 0.022971935272216797, 0.023388639450073242, 0.027726112365722658, 0.023338560104370118, 0.022936767578125, 0.022999391555786133, 0.022948415756225585, 0.023042112350463866, 0.023250112533569334, 0.023253824234008787, 0.02316009521484375, 0.022880640029907227, 0.02294822311401367, 0.02295743942260742, 0.02268614387512207, 0.022929567337036133, 0.022740991592407226, 0.022709375381469728, 0.02290777587890625, 0.022691455841064453, 0.022732639312744142, 0.02591798400878906, 0.0242238712310791, 0.023279487609863283, 0.023129280090332032, 0.022995071411132814, 0.02290928077697754, 0.023024063110351562, 0.023350847244262694, 0.0238350715637207, 0.023716064453125, 0.02368716812133789, 0.023390207290649414, 0.023369152069091795, 0.02344607925415039, 0.023513248443603516, 0.023370847702026368, 0.02328028869628906, 0.023248992919921874, 0.02306252861022949, 0.022921215057373046, 0.02286591911315918, 0.02296419143676758, 0.023012895584106446, 0.02302207946777344, 0.027340799331665038, 0.02319561576843262, 0.02285775947570801, 0.022874111175537108, 0.022869792938232422, 0.022896928787231444, 0.022933439254760744, 0.023605215072631837, 0.022918752670288086, 0.02282499122619629, 0.02296873664855957, 0.02299660873413086, 0.023294336318969728, 0.02292870330810547, 0.023047903060913085, 0.02302457618713379, 0.02296620750427246, 0.022992448806762697, 0.02284976005554199, 0.023034175872802733, 0.02286591911315918, 0.022822336196899416, 0.022804704666137696, 0.022989152908325195, 0.022907072067260743, 0.023538944244384765, 0.023380544662475584, 0.02307823944091797, 0.02310767936706543, 0.02290540885925293, 0.022853343963623048, 0.022864160537719728, 0.02280463981628418, 0.02304819107055664, 0.023004032135009764, 0.023069631576538085, 0.022871583938598634, 0.02279270362854004, 0.023357440948486328, 0.022935264587402342, 0.022806304931640625, 0.022933055877685547, 0.023022527694702147, 0.023478271484375, 0.02327961540222168, 0.02750364875793457, 0.022706815719604492, 0.02316230392456055, 0.02305081558227539, 0.023062080383300782, 0.02291756820678711, 0.022743167877197264, 0.022839168548583984, 0.022899776458740233, 0.023179391860961913, 0.02279097557067871, 0.0228287353515625, 0.022745407104492188, 0.022986751556396484, 0.022890495300292968, 0.022870208740234373, 0.02278995132446289, 0.022953983306884765, 0.023026880264282228, 0.02302617645263672, 0.02330246353149414, 0.02327110481262207, 0.023382335662841796, 0.023370912551879883, 0.023462751388549804, 0.023830528259277343, 0.02326425552368164, 0.023151615142822265, 0.023090431213378906, 0.023106496810913087, 0.02332009506225586, 0.023545696258544923, 0.023226816177368163, 0.023213375091552736, 0.022970527648925782, 0.02292380714416504, 0.022951936721801756, 0.022806304931640625, 0.02292755126953125, 0.02315881538391113, 0.022871936798095703, 0.023001216888427736, 0.022926368713378907, 0.027671680450439454, 0.023309823989868163, 0.023019872665405273, 0.023061792373657228, 0.022661855697631836, 0.02261756706237793, 0.02264956855773926, 0.02259929656982422, 0.022757408142089843, 0.022974111557006835, 0.02269004821777344, 0.0228353271484375, 0.02282019233703613, 0.022788896560668945, 0.02283635139465332, 0.022660255432128906, 0.022767135620117188, 0.022611167907714842, 0.022639583587646485, 0.022585344314575196, 0.022720800399780274]",tokens/s,43.160081042043004,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1076.96128,972.947456,0.0,570.425344,536.326656,s,1,8.14050341796875,8.14050341796875,0.0,8.14050341796875,8.14050341796875,8.14050341796875,8.14050341796875,[8.14050341796875],,kWh,3.176123900827102e-05,3.4956088478348264e-06,1.1024731042030833e-05,4.628157889813668e-05,,MB,1388.130304,1042.153472,0.0,624.951296,594.377728,s,10,0.20732541084289552,0.02073254108428955,0.0001038056183619794,0.02071609687805176,0.020886621284484864,0.02090950288772583,0.020927808170318602,"[0.020736928939819335, 0.020932384490966797, 0.02088153648376465, 0.02069305610656738, 0.020582815170288087, 0.020605567932128907, 0.02069526481628418, 0.020683296203613283, 0.02077248001098633, 0.020742080688476563]",tokens/s,12347.738705024853,kWh,6.055918875865326e-07,6.678099712314494e-08,3.904123685767654e-07,1.0627852532864428e-06,tokens/kWh,240876507.4678757,MB,1403.392,1056.833536,0.0,639.63136,607.71072,s,10,11.260330810546876,1.1260330810546875,0.004649675580085116,1.1262554321289064,1.1307368774414062,1.1320752868652344,1.1331460144042969,"[1.1261185302734376, 1.1334136962890624, 1.1289896240234376, 1.1290093994140624, 1.130439453125, 1.1221141357421875, 1.125031005859375, 1.126392333984375, 1.12261376953125, 1.1162088623046875]",tokens/s,55.94862270031329,kWh,3.254569958407361e-05,3.5893301914078337e-06,1.2870181573228357e-05,4.900521134870979e-05,tokens/kWh,1285577.5593274462,,s,630,11.254827167510998,0.017864805027795215,0.0003378283318796366,0.01779513645172119,0.01818455638885498,0.018382318019866943,0.019244218101501464,"[0.017334112167358397, 0.017895584106445313, 0.017821695327758787, 0.017847455978393555, 0.017791296005249025, 0.017690176010131835, 0.017920576095581054, 0.017857023239135742, 0.017747936248779298, 0.017647552490234374, 0.017680383682250975, 0.017717248916625978, 0.017757951736450197, 0.017810752868652344, 0.018061504364013672, 0.01807753562927246, 0.018138015747070312, 0.018061311721801757, 0.018037824630737304, 0.018115520477294922, 0.018231296539306642, 0.018769920349121092, 0.01820057678222656, 0.01794047927856445, 0.017778688430786133, 0.017938432693481447, 0.01787494468688965, 0.017757823944091797, 0.01770944023132324, 0.017960800170898437, 0.017911968231201173, 0.017745920181274414, 0.017917951583862304, 0.017854175567626952, 0.017852703094482423, 0.017948511123657227, 0.017782880783081056, 0.01769664001464844, 0.017717056274414063, 0.017690240859985353, 0.01783065605163574, 0.017816959381103517, 0.01761664009094238, 0.017685375213623046, 0.01794223976135254, 0.017862112045288085, 0.01785536003112793, 0.01787487983703613, 0.017895423889160156, 0.01791494369506836, 0.017804224014282225, 0.017837600708007814, 0.017715679168701173, 0.01777663993835449, 0.017745023727416993, 0.01782668876647949, 0.017819679260253907, 0.017899488449096678, 0.017803264617919923, 0.01799577522277832, 0.017981472015380858, 0.017860832214355468, 0.01790540885925293, 0.017547264099121093, 0.01784217643737793, 0.017596416473388672, 0.01777859115600586, 0.017845375061035156, 0.01779520034790039, 0.017859424591064453, 0.01804697608947754, 0.01778483200073242, 0.018089984893798827, 0.018671327590942383, 0.01898019218444824, 0.018438432693481447, 0.018475679397583007, 0.018208831787109376, 0.01849951934814453, 0.018097631454467772, 0.018002496719360352, 0.01811452865600586, 0.018096384048461915, 0.01813699150085449, 0.018315103530883788, 0.018243135452270506, 0.01806175994873047, 0.017950719833374023, 0.01792518424987793, 0.01797737693786621, 0.017892255783081054, 0.01786675262451172, 0.017913055419921876, 0.017867551803588868, 0.017888511657714844, 0.018248031616210938, 0.01790176010131836, 0.017961183547973634, 0.017819648742675782, 0.0178055362701416, 0.017940511703491212, 0.01791974449157715, 0.018005504608154296, 0.017895872116088868, 0.01810006332397461, 0.017862464904785155, 0.017752479553222657, 0.017815263748168945, 0.01770832061767578, 0.017884159088134767, 0.01777663993835449, 0.017717248916625978, 0.01779916763305664, 0.017695903778076172, 0.01767030334472656, 0.017654624938964844, 0.017994943618774413, 0.017777151107788085, 0.017764127731323243, 0.017965375900268556, 0.01914681625366211, 0.017788896560668944, 0.01776345634460449, 0.01783305549621582, 0.01797622489929199, 0.018044864654541016, 0.018032447814941406, 0.018264223098754882, 0.01818838310241699, 0.018184671401977538, 0.018111743927001954, 0.018033407211303712, 0.01816582489013672, 0.018137056350708006, 0.017797119140625, 0.017798912048339843, 0.017846399307250977, 0.017821792602539063, 0.017625087738037108, 0.017644607543945312, 0.01765043258666992, 0.01749830436706543, 0.01764761543273926, 0.017530879974365234, 0.01761043167114258, 0.017750335693359376, 0.017813119888305664, 0.01768828773498535, 0.01764137649536133, 0.017730207443237306, 0.017646944046020508, 0.01755401611328125, 0.017764448165893554, 0.017580095291137694, 0.01762099266052246, 0.017680383682250975, 0.01762918472290039, 0.01769468879699707, 0.017674272537231445, 0.01767740821838379, 0.017825983047485353, 0.01814192008972168, 0.017952768325805665, 0.017845504760742186, 0.01784284782409668, 0.017738975524902344, 0.017731903076171875, 0.017871423721313475, 0.017762304306030274, 0.017847295761108398, 0.017900543212890627, 0.01797920036315918, 0.018094272613525392, 0.01818454360961914, 0.01815920066833496, 0.01817350387573242, 0.018092351913452147, 0.01808198356628418, 0.01824358367919922, 0.018075872421264648, 0.018568992614746094, 0.01845043182373047, 0.019326143264770508, 0.018405887603759767, 0.01786911964416504, 0.017786880493164063, 0.018089984893798827, 0.017704959869384765, 0.01797475242614746, 0.017280704498291017, 0.01783683204650879, 0.017563232421875, 0.01756159973144531, 0.01759212875366211, 0.01784275245666504, 0.017730527877807618, 0.01762384033203125, 0.017731584548950196, 0.018058496475219725, 0.017830623626708984, 0.017933919906616212, 0.01771564865112305, 0.018091264724731444, 0.02020947265625, 0.018911487579345704, 0.018074304580688476, 0.0177923526763916, 0.017690719604492186, 0.01765238380432129, 0.01767523193359375, 0.017681215286254885, 0.017678464889526367, 0.017760255813598632, 0.017709056854248048, 0.017665760040283203, 0.017878847122192384, 0.017731327056884766, 0.017869152069091798, 0.01774166488647461, 0.018018848419189454, 0.01777164840698242, 0.020443103790283204, 0.019243104934692383, 0.018211648941040038, 0.017905664443969727, 0.01780940818786621, 0.017981088638305665, 0.01780291175842285, 0.01774457550048828, 0.017676383972167968, 0.017778175354003906, 0.01776880073547363, 0.018618431091308594, 0.017924064636230468, 0.017962112426757812, 0.01769503974914551, 0.017697376251220705, 0.017705120086669923, 0.017877983093261718, 0.01787788772583008, 0.01759846305847168, 0.017933599472045897, 0.017750751495361327, 0.01769209671020508, 0.01761894416809082, 0.01769043159484863, 0.017824512481689453, 0.017841728210449218, 0.017676191329956056, 0.01774236869812012, 0.017721343994140625, 0.017741823196411134, 0.01731769561767578, 0.017754207611083983, 0.017810272216796874, 0.017741247177124022, 0.01762883186340332, 0.017652639389038084, 0.017512447357177736, 0.017903615951538086, 0.017622175216674803, 0.017587039947509767, 0.01760665512084961, 0.017698816299438477, 0.017889408111572264, 0.01778188705444336, 0.017689184188842775, 0.017614240646362304, 0.01763804817199707, 0.01764361572265625, 0.01764352035522461, 0.01773155212402344, 0.017682464599609374, 0.01777663993835449, 0.017847423553466798, 0.017808416366577148, 0.017612640380859374, 0.017823360443115235, 0.017664384841918946, 0.018265151977539064, 0.017910144805908204, 0.017866943359375, 0.017897056579589843, 0.017724191665649414, 0.017811456680297853, 0.017797279357910156, 0.017939584732055664, 0.017928672790527345, 0.018419103622436525, 0.018431999206542968, 0.01883782386779785, 0.018788896560668945, 0.019187231063842774, 0.018307552337646485, 0.01833087921142578, 0.01827507209777832, 0.01861756706237793, 0.018354143142700195, 0.018266271591186524, 0.01844207954406738, 0.018254655838012696, 0.018054943084716796, 0.017974943161010743, 0.017946624755859376, 0.017868959426879882, 0.017766208648681642, 0.01774835205078125, 0.01780963134765625, 0.01787494468688965, 0.018085887908935547, 0.01809552001953125, 0.01796566390991211, 0.017792287826538085, 0.017734367370605467, 0.0178637752532959, 0.017187519073486326, 0.017757535934448242, 0.02026873588562012, 0.017748512268066407, 0.01771881675720215, 0.017681312561035157, 0.017688575744628905, 0.017597503662109375, 0.017673152923583985, 0.01765376091003418, 0.017709056854248048, 0.017681631088256836, 0.01763817596435547, 0.017714176177978515, 0.017757183074951173, 0.019244672775268555, 0.01795622444152832, 0.01784115219116211, 0.017796384811401368, 0.0183221435546875, 0.01781990432739258, 0.01791155242919922, 0.018104320526123048, 0.01763532829284668, 0.01758131217956543, 0.017803167343139647, 0.017664863586425782, 0.017682432174682617, 0.017747968673706056, 0.01770297622680664, 0.01774995231628418, 0.01760870361328125, 0.017638784408569336, 0.017630943298339842, 0.017785760879516603, 0.01790287971496582, 0.017689151763916014, 0.017756032943725585, 0.017656095504760744, 0.018231296539306642, 0.017780736923217775, 0.017763711929321288, 0.017713792800903322, 0.01837161636352539, 0.017734527587890625, 0.01765900802612305, 0.01766499137878418, 0.017721343994140625, 0.017671808242797852, 0.017660287857055663, 0.017856576919555663, 0.017645503997802733, 0.01764521598815918, 0.017636800765991213, 0.017640031814575196, 0.017590591430664063, 0.017663999557495116, 0.01762099266052246, 0.01771891212463379, 0.01777449607849121, 0.017660032272338866, 0.01757801628112793, 0.01785683250427246, 0.01738252830505371, 0.017826656341552734, 0.01761187171936035, 0.01757049560546875, 0.01756729507446289, 0.017610624313354493, 0.017560384750366212, 0.01766099166870117, 0.01799817657470703, 0.017825920104980467, 0.017862783432006837, 0.017916255950927735, 0.018136415481567383, 0.018156351089477538, 0.01831100845336914, 0.018259967803955078, 0.01822287940979004, 0.018131359100341797, 0.018374591827392577, 0.017985631942749023, 0.01802422332763672, 0.0182488956451416, 0.018082752227783203, 0.018773887634277345, 0.018896352767944335, 0.01794291114807129, 0.017851680755615235, 0.017908319473266602, 0.017807647705078124, 0.017795072555541993, 0.017683616638183595, 0.017777503967285155, 0.017723392486572266, 0.017620319366455077, 0.017726112365722656, 0.017711103439331053, 0.01760428810119629, 0.017723104476928712, 0.017680479049682618, 0.017644031524658203, 0.01780636787414551, 0.017750944137573242, 0.01788115119934082, 0.017780736923217775, 0.0176125431060791, 0.01772323226928711, 0.017744287490844727, 0.017612800598144532, 0.01765990447998047, 0.017688575744628905, 0.017800800323486327, 0.01766032028198242, 0.01770086479187012, 0.017602367401123045, 0.01779756736755371, 0.017721088409423828, 0.017667295455932618, 0.017668895721435547, 0.017713184356689452, 0.018443872451782226, 0.017707071304321288, 0.01776985549926758, 0.017790943145751952, 0.01731545639038086, 0.01784623908996582, 0.01786070442199707, 0.017981760025024413, 0.01911187171936035, 0.020095071792602538, 0.01820262336730957, 0.01797324752807617, 0.017858335494995117, 0.017905887603759767, 0.01785977554321289, 0.017793983459472657, 0.017900543212890627, 0.017943296432495117, 0.018229087829589843, 0.01811689567565918, 0.017938720703125, 0.017825727462768556, 0.01800707244873047, 0.01801206398010254, 0.017777503967285155, 0.017723392486572266, 0.017716320037841796, 0.01772774314880371, 0.017729503631591797, 0.017662368774414062, 0.017700992584228515, 0.017872064590454102, 0.017840768814086912, 0.017893728256225587, 0.017819648742675782, 0.017747968673706056, 0.017657567977905273, 0.017647903442382814, 0.017775808334350586, 0.0175665283203125, 0.017665632247924806, 0.017750175476074218, 0.019268896102905272, 0.0177523193359375, 0.017625823974609375, 0.017711231231689453, 0.017587615966796876, 0.01764566421508789, 0.017602943420410157, 0.01760051155090332, 0.01761894416809082, 0.017635040283203125, 0.0178304328918457, 0.017709888458251954, 0.017830848693847656, 0.01765068817138672, 0.017556480407714844, 0.01769267272949219, 0.017637504577636718, 0.01759539222717285, 0.017600831985473634, 0.017739456176757814, 0.01787900733947754, 0.017815584182739257, 0.01783247947692871, 0.01838863945007324, 0.01798624038696289, 0.017376928329467772, 0.01779542350769043, 0.017909759521484374, 0.017846271514892577, 0.017876991271972655, 0.018257055282592773, 0.017865440368652345, 0.018100351333618165, 0.018283712387084962, 0.01774620819091797, 0.017697311401367186, 0.01788515281677246, 0.017663616180419922, 0.0177545280456543, 0.017675968170166017, 0.01780496025085449, 0.01766671943664551, 0.01764556884765625, 0.01763532829284668, 0.017537023544311522, 0.01757814407348633, 0.017640575408935547, 0.01753571128845215, 0.017663520812988283, 0.017732063293457032, 0.017522655487060546, 0.017714656829833985, 0.017631168365478515, 0.017568159103393554, 0.01764352035522461, 0.01803081512451172, 0.017874591827392577, 0.01802275276184082, 0.018018304824829103, 0.017934080123901366, 0.017892608642578123, 0.01784934425354004, 0.017794559478759766, 0.01809459114074707, 0.018314624786376955, 0.018071327209472656, 0.017893247604370117, 0.017873247146606444, 0.017715839385986327, 0.017674175262451172, 0.01777199935913086, 0.017643295288085937, 0.017603391647338866, 0.017587808609008788, 0.01755340766906738, 0.01760665512084961, 0.017655231475830077, 0.01769161605834961, 0.01803468894958496, 0.01811667251586914, 0.018001888275146486, 0.018046304702758788, 0.017928192138671875, 0.017984128952026366, 0.01812665557861328, 0.017799360275268555, 0.01774380874633789, 0.017844383239746093, 0.01744691276550293, 0.017829887390136717, 0.01791328048706055, 0.017832511901855468, 0.01760256004333496, 0.017613824844360353, 0.01762611198425293, 0.01761484718322754, 0.01760665512084961, 0.017565696716308594, 0.017548479080200196, 0.017699487686157228, 0.017837472915649414, 0.017634143829345705, 0.017553312301635742, 0.017663999557495116, 0.017587295532226564, 0.017567903518676757, 0.017642240524291992, 0.017842111587524415, 0.01753001594543457, 0.017509279251098634, 0.018018304824829103, 0.017558687210083006, 0.017488319396972655, 0.017491584777832032, 0.017566495895385743, 0.017588224411010742, 0.017622304916381837, 0.017586624145507813, 0.017647647857666017, 0.0177576961517334, 0.017590848922729493, 0.017586368560791016, 0.017584127426147463, 0.017518592834472657, 0.017720607757568358, 0.017560287475585936, 0.017532928466796875, 0.017555456161499023, 0.01785036849975586, 0.01771244812011719, 0.017754816055297853, 0.017760255813598632, 0.017681760787963866, 0.01780803108215332, 0.01798963165283203, 0.017844032287597657, 0.0176495361328125, 0.017848384857177733, 0.017971712112426756, 0.017804864883422852, 0.017839359283447265, 0.018111423492431642, 0.017824928283691407, 0.018084703445434572, 0.017886335372924805, 0.017931135177612304, 0.01774515151977539, 0.017753984451293944, 0.017984031677246094, 0.017819040298461913, 0.017764543533325194]",tokens/s,55.97598173862718,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1068.302336,972.947456,0.0,570.425344,525.840896,s,1,8.3147880859375,8.3147880859375,0.0,8.3147880859375,8.3147880859375,8.3147880859375,8.3147880859375,[8.3147880859375],,kWh,2.666408144167083e-05,2.9340606053506265e-06,8.985840522002242e-06,3.8583982569023696e-05,,MB,1371.611136,1014.890496,0.0,597.68832,584.940544,s,10,1.606397933959961,0.16063979339599613,0.0005527566183424234,0.16048513793945313,0.16120621337890625,0.16149971313476563,0.16173451293945312,"[0.161793212890625, 0.16056591796875, 0.1611409912109375, 0.16084243774414062, 0.16024032592773438, 0.16038710021972657, 0.16040435791015625, 0.16109808349609375, 0.16006678771972657, 0.15985871887207032]",tokens/s,1593.6275476209662,kWh,4.913539487499747e-06,5.418632405369581e-07,3.2778498445000063e-06,8.733252572536712e-06,tokens/kWh,29313248.17113823,MB,1410.82624,1029.57056,0.0,612.368384,597.290496,s,10,14.481121337890626,1.4481121337890626,0.002765557196265512,1.448029541015625,1.4513597534179687,1.452547430419922,1.4534975720214844,"[1.4483218994140625, 1.453735107421875, 1.4465081787109375, 1.44409912109375, 1.44796533203125, 1.4500780029296876, 1.4449583740234375, 1.4462657470703124, 1.4510958251953125, 1.44809375]",tokens/s,43.504918251846384,kWh,4.1576940329161965e-05,4.5856402142860356e-06,1.6513860433300018e-05,6.267644097674802e-05,tokens/kWh,1005162.3707123386,,s,630,14.475470323562622,0.02297693702152797,0.0003288888919338714,0.022890815734863283,0.023292008781433105,0.023451161575317382,0.024743578910827645,"[0.024799583435058593, 0.023143936157226562, 0.023081695556640625, 0.022918624877929686, 0.022831615447998048, 0.022720672607421874, 0.023443487167358397, 0.023326528549194335, 0.023569728851318358, 0.023857120513916016, 0.023032543182373046, 0.022878368377685546, 0.022898080825805665, 0.023106143951416015, 0.022838752746582033, 0.022864255905151367, 0.022849536895751952, 0.02280828857421875, 0.022921728134155273, 0.023035072326660157, 0.02310643196105957, 0.022876096725463868, 0.023007104873657227, 0.023051584243774414, 0.02318339157104492, 0.02289516830444336, 0.02291324806213379, 0.023057952880859375, 0.02294108772277832, 0.022782527923583984, 0.022829343795776367, 0.022863872528076173, 0.022940000534057616, 0.022898687362670898, 0.022780672073364257, 0.022889375686645508, 0.022736320495605467, 0.022794815063476564, 0.022804479598999023, 0.02284886360168457, 0.023008991241455078, 0.02284351921081543, 0.022731584548950197, 0.02305449676513672, 0.022768991470336914, 0.022917055130004884, 0.022818687438964844, 0.022777952194213868, 0.02279484748840332, 0.023045183181762696, 0.022948095321655274, 0.022762399673461914, 0.022781728744506836, 0.02290483283996582, 0.022956031799316406, 0.022822912216186524, 0.022800512313842773, 0.023125247955322267, 0.023193504333496092, 0.022936288833618163, 0.022951040267944336, 0.022868608474731444, 0.022785696029663086, 0.023044095993041993, 0.023319744110107423, 0.023050336837768554, 0.02314035224914551, 0.022989536285400392, 0.023189407348632812, 0.023340608596801756, 0.023577119827270506, 0.023547935485839843, 0.02347158432006836, 0.0232608642578125, 0.023210079193115234, 0.02334726333618164, 0.0232906551361084, 0.023392127990722655, 0.0230743350982666, 0.023042848587036133, 0.022761152267456054, 0.022824960708618162, 0.02290889549255371, 0.022863840103149412, 0.023044063568115235, 0.023025888442993164, 0.023534719467163085, 0.022964479446411133, 0.022784223556518556, 0.02273094367980957, 0.022883968353271486, 0.02344806480407715, 0.024606464385986328, 0.023082624435424803, 0.022933631896972655, 0.023062847137451173, 0.022841535568237304, 0.022798303604125977, 0.02274508857727051, 0.022898687362670898, 0.022852800369262696, 0.022879135131835936, 0.022820768356323243, 0.02309049606323242, 0.022858047485351564, 0.022816160202026366, 0.022706239700317384, 0.022795167922973633, 0.02279449653625488, 0.022859615325927736, 0.022916448593139647, 0.0230467529296875, 0.022980127334594726, 0.02292927932739258, 0.02281692886352539, 0.022878623962402343, 0.02273689651489258, 0.022852832794189454, 0.02274508857727051, 0.02296713638305664, 0.02309113693237305, 0.023303680419921875, 0.023843456268310546, 0.02320089530944824, 0.02318003273010254, 0.023136255264282226, 0.02307276725769043, 0.023121919631958008, 0.022943519592285157, 0.022712223052978514, 0.022789888381958008, 0.02272662353515625, 0.022688352584838867, 0.02292531204223633, 0.022931392669677735, 0.022845056533813475, 0.022714815139770507, 0.022825023651123048, 0.022755199432373047, 0.02299216079711914, 0.022773920059204103, 0.022771360397338868, 0.02288092803955078, 0.02291334342956543, 0.02282700729370117, 0.022754304885864256, 0.02272768020629883, 0.022840927124023438, 0.02295030403137207, 0.023007232666015624, 0.02310086441040039, 0.022925888061523438, 0.02296860885620117, 0.022804063796997072, 0.023142528533935548, 0.02287820816040039, 0.02305753517150879, 0.02299964714050293, 0.022962080001831055, 0.022855520248413086, 0.022878719329833985, 0.022943103790283203, 0.02293212890625, 0.022861791610717774, 0.022897727966308595, 0.02289148712158203, 0.023068351745605467, 0.022826431274414062, 0.022885087966918946, 0.02288630485534668, 0.022950143814086915, 0.022824480056762696, 0.022906944274902342, 0.02279465675354004, 0.022820671081542968, 0.022728832244873046, 0.022711584091186524, 0.022686431884765625, 0.022855072021484374, 0.02286262321472168, 0.02287808036804199, 0.022853631973266602, 0.02273209571838379, 0.022768320083618163, 0.022808832168579103, 0.0227509765625, 0.02565283203125, 0.024979328155517577, 0.02311743927001953, 0.022998239517211912, 0.023208223342895507, 0.02306435203552246, 0.022919904708862304, 0.022838367462158202, 0.022836128234863282, 0.023134464263916014, 0.0227775993347168, 0.02275904083251953, 0.022798015594482423, 0.022766271591186524, 0.02277891159057617, 0.022806848526000977, 0.022878143310546876, 0.023057119369506836, 0.02292736053466797, 0.022882303237915038, 0.02296124839782715, 0.02279311943054199, 0.02291059112548828, 0.022788799285888672, 0.02281228828430176, 0.022822975158691406, 0.022824928283691405, 0.022854751586914062, 0.022913631439208985, 0.0228887996673584, 0.02304217529296875, 0.0232938232421875, 0.022965471267700197, 0.022804351806640626, 0.022792127609252928, 0.022830047607421876, 0.022743040084838868, 0.02299235153198242, 0.023142688751220702, 0.023060640335083007, 0.022986080169677733, 0.022991903305053712, 0.022981983184814453, 0.022935487747192382, 0.022970272064208985, 0.023142784118652344, 0.022749343872070314, 0.022771711349487304, 0.022982656478881838, 0.02290297508239746, 0.02284880065917969, 0.02287001609802246, 0.022872608184814455, 0.023101280212402344, 0.02295792007446289, 0.02293996810913086, 0.022929407119750975, 0.022908735275268554, 0.022853055953979493, 0.022838016510009766, 0.02313382339477539, 0.0229881591796875, 0.022819456100463868, 0.02284992027282715, 0.022797983169555665, 0.022772064208984377, 0.022928512573242188, 0.02337401580810547, 0.02339295959472656, 0.02331596755981445, 0.023093791961669923, 0.022948896408081055, 0.02295052719116211, 0.02288902473449707, 0.0229434871673584, 0.023023616790771483, 0.022998720169067382, 0.023099647521972657, 0.023083168029785155, 0.023179168701171874, 0.023177215576171875, 0.023164928436279295, 0.023103551864624025, 0.02315667152404785, 0.02347974395751953, 0.022854175567626953, 0.022745119094848634, 0.022714527130126953, 0.02268297576904297, 0.02289695930480957, 0.022900192260742188, 0.022890655517578126, 0.022808799743652342, 0.02275760078430176, 0.022935680389404297, 0.022867488861083984, 0.02302729606628418, 0.023018367767333986, 0.022769664764404295, 0.022934911727905273, 0.022747392654418944, 0.02292473602294922, 0.022822175979614258, 0.02289606475830078, 0.022955520629882813, 0.02279324722290039, 0.022789823532104493, 0.022751232147216797, 0.02270204734802246, 0.02270947265625, 0.022880064010620118, 0.022945823669433593, 0.023249439239501953, 0.022925760269165037, 0.023204864501953124, 0.023020576477050782, 0.022812416076660156, 0.022722784042358397, 0.022845439910888672, 0.022669599533081054, 0.02274070358276367, 0.02270364761352539, 0.022890975952148437, 0.02325299263000488, 0.023522527694702148, 0.023368480682373047, 0.02327315139770508, 0.023083328247070312, 0.02307481575012207, 0.023052383422851562, 0.023291807174682617, 0.023090911865234376, 0.023001375198364257, 0.02289446449279785, 0.023066751480102538, 0.022803871154785157, 0.022884159088134765, 0.02307548713684082, 0.02279814338684082, 0.022778175354003907, 0.022796512603759766, 0.022771488189697264, 0.022755327224731444, 0.022822591781616212, 0.022758975982666015, 0.022983232498168946, 0.02280703926086426, 0.022752384185791015, 0.022757951736450194, 0.022747007369995118, 0.022749311447143556, 0.022829055786132812, 0.023037952423095705, 0.02386684799194336, 0.02290537643432617, 0.0231014404296875, 0.022898687362670898, 0.02286288070678711, 0.022761728286743162, 0.022972864151000978, 0.023299808502197265, 0.02335513687133789, 0.023371807098388674, 0.02301375961303711, 0.022964639663696287, 0.02283139228820801, 0.022888128280639648, 0.022755136489868166, 0.022755071640014647, 0.02286787223815918, 0.022815296173095703, 0.022845279693603514, 0.022760608673095702, 0.02267238426208496, 0.022675455093383787, 0.022724607467651366, 0.022695423126220703, 0.023056896209716796, 0.023402015686035157, 0.02583795166015625, 0.0233287353515625, 0.023772672653198244, 0.024916032791137695, 0.023073343276977538, 0.0228897590637207, 0.022741727828979492, 0.0227326717376709, 0.022755008697509765, 0.02273084831237793, 0.022745439529418945, 0.022788095474243163, 0.02278611183166504, 0.023020095825195312, 0.023185567855834963, 0.02299273681640625, 0.02280841636657715, 0.022865951538085936, 0.02315228843688965, 0.02302934455871582, 0.022780672073364257, 0.022925056457519532, 0.0227270393371582, 0.022719871520996093, 0.022821504592895506, 0.022708288192749025, 0.022763456344604492, 0.022692928314208983, 0.02261235237121582, 0.022677536010742187, 0.022699647903442383, 0.022864032745361328, 0.02266803169250488, 0.02266262435913086, 0.022878015518188476, 0.02295267105102539, 0.02351103973388672, 0.02397590446472168, 0.02312224006652832, 0.022980384826660157, 0.022921152114868164, 0.022734848022460938, 0.02264396858215332, 0.022686464309692383, 0.02264473533630371, 0.022673311233520507, 0.02266326332092285, 0.022767616271972657, 0.022887807846069336, 0.022841472625732422, 0.022883903503417968, 0.022835424423217773, 0.02279292869567871, 0.022986175537109375, 0.02288467216491699, 0.023006847381591797, 0.022995584487915038, 0.02302889633178711, 0.022965087890625, 0.023240608215332033, 0.02311382484436035, 0.023324352264404297, 0.023298368453979493, 0.02345369529724121, 0.02306812858581543, 0.023046911239624022, 0.02291279983520508, 0.022800384521484376, 0.02292246437072754, 0.02364054489135742, 0.02286412811279297, 0.022814624786376952, 0.02274835205078125, 0.022845632553100587, 0.022809247970581054, 0.022849664688110352, 0.02272643280029297, 0.022915103912353515, 0.02278223991394043, 0.022790367126464844, 0.022759136199951173, 0.022826623916625977, 0.022891168594360353, 0.022773759841918945, 0.022787263870239258, 0.022776927947998047, 0.022875104904174805, 0.022964767456054688, 0.022986976623535157, 0.023015167236328123, 0.02287129592895508, 0.0229101448059082, 0.022929216384887697, 0.022947423934936522, 0.02331235122680664, 0.02329644775390625, 0.02333251190185547, 0.023123807907104492, 0.023361984252929686, 0.023103551864624025, 0.02316441535949707, 0.02295564842224121, 0.022856128692626952, 0.022811071395874023, 0.022782976150512696, 0.022803327560424805, 0.022728832244873046, 0.022831104278564454, 0.022816768646240236, 0.02264371109008789, 0.022789024353027345, 0.022777631759643556, 0.02274924850463867, 0.022898944854736328, 0.02303104019165039, 0.02300595283508301, 0.02282700729370117, 0.022687744140625, 0.022875711441040038, 0.02274870491027832, 0.024890239715576173, 0.0234335994720459, 0.02303299140930176, 0.022954496383666992, 0.022847488403320314, 0.02291433525085449, 0.022921024322509767, 0.022788511276245118, 0.02275139236450195, 0.022825471878051756, 0.023136095046997072, 0.0230581111907959, 0.023255359649658202, 0.022833152770996092, 0.022846687316894532, 0.02277395248413086, 0.022864479064941406, 0.022825088500976563, 0.02290675163269043, 0.023021696090698242, 0.023119968414306642, 0.022980831146240235, 0.02284339141845703, 0.022781408309936524, 0.022746944427490236, 0.022780672073364257, 0.022838560104370118, 0.022872543334960936, 0.022975872039794922, 0.022845855712890627, 0.023024063110351562, 0.023427072525024413, 0.023791519165039063, 0.023356639862060546, 0.023630111694335938, 0.023197439193725584, 0.023139007568359377, 0.02323676872253418, 0.023261247634887697, 0.023143775939941408, 0.022929439544677733, 0.022805055618286132, 0.0230830078125, 0.022816768646240236, 0.022798015594482423, 0.022788415908813475, 0.02287548828125, 0.022848255157470704, 0.022680896759033203, 0.02277436828613281, 0.022733152389526366, 0.02273859214782715, 0.022689504623413084, 0.022792127609252928, 0.02323699188232422, 0.023495744705200196, 0.023534303665161134, 0.023574975967407225, 0.023758399963378907, 0.023736000061035156, 0.023355840682983398, 0.023485567092895506, 0.02344473648071289, 0.02319887924194336, 0.02299958419799805, 0.02309119987487793, 0.02285753631591797, 0.02288425636291504, 0.022806047439575195, 0.022835968017578125, 0.02275881576538086, 0.022929664611816405, 0.022877824783325194, 0.022807487487792967, 0.022929183959960936, 0.02306435203552246, 0.022728384017944334, 0.02270262336730957, 0.022730560302734376, 0.0227326717376709, 0.022819135665893556, 0.022807647705078125, 0.022864992141723633, 0.023018272399902343, 0.02290496063232422, 0.022855680465698244, 0.0228621768951416, 0.02328329658508301, 0.025274112701416017, 0.02329631996154785, 0.02306185531616211, 0.02320275115966797, 0.02304732894897461, 0.022903520584106444, 0.022896480560302735, 0.02302367973327637, 0.02305836868286133, 0.022826496124267577, 0.02282956886291504, 0.022814336776733397, 0.022800735473632813, 0.02284956741333008, 0.02288217544555664, 0.022797760009765626, 0.022987455368041993, 0.022796287536621093, 0.022837568283081054, 0.023020864486694336, 0.022875808715820314, 0.022893312454223633, 0.022958175659179687, 0.022877216339111328, 0.02294028854370117, 0.022746463775634766, 0.02284339141845703, 0.02272115135192871, 0.022710559844970703, 0.022758495330810546, 0.022780799865722658, 0.022734848022460938, 0.022775808334350587, 0.022705568313598632, 0.022692447662353517, 0.022702016830444337, 0.022805887222290037, 0.022871904373168946, 0.02286672019958496, 0.022865983963012697, 0.023535743713378906, 0.023089023590087892, 0.023046112060546874, 0.023091232299804688, 0.023205312728881836, 0.022978879928588866, 0.022966848373413087, 0.022822591781616212, 0.02296438407897949, 0.022875648498535156, 0.023268863677978514, 0.023106592178344727, 0.0234116153717041, 0.02316147232055664, 0.022989343643188477, 0.022949600219726564, 0.022956064224243164]",tokens/s,43.5219019429379,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1592.553472,1830.682624,0.0,1428.160512,1322.516992,s,1,8.579087890625,8.579087890625,0.0,8.579087890625,8.579087890625,8.579087890625,8.579087890625,[8.579087890625],,kWh,4.1737750074995954e-05,4.596587948120488e-06,1.7295291613983466e-05,6.362962963709991e-05,,MB,1700.1472,1851.654144,0.0,1434.451968,1320.892416,s,10,5.887068969726563,0.5887068969726562,0.002141463340372638,0.5880539855957031,0.5899028503417969,0.5923191009521485,0.5942521014404297,"[0.5947353515625, 0.5876989135742188, 0.5864642333984375, 0.5877103271484375, 0.5887935791015625, 0.5885071411132813, 0.5878301391601563, 0.58827783203125, 0.5893659057617188, 0.587685546875]",tokens/s,434.8513688500077,kWh,1.7513177812250824e-05,1.930867402998922e-06,1.162322825282496e-05,3.10672734680747e-05,tokens/kWh,8240182.398467322,MB,1711.067136,1851.654144,0.0,1434.451968,1373.031936,s,10,14.25769384765625,1.425769384765625,0.005509510047553519,1.4245712890625,1.4316825561523439,1.4353286437988282,1.4382455139160157,"[1.4221181640625, 1.4226575927734375, 1.4265272216796876, 1.430872314453125, 1.42288427734375, 1.4389747314453125, 1.4196661376953126, 1.4275069580078126, 1.42625830078125, 1.4202281494140625]",tokens/s,44.18666908769138,kWh,4.105419946816699e-05,4.528824133159441e-06,2.03284917529712e-05,6.591151535429763e-05,tokens/kWh,955826.9091728932,,s,630,14.255010454177844,0.022627000720917233,0.000444445885649772,0.022513312339782716,0.02293103103637695,0.023188226795196533,0.024895756340026876,"[0.02306915283203125, 0.022718816757202148, 0.022874111175537108, 0.02303977584838867, 0.02273711967468262, 0.02283087921142578, 0.0226627197265625, 0.02416307258605957, 0.022568864822387694, 0.02259548759460449, 0.022365631103515624, 0.022317728042602538, 0.022458335876464845, 0.022343103408813476, 0.022264415740966798, 0.022351871490478514, 0.022402496337890626, 0.022411840438842773, 0.022400447845458984, 0.022173248291015624, 0.02230784034729004, 0.02236524772644043, 0.022520767211914063, 0.02249318313598633, 0.022411264419555665, 0.022396928787231447, 0.022280096054077148, 0.022401119232177736, 0.02259676742553711, 0.022428192138671876, 0.022882560729980468, 0.022661184310913084, 0.022362112045288086, 0.022388063430786132, 0.022198816299438477, 0.02248886489868164, 0.02239523124694824, 0.02281062316894531, 0.022400064468383787, 0.022287296295166015, 0.022329343795776366, 0.025315263748168945, 0.023358623504638673, 0.022596511840820312, 0.02256220817565918, 0.02258799934387207, 0.022564767837524414, 0.02250556755065918, 0.022362112045288086, 0.022517087936401368, 0.022385311126708985, 0.02227519989013672, 0.022375295639038086, 0.02243097686767578, 0.022466848373413086, 0.022350303649902342, 0.022327167510986328, 0.02247283172607422, 0.022616064071655274, 0.022427648544311524, 0.022408800125122072, 0.022448543548583985, 0.022312959671020507, 0.023020864486694336, 0.022981279373168944, 0.02254643249511719, 0.02289664077758789, 0.023041215896606446, 0.02282169532775879, 0.022512895584106445, 0.022417375564575195, 0.022840095520019532, 0.02245180892944336, 0.02250998306274414, 0.02230259132385254, 0.022404767990112304, 0.022594015121459962, 0.022660512924194336, 0.02237295913696289, 0.022437759399414062, 0.022345855712890626, 0.022408384323120117, 0.02226259231567383, 0.023354816436767577, 0.022518335342407228, 0.022411264419555665, 0.0226343994140625, 0.022671295166015626, 0.022478847503662108, 0.02253225517272949, 0.0225545597076416, 0.02241542434692383, 0.02244812774658203, 0.02249932861328125, 0.02243174362182617, 0.022714368820190428, 0.022874111175537108, 0.02264678382873535, 0.022635936737060547, 0.02256752014160156, 0.02253209686279297, 0.022427648544311524, 0.022380544662475587, 0.022433792114257813, 0.02241535949707031, 0.02235923194885254, 0.022303136825561523, 0.022614431381225587, 0.02262188720703125, 0.022763776779174804, 0.022756895065307616, 0.0227043514251709, 0.022753536224365236, 0.022693439483642577, 0.02282512092590332, 0.022801919937133788, 0.022635360717773438, 0.022823968887329103, 0.022587743759155274, 0.02248963165283203, 0.022347871780395507, 0.022403072357177735, 0.022406848907470703, 0.022421152114868163, 0.02236892890930176, 0.0223287353515625, 0.023021568298339845, 0.022808576583862306, 0.02253824043273926, 0.022935552597045897, 0.022708223342895507, 0.022760608673095702, 0.022424415588378908, 0.022394304275512696, 0.022487039566040038, 0.022605567932128905, 0.022520639419555663, 0.022338815689086914, 0.022557216644287108, 0.0239466552734375, 0.022810592651367187, 0.023189695358276367, 0.022745759963989257, 0.022775840759277344, 0.02299673652648926, 0.0228702392578125, 0.02271392059326172, 0.022781791687011718, 0.022698591232299805, 0.022738752365112306, 0.022595775604248046, 0.022444032669067384, 0.02242355155944824, 0.022564863204956053, 0.02242092704772949, 0.022415231704711915, 0.02259014320373535, 0.022573055267333983, 0.02243132781982422, 0.02251152038574219, 0.02250102424621582, 0.022647647857666015, 0.022675296783447266, 0.022472864151000978, 0.022744287490844728, 0.022602527618408204, 0.022500511169433593, 0.022440799713134764, 0.022509567260742186, 0.02231817626953125, 0.02246953582763672, 0.02329190444946289, 0.022640640258789063, 0.022618112564086915, 0.022719680786132814, 0.02278278350830078, 0.02265033531188965, 0.022544031143188478, 0.022567264556884764, 0.022454816818237303, 0.022593536376953126, 0.022456031799316406, 0.02262236785888672, 0.022453664779663086, 0.02254102325439453, 0.022517023086547853, 0.022567295074462892, 0.02260380744934082, 0.022454591751098634, 0.023242528915405274, 0.023134431838989257, 0.02329097557067871, 0.02303267288208008, 0.02302601623535156, 0.02293052864074707, 0.022753856658935548, 0.02255673599243164, 0.022577152252197266, 0.022589439392089843, 0.022512863159179688, 0.023074880599975586, 0.02245257568359375, 0.02238912010192871, 0.022394880294799805, 0.022546207427978516, 0.02238870429992676, 0.022388448715209962, 0.02249497604370117, 0.022514240264892578, 0.02325503921508789, 0.022749088287353517, 0.025444671630859374, 0.022666400909423828, 0.02275209617614746, 0.022926624298095704, 0.022974496841430665, 0.023009599685668944, 0.022839679718017578, 0.022708223342895507, 0.02282035255432129, 0.02277596855163574, 0.022988479614257814, 0.022852256774902345, 0.022891872406005858, 0.022675840377807618, 0.02264908790588379, 0.022728511810302734, 0.02265519905090332, 0.022479967117309572, 0.02239785575866699, 0.022470111846923827, 0.02237494468688965, 0.022417407989501953, 0.02247270393371582, 0.023197696685791015, 0.022412960052490234, 0.022530399322509765, 0.02273017692565918, 0.022495807647705077, 0.02239897537231445, 0.02229043197631836, 0.022487039566040038, 0.02251296043395996, 0.0226366081237793, 0.022588031768798828, 0.02262015914916992, 0.022519807815551757, 0.022481952667236328, 0.02236240005493164, 0.022409280776977538, 0.0223504638671875, 0.022347776412963868, 0.023099519729614257, 0.02319548797607422, 0.022884384155273437, 0.023621599197387697, 0.02298908805847168, 0.022591424942016602, 0.022483072280883788, 0.022394880294799805, 0.022318464279174804, 0.02235251235961914, 0.022443328857421875, 0.022481407165527344, 0.02231110382080078, 0.02242515182495117, 0.022314943313598633, 0.02236467170715332, 0.022401023864746093, 0.022358015060424806, 0.02248294448852539, 0.022380191802978514, 0.02286422348022461, 0.022368255615234374, 0.022451616287231444, 0.023175167083740233, 0.024475231170654296, 0.022753856658935548, 0.022604223251342773, 0.02265212821960449, 0.022716352462768555, 0.02238345527648926, 0.02248908805847168, 0.022384639739990234, 0.02224127960205078, 0.022429407119750975, 0.022303007125854493, 0.022363359451293946, 0.022381343841552735, 0.02248089599609375, 0.022429695129394533, 0.022376447677612304, 0.022766944885253906, 0.022530719757080077, 0.022296575546264647, 0.02250739288330078, 0.022521984100341796, 0.022784000396728517, 0.0227061767578125, 0.022672704696655274, 0.022824960708618162, 0.02264339256286621, 0.022417407989501953, 0.022556671142578123, 0.022486848831176756, 0.02237664031982422, 0.022425024032592774, 0.02257276725769043, 0.022430559158325196, 0.0225218563079834, 0.022531871795654298, 0.022349088668823243, 0.022481632232666016, 0.02250569534301758, 0.022425600051879883, 0.02336528015136719, 0.023157087326049805, 0.02291097640991211, 0.022550432205200196, 0.022605920791625978, 0.025067520141601563, 0.022847488403320314, 0.02270207977294922, 0.022622207641601562, 0.022511615753173828, 0.022516799926757813, 0.022299583435058595, 0.022468608856201173, 0.022537471771240235, 0.022446752548217774, 0.022411231994628907, 0.022365888595581054, 0.022403263092041017, 0.022582847595214842, 0.022454336166381837, 0.022299264907836912, 0.025324640274047853, 0.022758304595947267, 0.02260083198547363, 0.022866815567016602, 0.02269388771057129, 0.022666431427001952, 0.022580032348632813, 0.022568191528320312, 0.022493343353271484, 0.022602336883544922, 0.022595584869384764, 0.022588544845581055, 0.022438783645629883, 0.022585248947143553, 0.022683679580688478, 0.02258131217956543, 0.02295327949523926, 0.02625119972229004, 0.026949663162231446, 0.022755456924438477, 0.022604480743408203, 0.023961055755615236, 0.02283318328857422, 0.023306655883789062, 0.02275267219543457, 0.022686304092407225, 0.022577152252197266, 0.022785888671875, 0.022386848449707033, 0.022347776412963868, 0.022775808334350587, 0.02241049575805664, 0.022532863616943358, 0.022587007522583007, 0.022370687484741213, 0.02251366424560547, 0.022633567810058593, 0.02239731216430664, 0.022345855712890626, 0.022401439666748048, 0.022376224517822264, 0.022496543884277343, 0.023789695739746094, 0.023591167449951173, 0.022910272598266602, 0.02249388885498047, 0.02266886329650879, 0.022405311584472655, 0.022334848403930664, 0.02234048080444336, 0.02273689651489258, 0.022459423065185547, 0.022385631561279297, 0.0224420166015625, 0.022456287384033203, 0.022433792114257813, 0.022318304061889647, 0.022362911224365234, 0.022261760711669923, 0.02251366424560547, 0.022402143478393553, 0.02240982437133789, 0.02231737518310547, 0.022353919982910156, 0.02224742317199707, 0.02226790428161621, 0.02266316795349121, 0.022382144927978517, 0.022438304901123047, 0.0223635196685791, 0.022373023986816405, 0.022321151733398437, 0.02237753677368164, 0.022465471267700196, 0.022511615753173828, 0.022337535858154296, 0.02249318313598633, 0.022375871658325195, 0.022381120681762696, 0.022396928787231447, 0.02230271911621094, 0.022560768127441407, 0.02305023956298828, 0.02243302345275879, 0.022454816818237303, 0.022751455307006837, 0.02290892791748047, 0.023162879943847657, 0.02290483283996582, 0.022771072387695313, 0.022818496704101562, 0.022696895599365233, 0.022491071701049806, 0.022433759689331055, 0.022407072067260742, 0.02269817543029785, 0.022482784271240234, 0.02240287971496582, 0.022427839279174806, 0.0223600959777832, 0.022495359420776368, 0.022343679428100584, 0.022349599838256837, 0.02244630432128906, 0.022405120849609376, 0.023186431884765626, 0.023136255264282226, 0.02397292709350586, 0.02283737564086914, 0.022723392486572267, 0.02249648094177246, 0.022638559341430664, 0.022367040634155275, 0.022474176406860353, 0.02243846321105957, 0.024389631271362306, 0.02261155128479004, 0.022484800338745118, 0.02248534393310547, 0.022256959915161134, 0.023073183059692384, 0.022561248779296876, 0.022509632110595704, 0.022449823379516603, 0.022446432113647462, 0.022360063552856444, 0.022372352600097657, 0.022394880294799805, 0.022453439712524413, 0.02259436798095703, 0.02253523254394531, 0.022539167404174804, 0.022499359130859375, 0.02254140853881836, 0.022389663696289062, 0.02242355155944824, 0.02249932861328125, 0.02241279983520508, 0.02247862434387207, 0.02282979202270508, 0.022382591247558595, 0.022574207305908204, 0.02254038429260254, 0.022418176651000977, 0.022337568283081054, 0.022462240219116213, 0.022649055480957032, 0.022921215057373046, 0.02290483283996582, 0.02287820816040039, 0.02317103958129883, 0.023053760528564452, 0.023017215728759765, 0.023005504608154297, 0.022808704376220703, 0.022786239624023437, 0.022720447540283205, 0.022708127975463867, 0.022557056427001954, 0.02251366424560547, 0.02255462455749512, 0.022463552474975584, 0.02260883140563965, 0.022577152252197266, 0.022351871490478514, 0.02240716743469238, 0.022539648056030273, 0.022391008377075194, 0.024040607452392577, 0.022946815490722656, 0.022506784439086915, 0.022400768280029296, 0.022797279357910157, 0.02250048065185547, 0.022422399520874024, 0.022341087341308595, 0.022613920211791993, 0.022348415374755858, 0.022478847503662108, 0.022331071853637696, 0.02237446403503418, 0.02243609619140625, 0.02243097686767578, 0.022919488906860352, 0.022460319519042968, 0.023509376525878905, 0.025780384063720702, 0.022829055786132812, 0.02259548759460449, 0.022437759399414062, 0.02226799964904785, 0.022452159881591795, 0.022326559066772462, 0.022518688201904297, 0.022540288925170897, 0.02256662368774414, 0.022539775848388673, 0.02230556869506836, 0.022509408950805665, 0.022396640777587892, 0.022419071197509764, 0.022458240509033202, 0.022352096557617187, 0.022371040344238282, 0.023512191772460937, 0.02230281639099121, 0.02243779182434082, 0.02254732894897461, 0.022405120849609376, 0.022380544662475587, 0.022280191421508787, 0.022406784057617188, 0.022303104400634766, 0.02253004837036133, 0.02235580825805664, 0.02218943977355957, 0.02247145652770996, 0.02267091178894043, 0.02271072006225586, 0.022718463897705078, 0.02283478355407715, 0.022870304107666016, 0.02288447952270508, 0.02266012763977051, 0.022694879531860352, 0.022683647155761717, 0.022796287536621093, 0.022836767196655273, 0.02292937660217285, 0.022608383178710938, 0.022474687576293947, 0.023437599182128906, 0.02297088050842285, 0.022859935760498048, 0.022863872528076173, 0.022759424209594727, 0.022631935119628906, 0.022718463897705078, 0.022641151428222657, 0.022347007751464844, 0.022382368087768556, 0.022395200729370117, 0.022413856506347658, 0.022519935607910158, 0.022632448196411133, 0.02278915214538574, 0.022615007400512695, 0.022394880294799805, 0.02233283233642578, 0.02238319969177246, 0.022312959671020507, 0.02291097640991211, 0.023418815612792968, 0.022555936813354494, 0.022471168518066405, 0.022455968856811524, 0.02242419242858887, 0.022529951095581056, 0.022568864822387694, 0.022505664825439455, 0.022368255615234374, 0.022478847503662108, 0.022517568588256837, 0.02250156784057617, 0.022757375717163086, 0.022798336029052735, 0.02255411148071289, 0.022419488906860352, 0.0225960636138916, 0.02246553611755371, 0.022379520416259766, 0.02231705665588379, 0.02227347183227539, 0.022307296752929688, 0.02244207954406738, 0.022222208023071288, 0.02238528060913086, 0.02225472068786621, 0.02223807907104492, 0.022357887268066406, 0.02232048034667969, 0.022317792892456053, 0.02232041549682617, 0.022737472534179688, 0.02274940872192383, 0.02261743927001953, 0.022454944610595703, 0.0224071044921875, 0.022548479080200197, 0.022531583786010743, 0.02268832015991211, 0.02253004837036133, 0.02242953681945801, 0.02245964813232422]",tokens/s,44.19498688023478,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6756.352,7532.838912,0.0,7147.094016,7138.9184,s,1,11.2779287109375,11.2779287109375,0.0,11.2779287109375,11.2779287109375,11.2779287109375,11.2779287109375,[11.2779287109375],,kWh,0.00012285618032916926,1.3544524021034196e-05,4.141058868400993e-05,0.00017781129303421337,,MB,1658.454016,8235.384832,0.0,7818.182656,7724.300288,s,10,6.599942260742187,0.6599942260742186,0.0011834421750699803,0.6601267700195312,0.6611359252929687,0.6611798706054688,0.6612150268554687,"[0.657084228515625, 0.6592981567382813, 0.6593634643554688, 0.6597883911132812, 0.6612238159179687, 0.6611261596679687, 0.659956298828125, 0.6607059326171875, 0.6602972412109375, 0.6610985717773438]",tokens/s,387.88218121655495,kWh,1.9300234251301162e-05,2.1278048142389366e-06,1.2834732489998973e-05,3.426277155553907e-05,tokens/kWh,7471666.429116238,MB,1669.709824,8382.185472,0.0,7964.983296,7904.605696,s,10,31.234401855468747,3.123440185546875,0.003393312229633914,3.1237308349609374,3.126921484375,3.127561328125,3.128073203125,"[3.12097216796875, 3.1151640625, 3.122500244140625, 3.123650146484375, 3.123130615234375, 3.124876220703125, 3.12531640625, 3.128201171875, 3.1238115234375, 3.126779296875]",tokens/s,20.170067700198167,kWh,9.113876920202566e-05,1.005372563922487e-05,6.063607628660275e-05,0.0001618285711278533,tokens/kWh,389300.84818104585,,s,630,31.23043832015992,0.04957212431771415,0.0006837881821820815,0.04951942443847657,0.05028725891113281,0.050501586723327635,0.05228945510864258,"[0.05229404830932617, 0.04924726486206055, 0.048699390411376955, 0.04861433410644531, 0.04938265609741211, 0.049009407043457034, 0.048062496185302735, 0.04846182250976563, 0.048435199737548826, 0.048621025085449215, 0.04875929641723633, 0.04865622329711914, 0.04888595199584961, 0.04975571060180664, 0.04939206314086914, 0.04888576126098633, 0.050941471099853516, 0.04860956954956055, 0.04957408142089844, 0.05008303833007813, 0.05009692764282227, 0.04969007873535156, 0.04933635330200195, 0.04950886535644531, 0.04964076614379883, 0.04886393737792969, 0.04845772933959961, 0.0488133430480957, 0.04878204727172852, 0.04905801773071289, 0.04899612808227539, 0.04888723373413086, 0.04993286514282227, 0.04931584167480469, 0.04939913558959961, 0.04944963073730469, 0.05062022399902344, 0.050356414794921874, 0.049329502105712894, 0.04990428924560547, 0.050477054595947264, 0.04998553466796875, 0.04983145523071289, 0.04940390396118164, 0.0495533447265625, 0.04993097686767578, 0.04915529632568359, 0.04918291091918945, 0.049230239868164063, 0.04956595230102539, 0.04944406509399414, 0.049885440826416015, 0.049487903594970704, 0.05050624084472656, 0.050290431976318356, 0.050026496887207034, 0.049964862823486327, 0.05171775817871094, 0.05016428756713867, 0.049748031616210935, 0.050286529541015625, 0.05026128005981445, 0.04974460983276367, 0.051806209564208984, 0.049367263793945314, 0.05043996810913086, 0.04859904098510742, 0.04875471878051758, 0.04871881484985351, 0.048923809051513674, 0.04835311889648437, 0.04866444778442383, 0.04911030578613281, 0.04933334350585938, 0.04896044921875, 0.04862854385375977, 0.04880998229980469, 0.048634078979492186, 0.049198654174804686, 0.04935027313232422, 0.049224159240722654, 0.0500153923034668, 0.04955046463012695, 0.05061145782470703, 0.048900543212890626, 0.048640159606933596, 0.049086463928222655, 0.04927897644042969, 0.049186817169189455, 0.049432575225830076, 0.049118270874023436, 0.04869625473022461, 0.04875785446166992, 0.04889692687988281, 0.049685791015625, 0.049226207733154295, 0.049051807403564456, 0.049056991577148434, 0.04967417526245117, 0.049193920135498045, 0.0491102409362793, 0.05019279861450195, 0.05076623916625977, 0.04979475021362305, 0.04998096084594727, 0.04955791854858398, 0.04980310440063476, 0.050076160430908206, 0.04947558212280274, 0.049530879974365234, 0.04914755249023438, 0.04939107131958008, 0.049196063995361326, 0.04932182312011719, 0.04943657684326172, 0.04954841613769531, 0.04942742538452148, 0.049435680389404296, 0.0502149772644043, 0.050689151763916016, 0.049923519134521484, 0.05029683303833008, 0.05013948822021484, 0.049632991790771484, 0.04983840179443359, 0.04998345565795898, 0.05265817642211914, 0.049253761291503904, 0.04834572982788086, 0.04858230209350586, 0.04887180709838867, 0.049219104766845705, 0.048736705780029296, 0.04883433532714844, 0.0487303352355957, 0.049273918151855466, 0.04952115249633789, 0.04913747024536133, 0.04945369720458984, 0.04914960098266601, 0.04863216018676758, 0.04906335830688477, 0.04931436920166016, 0.049315231323242184, 0.04944342422485352, 0.05050102233886719, 0.04982230377197266, 0.04942006301879883, 0.04920499038696289, 0.04938979339599609, 0.04910515213012695, 0.04972499084472656, 0.04910150527954102, 0.04879129409790039, 0.0489246711730957, 0.04924224090576172, 0.04917769622802735, 0.049411937713623046, 0.04985747146606445, 0.04909366226196289, 0.04890419387817383, 0.049802207946777345, 0.04958003234863281, 0.049616897583007816, 0.04997110366821289, 0.04968425750732422, 0.05024169540405273, 0.049780609130859375, 0.04983427047729492, 0.04938476943969727, 0.04955206298828125, 0.05004102325439453, 0.050171104431152344, 0.04973833465576172, 0.049602497100830076, 0.04910905456542969, 0.049324031829833984, 0.050302207946777346, 0.04969548797607422, 0.04930319976806641, 0.04971526336669922, 0.050049312591552736, 0.04984012985229492, 0.05012249755859375, 0.050476768493652346, 0.05059449768066406, 0.05013484954833984, 0.05042300796508789, 0.05053523254394531, 0.0521673583984375, 0.0492275505065918, 0.04897177505493164, 0.04862486267089844, 0.04881919860839844, 0.0487256965637207, 0.04908284759521484, 0.0490239372253418, 0.04876995086669922, 0.04944486236572266, 0.04918272018432617, 0.04920729446411133, 0.04977641677856445, 0.048883201599121094, 0.04900486373901367, 0.04887184143066406, 0.04991999816894531, 0.049307647705078124, 0.049805313110351565, 0.04964352035522461, 0.049999168395996094, 0.049538944244384764, 0.049345088958740235, 0.04894732666015625, 0.04934672164916992, 0.049096672058105466, 0.05008915328979492, 0.049175201416015626, 0.04853721618652344, 0.04922214508056641, 0.04969884872436523, 0.04984832000732422, 0.04929523086547852, 0.0492562255859375, 0.04926473617553711, 0.04995635223388672, 0.05000217437744141, 0.05028710556030273, 0.04942870330810547, 0.05004880142211914, 0.0498458251953125, 0.05029318237304688, 0.04990566253662109, 0.049154369354248044, 0.050286270141601565, 0.04981788635253906, 0.049484798431396484, 0.049248416900634764, 0.04950278472900391, 0.04987279891967773, 0.04990969467163086, 0.049778079986572264, 0.04945987319946289, 0.049517696380615234, 0.04978281784057617, 0.04982470321655273, 0.04966195297241211, 0.050677726745605466, 0.05019241714477539, 0.049823646545410154, 0.05018624114990235, 0.049979488372802736, 0.05026816177368164, 0.05372111892700195, 0.049839134216308596, 0.048888416290283204, 0.04875446319580078, 0.04873801422119141, 0.04883545684814453, 0.04895052719116211, 0.04859980773925781, 0.04915798568725586, 0.0493487663269043, 0.04918502426147461, 0.04910579299926758, 0.048939231872558595, 0.04917353439331055, 0.04903715133666992, 0.04926211166381836, 0.04953718566894531, 0.04895340728759766, 0.04970703887939453, 0.05048297500610351, 0.05055027389526367, 0.04938947296142578, 0.04914591979980469, 0.04889676666259766, 0.049360736846923825, 0.04889574432373047, 0.04888348770141602, 0.0491280632019043, 0.049565696716308595, 0.04928694534301758, 0.04978825759887695, 0.0497918701171875, 0.049626590728759766, 0.049533470153808594, 0.0490885124206543, 0.04907827377319336, 0.04933456039428711, 0.049649375915527344, 0.049893375396728515, 0.04976230239868164, 0.050253822326660154, 0.04948992156982422, 0.049543102264404296, 0.04974188613891602, 0.04979017639160156, 0.04965420913696289, 0.04912112045288086, 0.049174945831298826, 0.04928521728515625, 0.05003468704223633, 0.04974387359619141, 0.049653568267822266, 0.050305118560791014, 0.049860702514648435, 0.04962508773803711, 0.050783809661865235, 0.050534561157226564, 0.05028483200073242, 0.049737728118896485, 0.04994182586669922, 0.04982444763183594, 0.04995072174072265, 0.04961452865600586, 0.05257577514648438, 0.0496830062866211, 0.048714942932128906, 0.04853744125366211, 0.04910374450683594, 0.04842700958251953, 0.048823455810546874, 0.04878752136230469, 0.04873516845703125, 0.04939353561401367, 0.049568992614746094, 0.0495807991027832, 0.049168384552001954, 0.0491069450378418, 0.04921964645385742, 0.04975609588623047, 0.04964556884765625, 0.04893491363525391, 0.0492151985168457, 0.049895423889160156, 0.05014550399780274, 0.04950460815429687, 0.04943027114868164, 0.049022079467773434, 0.049158432006835937, 0.04919353485107422, 0.0490967025756836, 0.04844454574584961, 0.04903926467895508, 0.04920828628540039, 0.049560577392578124, 0.049331199645996096, 0.04938956832885742, 0.049360126495361326, 0.04991052627563477, 0.04989888000488281, 0.049938495635986326, 0.049621566772460934, 0.04990950393676758, 0.05003852844238281, 0.050471424102783206, 0.04956332778930664, 0.04957011032104492, 0.049408000946044923, 0.05015283203125, 0.049371776580810545, 0.05039718246459961, 0.04911248016357422, 0.04961251068115234, 0.0495296630859375, 0.04942416000366211, 0.049661502838134766, 0.05014601516723633, 0.05022515106201172, 0.05000150299072265, 0.05050204849243164, 0.049928192138671876, 0.05100339126586914, 0.05024153518676758, 0.050288639068603515, 0.05017497634887695, 0.04965494537353515, 0.05003180694580078, 0.05318355178833008, 0.04977555084228515, 0.04889798355102539, 0.048834625244140624, 0.04851507186889648, 0.048418815612792966, 0.049339649200439456, 0.04860166549682617, 0.04893920135498047, 0.049151649475097654, 0.04932006454467774, 0.049268959045410156, 0.04897792053222656, 0.04905497741699219, 0.04856089782714844, 0.0490250244140625, 0.04907759857177734, 0.050231681823730466, 0.05029679870605469, 0.05006572723388672, 0.050282497406005856, 0.04941196823120117, 0.04954278564453125, 0.04928291320800781, 0.04907676696777344, 0.04882163238525391, 0.04958662414550781, 0.049495872497558595, 0.049670654296875, 0.04905990219116211, 0.04958563232421875, 0.05042633438110351, 0.04979916763305664, 0.048885566711425785, 0.04866393661499024, 0.04907321548461914, 0.049513439178466796, 0.050004032135009764, 0.049982177734375, 0.05024787139892578, 0.05031711959838867, 0.05003382492065429, 0.05010678482055664, 0.049443264007568356, 0.04947148895263672, 0.04962303924560547, 0.050286270141601565, 0.05019065475463867, 0.04974387359619141, 0.0497151985168457, 0.049549312591552735, 0.049259807586669924, 0.04968649673461914, 0.049576702117919924, 0.04949414443969727, 0.04930057525634766, 0.04982352066040039, 0.05015974426269531, 0.05004777526855469, 0.05024736022949219, 0.05036614227294922, 0.05003945541381836, 0.05022521591186523, 0.05248758316040039, 0.04952288055419922, 0.04850735855102539, 0.048560222625732424, 0.04900454330444336, 0.048936351776123044, 0.04923043060302734, 0.04927900695800781, 0.04854102325439453, 0.048753311157226566, 0.04925027084350586, 0.049344512939453126, 0.049111038208007815, 0.04973891067504883, 0.04963024139404297, 0.049372608184814454, 0.04928134536743164, 0.049194751739501955, 0.04939353561401367, 0.050375102996826175, 0.04977667236328125, 0.04914992141723633, 0.04919244766235351, 0.048717567443847656, 0.04899699020385742, 0.04946255874633789, 0.04976688003540039, 0.04957865524291992, 0.04987158584594727, 0.0517927360534668, 0.04921769714355469, 0.0495552978515625, 0.049045631408691406, 0.04989750289916992, 0.04958425521850586, 0.04976998519897461, 0.0498939208984375, 0.04981129455566406, 0.05014527893066406, 0.05041743850708008, 0.050157791137695314, 0.04975001525878906, 0.049397758483886715, 0.04935430526733398, 0.04967264175415039, 0.05030691146850586, 0.04980060958862305, 0.049514270782470705, 0.04972438430786133, 0.04955046463012695, 0.04948432159423828, 0.049447265625, 0.04993648147583008, 0.05026544189453125, 0.05033407974243164, 0.049514366149902345, 0.05014764785766602, 0.05004492950439453, 0.05036576080322266, 0.05098566436767578, 0.04988313674926758, 0.04962508773803711, 0.04945305633544922, 0.052278209686279296, 0.04900601577758789, 0.04868316650390625, 0.04880217742919922, 0.04887551879882813, 0.04850483322143555, 0.04843507385253906, 0.048369792938232424, 0.049598464965820314, 0.04941033554077148, 0.04968742370605469, 0.04897468948364258, 0.04906716918945313, 0.050068321228027346, 0.04921548843383789, 0.04883638381958008, 0.049412353515625, 0.04894512176513672, 0.0494161262512207, 0.051516864776611326, 0.04955353546142578, 0.04940780639648437, 0.04943926239013672, 0.04952284622192383, 0.04918476867675781, 0.049030303955078125, 0.049302303314208984, 0.04952262496948242, 0.04927203369140625, 0.049458080291748044, 0.04926054382324219, 0.049293312072753906, 0.049465248107910156, 0.04970915222167969, 0.049563518524169924, 0.049876415252685546, 0.049581024169921876, 0.04957539367675781, 0.04968038558959961, 0.04997536087036133, 0.049882720947265625, 0.04989193725585937, 0.04964352035522461, 0.04957107162475586, 0.04950835037231445, 0.04940876770019531, 0.04983516693115234, 0.0494857292175293, 0.049392574310302736, 0.05018435287475586, 0.04991984176635742, 0.04955136108398438, 0.05036860656738281, 0.049708992004394534, 0.049979358673095706, 0.049772544860839846, 0.049829246520996094, 0.049662593841552735, 0.050429344177246094, 0.05006192016601563, 0.05055862426757812, 0.05003475189208984, 0.050011520385742185, 0.0542234878540039, 0.04984515380859375, 0.04880624008178711, 0.048740001678466795, 0.04848729705810547, 0.04848012924194336, 0.048859329223632814, 0.04921343994140625, 0.04913356781005859, 0.0488037109375, 0.04998960113525391, 0.04967951965332031, 0.04916940689086914, 0.048965633392333986, 0.04901004791259766, 0.04917926406860352, 0.049314945220947266, 0.049580734252929685, 0.050307262420654295, 0.04992147064208984, 0.05027305603027344, 0.04983990478515625, 0.0493201904296875, 0.04915126419067383, 0.048839134216308595, 0.04870553588867187, 0.049037311553955076, 0.04894499206542969, 0.048906208038330075, 0.04931945419311524, 0.049618881225585935, 0.04966064071655273, 0.04969267272949219, 0.049238113403320315, 0.04964956665039062, 0.04961795043945313, 0.04956454467773438, 0.04988240051269531, 0.05064966583251953, 0.050587039947509765, 0.05024003219604492, 0.049645729064941406, 0.04951055908203125, 0.049358848571777345, 0.04935270309448242, 0.04917657470703125, 0.04945510482788086, 0.049317886352539066, 0.04919222259521484, 0.04946409606933594, 0.05045996856689453, 0.04999977493286133, 0.0497999038696289, 0.04984239959716797, 0.05048092651367188, 0.049854465484619144, 0.04994249725341797, 0.049801246643066406, 0.05031731033325195, 0.05060403060913086, 0.050240928649902344, 0.049782623291015626, 0.05035919952392578]",tokens/s,20.17262753540419,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4278.43584,4733.140992,0.0,4347.396096,4328.833024,s,1,10.1750439453125,10.1750439453125,0.0,10.1750439453125,10.1750439453125,10.1750439453125,10.1750439453125,[10.1750439453125],,kWh,8.300547219995071e-05,9.142800463155415e-06,2.719835509196189e-05,0.00011934662775506803,,MB,1406.38208,5219.680256,0.0,4804.575232,4748.27776,s,10,3.9685216674804686,0.39685216674804685,0.004237245006404738,0.3973748016357422,0.39992364501953126,0.40153520202636717,0.40282444763183595,"[0.38561407470703124, 0.39849429321289065, 0.3971178283691406, 0.39640155029296875, 0.39763177490234375, 0.39804180908203124, 0.39572735595703123, 0.3967807006835937, 0.39956552124023437, 0.40314675903320313]",tokens/s,645.0764830081653,kWh,1.1504594151123314e-05,1.2687551058847038e-06,7.653873644460821e-06,2.0427222901468838e-05,tokens/kWh,12532295.811076311,MB,1406.38208,5328.73216,0.0,4913.627136,4878.100992,s,10,19.958466796875,1.9958466796875,0.007244767989749116,1.9945481567382812,2.0042439086914063,2.008423162841797,2.0117665661621094,"[1.9989632568359375, 2.0126024169921877, 1.98665869140625, 1.9944241943359375, 1.993689697265625, 2.003315185546875, 1.9948114013671876, 1.994672119140625, 1.9917440185546875, 1.9875858154296875]",tokens/s,31.565550921909608,kWh,5.8782871383879096e-05,6.4825083149375076e-06,3.897019143253919e-05,0.00010423557113135581,tokens/kWh,604400.1996267524,,s,630,19.95400669479371,0.031673026499672546,0.0007923343902497728,0.03150599956512451,0.03205948143005371,0.03241843147277832,0.034947525863647466,"[0.034504638671875, 0.03198796844482422, 0.031593280792236327, 0.03150627136230469, 0.03173990440368652, 0.031655231475830076, 0.03189113616943359, 0.031529151916503906, 0.03134342384338379, 0.03170918464660644, 0.03188707160949707, 0.031662368774414064, 0.03159215927124023, 0.031629600524902345, 0.0322633285522461, 0.03176499176025391, 0.03168905639648437, 0.03172259140014649, 0.03174083137512207, 0.0319815673828125, 0.031837375640869144, 0.03157689666748047, 0.03230310440063477, 0.031741439819335936, 0.032002559661865236, 0.03190105628967285, 0.03170495986938476, 0.031517183303833005, 0.03150572776794434, 0.03161520004272461, 0.031365055084228516, 0.03156662368774414, 0.0315043830871582, 0.031320064544677735, 0.031473663330078124, 0.031428255081176755, 0.03144675254821777, 0.03142239952087402, 0.03140678405761719, 0.03144499206542969, 0.031526912689208986, 0.03133235168457031, 0.031543296813964845, 0.03183001518249512, 0.03167840003967285, 0.03177068710327149, 0.03162931251525879, 0.03186188888549805, 0.03152371215820313, 0.03165798377990723, 0.03178700828552246, 0.03202252960205078, 0.032040576934814456, 0.0318363208770752, 0.03188883209228516, 0.031738527297973634, 0.031651647567749026, 0.03167878341674805, 0.0318396167755127, 0.03163580894470215, 0.031582304000854496, 0.031582399368286135, 0.031559104919433596, 0.03427423858642578, 0.03202211380004883, 0.03180175971984863, 0.03209142303466797, 0.032739200592041016, 0.031769439697265624, 0.032379966735839844, 0.03237065505981445, 0.03200739288330078, 0.031751935958862304, 0.03162118339538574, 0.03182931137084961, 0.03151935958862305, 0.03167628860473633, 0.03169020843505859, 0.03150422477722168, 0.031660863876342774, 0.0314768009185791, 0.03153308868408203, 0.03158905601501465, 0.031549087524414064, 0.031545791625976566, 0.03141228866577148, 0.03152041625976563, 0.03166780853271484, 0.031609664916992186, 0.03138553619384766, 0.031406143188476565, 0.03159654426574707, 0.03144659233093262, 0.03163961601257324, 0.03164512062072754, 0.03192518424987793, 0.03243139266967773, 0.03240419387817383, 0.03192127990722656, 0.03185552024841309, 0.03195305633544922, 0.03293753433227539, 0.03246425628662109, 0.0320781135559082, 0.0317609920501709, 0.03232460784912109, 0.031921152114868165, 0.03187711906433106, 0.031512479782104495, 0.03154134368896484, 0.03136716842651367, 0.03140377616882324, 0.0320797119140625, 0.03158009529113769, 0.03206601715087891, 0.03948524856567383, 0.03180972862243652, 0.03144908714294434, 0.0316246395111084, 0.03159667205810547, 0.031558080673217775, 0.031507648468017575, 0.03171615982055664, 0.031448768615722655, 0.03152707290649414, 0.031447168350219726, 0.03474643325805664, 0.03206480026245117, 0.031650463104248044, 0.03136409568786621, 0.03132310485839844, 0.03117625617980957, 0.03136108779907226, 0.03246531295776367, 0.03243008041381836, 0.031764480590820314, 0.03203071975708008, 0.031365184783935546, 0.03147555160522461, 0.03136428833007812, 0.031241119384765623, 0.03143270492553711, 0.031188768386840822, 0.031166688919067383, 0.03116646385192871, 0.031045631408691408, 0.031368896484375, 0.03147603225708008, 0.03182796859741211, 0.031432191848754884, 0.0316441593170166, 0.03171455955505371, 0.03145395278930664, 0.03198671913146973, 0.03150752067565918, 0.03138956832885742, 0.03131599998474121, 0.031228992462158205, 0.03125529670715332, 0.031149919509887696, 0.031158624649047853, 0.03104934310913086, 0.031169151306152342, 0.031330047607421876, 0.03128233528137207, 0.03138665580749512, 0.031340351104736326, 0.031254528045654296, 0.031149311065673826, 0.031154943466186524, 0.031115264892578126, 0.0313093433380127, 0.0312604808807373, 0.03134864044189453, 0.03133884811401367, 0.03155401611328125, 0.03167430305480957, 0.031518207550048825, 0.03148643112182617, 0.031363199234008786, 0.03166630363464355, 0.03166924858093262, 0.03147855949401855, 0.03199788856506348, 0.03205945587158203, 0.031774751663208006, 0.03172259140014649, 0.031589248657226565, 0.03158758354187012, 0.03508284759521484, 0.032074016571044923, 0.03179206466674805, 0.03158841514587402, 0.031441631317138674, 0.031364416122436525, 0.031300287246704105, 0.031172447204589844, 0.031332511901855466, 0.031426559448242186, 0.03181977653503418, 0.03167231941223145, 0.0319213752746582, 0.03182406425476074, 0.03173407936096191, 0.03161084747314453, 0.03169664001464844, 0.031492671966552734, 0.03158595275878906, 0.03178940773010254, 0.031600831985473636, 0.03143417549133301, 0.03151907157897949, 0.03152067184448242, 0.03142255973815918, 0.031592512130737306, 0.03149411201477051, 0.03154115104675293, 0.03145123291015625, 0.031596607208251956, 0.032815040588378905, 0.0340766716003418, 0.03163571166992187, 0.031795072555541995, 0.03155110359191895, 0.031566015243530275, 0.031553600311279295, 0.03194675254821777, 0.03130572891235352, 0.03129548835754394, 0.031270912170410156, 0.031645696640014646, 0.0314202880859375, 0.03142803192138672, 0.03175868797302246, 0.03220864105224609, 0.03184422492980957, 0.03196944046020508, 0.03161555290222168, 0.03144307136535644, 0.03151449584960937, 0.03141836738586426, 0.031219104766845703, 0.03130019187927246, 0.031252479553222655, 0.031291391372680666, 0.03136716842651367, 0.031176351547241212, 0.0311298885345459, 0.031242303848266602, 0.031413951873779294, 0.03135315132141113, 0.031410175323486327, 0.03476326370239258, 0.03191398429870605, 0.03188281631469726, 0.03156796836853027, 0.031607135772705075, 0.03154086494445801, 0.03171958351135254, 0.03176470375061035, 0.03163171195983887, 0.03222288131713867, 0.03184633636474609, 0.03163676834106445, 0.031909727096557615, 0.03159516716003418, 0.031840543746948245, 0.031559968948364256, 0.031626976013183594, 0.03140812873840332, 0.03149004745483398, 0.03146076774597168, 0.03148201560974121, 0.03146998405456543, 0.03178249549865723, 0.03162361526489258, 0.03144198417663574, 0.03136195182800293, 0.031395872116088866, 0.03152422332763672, 0.03148249626159668, 0.03155104064941406, 0.03161734390258789, 0.03198124885559082, 0.03213545608520508, 0.032888671875, 0.03211705780029297, 0.03206553649902344, 0.03191324806213379, 0.03160288047790527, 0.031554239273071286, 0.031480960845947266, 0.031301599502563476, 0.03189632034301758, 0.03129068756103515, 0.031496896743774414, 0.031362464904785156, 0.03134934425354004, 0.03140812873840332, 0.03142000007629395, 0.031297536849975584, 0.031439104080200196, 0.031213407516479493, 0.03135110473632813, 0.031295711517333985, 0.03125430488586426, 0.031364320755004886, 0.03132259178161621, 0.031281503677368164, 0.03121504020690918, 0.031170303344726563, 0.031292192459106444, 0.031337472915649416, 0.031564352035522464, 0.03161542320251465, 0.03520857620239258, 0.03299596786499023, 0.03181977653503418, 0.03161686325073242, 0.03158633613586426, 0.03141446495056152, 0.031500223159790036, 0.031243967056274413, 0.03135238456726074, 0.031383712768554686, 0.03160534477233887, 0.0313118724822998, 0.03130102348327637, 0.03142300796508789, 0.031201471328735353, 0.031352703094482424, 0.0314204158782959, 0.03134409523010254, 0.03152256011962891, 0.03143955230712891, 0.03188086318969727, 0.0328135986328125, 0.0315817928314209, 0.0314453125, 0.032061439514160156, 0.03151241683959961, 0.03145084762573242, 0.031624799728393556, 0.031890560150146484, 0.031652992248535156, 0.03142457580566406, 0.031341087341308596, 0.03141427230834961, 0.03133011245727539, 0.0312772159576416, 0.031420127868652344, 0.031277376174926756, 0.031508480072021484, 0.03148185539245606, 0.03163340759277344, 0.03188310432434082, 0.03209231948852539, 0.03243212890625, 0.03187302398681641, 0.03200204849243164, 0.031954784393310544, 0.031899423599243165, 0.03270243072509765, 0.032817569732666016, 0.03189145660400391, 0.03217635345458984, 0.03188105583190918, 0.03342329788208008, 0.031917695999145505, 0.03169747161865234, 0.031618560791015625, 0.03181113624572754, 0.032041728973388674, 0.031876863479614256, 0.03178483200073242, 0.03172764778137207, 0.031922527313232425, 0.03152076721191406, 0.034934303283691404, 0.031924640655517575, 0.0314839038848877, 0.03160483169555664, 0.03136537551879883, 0.0315228157043457, 0.03214912033081055, 0.032341663360595706, 0.03203145599365234, 0.03200614547729492, 0.031647743225097655, 0.03177276802062988, 0.031472991943359375, 0.03137334442138672, 0.031412895202636716, 0.03143257522583008, 0.031215200424194334, 0.031431072235107424, 0.031235616683959962, 0.0314638729095459, 0.03306047821044922, 0.03476726531982422, 0.03201830291748047, 0.0316122875213623, 0.03194275283813477, 0.03129206466674805, 0.03133235168457031, 0.031235296249389647, 0.03138764762878418, 0.03163321685791016, 0.031534048080444334, 0.03128319931030273, 0.03135008049011231, 0.03140847969055176, 0.03142678451538086, 0.03128128051757813, 0.031388864517211915, 0.031765312194824216, 0.03171676826477051, 0.03164630317687988, 0.03144607925415039, 0.031427391052246095, 0.03152089691162109, 0.03141753578186035, 0.03136185646057129, 0.03144905662536621, 0.03154092788696289, 0.0316312313079834, 0.031498720169067386, 0.03149836730957031, 0.03142643165588379, 0.03160063934326172, 0.03134883117675781, 0.031458976745605466, 0.03150569534301758, 0.03205971145629883, 0.031385759353637695, 0.03139020729064941, 0.03137971115112305, 0.031325952529907226, 0.03126848030090332, 0.031338880538940427, 0.03133263969421387, 0.03495292663574219, 0.03203209686279297, 0.0315927677154541, 0.03146611213684082, 0.03142563247680664, 0.031390752792358395, 0.031231552124023437, 0.03147932815551758, 0.03129587173461914, 0.03121299171447754, 0.031126304626464842, 0.03344790267944336, 0.0315578556060791, 0.031311071395874024, 0.03125532722473145, 0.031085983276367187, 0.031176288604736327, 0.0313659839630127, 0.031283359527587894, 0.031178752899169923, 0.031022335052490236, 0.031058687210083008, 0.031022335052490236, 0.0309718074798584, 0.03109084892272949, 0.031084768295288084, 0.03174448013305664, 0.031866880416870115, 0.032093376159667966, 0.03177350425720215, 0.03185004806518555, 0.03176041603088379, 0.031271167755126957, 0.03135087966918945, 0.031066207885742186, 0.03150188827514649, 0.031637920379638675, 0.03165776062011719, 0.031248607635498048, 0.03144806480407715, 0.03137980842590332, 0.031234720230102538, 0.031393503189086916, 0.03129164886474609, 0.031407743453979495, 0.042160545349121094, 0.03157606315612793, 0.03191219139099121, 0.03121059226989746, 0.03164179229736328, 0.03134511947631836, 0.03117670440673828, 0.03125657653808594, 0.0312928638458252, 0.031160448074340822, 0.031078304290771484, 0.03124073600769043, 0.031117311477661135, 0.03162521553039551, 0.03154944038391113, 0.0314550724029541, 0.031457439422607425, 0.031492095947265625, 0.03520857620239258, 0.031949440002441404, 0.031706783294677736, 0.03143123245239258, 0.03145619201660156, 0.031378271102905274, 0.0314204158782959, 0.03127657508850098, 0.03130627250671387, 0.03384108734130859, 0.036982784271240236, 0.031453184127807614, 0.031397727966308596, 0.0313796157836914, 0.03126416015625, 0.03137596893310547, 0.03145113563537598, 0.031410175323486327, 0.03129491233825683, 0.03128595161437988, 0.031112543106079103, 0.03116080093383789, 0.031193023681640626, 0.03128246307373047, 0.0312673282623291, 0.03137267112731933, 0.031689695358276364, 0.0321003532409668, 0.0315230712890625, 0.031395584106445315, 0.031550687789916994, 0.031660480499267576, 0.031344863891601564, 0.031230079650878907, 0.031163711547851563, 0.03111356735229492, 0.0312604808807373, 0.031495840072631835, 0.031089536666870116, 0.031078048706054687, 0.031225984573364257, 0.03148559951782227, 0.03143721580505371, 0.03132431983947754, 0.03133235168457031, 0.03151817512512207, 0.03197151947021484, 0.03238332748413086, 0.03163654327392578, 0.03148627281188965, 0.03165228843688965, 0.031248159408569336, 0.031394208908081055, 0.03129958343505859, 0.03133440017700195, 0.03137875175476074, 0.03129337692260742, 0.031083263397216798, 0.03136511993408203, 0.03147699165344238, 0.03141401672363281, 0.0314233283996582, 0.03190185546875, 0.03453804779052734, 0.0321231689453125, 0.031966943740844724, 0.03208569717407227, 0.03182419204711914, 0.032074848175048826, 0.03145161628723145, 0.0315534725189209, 0.03156428718566894, 0.03165593528747559, 0.03146735954284668, 0.031402143478393554, 0.031492095947265625, 0.03212467193603516, 0.031359359741210936, 0.03151040077209473, 0.03170508766174317, 0.0314654712677002, 0.031282272338867184, 0.03128207969665527, 0.0313054084777832, 0.03117692756652832, 0.031104991912841797, 0.031444639205932615, 0.03156841659545898, 0.03129542350769043, 0.031176000595092773, 0.03115014457702637, 0.03126950454711914, 0.03128236770629883, 0.031684511184692385, 0.031804319381713866, 0.03169209671020508, 0.031652448654174804, 0.031701087951660156, 0.03218022537231445, 0.03164303970336914, 0.031384159088134765, 0.03124835205078125, 0.03139792060852051, 0.03142019271850586, 0.031279327392578125, 0.03143270492553711, 0.03129126358032226, 0.03116819190979004, 0.03123040008544922, 0.031215488433837892, 0.031306976318359374, 0.03149017524719238, 0.031675167083740234, 0.0313606071472168, 0.031351232528686525, 0.031366399765014645, 0.03138412857055664, 0.031398048400878904, 0.03146751976013184, 0.031337472915649416, 0.03141961669921875, 0.03167004776000976, 0.03120332717895508, 0.03130572891235352, 0.03137318420410156, 0.031484031677246095]",tokens/s,31.572606426175867,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2216.98048,2487.0912,0.0,2101.346304,1978.345472,s,1,8.9420341796875,8.9420341796875,0.0,8.9420341796875,8.9420341796875,8.9420341796875,8.9420341796875,[8.9420341796875],,kWh,4.735461085830745e-05,5.216259670568882e-06,1.5532234648008014e-05,6.810310517688434e-05,,MB,2285.740032,2782.789632,0.0,2367.684608,2241.29024,s,10,1.5768295593261719,0.1576829559326172,0.003307141787163014,0.15898407745361326,0.15985,0.1604330032348633,0.1608994058227539,"[0.16101600646972655, 0.15891900634765624, 0.1590491485595703, 0.15066517639160157, 0.159065185546875, 0.15972044372558594, 0.15938882446289063, 0.15177468872070313, 0.1586065673828125, 0.15862451171875]",tokens/s,1623.5109145810072,kWh,4.919577375416743e-06,5.425391620124654e-07,3.2591322369333413e-06,8.721248774362548e-06,tokens/kWh,29353594.49354906,MB,2288.04608,2866.675712,0.0,2451.570688,2334.829056,s,10,19.519980712890625,1.9519980712890626,0.005570624626799112,1.952057189941406,1.9570929931640626,1.9585798828125,1.95976939453125,"[1.9384727783203124, 1.9559625244140626, 1.9496724853515626, 1.94893017578125, 1.95485009765625, 1.9523582763671874, 1.9567625732421876, 1.951756103515625, 1.95114892578125, 1.9600667724609375]",tokens/s,32.27462205349209,kWh,5.6696505888748e-05,6.2534482927091025e-06,3.133656210626464e-05,9.428651628772176e-05,tokens/kWh,668176.1346208951,,s,630,19.51235071563721,0.030971985262916196,0.0005675866797720919,0.030839728355407714,0.03137550659179687,0.031780087852478024,0.03350711292266847,"[0.03184931182861328, 0.031127552032470703, 0.030776960372924805, 0.030730112075805664, 0.030722560882568358, 0.030525440216064452, 0.030643360137939453, 0.030622047424316408, 0.030784000396728517, 0.030484447479248045, 0.030523487091064453, 0.030394079208374024, 0.030955968856811525, 0.031303455352783206, 0.03105353546142578, 0.030738271713256837, 0.030977888107299803, 0.030683744430541993, 0.031123455047607423, 0.031055072784423828, 0.03102329635620117, 0.030820671081542968, 0.03079804801940918, 0.03077849578857422, 0.030827199935913086, 0.030723392486572267, 0.030689792633056642, 0.030691104888916014, 0.030640800476074218, 0.030514528274536133, 0.030600160598754884, 0.030409919738769532, 0.030488767623901368, 0.03035103988647461, 0.030780031204223634, 0.030273536682128906, 0.030345216751098632, 0.03142860794067383, 0.030650367736816408, 0.030561407089233397, 0.030772096633911134, 0.030486080169677736, 0.03052332878112793, 0.030458368301391602, 0.03061350440979004, 0.030404895782470704, 0.03118396759033203, 0.030925439834594726, 0.030924800872802735, 0.03059017562866211, 0.030787839889526367, 0.030537887573242186, 0.03072559928894043, 0.030653343200683594, 0.030679040908813477, 0.03082838439941406, 0.030819839477539062, 0.030836671829223634, 0.030835424423217773, 0.03101081657409668, 0.030851072311401367, 0.030748287200927735, 0.03109107208251953, 0.031244287490844725, 0.03098214340209961, 0.030787519454956055, 0.03085318374633789, 0.03095347213745117, 0.03074822425842285, 0.030773696899414064, 0.03075052833557129, 0.03117001533508301, 0.031441631317138674, 0.031397184371948245, 0.031097536087036134, 0.031021055221557618, 0.030988288879394532, 0.03098841667175293, 0.030912384033203125, 0.031354496002197266, 0.03133273506164551, 0.031397216796875, 0.031185504913330078, 0.03144300842285156, 0.03137055969238281, 0.03250844955444336, 0.03257088088989258, 0.0315644474029541, 0.031285247802734374, 0.03130982398986817, 0.03127222442626953, 0.03093987274169922, 0.03075049591064453, 0.030835935592651367, 0.03078656005859375, 0.03078963279724121, 0.030681055068969728, 0.030781471252441406, 0.030575679779052733, 0.030823360443115233, 0.030713855743408205, 0.030726144790649414, 0.030885120391845704, 0.030950143814086915, 0.0306376953125, 0.031001184463500978, 0.0309901123046875, 0.030804000854492187, 0.03082566452026367, 0.03172153663635254, 0.030822399139404297, 0.030937919616699217, 0.031018911361694337, 0.03106355285644531, 0.030821887969970704, 0.030872575759887694, 0.031076351165771485, 0.03100428771972656, 0.030853248596191405, 0.031023168563842775, 0.030718143463134766, 0.030676448822021484, 0.030824415206909178, 0.03092073631286621, 0.030714399337768556, 0.030904319763183592, 0.03175712013244629, 0.03113161659240723, 0.030948863983154298, 0.031042112350463866, 0.03137455940246582, 0.03150483131408691, 0.031500640869140624, 0.031457088470458985, 0.03120476722717285, 0.030869279861450195, 0.031286272048950195, 0.031071712493896484, 0.031005119323730467, 0.030809215545654297, 0.03104047966003418, 0.030686527252197265, 0.030687936782836912, 0.030830463409423827, 0.03076460838317871, 0.03111302375793457, 0.031034112930297852, 0.03144233512878418, 0.03256934356689453, 0.032032577514648435, 0.031064512252807618, 0.03094153594970703, 0.030840831756591795, 0.030609119415283204, 0.03068316841125488, 0.030557727813720702, 0.030726879119873048, 0.030487743377685547, 0.030756895065307616, 0.030515199661254884, 0.03065519905090332, 0.0304267520904541, 0.030740928649902344, 0.031166080474853516, 0.03138403129577637, 0.031116544723510744, 0.03112201690673828, 0.030805728912353517, 0.030820608139038086, 0.03073801612854004, 0.030873472213745118, 0.030737024307250976, 0.0308404483795166, 0.030628160476684572, 0.030858463287353515, 0.030774368286132812, 0.030758655548095704, 0.03056435203552246, 0.03070492744445801, 0.030531936645507813, 0.03069580841064453, 0.030644224166870116, 0.030822208404541016, 0.030646656036376955, 0.030668607711791994, 0.03078144073486328, 0.030826208114624023, 0.03061759948730469, 0.03110121536254883, 0.03138819122314453, 0.03124377632141113, 0.03132467269897461, 0.03078758430480957, 0.030801919937133788, 0.0306461124420166, 0.03057417678833008, 0.030705280303955078, 0.03071433639526367, 0.030669279098510742, 0.03126380729675293, 0.030880352020263672, 0.030781791687011718, 0.030611455917358397, 0.030793855667114258, 0.030559648513793947, 0.03278249740600586, 0.03151628875732422, 0.031027711868286133, 0.030967391967773438, 0.031025760650634764, 0.030935039520263673, 0.030793376922607422, 0.03067846488952637, 0.030959711074829102, 0.030853952407836914, 0.03097372817993164, 0.030791488647460938, 0.030685184478759765, 0.03170041656494141, 0.03227756881713867, 0.031068063735961913, 0.030868480682373047, 0.030810720443725587, 0.030867519378662108, 0.030898527145385744, 0.031047679901123046, 0.030868799209594726, 0.03075916862487793, 0.030813919067382813, 0.03070844841003418, 0.03070534324645996, 0.030793344497680664, 0.03083465576171875, 0.03096342468261719, 0.030802656173706054, 0.03066908836364746, 0.03060531234741211, 0.03076710319519043, 0.030611455917358397, 0.030966848373413087, 0.030718399047851563, 0.030933727264404298, 0.030932767868041992, 0.030935039520263673, 0.030862720489501953, 0.030968448638916016, 0.030801151275634767, 0.030570751190185548, 0.03063654327392578, 0.030906368255615234, 0.03081216049194336, 0.03091187286376953, 0.03206332778930664, 0.031395872116088866, 0.03117679977416992, 0.030920703887939452, 0.031268863677978515, 0.030988288879394532, 0.030932416915893556, 0.031133888244628906, 0.03133273506164551, 0.031129152297973632, 0.03099283218383789, 0.030768768310546875, 0.03087798309326172, 0.03099849510192871, 0.03125056076049805, 0.030965375900268554, 0.031246719360351564, 0.031086591720581053, 0.030926847457885744, 0.030627840042114256, 0.030705631256103514, 0.030836639404296876, 0.030652544021606446, 0.030598304748535157, 0.030828639984130858, 0.03067366409301758, 0.030889984130859374, 0.030883840560913086, 0.03087545585632324, 0.030855199813842774, 0.03112771224975586, 0.03100057601928711, 0.031393440246582034, 0.0306693115234375, 0.031143903732299805, 0.031117088317871095, 0.030678239822387696, 0.030644224166870116, 0.030624639511108397, 0.030959423065185548, 0.033390945434570315, 0.032239070892333986, 0.031107200622558593, 0.030879999160766603, 0.030902271270751954, 0.030744575500488282, 0.031397024154663083, 0.031216480255126952, 0.031056032180786133, 0.031077823638916015, 0.03098579216003418, 0.03106287956237793, 0.030889984130859374, 0.030725408554077148, 0.030941343307495116, 0.03075488090515137, 0.030870208740234373, 0.030696319580078124, 0.030763776779174804, 0.030785728454589843, 0.030791296005249023, 0.030680864334106446, 0.030818336486816405, 0.03179887962341309, 0.03115216064453125, 0.03090060806274414, 0.030789152145385742, 0.030920192718505858, 0.030663455963134766, 0.030797632217407226, 0.030737024307250976, 0.031216543197631837, 0.031144479751586913, 0.031338783264160154, 0.030885408401489258, 0.030853279113769533, 0.03083033561706543, 0.031181280136108398, 0.03144220733642578, 0.031412256240844726, 0.031179391860961914, 0.031273151397705076, 0.03273648071289063, 0.03127561569213867, 0.031139936447143555, 0.030963775634765624, 0.030783519744873047, 0.03087775993347168, 0.030639904022216797, 0.031080223083496093, 0.031281152725219724, 0.03183788871765137, 0.03115078353881836, 0.03117465591430664, 0.030869279861450195, 0.030851295471191406, 0.030539775848388673, 0.03052115249633789, 0.03039583969116211, 0.030563072204589845, 0.030621696472167968, 0.030789472579956054, 0.03058294486999512, 0.031123455047607423, 0.03135487937927246, 0.030891935348510743, 0.03214899063110352, 0.031039104461669922, 0.03079654312133789, 0.030855392456054686, 0.030672800064086913, 0.030765024185180664, 0.030559423446655274, 0.03073324775695801, 0.030635295867919923, 0.030622432708740235, 0.030705663681030275, 0.030826496124267577, 0.03075071907043457, 0.030849023818969725, 0.0307589111328125, 0.0307957763671875, 0.030918272018432617, 0.030865312576293946, 0.030924768447875978, 0.03074300765991211, 0.03134521675109863, 0.030942880630493164, 0.031096479415893555, 0.031019680023193358, 0.033554561614990236, 0.03208396911621094, 0.032093952178955075, 0.03155299186706543, 0.03118713569641113, 0.030945791244506835, 0.030789888381958008, 0.030815168380737303, 0.03084182357788086, 0.030599103927612305, 0.03079974365234375, 0.030578815460205078, 0.0305930233001709, 0.030527103424072267, 0.03076335906982422, 0.030719072341918945, 0.030659551620483397, 0.030706655502319335, 0.03120639991760254, 0.030776800155639647, 0.031036991119384766, 0.030743520736694337, 0.030920703887939452, 0.031004608154296873, 0.03083795166015625, 0.030927200317382813, 0.0311506233215332, 0.03073023986816406, 0.030865407943725585, 0.030651968002319337, 0.030908287048339842, 0.030728544235229492, 0.03082032012939453, 0.03109503936767578, 0.0313175048828125, 0.03125049591064453, 0.0317335033416748, 0.03112825584411621, 0.031018848419189452, 0.030924095153808593, 0.030894943237304687, 0.03094118309020996, 0.0307957763671875, 0.030817567825317384, 0.030792415618896483, 0.030701536178588867, 0.03085011291503906, 0.030838783264160157, 0.030898975372314452, 0.030963903427124025, 0.03102889633178711, 0.03115679931640625, 0.03180521583557129, 0.03197929573059082, 0.031188768386840822, 0.031108543395996092, 0.03117171287536621, 0.031100799560546875, 0.030906463623046877, 0.03139043235778809, 0.030896127700805662, 0.03086329650878906, 0.030706783294677735, 0.036430751800537106, 0.03202585601806641, 0.031119424819946288, 0.03083900833129883, 0.03105580711364746, 0.03077587127685547, 0.030608512878417968, 0.030489055633544922, 0.030646080017089843, 0.03048681640625, 0.030568735122680664, 0.030480447769165038, 0.030623743057250977, 0.030530656814575195, 0.030815135955810546, 0.030502912521362304, 0.03072751998901367, 0.03041756820678711, 0.030558176040649414, 0.030394239425659178, 0.03049247932434082, 0.030480735778808592, 0.030667776107788085, 0.030766080856323243, 0.030815839767456055, 0.030653087615966797, 0.030821311950683595, 0.030763839721679686, 0.030772287368774413, 0.030606271743774414, 0.030866687774658202, 0.030655231475830078, 0.03077462387084961, 0.031000608444213866, 0.035461406707763675, 0.03094144058227539, 0.030975519180297853, 0.030807968139648437, 0.03092515182495117, 0.03080019187927246, 0.03099443244934082, 0.03100592041015625, 0.030960384368896483, 0.030834720611572265, 0.03099852752685547, 0.030947168350219725, 0.031166624069213868, 0.030920703887939452, 0.03122380828857422, 0.031106752395629884, 0.030847328186035156, 0.030668319702148436, 0.03077734375, 0.030764991760253907, 0.031052288055419923, 0.03072764778137207, 0.03064681625366211, 0.03057833671569824, 0.03065875244140625, 0.03170918464660644, 0.03136102485656738, 0.03140755271911621, 0.03510300827026367, 0.031231775283813476, 0.03088025665283203, 0.030857376098632813, 0.03056800079345703, 0.030791200637817383, 0.03068377685546875, 0.03067286491394043, 0.0319073600769043, 0.03244083023071289, 0.030847103118896484, 0.03072528076171875, 0.030870367050170898, 0.030943231582641603, 0.031069952011108397, 0.031066335678100587, 0.031018592834472655, 0.030892255783081055, 0.030705919265747072, 0.03100864028930664, 0.030750816345214843, 0.03066169548034668, 0.030468927383422852, 0.030709312438964843, 0.03075948715209961, 0.03072204780578613, 0.030797760009765626, 0.0306627197265625, 0.03051033592224121, 0.03056870460510254, 0.030417407989501953, 0.030961376190185547, 0.030927135467529298, 0.03520512008666992, 0.03114729690551758, 0.030829280853271485, 0.030682783126831054, 0.03086956787109375, 0.030646688461303712, 0.03072502326965332, 0.03061359977722168, 0.030670976638793944, 0.030494848251342774, 0.0304769287109375, 0.030721183776855468, 0.030742944717407225, 0.030647808074951172, 0.031008800506591796, 0.030654624938964845, 0.030468000411987304, 0.030538591384887695, 0.03078348731994629, 0.030533567428588867, 0.03064771270751953, 0.03038070487976074, 0.030423072814941405, 0.030492639541625975, 0.030746047973632812, 0.030781824111938475, 0.03071164894104004, 0.03156803131103516, 0.03357519912719727, 0.033252960205078126, 0.031045791625976562, 0.03093529510498047, 0.03073184013366699, 0.030896575927734374, 0.030734336853027344, 0.03077120018005371, 0.0305664005279541, 0.030875648498535156, 0.030876991271972656, 0.031054527282714843, 0.030940479278564453, 0.031145984649658204, 0.03091219139099121, 0.030825471878051756, 0.030668800354003906, 0.030672895431518556, 0.03060121536254883, 0.030697471618652345, 0.030524991989135743, 0.030592992782592775, 0.03279008102416992, 0.03324127960205078, 0.030830623626708985, 0.030785951614379883, 0.030654752731323242, 0.030719999313354493, 0.031223295211791992, 0.03106447982788086, 0.03118195152282715, 0.030942176818847655, 0.0309652156829834, 0.032088607788085935, 0.03377766418457031, 0.031455232620239255, 0.030844831466674806, 0.030768287658691405, 0.030486495971679687, 0.03085001564025879, 0.030699520111083983, 0.03082966423034668, 0.030749311447143556, 0.03082604789733887, 0.03125091171264648, 0.0314145278930664, 0.030908416748046875, 0.030810111999511718, 0.03057619285583496, 0.030822528839111327, 0.030789503097534178, 0.03079007911682129, 0.03145292854309082, 0.030776607513427735, 0.0306278076171875, 0.0315548152923584, 0.03102044868469238, 0.030920192718505858, 0.03068921661376953, 0.030968095779418944, 0.030726783752441405, 0.03084659194946289]",tokens/s,32.287242535832334,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1068.511232,874.381312,0.0,488.636416,482.553856,s,1,7.98183837890625,7.98183837890625,0.0,7.98183837890625,7.98183837890625,7.98183837890625,7.98183837890625,[7.98183837890625],,kWh,2.343048233751688e-05,2.5769095888066954e-06,7.625839433977877e-06,3.363323136030145e-05,,MB,1330.31936,1031.667712,0.0,616.562688,582.974464,s,10,0.2700037460327149,0.027000374603271482,0.0002624142901477012,0.02691862392425537,0.02718552951812744,0.027460684680938718,0.027680808811187743,"[0.02773583984375, 0.0271243839263916, 0.02685366439819336, 0.026768768310546875, 0.02682089614868164, 0.026913631439208986, 0.026998464584350585, 0.026898752212524413, 0.026923616409301757, 0.026965728759765627]",tokens/s,9481.349935381339,kWh,8.617879821535171e-07,9.50398238166745e-08,5.716425418763235e-07,1.528470347846515e-06,tokens/kWh,167487711.07053682,MB,1343.619072,1044.250624,0.0,629.1456,597.192192,s,10,11.647482666015625,1.1647482666015627,0.004898799594412482,1.1642495727539064,1.1705009155273436,1.1725100402832032,1.1741173400878906,"[1.16166455078125, 1.160717529296875, 1.1663692626953126, 1.1745191650390625, 1.1672080078125, 1.162334228515625, 1.1561163330078126, 1.170054443359375, 1.16498681640625, 1.1635123291015625]",tokens/s,54.08894076641803,kWh,3.363824998243029e-05,3.7098338809409246e-06,1.382553564251802e-05,5.117361950588922e-05,tokens/kWh,1231103.0685009444,,s,630,11.642304039001482,0.01847984768095471,0.0003897269460732169,0.01839267158508301,0.01878403205871582,0.018971789360046388,0.02027256567001343,"[0.01791007995605469, 0.018309120178222657, 0.01842790412902832, 0.01836031913757324, 0.01823744010925293, 0.018284543991088868, 0.01848320007324219, 0.018546207427978516, 0.018405855178833006, 0.018378496170043945, 0.01839948844909668, 0.01845248031616211, 0.01826201629638672, 0.018439552307128907, 0.018306880950927733, 0.018223936080932618, 0.018288768768310548, 0.018340768814086913, 0.018365215301513672, 0.018468223571777343, 0.01840620803833008, 0.01837376022338867, 0.018340736389160155, 0.018300479888916015, 0.018548864364624024, 0.01841798400878906, 0.018415552139282226, 0.018260032653808593, 0.018436063766479494, 0.018481151580810547, 0.018442272186279297, 0.01858355140686035, 0.018687999725341797, 0.018874368667602538, 0.018840959548950195, 0.01878223991394043, 0.01876643180847168, 0.01861222457885742, 0.018532352447509767, 0.01878326416015625, 0.018344928741455078, 0.01839308738708496, 0.018329599380493163, 0.01846419143676758, 0.019204671859741212, 0.01851408004760742, 0.018402816772460938, 0.018299232482910155, 0.018298879623413086, 0.01851414489746094, 0.01843315124511719, 0.01846444892883301, 0.018379743576049806, 0.018310432434082032, 0.018217695236206054, 0.018441600799560545, 0.01833353614807129, 0.018297088623046874, 0.0182392635345459, 0.01830169677734375, 0.018323455810546875, 0.018276351928710938, 0.018531391143798828, 0.01806608009338379, 0.018579456329345705, 0.01841766357421875, 0.018259359359741212, 0.018258527755737306, 0.018239488601684572, 0.018292736053466797, 0.018292255401611328, 0.01832700729370117, 0.018219263076782226, 0.018905855178833007, 0.01846272087097168, 0.018391040802001952, 0.018249408721923828, 0.01865555191040039, 0.018374656677246092, 0.018309120178222657, 0.018263904571533204, 0.01834614372253418, 0.018320608139038085, 0.018171775817871095, 0.01837926483154297, 0.018235807418823243, 0.018310335159301756, 0.019129119873046874, 0.018668960571289063, 0.01847875213623047, 0.018541536331176757, 0.01842495918273926, 0.018410144805908205, 0.018471136093139648, 0.018738880157470703, 0.018608448028564453, 0.01830860710144043, 0.01832713508605957, 0.01854755210876465, 0.01852422332763672, 0.018440031051635743, 0.01835212707519531, 0.018376863479614258, 0.018464767456054687, 0.018560895919799803, 0.01868742370605469, 0.018643648147583007, 0.01858355140686035, 0.018493440628051756, 0.01853443145751953, 0.018368480682373046, 0.018431167602539062, 0.01834880065917969, 0.01832486343383789, 0.018315263748168945, 0.018252479553222657, 0.018366464614868162, 0.018512031555175782, 0.018452320098876953, 0.018343936920166014, 0.018400928497314454, 0.018230688095092772, 0.018375616073608398, 0.018274303436279296, 0.018216800689697266, 0.01834614372253418, 0.01804844856262207, 0.01839366340637207, 0.018427072525024416, 0.01862646484375, 0.018768800735473632, 0.01872489547729492, 0.019552223205566405, 0.023007104873657227, 0.019200128555297853, 0.019005439758300782, 0.01891231918334961, 0.01890764808654785, 0.018811328887939453, 0.01864646339416504, 0.018754112243652345, 0.019007680892944336, 0.018783039093017578, 0.018525375366210937, 0.018641855239868162, 0.018389888763427734, 0.018384992599487306, 0.01847625541687012, 0.01839084815979004, 0.018170560836791992, 0.018301120758056642, 0.018286336898803712, 0.018321695327758788, 0.01832953643798828, 0.018278047561645507, 0.01826799964904785, 0.018344480514526366, 0.018114112854003907, 0.018257375717163085, 0.01831216049194336, 0.01824358367919922, 0.018192384719848635, 0.018327327728271486, 0.018229087829589843, 0.018166143417358397, 0.01818377685546875, 0.01819273567199707, 0.018253664016723632, 0.018342111587524416, 0.018268159866333008, 0.018242847442626952, 0.018432096481323244, 0.018581855773925782, 0.01846086311340332, 0.018274400711059572, 0.018229248046875, 0.01813222312927246, 0.018297599792480468, 0.018341888427734376, 0.018235424041748046, 0.01831318473815918, 0.018285663604736328, 0.018192960739135743, 0.018235263824462892, 0.018432479858398437, 0.018431999206542968, 0.018349504470825194, 0.01830339241027832, 0.018302175521850587, 0.017978015899658202, 0.018443519592285157, 0.018578176498413087, 0.018970624923706055, 0.01883510398864746, 0.019079328536987305, 0.018853151321411132, 0.018794431686401367, 0.018863071441650392, 0.01839446449279785, 0.018340320587158204, 0.018347808837890625, 0.018178239822387695, 0.01827452850341797, 0.01861427116394043, 0.018253503799438478, 0.01904047966003418, 0.020543584823608397, 0.01845248031616211, 0.018280448913574218, 0.018386943817138672, 0.018210336685180663, 0.01827599906921387, 0.01837251281738281, 0.01838172721862793, 0.018679040908813477, 0.021601247787475585, 0.01938803291320801, 0.018491552352905272, 0.018482431411743164, 0.01845734405517578, 0.01840233612060547, 0.01840220832824707, 0.018421823501586915, 0.018331647872924805, 0.018509088516235353, 0.01856732749938965, 0.018584192276000975, 0.018642879486083983, 0.01830672073364258, 0.018279808044433594, 0.0184616641998291, 0.018581504821777343, 0.018905120849609373, 0.019007455825805663, 0.019113983154296875, 0.018814783096313476, 0.019028160095214845, 0.01884339141845703, 0.01856742477416992, 0.018757631301879883, 0.01872892761230469, 0.018800319671630858, 0.018619871139526366, 0.018465663909912108, 0.018376256942749022, 0.018375104904174804, 0.018294048309326173, 0.018352863311767578, 0.01841766357421875, 0.018374656677246092, 0.018421567916870118, 0.018375871658325195, 0.018174720764160157, 0.01843814468383789, 0.01835212707519531, 0.01840668869018555, 0.01835696029663086, 0.018356224060058594, 0.0185382080078125, 0.018419456481933594, 0.018516191482543944, 0.01845417594909668, 0.018487712860107423, 0.01835433578491211, 0.018371679306030272, 0.018463743209838866, 0.018308671951293945, 0.01848297691345215, 0.018610015869140625, 0.018450847625732424, 0.01847881507873535, 0.018399328231811524, 0.018504064559936525, 0.0184486083984375, 0.018345983505249023, 0.018300928115844727, 0.018300928115844727, 0.018331647872924805, 0.018470687866210936, 0.018382623672485353, 0.01864543914794922, 0.0186060791015625, 0.01842495918273926, 0.01845747184753418, 0.018415071487426757, 0.018394655227661132, 0.01841868782043457, 0.018328960418701173, 0.018489343643188477, 0.0184652156829834, 0.018362144470214843, 0.01840336036682129, 0.018311424255371092, 0.018370687484741213, 0.01844416046142578, 0.01867788887023926, 0.018875904083251953, 0.01872537612915039, 0.019150655746459962, 0.019177663803100587, 0.02027849578857422, 0.019062719345092773, 0.018971296310424806, 0.018741439819335938, 0.01862620735168457, 0.01859644889831543, 0.018469696044921876, 0.018422719955444335, 0.01841971206665039, 0.018384000778198243, 0.01851456069946289, 0.018386207580566406, 0.018342111587524416, 0.018428287506103515, 0.018411903381347655, 0.018074527740478515, 0.018374656677246092, 0.01841971206665039, 0.018347135543823244, 0.018467807769775392, 0.018343711853027345, 0.018258047103881837, 0.018349151611328125, 0.018276767730712892, 0.020844768524169922, 0.020983455657958984, 0.01852070426940918, 0.0184516487121582, 0.018520896911621093, 0.018317312240600587, 0.018267711639404296, 0.01822358322143555, 0.018331615447998047, 0.018324512481689453, 0.01834102439880371, 0.01842521667480469, 0.018330047607421875, 0.01830860710144043, 0.0182042236328125, 0.018274303436279296, 0.01828755187988281, 0.018181568145751954, 0.01840185546875, 0.01824729537963867, 0.018243967056274416, 0.01826121520996094, 0.018243520736694337, 0.01816281509399414, 0.01830473518371582, 0.01844223976135254, 0.01866547203063965, 0.01883139228820801, 0.018669536590576172, 0.018645088195800782, 0.018685567855834962, 0.01858086395263672, 0.01843292808532715, 0.01855619239807129, 0.018352256774902344, 0.018304864883422853, 0.01829315185546875, 0.01842620849609375, 0.01842959976196289, 0.01827465629577637, 0.01823744010925293, 0.018267328262329102, 0.018336576461791994, 0.018336896896362306, 0.01840185546875, 0.018327871322631837, 0.018249727249145507, 0.018490560531616212, 0.018283008575439453, 0.018288223266601563, 0.018451168060302735, 0.01828860855102539, 0.01817923164367676, 0.0181973762512207, 0.01805459213256836, 0.01825164794921875, 0.01824332809448242, 0.018152383804321288, 0.01816166305541992, 0.01825382423400879, 0.018376575469970704, 0.018253952026367187, 0.018395135879516602, 0.018361536026000977, 0.01843471908569336, 0.018312576293945313, 0.018500383377075196, 0.0183470401763916, 0.018448896408081054, 0.018465248107910157, 0.018329280853271485, 0.01835856056213379, 0.01826736068725586, 0.018151840209960936, 0.018286048889160158, 0.018256832122802734, 0.018235200881958007, 0.018188480377197266, 0.018268159866333008, 0.018318784713745116, 0.018626848220825196, 0.018366592407226563, 0.01834409523010254, 0.018425439834594725, 0.018421247482299806, 0.01840163230895996, 0.018481504440307616, 0.018505632400512697, 0.018335840225219727, 0.018292959213256837, 0.018370399475097655, 0.01826540756225586, 0.018291231155395507, 0.018268415451049805, 0.018241600036621095, 0.018263904571533204, 0.01829043197631836, 0.018208160400390624, 0.018199552536010744, 0.018464767456054687, 0.018259519577026366, 0.018291135787963868, 0.018233343124389647, 0.018268159866333008, 0.01820809555053711, 0.018205343246459962, 0.01818009567260742, 0.018112512588500978, 0.01824496078491211, 0.018571935653686523, 0.020258047103881835, 0.01854745674133301, 0.018297887802124022, 0.01828963279724121, 0.018333696365356447, 0.01820057678222656, 0.018358272552490236, 0.018082239151000976, 0.018315200805664063, 0.018296575546264647, 0.018473472595214844, 0.018540351867675782, 0.01824995231628418, 0.018261791229248047, 0.020020671844482422, 0.0214083194732666, 0.01859993553161621, 0.01842911911010742, 0.01854342460632324, 0.01854217529296875, 0.018815040588378906, 0.01893827247619629, 0.018886592864990233, 0.018623519897460937, 0.018659936904907228, 0.018609792709350585, 0.0185, 0.01894419288635254, 0.01886400032043457, 0.018821407318115234, 0.01897043228149414, 0.018550975799560547, 0.018439680099487304, 0.01848575973510742, 0.018501407623291017, 0.01827452850341797, 0.018362720489501952, 0.018294431686401366, 0.018331647872924805, 0.018565120697021483, 0.018409343719482423, 0.018271839141845703, 0.018432287216186522, 0.018510080337524413, 0.01850374412536621, 0.018479040145874023, 0.01832668876647949, 0.018356767654418946, 0.01835811233520508, 0.018242015838623046, 0.018300928115844727, 0.019058687210083008, 0.018276351928710938, 0.018253440856933593, 0.01835251235961914, 0.01858064079284668, 0.01834480094909668, 0.018431999206542968, 0.01836835289001465, 0.01841279983520508, 0.01840140724182129, 0.018279199600219728, 0.018386943817138672, 0.018406496047973633, 0.018539648056030273, 0.018582496643066406, 0.018701120376586913, 0.01860812759399414, 0.0187064323425293, 0.018475103378295898, 0.01820400047302246, 0.01859446334838867, 0.018707584381103516, 0.018790943145751953, 0.018579839706420898, 0.019159008026123046, 0.018970239639282228, 0.018976863861083985, 0.018651424407958986, 0.01884160041809082, 0.018671615600585938, 0.018655231475830078, 0.018636255264282225, 0.0185795841217041, 0.018472896575927735, 0.018393119812011718, 0.018307647705078124, 0.018388671875, 0.01849772834777832, 0.018358240127563475, 0.01860406494140625, 0.0185446720123291, 0.018458431243896484, 0.018327455520629882, 0.018335935592651367, 0.018360288619995117, 0.01860848045349121, 0.018770879745483398, 0.01858438491821289, 0.01840764808654785, 0.018365535736083984, 0.01849020767211914, 0.01849942398071289, 0.018449888229370118, 0.018315359115600584, 0.018291263580322265, 0.01842777633666992, 0.018198528289794923, 0.01824563217163086, 0.018319360733032225, 0.018206464767456056, 0.01819059181213379, 0.018278144836425782, 0.018290111541748047, 0.01821779251098633, 0.018417535781860353, 0.018290464401245116, 0.018429279327392578, 0.018434463500976564, 0.01841779136657715, 0.018972192764282227, 0.018676671981811523, 0.018296831130981444, 0.01824358367919922, 0.01829417610168457, 0.01839164733886719, 0.01839923286437988, 0.018343936920166014, 0.01887958335876465, 0.018324384689331053, 0.01832748794555664, 0.01856518363952637, 0.018532352447509767, 0.018210752487182617, 0.018466976165771483, 0.01981494331359863, 0.018306432723999025, 0.01830681610107422, 0.018256736755371095, 0.018239488601684572, 0.018448383331298827, 0.01840742492675781, 0.018315263748168945, 0.0183721923828125, 0.01902864074707031, 0.01853004837036133, 0.018711807250976563, 0.018459392547607423, 0.018310495376586914, 0.018993152618408202, 0.018377632141113282, 0.018437536239624023, 0.018432031631469725, 0.018361663818359374, 0.018328575134277342, 0.018481151580810547, 0.01843791961669922, 0.01861974334716797, 0.01838092803955078, 0.01830784034729004, 0.018403392791748047, 0.018288063049316405, 0.018507583618164063, 0.018277055740356447, 0.018307008743286134, 0.018261344909667968, 0.018264511108398437, 0.018264320373535155, 0.01851126480102539, 0.018401920318603517, 0.018370208740234376, 0.018356576919555664, 0.018323007583618163, 0.018807231903076174, 0.01841971206665039, 0.01859324836730957, 0.01865747261047363, 0.0183024959564209, 0.01838947105407715, 0.018362720489501952, 0.018327552795410155, 0.01835212707519531, 0.01882931137084961, 0.019381824493408202, 0.01835024070739746, 0.018683231353759766, 0.018392255783081055, 0.018239231109619142, 0.018370559692382812, 0.01850979232788086, 0.01853001594543457, 0.018358015060424806, 0.018216703414916994, 0.018342304229736327, 0.018330015182495118, 0.018391040802001952]",tokens/s,54.11300013206266,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,841.719808,545.128448,0.0,159.383552,141.514752,s,1,7.45429833984375,7.45429833984375,0.0,7.45429833984375,7.45429833984375,7.45429833984375,7.45429833984375,[7.45429833984375],,kWh,1.1628886587520051e-05,1.2755721778985396e-06,3.8019474860040425e-06,1.6706406251422634e-05,,MB,1281.634304,614.334464,0.0,199.22944,184.525824,s,24,0.1747963857650757,0.007283182740211488,8.49372612190771e-05,0.007258400201797485,0.007406406307220459,0.00743914897441864,0.0074986657285690305,"[0.007421631813049317, 0.007154816150665283, 0.007322624206542969, 0.007225664138793946, 0.007247392177581787, 0.0071940159797668456, 0.0072210879325866695, 0.007442240238189697, 0.0072520642280578616, 0.0072427201271057125, 0.007326303958892822, 0.007234687805175781, 0.007308576107025146, 0.007294496059417724, 0.0071981439590454105, 0.007370880126953125, 0.007292928218841553, 0.007274112224578857, 0.007230016231536865, 0.007342751979827881, 0.00720959997177124, 0.007515520095825195, 0.007264736175537109, 0.007209375858306884]",tokens/s,35149.468183269324,kWh,2.158622621147808e-07,2.3805693911814966e-08,1.2653445362721762e-07,3.6620240965381334e-07,tokens/kWh,699066945.6326288,MB,1294.815232,616.431616,0.0,201.326592,184.528384,s,24,9.85175799560547,0.41048991648356115,0.001486690970234041,0.4104195556640625,0.41268056640625,0.4127638778686524,0.413219460144043,"[0.40989993286132814, 0.4102464294433594, 0.4127764892578125, 0.41049798583984376, 0.4085834045410156, 0.40962896728515624, 0.41265292358398437, 0.40917745971679687, 0.412353515625, 0.4095642700195313, 0.40911978149414063, 0.41335177612304685, 0.4126924133300781, 0.4103411254882813, 0.41070068359375, 0.4102667236328125, 0.4106561279296875, 0.40796697998046877, 0.4120779113769531, 0.41074017333984375, 0.41053860473632814, 0.4109443969726562, 0.4082868347167969, 0.4086930847167969]",tokens/s,153.47514633169547,kWh,1.1895025141885919e-05,1.3118295208650998e-06,4.455502545331876e-06,1.7662357208082898e-05,tokens/kWh,3566907.8174440414,,s,1512,9.840797049045566,0.0065084636567761655,0.00010930041597396237,0.0064865598678588866,0.006570876741409302,0.006639065527915954,0.0069387881278991734,"[0.006753119945526123, 0.006612991809844971, 0.006550591945648193, 0.006553952217102051, 0.0065848641395568844, 0.006570047855377197, 0.006538432121276855, 0.006528895854949951, 0.00655238389968872, 0.006766016006469726, 0.0065231361389160155, 0.006551871776580811, 0.006593887805938721, 0.006544159889221191, 0.006527103900909424, 0.006577023983001709, 0.006489247798919678, 0.006522719860076904, 0.006485663890838623, 0.006531136035919189, 0.006518144130706787, 0.006441184043884277, 0.006463520050048828, 0.006484479904174805, 0.00640835189819336, 0.006455296039581298, 0.006446303844451905, 0.006447199821472168, 0.006472352027893066, 0.006455488204956054, 0.006484896183013916, 0.006448063850402832, 0.006458432197570801, 0.006468287944793701, 0.0064655041694641115, 0.006453536033630371, 0.006498303890228272, 0.006454783916473389, 0.006463488101959228, 0.0064865598678588866, 0.0064917440414428715, 0.006494944095611572, 0.0064609918594360355, 0.00647756814956665, 0.006499711990356445, 0.006416607856750489, 0.006484511852264404, 0.006449567794799805, 0.006479680061340332, 0.00646454381942749, 0.006435167789459228, 0.006446720123291016, 0.006449728012084961, 0.006464191913604736, 0.0064767680168151855, 0.006450208187103271, 0.006469088077545166, 0.006459551811218262, 0.006446944236755371, 0.0064719681739807125, 0.006504447937011719, 0.006565887928009034, 0.006467008113861084, 0.006549503803253174, 0.006459455966949463, 0.006456480026245117, 0.0064765758514404295, 0.006470655918121338, 0.006502495765686035, 0.006482848167419434, 0.006496255874633789, 0.006457568168640137, 0.006518496036529541, 0.00647811222076416, 0.006466879844665528, 0.006453728199005127, 0.006424352169036866, 0.006471903800964356, 0.006410111904144287, 0.006467711925506592, 0.006450719833374023, 0.006449632167816162, 0.006471680164337158, 0.0064839677810668945, 0.006455296039581298, 0.006551551818847656, 0.006507775783538818, 0.006465407848358154, 0.006475872039794922, 0.006488863945007324, 0.006453248023986816, 0.006456319808959961, 0.006483232021331787, 0.0064939198493957516, 0.006557695865631104, 0.00694271993637085, 0.006622399806976318, 0.006588768005371093, 0.006585824012756348, 0.006736127853393554, 0.006549248218536377, 0.006552576065063476, 0.006536191940307618, 0.006513023853302002, 0.006480512142181397, 0.006508351802825928, 0.006453440189361572, 0.006490111827850342, 0.0064143362045288085, 0.006475776195526123, 0.006488224029541015, 0.006410367965698242, 0.006499167919158936, 0.006564511775970459, 0.006490399837493896, 0.0064735360145568846, 0.006465663909912109, 0.006469664096832276, 0.006588384151458741, 0.0064880638122558594, 0.006500607967376709, 0.006506239891052246, 0.006532159805297852, 0.0065279359817504885, 0.006490431785583496, 0.006532800197601318, 0.006535615921020508, 0.006501984119415283, 0.006537439823150634, 0.006506144046783447, 0.00658070421218872, 0.006514624118804931, 0.006480000019073486, 0.006490335941314697, 0.006471360206604004, 0.006432479858398438, 0.006492159843444824, 0.006480000019073486, 0.006678783893585205, 0.006555967807769776, 0.006471360206604004, 0.006513919830322266, 0.006453120231628418, 0.006507391929626465, 0.006485568046569824, 0.006461664199829102, 0.006481344223022461, 0.006522784233093262, 0.006474624156951905, 0.006553599834442139, 0.006475967884063721, 0.006462656021118164, 0.006490047931671143, 0.006460192203521728, 0.006508448123931885, 0.006537343978881836, 0.0064561920166015625, 0.006530399799346924, 0.006502048015594482, 0.006457503795623779, 0.006440159797668457, 0.006476640224456787, 0.00647760009765625, 0.006494080066680908, 0.006483712196350098, 0.006566271781921386, 0.006500256061553955, 0.006459712028503418, 0.0065062718391418455, 0.006487904071807861, 0.007268511772155762, 0.006591616153717041, 0.007170944213867187, 0.00717628812789917, 0.006530399799346924, 0.006585087776184082, 0.006857600212097168, 0.006582367897033692, 0.006533984184265137, 0.00652288007736206, 0.006552639961242676, 0.006455679893493652, 0.006500927925109863, 0.006495872020721436, 0.00650051212310791, 0.006452799797058106, 0.00650438404083252, 0.006577023983001709, 0.006500192165374756, 0.006404096126556396, 0.006447167873382569, 0.006483903884887695, 0.006453248023986816, 0.006469855785369873, 0.0064776639938354495, 0.006451072216033936, 0.006537280082702637, 0.006469632148742676, 0.006494207859039307, 0.006524479866027832, 0.006488480091094971, 0.006503615856170654, 0.006537343978881836, 0.0064867520332336425, 0.006526112079620361, 0.006505311965942383, 0.006475776195526123, 0.006616223812103272, 0.006458240032196045, 0.006477791786193848, 0.006496255874633789, 0.006500671863555909, 0.0065016641616821285, 0.006516160011291504, 0.006484799861907959, 0.00652243185043335, 0.0065623998641967775, 0.006577568054199219, 0.006525536060333252, 0.006469567775726318, 0.006504511833190918, 0.006592512130737305, 0.00651580810546875, 0.006513216018676758, 0.006525279998779297, 0.0065532798767089845, 0.006519392013549805, 0.006502111911773681, 0.006527135848999024, 0.006551392078399658, 0.006461440086364746, 0.006543136119842529, 0.006545472145080567, 0.006490047931671143, 0.0065190081596374514, 0.006495776176452636, 0.006502272129058838, 0.0065309758186340335, 0.006562431812286377, 0.006522143840789795, 0.006521024227142334, 0.006507232189178467, 0.006520639896392822, 0.0064962878227233884, 0.0065, 0.006505087852478027, 0.00651804780960083, 0.0064826240539550785, 0.006566976070404053, 0.006491007804870605, 0.006535232067108154, 0.0064468798637390134, 0.006410079956054688, 0.006564000129699707, 0.006518303871154785, 0.0065400958061218265, 0.0065716800689697265, 0.00648524808883667, 0.006492928028106689, 0.006448128223419189, 0.006466559886932373, 0.006470911979675293, 0.006474720001220703, 0.006471456050872803, 0.0064637441635131835, 0.0064716157913208006, 0.0066780800819396975, 0.00646070384979248, 0.006501279830932617, 0.006484032154083252, 0.006479872226715088, 0.006443136215209961, 0.006454592227935791, 0.006465184211730957, 0.006471648216247559, 0.006458303928375244, 0.006486015796661377, 0.0064757437705993655, 0.006428575992584228, 0.006532544136047363, 0.00643552017211914, 0.0064962878227233884, 0.006469888210296631, 0.006458144187927246, 0.006462111949920655, 0.006449440002441406, 0.006479519844055176, 0.006534815788269043, 0.006521247863769532, 0.006471487998962402, 0.006495903968811035, 0.0064766077995300295, 0.006481311798095703, 0.006429535865783691, 0.006450623989105225, 0.006475615978240967, 0.006496416091918945, 0.006485311985015869, 0.006466559886932373, 0.006455296039581298, 0.006465023994445801, 0.006451263904571534, 0.006513088226318359, 0.0064880638122558594, 0.006506368160247803, 0.006461088180541992, 0.006479519844055176, 0.006425407886505127, 0.006450592041015625, 0.006418047904968261, 0.006456287860870361, 0.0064568638801574705, 0.006445087909698486, 0.006494656085968018, 0.00644652795791626, 0.00637395191192627, 0.006495520114898682, 0.0064700798988342285, 0.006453728199005127, 0.006438560009002685, 0.006515039920806885, 0.006455296039581298, 0.0064423041343688965, 0.006488704204559327, 0.006530752182006836, 0.006457280158996582, 0.0064700798988342285, 0.0064551677703857425, 0.006472991943359375, 0.006521503925323486, 0.006465727806091308, 0.006483456134796142, 0.00647219181060791, 0.006487743854522705, 0.0065006399154663086, 0.006467008113861084, 0.006484576225280762, 0.006559743881225586, 0.006456543922424316, 0.00647430419921875, 0.006479135990142822, 0.0064757437705993655, 0.006568831920623779, 0.0064707517623901365, 0.006491136074066162, 0.006508607864379883, 0.006540639877319336, 0.006474175930023193, 0.006487648010253907, 0.0064412798881530765, 0.006488160133361816, 0.0065414719581604, 0.006440959930419922, 0.006498496055603028, 0.006502208232879639, 0.006502016067504883, 0.006865471839904785, 0.006468768119812012, 0.006472095966339111, 0.0064596481323242185, 0.006516736030578613, 0.006749887943267822, 0.006504191875457764, 0.006484032154083252, 0.006539743900299072, 0.006475552082061768, 0.006500703811645508, 0.0064655041694641115, 0.006475647926330566, 0.006493631839752197, 0.006464064121246338, 0.006524703979492188, 0.006498559951782227, 0.006471712112426758, 0.006516992092132568, 0.006470880031585693, 0.0064865598678588866, 0.0064774398803710935, 0.006456992149353027, 0.006474336147308349, 0.006457695960998535, 0.006471744060516357, 0.006491392135620117, 0.006447872161865234, 0.00648799991607666, 0.006463615894317627, 0.00648528003692627, 0.006546239852905273, 0.006471231937408447, 0.00648419189453125, 0.0064941120147705075, 0.006510687828063965, 0.006473855972290039, 0.006480832099914551, 0.006478943824768066, 0.0065493440628051755, 0.006729728221893311, 0.006510591983795166, 0.006551551818847656, 0.006513919830322266, 0.006548128128051758, 0.007454880237579346, 0.006526912212371826, 0.0066007041931152345, 0.006526976108551025, 0.006483136177062988, 0.006504511833190918, 0.006531295776367188, 0.00658409595489502, 0.006516831874847412, 0.0065419840812683105, 0.006519904136657715, 0.006566527843475342, 0.006496096134185791, 0.006509247779846191, 0.006540512084960938, 0.006467840194702148, 0.006695199966430664, 0.006496575832366944, 0.00646668815612793, 0.006510591983795166, 0.0064917120933532715, 0.0065119037628173825, 0.007063263893127441, 0.006500351905822754, 0.006516416072845459, 0.006520256042480469, 0.006564735889434814, 0.0065552000999450685, 0.006539296150207519, 0.006574463844299316, 0.006481472015380859, 0.006623712062835694, 0.006519968032836914, 0.006476640224456787, 0.006475200176239013, 0.006505248069763184, 0.006561567783355713, 0.0064880638122558594, 0.00659660816192627, 0.0065177597999572755, 0.006424448013305664, 0.0065001602172851565, 0.006474048137664795, 0.006570271968841553, 0.006532608032226562, 0.006476160049438477, 0.00647756814956665, 0.0064934401512145995, 0.006450016021728516, 0.006492224216461182, 0.006488031864166259, 0.006617055892944336, 0.0065493760108947755, 0.006489247798919678, 0.0064807038307189944, 0.006526527881622314, 0.006488671779632568, 0.00650377607345581, 0.006509119987487793, 0.006491615772247315, 0.006576096057891846, 0.0064784960746765135, 0.006453375816345214, 0.0064899840354919434, 0.006436960220336914, 0.006448480129241943, 0.00644323205947876, 0.006455264091491699, 0.006523263931274414, 0.006471392154693604, 0.006459392070770263, 0.0065047359466552735, 0.006497824192047119, 0.00650438404083252, 0.00646608018875122, 0.0064778242111206055, 0.006487872123718262, 0.006520256042480469, 0.006474495887756348, 0.006475103855133057, 0.006484416007995606, 0.006482143878936767, 0.006448991775512695, 0.006465184211730957, 0.006504992008209229, 0.006496223926544189, 0.006458399772644043, 0.0064436478614807126, 0.0064535999298095705, 0.0064737281799316405, 0.006434815883636475, 0.006456992149353027, 0.006451136112213135, 0.006433184146881103, 0.0066416640281677245, 0.006501728057861328, 0.006470304012298584, 0.0065084161758422856, 0.0064718079566955565, 0.006465695858001709, 0.006479487895965576, 0.006455679893493652, 0.006563360214233398, 0.0063414077758789066, 0.006419424057006836, 0.006465983867645264, 0.0064349441528320315, 0.006467807769775391, 0.0064629120826721195, 0.0064477438926696775, 0.006682559967041015, 0.0064778242111206055, 0.006489439964294434, 0.006453760147094726, 0.006468768119812012, 0.006500480175018311, 0.0064534077644348145, 0.0064642238616943355, 0.006530816078186035, 0.006455552101135254, 0.0064759039878845214, 0.006893439769744873, 0.00671123218536377, 0.006697023868560791, 0.007575168132781983, 0.006544960021972656, 0.006586463928222657, 0.006539775848388672, 0.006508768081665039, 0.006489952087402343, 0.006498464107513428, 0.006481247901916504, 0.006498976230621338, 0.006479872226715088, 0.0065099201202392576, 0.006740640163421631, 0.006473055839538574, 0.006508480072021485, 0.006522784233093262, 0.006515423774719239, 0.006523071765899658, 0.006524256229400635, 0.0064906878471374515, 0.0065781760215759275, 0.006486015796661377, 0.006503520011901856, 0.006468255996704101, 0.006438176155090332, 0.006478271961212158, 0.006459231853485107, 0.006597311973571778, 0.006518015861511231, 0.0064990720748901365, 0.00656112003326416, 0.006482592105865478, 0.006447455883026123, 0.0065716800689697265, 0.006493504047393799, 0.006439648151397705, 0.006719456195831299, 0.006533120155334473, 0.006496255874633789, 0.006498303890228272, 0.006572095870971679, 0.006492095947265625, 0.006731776237487793, 0.006414400100708008, 0.0065474557876586915, 0.0064635839462280275, 0.006471936225891113, 0.006482719898223877, 0.006421023845672607, 0.006460864067077637, 0.006524127960205078, 0.006473408222198486, 0.006499839782714844, 0.006554111957550048, 0.006483359813690186, 0.006521440029144287, 0.006457632064819336, 0.006578144073486328, 0.006509376049041748, 0.006439871788024902, 0.006481120109558106, 0.006506432056427002, 0.006468448162078857, 0.006504447937011719, 0.006481919765472412, 0.006547391891479492, 0.006526144027709961, 0.006546463966369629, 0.006497856140136719, 0.006445631980895996, 0.006512063980102539, 0.006460864067077637, 0.006482783794403076, 0.006474944114685058, 0.006547584056854248, 0.0064288959503173825, 0.006623456001281738, 0.006476191997528076, 0.006525055885314941, 0.006658944129943847, 0.006466400146484375, 0.006526688098907471, 0.006498559951782227, 0.006506112098693848, 0.006553696155548096, 0.006537536144256592, 0.0064880638122558594, 0.0064839677810668945, 0.0064759359359741215, 0.006452608108520508, 0.006472415924072265, 0.0064386558532714844, 0.006463200092315674, 0.006443295955657959, 0.006457503795623779, 0.0065218877792358396, 0.006453248023986816, 0.0064663681983947756, 0.0064856958389282226, 0.006465439796447754, 0.006459807872772217, 0.006457536220550537, 0.006526783943176269, 0.006555647850036621, 0.006444575786590576, 0.006517216205596924, 0.006424736022949218, 0.0065001602172851565, 0.006510623931884766, 0.006475776195526123, 0.006477503776550293, 0.0065392317771911625, 0.0064802241325378415, 0.006491936206817627, 0.00648419189453125, 0.006518784046173095, 0.006520703792572021, 0.006460768222808838, 0.006506432056427002, 0.0066015682220458985, 0.0064748477935791015, 0.006478176116943359, 0.006452991962432862, 0.006465663909912109, 0.0064795842170715335, 0.006458335876464843, 0.006471648216247559, 0.006547584056854248, 0.006537119865417481, 0.006477952003479004, 0.006451072216033936, 0.0064430079460144046, 0.006491392135620117, 0.006491231918334961, 0.0064815678596496585, 0.006500351905822754, 0.006505887985229492, 0.0064745922088623046, 0.0064570879936218266, 0.006477952003479004, 0.0064858880043029785, 0.0064572482109069825, 0.006463903903961182, 0.006477503776550293, 0.006499392032623291, 0.006528192043304444, 0.006473408222198486, 0.006485504150390625, 0.006519072055816651, 0.006469247817993164, 0.006488736152648926, 0.006453023910522461, 0.0064555201530456545, 0.00655401611328125, 0.006461023807525635, 0.006490111827850342, 0.0065372161865234375, 0.006442912101745605, 0.00647760009765625, 0.006465792179107666, 0.00646457576751709, 0.0065337600708007815, 0.00644543981552124, 0.006477952003479004, 0.006487711906433106, 0.006457119941711426, 0.0064926080703735355, 0.006491392135620117, 0.006525440216064453, 0.006449344158172607, 0.006418399810791016, 0.006504032135009765, 0.006474175930023193, 0.006732992172241211, 0.006570879936218262, 0.006501920223236084, 0.006496064186096192, 0.006576064109802246, 0.006658783912658691, 0.006539135932922363, 0.006736000061035156, 0.006486015796661377, 0.006529024124145508, 0.006469632148742676, 0.006500127792358398, 0.006490335941314697, 0.006576352119445801, 0.00651254415512085, 0.006478943824768066, 0.006509344100952149, 0.006524928092956543, 0.006469024181365967, 0.006543968200683594, 0.006634624004364014, 0.006564000129699707, 0.006531807899475097, 0.0067358717918396, 0.006501376152038574, 0.006797311782836914, 0.006605343818664551, 0.006558176040649414, 0.006544640064239502, 0.0065953278541564945, 0.006500256061553955, 0.006556992053985596, 0.00656444787979126, 0.006570303916931152, 0.006592383861541748, 0.0065413122177124024, 0.006551616191864014, 0.006543295860290527, 0.006643712043762207, 0.00651254415512085, 0.00651478385925293, 0.006522463798522949, 0.006511007785797119, 0.006571104049682617, 0.006589536190032959, 0.006731584072113037, 0.006696959972381592, 0.006516479969024659, 0.006529280185699463, 0.0065025601387023926, 0.00653056001663208, 0.0065437121391296385, 0.006509888172149658, 0.006574783802032471, 0.006574079990386963, 0.006474976062774658, 0.006513440132141114, 0.006629663944244385, 0.006461152076721191, 0.0064338879585266115, 0.006440000057220459, 0.00646281623840332, 0.006535776138305664, 0.0064592638015747075, 0.006496255874633789, 0.006476064205169677, 0.006477344036102295, 0.00654150390625, 0.006987775802612305, 0.007839744091033935, 0.007479328155517578, 0.006868671894073486, 0.006563968181610107, 0.006479263782501221, 0.006474495887756348, 0.006461664199829102, 0.006501503944396973, 0.006507008075714111, 0.0065229439735412596, 0.006467264175415039, 0.006482336044311523, 0.006457344055175781, 0.006513728141784668, 0.006449215888977051, 0.0064865598678588866, 0.0064793601036071775, 0.006445919990539551, 0.00652288007736206, 0.006487904071807861, 0.006436192035675049, 0.006481855869293213, 0.006476384162902832, 0.006491936206817627, 0.006510528087615966, 0.0065391998291015625, 0.0065914239883422855, 0.006551231861114502, 0.006500351905822754, 0.006494239807128906, 0.006483200073242187, 0.0065133762359619145, 0.006504447937011719, 0.006446623802185058, 0.00652288007736206, 0.006539296150207519, 0.006478271961212158, 0.006528351783752441, 0.006498976230621338, 0.006485375881195068, 0.006489855766296387, 0.00647049617767334, 0.006504479885101318, 0.006551616191864014, 0.006451136112213135, 0.006485663890838623, 0.006458879947662354, 0.006482880115509033, 0.0064982080459594724, 0.0064839677810668945, 0.0064876160621643066, 0.006531807899475097, 0.0064544639587402345, 0.006401760101318359, 0.006479743957519531, 0.006463615894317627, 0.006489215850830078, 0.006472959995269776, 0.006523744106292724, 0.006506048202514649, 0.006501567840576172, 0.006472415924072265, 0.006459519863128662, 0.006491680145263672, 0.006521567821502685, 0.006448319911956787, 0.006493087768554688, 0.006549215793609619, 0.006499423980712891, 0.0065033278465271, 0.006471839904785157, 0.006490047931671143, 0.006555552005767822, 0.006460832118988037, 0.006541600227355957, 0.006541888236999512, 0.006612160205841064, 0.006676544189453125, 0.006535840034484863, 0.006526815891265869, 0.00652016019821167, 0.0064963197708129885, 0.0065008320808410645, 0.00650051212310791, 0.0064980478286743165, 0.006531231880187989, 0.00651475191116333, 0.006479872226715088, 0.006545184135437011, 0.006490335941314697, 0.006492159843444824, 0.006501440048217773, 0.006534080028533936, 0.006486368179321289, 0.006524576187133789, 0.006512639999389648, 0.006510591983795166, 0.0064839677810668945, 0.00646668815612793, 0.006532192230224609, 0.006476992130279541, 0.006527584075927734, 0.006432767868041992, 0.006497663974761963, 0.006705023765563965, 0.006463295936584472, 0.0064419198036193845, 0.0064984321594238285, 0.006457280158996582, 0.0066211199760437016, 0.006449120044708252, 0.006475872039794922, 0.006512576103210449, 0.0065212159156799315, 0.006514304161071778, 0.006489888191223144, 0.006379807949066162, 0.006565792083740234, 0.006463903903961182, 0.006492159843444824, 0.006479551792144775, 0.0064430079460144046, 0.0065372161865234375, 0.0064774398803710935, 0.006476160049438477, 0.006590303897857666, 0.00647811222076416, 0.006496255874633789, 0.0065493760108947755, 0.006467520236968994, 0.006461472034454346, 0.0064327998161315915, 0.006461567878723144, 0.006585472106933594, 0.0064683518409729005, 0.006478847980499268, 0.006570847988128662, 0.0064159040451049806, 0.006611455917358398, 0.006463615894317627, 0.006475776195526123, 0.006488160133361816, 0.006533023834228516, 0.006478879928588867, 0.006498559951782227, 0.006408927917480468, 0.0067452797889709475, 0.006454080104827881, 0.006460864067077637, 0.0064642238616943355, 0.0065324797630310055, 0.006448671817779541, 0.00649721622467041, 0.0064774398803710935, 0.006513023853302002, 0.006462783813476563, 0.006504320144653321, 0.006480671882629394, 0.006449183940887451, 0.006516543865203858, 0.006518271923065186, 0.006466239929199219, 0.0065140161514282225, 0.006630144119262695, 0.006547359943389892, 0.0065413122177124024, 0.006502143859863281, 0.006570079803466797, 0.006522784233093262, 0.006508800029754638, 0.006520256042480469, 0.006543168067932129, 0.006556511878967285, 0.00664089584350586, 0.006606880187988281, 0.006596352100372314, 0.006613120079040528, 0.006535327911376953, 0.006572927951812744, 0.006458176136016846, 0.006536511898040771, 0.0065352640151977535, 0.006499040126800537, 0.006490111827850342, 0.006526976108551025, 0.0064839677810668945, 0.006467648029327392, 0.006515647888183594, 0.006478879928588867, 0.0065084161758422856, 0.0065203838348388676, 0.006547999858856201, 0.00653875207901001, 0.006469503879547119, 0.006490911960601807, 0.006493887901306152, 0.006426784038543701, 0.006489312171936035, 0.006502687931060791, 0.0064572482109069825, 0.006531871795654297, 0.006741824150085449, 0.006492127895355224, 0.006459455966949463, 0.0064503679275512696, 0.006501152038574219, 0.006520832061767578, 0.006436863899230957, 0.006606847763061524, 0.006496255874633789, 0.006504447937011719, 0.00651251220703125, 0.006498079776763916, 0.006490464210510254, 0.006488160133361816, 0.0064851841926574704, 0.00662172794342041, 0.006546751976013183, 0.006464384078979492, 0.006511936187744141, 0.0064455680847167966, 0.006449344158172607, 0.006496255874633789, 0.006446752071380615, 0.006428927898406983, 0.006505887985229492, 0.0064824318885803225, 0.006475423812866211, 0.006451263904571534, 0.0064618239402770995, 0.0064943041801452635, 0.006445055961608887, 0.006492159843444824, 0.006858367919921875, 0.006496799945831299, 0.006489952087402343, 0.006459392070770263, 0.006471680164337158, 0.00665718412399292, 0.006492224216461182, 0.006515488147735595, 0.006471680164337158, 0.006361055850982666, 0.00647603178024292, 0.006428671836853027, 0.00648089599609375, 0.006445888042449951, 0.006454527854919434, 0.006474815845489502, 0.006510560035705566, 0.0064633917808532716, 0.006469632148742676, 0.006754303932189941, 0.006700319766998291, 0.006564832210540771, 0.006428415775299072, 0.00650812816619873, 0.00647430419921875, 0.006498271942138672, 0.006514560222625732, 0.006471903800964356, 0.00649619197845459, 0.006478015899658203, 0.006472991943359375, 0.0065030078887939454, 0.006467360019683838, 0.006528863906860352, 0.006512703895568847, 0.006572127819061279, 0.006486112117767334, 0.006496448040008545, 0.006430496215820312, 0.006497791767120361, 0.006443359851837158, 0.006445151805877685, 0.006543360233306885, 0.0064737281799316405, 0.006516575813293457, 0.006737343788146973, 0.006455071926116943, 0.0065049281120300294, 0.006459871768951416, 0.006480000019073486, 0.006557151794433593, 0.006494624137878418, 0.006500319957733154, 0.006479936122894287, 0.006449408054351806, 0.006475488185882569, 0.0065937919616699215, 0.006658624172210694, 0.006553791999816895, 0.0064778242111206055, 0.006491583824157715, 0.006534976005554199, 0.006482687950134278, 0.006479328155517578, 0.0064596481323242185, 0.007002399921417236, 0.006496255874633789, 0.006602399826049805, 0.0064650559425354, 0.006497087955474853, 0.006495359897613525, 0.0064787201881408695, 0.0064009599685668946, 0.006450463771820068, 0.0065316481590271, 0.006458623886108399, 0.006461696147918701, 0.006475967884063721, 0.00648748779296875, 0.006449600219726562, 0.006463007926940918, 0.006533247947692871, 0.006544159889221191, 0.006453248023986816, 0.006461440086364746, 0.006476992130279541, 0.00641926383972168, 0.00643891191482544, 0.0064778242111206055, 0.006469759941101074, 0.006509568214416504, 0.006521024227142334, 0.006494656085968018, 0.0064650559425354, 0.006415071964263916, 0.006475776195526123, 0.0064958400726318356, 0.006437280178070068, 0.006467296123504639, 0.006490399837493896, 0.006426176071166992, 0.006427103996276855, 0.006442463874816894, 0.006519296169281006, 0.00642252779006958, 0.006440288066864014, 0.0064243202209472655, 0.006437888145446777, 0.006516479969024659, 0.006442431926727295, 0.00644374418258667, 0.006467199802398681, 0.006414239883422852, 0.006447264194488525, 0.006426943778991699, 0.006463488101959228, 0.00650271987915039, 0.0064488320350646975, 0.006450719833374023, 0.006437024116516113, 0.0064453759193420414, 0.006488351821899414, 0.0064254398345947265, 0.006441664218902588, 0.006807007789611816, 0.00653385591506958, 0.006461440086364746, 0.0064222078323364256, 0.006500351905822754, 0.006484288215637207, 0.00644220781326294, 0.006447904109954834, 0.0064767999649047855, 0.006471903800964356, 0.00644755220413208, 0.006375616073608399, 0.006476992130279541, 0.006493087768554688, 0.006429567813873291, 0.006468255996704101, 0.0064469118118286135, 0.006506591796875, 0.00647811222076416, 0.006906976222991943, 0.006738815784454346, 0.0064876160621643066, 0.00644758415222168, 0.006481919765472412, 0.006483456134796142, 0.006495808124542236, 0.006463424205780029, 0.006419456005096436, 0.00643071985244751, 0.0064817600250244144, 0.00644268798828125, 0.006440927982330322, 0.006851071834564209, 0.006448480129241943, 0.00660752010345459, 0.006458687782287598, 0.006466047763824463, 0.006460927963256836, 0.006421472072601318, 0.00643452787399292, 0.006454912185668946, 0.006437056064605713, 0.006528704166412353, 0.006437280178070068, 0.006466752052307129, 0.007567872047424316, 0.0072052798271179195, 0.006458911895751953, 0.00647215986251831, 0.0065697917938232426, 0.006527520179748535, 0.006586016178131104, 0.006486015796661377, 0.00644700813293457, 0.0064943041801452635, 0.006467584133148193, 0.006481919765472412, 0.006549503803253174, 0.006498303890228272, 0.006555647850036621, 0.006475391864776611, 0.006472064018249512, 0.006492159843444824, 0.00647760009765625, 0.006473055839538574, 0.0065790719985961916, 0.006580416202545166, 0.006603936195373535, 0.006549983978271484, 0.006526144027709961, 0.006503359794616699, 0.006508607864379883, 0.0064849920272827145, 0.006669407844543457, 0.0064785280227661135, 0.006487455844879151, 0.006453855991363526, 0.006453248023986816, 0.006491936206817627, 0.006465695858001709, 0.006422048091888428, 0.006469727993011475, 0.0064291200637817385, 0.006699007987976074, 0.0065066561698913575, 0.006491648197174072, 0.0064966402053833, 0.006427999973297119, 0.006451839923858642, 0.0064462399482727055, 0.006462399959564209, 0.006524608135223388, 0.006704800128936768, 0.00661737585067749, 0.0068590397834777835, 0.006475776195526123, 0.006462463855743408, 0.006487040042877197, 0.0064570879936218266, 0.0064651198387146, 0.0065088639259338376, 0.006467936038970948, 0.006463488101959228, 0.0065474557876586915, 0.006475776195526123, 0.006487071990966797, 0.006441952228546143, 0.006499616146087647, 0.006504288196563721, 0.006450047969818115, 0.006479455947875977, 0.006449408054351806, 0.006455039978027344, 0.00653763198852539, 0.0064522562026977535, 0.006487008094787598, 0.006440447807312012, 0.006501023769378662, 0.006507455825805664, 0.006418399810791016, 0.006470592021942139, 0.006444831848144531, 0.006469855785369873, 0.006459392070770263, 0.0064563841819763185, 0.006514815807342529, 0.006574048042297363, 0.0064551677703857425, 0.006451744079589844, 0.006721983909606934, 0.007510015964508057, 0.006510528087615966, 0.006504511833190918, 0.006538943767547607, 0.006480192184448242, 0.006487264156341553, 0.006466335773468017, 0.006384223937988281, 0.006453152179718018, 0.006461535930633545, 0.006464960098266602, 0.006517183780670166, 0.006465184211730957, 0.006416863918304443, 0.006445216178894043, 0.006487904071807861, 0.006453055858612061, 0.0064412479400634765, 0.0064306240081787105, 0.006499904155731201, 0.006475296020507813, 0.006424736022949218, 0.006469535827636719, 0.006467967987060547, 0.0064373440742492676, 0.006559743881225586, 0.006445055961608887, 0.006477791786193848, 0.006502431869506836, 0.006477727890014649, 0.006490015983581543, 0.006451551914215088, 0.006434783935546875, 0.006471551895141601, 0.0064471039772033695, 0.00645740795135498, 0.006555647850036621, 0.0064122238159179685, 0.0064691839218139644, 0.006469312191009521, 0.006462431907653808, 0.006485792160034179, 0.006475776195526123, 0.006479072093963623, 0.006458144187927246, 0.006528639793395996, 0.006471551895141601, 0.006487584114074707, 0.006613120079040528, 0.006515488147735595, 0.00650867223739624, 0.006500288009643555, 0.007693727970123291, 0.006551424026489258, 0.006522624015808105, 0.006544352054595947, 0.006543039798736572, 0.0064423041343688965, 0.006456319808959961, 0.0064102401733398436, 0.0065146880149841305, 0.006539552211761474, 0.006485407829284668, 0.006508287906646729, 0.006447296142578125, 0.006557536125183105, 0.006752863883972168, 0.006866464138031006, 0.006455711841583252, 0.0064924159049987796, 0.006427103996276855, 0.006448448181152344, 0.00641868782043457, 0.006415743827819825, 0.006438784122467041, 0.0064174079895019534, 0.0064143362045288085, 0.0064737281799316405, 0.006427743911743164, 0.00645798397064209, 0.006414720058441162, 0.0064306240081787105, 0.006541376113891602, 0.006440896034240723, 0.006469632148742676, 0.006467584133148193, 0.006489120006561279, 0.006470623970031738, 0.006469632148742676, 0.006472991943359375, 0.006457952022552491, 0.0065578241348266605, 0.006498303890228272, 0.006689856052398682, 0.006582848072052002, 0.006540703773498535, 0.006456160068511963, 0.006468959808349609, 0.006497056007385254, 0.006392064094543457, 0.0064584641456604, 0.006408864021301269, 0.006494207859039307, 0.006504447937011719, 0.00670739221572876, 0.006497280120849609, 0.006490943908691406, 0.0064471039772033695, 0.006518400192260742, 0.006645823955535889, 0.006945248126983643, 0.0067582402229309085, 0.0065781760215759275, 0.00653107213973999, 0.0068055038452148435, 0.0068384318351745604, 0.006667424201965332, 0.006488863945007324, 0.006477727890014649, 0.006583392143249512, 0.006535711765289306, 0.006497951984405517, 0.006494944095611572, 0.006550943851470948, 0.006490719795227051, 0.0064839677810668945, 0.006481919765472412, 0.006561791896820069, 0.006512639999389648, 0.006457344055175781, 0.0064839677810668945, 0.006493951797485352, 0.006455552101135254, 0.006363296031951905, 0.0064358081817626955, 0.006505343914031982, 0.0064651198387146, 0.0064182720184326175, 0.006454016208648682, 0.00643782377243042, 0.006461728096008301, 0.006473567962646485, 0.006400767803192139, 0.006496255874633789, 0.00642252779006958, 0.006440159797668457, 0.006460192203521728, 0.0064245758056640625, 0.006444447994232177, 0.006475776195526123, 0.006419199943542481, 0.006464831829071045, 0.006463103771209717, 0.00645417594909668, 0.006480063915252686, 0.006420512199401856, 0.006448927879333496, 0.006464863777160644, 0.006441823959350586, 0.006459199905395508, 0.006453440189361572, 0.006430272102355957, 0.006465663909912109, 0.006412352085113525, 0.006447296142578125, 0.006456895828247071, 0.006544832229614258, 0.006505343914031982, 0.0064653120040893556, 0.006515039920806885, 0.006457215785980225, 0.006465536117553711, 0.006479616165161133, 0.0064268798828125, 0.006469247817993164, 0.006517055988311767, 0.00642416000366211, 0.006528927803039551, 0.0065582718849182126, 0.006455296039581298, 0.006479296207427978, 0.006853184223175049, 0.006473567962646485, 0.006477791786193848, 0.006413887977600097, 0.006433311939239502, 0.006584288120269776, 0.006467936038970948, 0.006459167957305908, 0.006445055961608887, 0.006453152179718018, 0.00670531177520752, 0.006496543884277344, 0.006579872131347656, 0.006492479801177979, 0.006440288066864014, 0.006362783908843994, 0.006451327800750733, 0.00645356798171997, 0.006411871910095215, 0.006432864189147949, 0.006494207859039307, 0.0064535999298095705, 0.006446752071380615, 0.006463488101959228, 0.006463488101959228, 0.006457151889801026, 0.006515999794006347, 0.006464767932891846, 0.006475423812866211, 0.006473504066467285, 0.006471072196960449, 0.006459680080413819, 0.006445248126983643, 0.006476128101348877, 0.0064666237831115725, 0.006490528106689453, 0.00649232006072998, 0.006545792102813721, 0.0064471039772033695, 0.006631616115570068, 0.006453311920166015, 0.006508287906646729, 0.0066375679969787596, 0.0064791679382324215, 0.006517439842224121, 0.00647324800491333, 0.006528831958770752, 0.006471712112426758, 0.0064841279983520505, 0.006456128120422363, 0.006409887790679932, 0.00643071985244751, 0.006431072235107422, 0.006407839775085449, 0.006550559997558594, 0.006425568103790283, 0.006428671836853027, 0.006459455966949463, 0.006426559925079346, 0.00645743989944458, 0.0064552001953125, 0.0064570879936218266, 0.00660316801071167, 0.006534111976623535, 0.006677472114562989, 0.00654531192779541, 0.006455039978027344, 0.0064856958389282226, 0.006481696128845215, 0.006496384143829346, 0.006504288196563721, 0.006503232002258301, 0.00645356798171997, 0.0064815678596496585, 0.006490143775939941, 0.006504447937011719, 0.006483935832977295, 0.006440639972686768]",tokens/s,153.6460911107445,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,882.614272,566.099968,0.0,163.577856,154.631168,s,1,7.68910546875,7.68910546875,0.0,7.68910546875,7.68910546875,7.68910546875,7.68910546875,[7.68910546875],,kWh,1.546030797496011e-05,1.6967620879155147e-06,4.775559376002381e-06,2.1932629438878006e-05,,MB,1367.609344,616.431616,0.0,199.22944,187.147776,s,30,0.20090300703048708,0.006696766901016236,0.00027029082893048677,0.006594256162643432,0.006941673564910889,0.007249966549873352,0.007618569593429565,"[0.0069144959449768065, 0.0065948481559753415, 0.0066397438049316405, 0.006707903861999512, 0.006601727962493896, 0.0065799360275268556, 0.006830080032348633, 0.006639296054840088, 0.006542175769805908, 0.006691679954528808, 0.0064934401512145995, 0.006485663890838623, 0.0065912318229675295, 0.006669343948364258, 0.006534143924713135, 0.006574368000030518, 0.0066516480445861816, 0.006534207820892334, 0.006492032051086426, 0.006587935924530029, 0.007302080154418945, 0.006880000114440918, 0.007186272144317627, 0.006526527881622314, 0.006557951927185059, 0.006593664169311523, 0.006574463844299316, 0.006583295822143555, 0.0077478399276733395, 0.00659500789642334]",tokens/s,38227.40193647056,kWh,1.9393589831395713e-07,2.138757439014679e-08,8.935869090364703e-08,3.0468216360775097e-07,tokens/kWh,840219844.0784851,MB,1383.202816,618.528768,0.0,201.326592,187.150336,s,30,9.995746276855472,0.333191542561849,0.0023377450345365047,0.3326029968261719,0.336141259765625,0.33689898071289065,0.3397462664794922,"[0.3361131591796875, 0.3354996337890625, 0.3336417541503906, 0.33282452392578127, 0.3355817565917969, 0.3338935852050781, 0.3312887268066406, 0.33277188110351563, 0.3363941650390625, 0.3315314025878906, 0.3300597534179687, 0.3319615478515625, 0.332053466796875, 0.3326657104492188, 0.33731201171875, 0.33071585083007815, 0.33211712646484376, 0.3319349060058594, 0.3308146057128906, 0.330283203125, 0.3357464599609375, 0.34074053955078126, 0.3323275146484375, 0.332540283203125, 0.33309930419921874, 0.3306569519042969, 0.33187164306640626, 0.332939697265625, 0.3338788146972656, 0.3324862976074219]",tokens/s,189.08042957994817,kWh,9.566398322659205e-06,1.0550100363183292e-06,3.6630794962305372e-06,1.4284487855208075e-05,tokens/kWh,4410378.631602841,,s,1890,9.98184825181962,0.005281401191438947,0.00015715138370240168,0.005253312110900878,0.005339718580245971,0.00541615343093872,0.005895056114196777,"[0.00535536003112793, 0.005427264213562012, 0.005453023910522461, 0.005419551849365234, 0.005464064121246338, 0.005375999927520752, 0.005341343879699707, 0.005723999977111816, 0.0053309440612792965, 0.005513216018676758, 0.005516767978668213, 0.005339680194854736, 0.005359615802764893, 0.0053944320678710935, 0.005320703983306885, 0.0053504958152770996, 0.005350560188293457, 0.005324543952941895, 0.005348544120788574, 0.005329376220703125, 0.00527180814743042, 0.0052808961868286135, 0.005280735969543457, 0.005244927883148193, 0.005253280162811279, 0.005279232025146485, 0.005236320018768311, 0.005286655902862549, 0.00537824010848999, 0.005268320083618164, 0.005282688140869141, 0.005359712123870849, 0.0053422718048095705, 0.005510079860687256, 0.005281792163848877, 0.005285888195037842, 0.00525545597076416, 0.005269152164459228, 0.0052594561576843266, 0.005301375865936279, 0.005305024147033691, 0.005245215892791748, 0.005242527961730957, 0.005392352104187012, 0.0052470722198486325, 0.005271615982055664, 0.005304128170013428, 0.005386655807495117, 0.005219295978546142, 0.005243711948394776, 0.005232639789581299, 0.005208352088928223, 0.005244639873504639, 0.005224448204040527, 0.005242879867553711, 0.005246143817901611, 0.00528380823135376, 0.005251232147216797, 0.005241536140441895, 0.005908480167388916, 0.005262720108032227, 0.005249375820159912, 0.005244383811950684, 0.005177087783813476, 0.005202208042144775, 0.0053021440505981446, 0.005244416236877441, 0.005225088119506836, 0.005695487976074219, 0.005318880081176758, 0.005330719947814941, 0.005273024082183838, 0.005285696029663086, 0.0052310719490051266, 0.005558847904205322, 0.005329919815063477, 0.0052600960731506346, 0.005375904083251953, 0.00529798412322998, 0.005263552188873291, 0.005279744148254394, 0.005287327766418457, 0.005429855823516846, 0.005414783954620361, 0.00529363203048706, 0.005894911766052246, 0.0052631678581237795, 0.00537395191192627, 0.005253119945526123, 0.0052633600234985355, 0.005322751998901368, 0.00532089614868164, 0.005330560207366944, 0.00563424015045166, 0.005260831832885742, 0.005287487983703613, 0.005403488159179687, 0.005275807857513428, 0.005274623870849609, 0.005275743961334229, 0.005300704002380371, 0.00529033613204956, 0.00533462381362915, 0.005328927993774414, 0.005259647846221924, 0.005311872005462646, 0.005317247867584228, 0.005273600101470947, 0.005320608139038086, 0.005255263805389404, 0.005261312007904053, 0.005283840179443359, 0.005255167961120606, 0.005400576114654541, 0.0053002238273620605, 0.005244927883148193, 0.005290112018585205, 0.005296000003814697, 0.005223487854003906, 0.005270463943481445, 0.0052644162178039555, 0.005286880016326904, 0.005305408000946045, 0.00532150411605835, 0.005268671989440918, 0.005251455783843994, 0.005212384223937988, 0.005295904159545899, 0.0052527041435241695, 0.0052631678581237795, 0.0053499841690063474, 0.005330976009368897, 0.005316927909851075, 0.005342144012451172, 0.0052641282081604, 0.005330431938171387, 0.00531935977935791, 0.005351200103759766, 0.005261312007904053, 0.0052674560546875, 0.005306496143341065, 0.005244800090789795, 0.005242623805999756, 0.005277184009552002, 0.005231359958648682, 0.005285024166107178, 0.00528879976272583, 0.005283840179443359, 0.005242623805999756, 0.005238399982452392, 0.00525171184539795, 0.005250527858734131, 0.005277760028839111, 0.0052391037940979004, 0.005238175868988037, 0.005283840179443359, 0.005225215911865234, 0.005272960186004639, 0.005278336048126221, 0.005294079780578613, 0.005246975898742676, 0.005269087791442871, 0.005299935817718506, 0.005242976188659668, 0.005253727912902832, 0.00537391996383667, 0.005273248195648193, 0.005261888027191162, 0.005393824100494385, 0.005523519992828369, 0.005431456089019775, 0.005341087818145752, 0.005259840011596679, 0.005297887802124024, 0.005339072227478027, 0.005267007827758789, 0.005291552066802979, 0.00535430383682251, 0.0052696638107299805, 0.0053002238273620605, 0.005314208030700683, 0.005238527774810791, 0.005257823944091797, 0.005296127796173096, 0.0052295680046081545, 0.00528227186203003, 0.005287551879882812, 0.005305408000946045, 0.005275680065155029, 0.005195871829986572, 0.005287936210632324, 0.005314559936523438, 0.005310463905334473, 0.005288032054901123, 0.005302175998687744, 0.005265344142913819, 0.0052975997924804685, 0.005288767814636231, 0.0053010878562927246, 0.005312895774841309, 0.005280352115631103, 0.005289760112762451, 0.005308800220489502, 0.005231647968292236, 0.00528217601776123, 0.005232927799224854, 0.0052880640029907226, 0.005248447895050049, 0.005260896205902099, 0.005288959980010987, 0.005243040084838867, 0.005269343852996826, 0.005261312007904053, 0.005269216060638428, 0.0052882242202758785, 0.0052594561576843266, 0.00526694393157959, 0.005314879894256592, 0.005312191963195801, 0.005222623825073242, 0.005253407955169678, 0.005269023895263672, 0.005232287883758545, 0.005265376091003418, 0.0052640318870544435, 0.005268703937530517, 0.005239583969116211, 0.005281792163848877, 0.005336863994598389, 0.005236991882324219, 0.005303743839263916, 0.005257952213287354, 0.005250879764556885, 0.005253119945526123, 0.005265696048736572, 0.005274847984313965, 0.005229055881500244, 0.005249375820159912, 0.005252384185791016, 0.005275519847869873, 0.005355616092681884, 0.005230720043182373, 0.0052774081230163574, 0.005261888027191162, 0.005252480030059814, 0.005275296211242676, 0.005283840179443359, 0.005516255855560303, 0.005265408039093018, 0.005296127796173096, 0.0052427840232849125, 0.005249536037445069, 0.005204256057739258, 0.005286240100860596, 0.005256927967071533, 0.005389056205749512, 0.0053060798645019535, 0.005331232070922852, 0.00537724781036377, 0.005318431854248047, 0.005483232021331787, 0.005327072143554688, 0.005351103782653808, 0.005316768169403076, 0.005279712200164795, 0.005344863891601563, 0.00525171184539795, 0.0053697280883789065, 0.005332096099853516, 0.005310463905334473, 0.005892864227294922, 0.005291711807250976, 0.005224063873291016, 0.005255807876586914, 0.005247039794921875, 0.005220352172851562, 0.005255136013031006, 0.005268511772155762, 0.00526585578918457, 0.005257791996002197, 0.005705728054046631, 0.006196959972381592, 0.00531279993057251, 0.0052899842262268066, 0.00527510404586792, 0.00529036808013916, 0.005275807857513428, 0.005274879932403565, 0.005287712097167969, 0.005289184093475342, 0.005312255859375, 0.005281856060028076, 0.005288991928100586, 0.005292960166931152, 0.005294079780578613, 0.005285280227661133, 0.005294047832489014, 0.005267615795135498, 0.0053170881271362306, 0.005269055843353272, 0.005247424125671387, 0.005285376071929931, 0.00522489595413208, 0.0052715840339660645, 0.005285855770111084, 0.005255424022674561, 0.0053051838874816895, 0.005273791790008545, 0.0053023681640625, 0.0052722558975219725, 0.0052420802116394044, 0.005296224117279052, 0.005214911937713623, 0.005268671989440918, 0.005260223865509033, 0.0052408318519592285, 0.0052715520858764645, 0.005274784088134765, 0.005292895793914795, 0.005262944221496582, 0.005282080173492432, 0.005347104072570801, 0.005386591911315918, 0.005290143966674805, 0.005302303791046142, 0.005281055927276611, 0.005261216163635254, 0.005413792133331299, 0.00528764820098877, 0.005330687999725342, 0.005273568153381348, 0.0052700481414794925, 0.005267199993133545, 0.005279007911682129, 0.005351200103759766, 0.005345215797424316, 0.005368832111358642, 0.005259263992309571, 0.005285247802734375, 0.0053151359558105465, 0.005257279872894287, 0.005277696132659912, 0.005296127796173096, 0.005269504070281982, 0.005285888195037842, 0.005308063983917236, 0.0053023681640625, 0.005255296230316162, 0.00529798412322998, 0.005293407917022705, 0.005286880016326904, 0.005281792163848877, 0.0052665920257568355, 0.005262080192565918, 0.005273695945739746, 0.00551526403427124, 0.005395904064178467, 0.00540012788772583, 0.0052911038398742675, 0.0052137279510498045, 0.005275680065155029, 0.005259903907775879, 0.005277120113372803, 0.005249311923980713, 0.005287968158721924, 0.005247168064117432, 0.00524454402923584, 0.005275807857513428, 0.005232639789581299, 0.005272575855255127, 0.005285888195037842, 0.005224864006042481, 0.005294623851776123, 0.005370048046112061, 0.005287807941436767, 0.005253119945526123, 0.005261312007904053, 0.005269536018371582, 0.0052037758827209474, 0.00527513599395752, 0.00528659200668335, 0.0052367358207702636, 0.005276703834533691, 0.005260255813598633, 0.005211487770080566, 0.005222208023071289, 0.005225312232971191, 0.005230688095092774, 0.005218368053436279, 0.005220191955566406, 0.005265408039093018, 0.005214208126068115, 0.005232863903045655, 0.005256991863250732, 0.005189631938934326, 0.005279744148254394, 0.005248735904693604, 0.005240960121154785, 0.005225664138793946, 0.00528601598739624, 0.005303328037261963, 0.005244224071502685, 0.005255167961120606, 0.005248735904693604, 0.0052193598747253414, 0.0052646718025207515, 0.00532528018951416, 0.005246975898742676, 0.005219423770904541, 0.005235616207122803, 0.005193727970123291, 0.005242879867553711, 0.005241919994354248, 0.005231423854827881, 0.0052343039512634274, 0.0052408638000488285, 0.005246431827545166, 0.005213183879852295, 0.005266687870025635, 0.005265312194824219, 0.005234911918640137, 0.005276288032531738, 0.005246848106384277, 0.005240064144134521, 0.005234655857086182, 0.0052646718025207515, 0.005231999874114991, 0.005224127769470215, 0.005252863883972168, 0.005263967990875244, 0.005243103981018066, 0.005220352172851562, 0.005285823822021484, 0.005256864070892334, 0.005259552001953125, 0.00529641580581665, 0.005226336002349854, 0.005297408103942871, 0.005278560161590576, 0.0054694080352783205, 0.005277632236480713, 0.005249023914337158, 0.0052995200157165525, 0.005257952213287354, 0.005625823974609375, 0.005341184139251709, 0.005328127861022949, 0.005327199935913086, 0.005349472045898438, 0.005302591800689697, 0.0052674560546875, 0.005309887886047363, 0.00522431993484497, 0.005262239933013916, 0.005293856143951416, 0.0052325439453125, 0.005883456230163574, 0.005288479804992675, 0.0053024001121521, 0.005249120235443115, 0.005234399795532227, 0.005259007930755615, 0.0052350077629089355, 0.005332992076873779, 0.005205120086669922, 0.005233535766601563, 0.005259263992309571, 0.00529363203048706, 0.005384640216827392, 0.0052408318519592285, 0.005220352172851562, 0.005349311828613281, 0.005304384231567383, 0.005222400188446045, 0.005242879867553711, 0.005249120235443115, 0.005232736110687256, 0.00521401596069336, 0.005230016231536865, 0.005243296146392822, 0.005238944053649902, 0.005216256141662597, 0.005291007995605469, 0.005260191917419434, 0.005219808101654053, 0.0052230401039123535, 0.005210112094879151, 0.005232416152954102, 0.005232863903045655, 0.005221759796142578, 0.005204607963562011, 0.005237760066986084, 0.005242208003997803, 0.00527235221862793, 0.00524294376373291, 0.0052576642036437984, 0.005230112075805664, 0.005214240074157715, 0.005274432182312012, 0.0052256321907043456, 0.005253983974456787, 0.005234879970550537, 0.005222208023071289, 0.005279744148254394, 0.005144192218780518, 0.005269120216369629, 0.005194079875946045, 0.005335616111755371, 0.005369696140289306, 0.0052464637756347654, 0.005268191814422608, 0.00524502420425415, 0.005286943912506103, 0.005230783939361572, 0.005300127983093262, 0.005724832057952881, 0.0054148159027099605, 0.007038976192474365, 0.00538812780380249, 0.005424287796020508, 0.005253344058990478, 0.005579552173614502, 0.005689504146575927, 0.005252384185791016, 0.005230368137359619, 0.005273952007293701, 0.005212607860565185, 0.005223455905914306, 0.0052715520858764645, 0.005263679981231689, 0.00538486385345459, 0.005267007827758789, 0.005235136032104492, 0.005238783836364746, 0.005281216144561768, 0.005212736129760742, 0.005230527877807617, 0.005730368137359619, 0.0052592320442199705, 0.005348703861236572, 0.005271679878234863, 0.005217887878417969, 0.005288928031921387, 0.005466400146484375, 0.00534006404876709, 0.00534006404876709, 0.0052854399681091305, 0.005220799922943115, 0.005252384185791016, 0.005345088005065918, 0.005222367763519287, 0.005237567901611328, 0.0052347202301025395, 0.005298399925231933, 0.0052408638000488285, 0.005248479843139649, 0.0052841281890869144, 0.005224448204040527, 0.005259263992309571, 0.005238976001739502, 0.005202079772949219, 0.005256480216979981, 0.005318272113800049, 0.005483136177062988, 0.00533897590637207, 0.0052605757713317875, 0.005239808082580567, 0.0051923198699951175, 0.005302175998687744, 0.005242911815643311, 0.005208064079284668, 0.005208032131195068, 0.0052276158332824705, 0.005231552124023438, 0.005199327945709229, 0.005204512119293213, 0.005230591773986816, 0.005207808017730713, 0.005212416172027588, 0.005231840133666992, 0.0052202239036560055, 0.005227424144744873, 0.005250495910644531, 0.005229311943054199, 0.005205023765563965, 0.005311264038085938, 0.005207071781158448, 0.005207007884979248, 0.005198912143707275, 0.005317567825317383, 0.00532480001449585, 0.005248799800872803, 0.005239327907562256, 0.005203648090362549, 0.005234687805175781, 0.005203968048095703, 0.005211264133453369, 0.005230944156646728, 0.005485119819641113, 0.0052939200401306155, 0.0052817602157592776, 0.00528604793548584, 0.005229951858520508, 0.0052228479385375975, 0.0052791361808776855, 0.005388639926910401, 0.0053272957801818845, 0.0052779197692871095, 0.00521398401260376, 0.005208384037017822, 0.005201344013214111, 0.005219615936279297, 0.005202047824859619, 0.00523145580291748, 0.005242432117462158, 0.00604204797744751, 0.005258528232574463, 0.0052594242095947265, 0.005215807914733887, 0.005219327926635742, 0.005259263992309571, 0.005203968048095703, 0.005212480068206787, 0.005209792137145996, 0.005283840179443359, 0.005222400188446045, 0.005242847919464112, 0.0052358717918395995, 0.005223135948181152, 0.005220511913299561, 0.0051404800415039064, 0.005201920032501221, 0.005179391860961914, 0.005211423873901367, 0.005287712097167969, 0.005247583866119385, 0.005228544235229492, 0.005224800109863282, 0.005227583885192871, 0.005217152118682861, 0.005230656147003174, 0.005310463905334473, 0.005681215763092041, 0.005246784210205078, 0.005207615852355957, 0.005220928192138672, 0.0052154879570007326, 0.00530508804321289, 0.005203968048095703, 0.005265408039093018, 0.005191232204437256, 0.005188127994537353, 0.005202047824859619, 0.005213568210601807, 0.005171616077423096, 0.00522649621963501, 0.005201632022857666, 0.0052024321556091305, 0.0051975998878479, 0.00519923210144043, 0.005195648193359375, 0.005181215763092041, 0.005276639938354492, 0.005209375858306884, 0.005223135948181152, 0.005207871913909912, 0.005238048076629639, 0.005174176216125488, 0.005193408012390137, 0.005219967842102051, 0.005173952102661133, 0.0052080960273742675, 0.005211296081542968, 0.005241663932800293, 0.005211904048919678, 0.005224703788757324, 0.005244768142700195, 0.0052304320335388186, 0.005191999912261963, 0.005211935997009277, 0.00521449613571167, 0.005199071884155274, 0.005202943801879883, 0.005212063789367676, 0.005192895889282226, 0.005200799942016602, 0.005228256225585938, 0.005231840133666992, 0.005230944156646728, 0.005265984058380127, 0.005641600131988525, 0.005240767955780029, 0.005229119777679443, 0.005228320121765137, 0.005210112094879151, 0.005239840030670166, 0.005258207798004151, 0.005253119945526123, 0.005296224117279052, 0.005267072200775147, 0.00523686408996582, 0.005251232147216797, 0.005298367977142334, 0.005218463897705078, 0.005211840152740479, 0.005211711883544922, 0.005316160202026367, 0.005291903972625732, 0.005280735969543457, 0.005318655967712403, 0.005285344123840332, 0.005244639873504639, 0.00525600004196167, 0.005218560218811035, 0.005275392055511475, 0.0052256321907043456, 0.005235328197479248, 0.0053060798645019535, 0.005247488021850586, 0.005217567920684814, 0.005221439838409424, 0.005232287883758545, 0.005208191871643067, 0.005227871894836426, 0.005247519969940185, 0.005254303932189942, 0.00527180814743042, 0.005253727912902832, 0.005261312007904053, 0.0052009282112121585, 0.00521727991104126, 0.005582176208496094, 0.005257247924804687, 0.005362271785736084, 0.00530841588973999, 0.0052492799758911135, 0.005322656154632568, 0.005273471832275391, 0.005228640079498291, 0.005226175785064697, 0.005259200096130371, 0.005220831871032715, 0.005209951877593994, 0.005217696189880371, 0.005233183860778808, 0.005212160110473632, 0.005206016063690186, 0.0052531838417053224, 0.005283775806427002, 0.0052462081909179685, 0.005559040069580078, 0.005214303970336914, 0.005254496097564698, 0.005220608234405517, 0.005247168064117432, 0.005355936050415039, 0.00516096019744873, 0.005265312194824219, 0.005212255954742432, 0.005210048198699952, 0.005247231960296631, 0.005227968215942382, 0.005216639995574951, 0.005228544235229492, 0.005197824001312256, 0.00521830415725708, 0.00520304012298584, 0.005248032093048096, 0.005218239784240722, 0.005212096214294434, 0.005238527774810791, 0.00520249605178833, 0.005237887859344483, 0.005354047775268555, 0.005208064079284668, 0.005250495910644531, 0.005204544067382812, 0.00526310396194458, 0.005218624114990234, 0.0052408638000488285, 0.005274623870849609, 0.0052087678909301756, 0.005241055965423584, 0.0052304000854492185, 0.0052187199592590335, 0.00525494384765625, 0.005246431827545166, 0.00523145580291748, 0.005209792137145996, 0.0052633600234985355, 0.005320703983306885, 0.005244927883148193, 0.005349088191986084, 0.005257728099822998, 0.005217823982238769, 0.0052326078414916995, 0.005275839805603027, 0.005240352153778076, 0.005253695964813233, 0.005318816184997559, 0.005260863780975342, 0.005260704040527343, 0.005274752140045166, 0.005286911964416504, 0.005253888130187988, 0.005280928134918213, 0.005215424060821533, 0.005262944221496582, 0.005280992031097412, 0.005247776031494141, 0.005238976001739502, 0.005601215839385987, 0.005264512062072754, 0.005336927890777588, 0.005284832000732422, 0.0053043198585510255, 0.005492127895355224, 0.005525856018066406, 0.005333248138427734, 0.0051753602027893066, 0.0052288641929626465, 0.005263296127319336, 0.005736639976501465, 0.0052304000854492185, 0.005273183822631836, 0.005208479881286621, 0.005269504070281982, 0.0052531838417053224, 0.005897632122039795, 0.005417791843414307, 0.005266240119934082, 0.0052391037940979004, 0.005236415863037109, 0.005251776218414306, 0.00524832010269165, 0.005208735942840576, 0.005263423919677735, 0.005234272003173828, 0.005331552028656006, 0.005265312194824219, 0.0051938238143920895, 0.005244927883148193, 0.005224448204040527, 0.005231808185577393, 0.005477344036102295, 0.005287615776062011, 0.005247488021850586, 0.005428415775299072, 0.0054583997726440426, 0.0052304320335388186, 0.005222559928894043, 0.005222400188446045, 0.00525219202041626, 0.005264287948608399, 0.005222400188446045, 0.005275648117065429, 0.005194752216339111, 0.0052336640357971195, 0.005234687805175781, 0.005233983993530273, 0.00533519983291626, 0.005267263889312744, 0.005230944156646728, 0.005214240074157715, 0.005255263805389404, 0.005214719772338867, 0.005201663970947266, 0.005265408039093018, 0.005226431846618652, 0.005218560218811035, 0.0052566719055175785, 0.005239424228668213, 0.005228384017944336, 0.005203839778900147, 0.00523635196685791, 0.005194079875946045, 0.005251232147216797, 0.005226655960083008, 0.005245888233184814, 0.005204768180847168, 0.005255167961120606, 0.0053209919929504395, 0.005219295978546142, 0.005231520175933838, 0.005223775863647461, 0.005304351806640625, 0.0052271361351013184, 0.005269504070281982, 0.005256735801696778, 0.0052226881980896, 0.0052531838417053224, 0.005242464065551758, 0.00521446418762207, 0.005224160194396973, 0.005337855815887451, 0.0052706880569458004, 0.005302944183349609, 0.00530847978591919, 0.005260799884796143, 0.005316800117492676, 0.005323071956634521, 0.005305856227874756, 0.005429471969604492, 0.005402847766876221, 0.005772672176361084, 0.007046080112457275, 0.0060076479911804195, 0.00547219181060791, 0.005344064235687256, 0.005287968158721924, 0.005328224182128906, 0.005364128112792969, 0.005275968074798584, 0.005284095764160156, 0.005280576229095459, 0.005339136123657226, 0.005293344020843506, 0.005267072200775147, 0.005529183864593506, 0.005290559768676758, 0.005425055980682373, 0.005277567863464355, 0.005451871871948242, 0.005228544235229492, 0.005359615802764893, 0.005375552177429199, 0.00524128007888794, 0.005269504070281982, 0.005275648117065429, 0.005248735904693604, 0.005238751888275147, 0.005310783863067627, 0.005207392215728759, 0.005248991966247558, 0.005427743911743164, 0.005425312042236328, 0.005374303817749023, 0.005311391830444336, 0.005259136199951172, 0.0052675199508667, 0.0052434239387512205, 0.005318943977355957, 0.005203423976898193, 0.005284736156463623, 0.005256864070892334, 0.00517852783203125, 0.005206943988800049, 0.0052503361701965335, 0.005237311840057373, 0.005203392028808594, 0.005259552001953125, 0.005233024120330811, 0.005222400188446045, 0.0052163200378417965, 0.005254687786102295, 0.00520252799987793, 0.0052360320091247555, 0.005218815803527832, 0.005242688179016113, 0.005226463794708252, 0.005230688095092774, 0.005269375801086426, 0.0051981120109558104, 0.005238239765167236, 0.005231103897094727, 0.005180416107177735, 0.005231616020202637, 0.00524012804031372, 0.00523744010925293, 0.0052297282218933105, 0.005223264217376709, 0.005270976066589356, 0.005186304092407227, 0.005235775947570801, 0.005228831768035889, 0.005209695816040039, 0.005206912040710449, 0.005230336189270019, 0.005222591876983642, 0.0052388482093811035, 0.0052566399574279785, 0.0052650880813598635, 0.005266272068023682, 0.005195807933807373, 0.005254655838012695, 0.005216767787933349, 0.005208064079284668, 0.005264448165893555, 0.005214303970336914, 0.00525593614578247, 0.00526259183883667, 0.005233503818511963, 0.005306367874145508, 0.005246304035186768, 0.005253791809082031, 0.005244128227233887, 0.005292128086090088, 0.005257919788360595, 0.0053002238273620605, 0.005306591987609863, 0.005265183925628662, 0.005279744148254394, 0.005271520137786865, 0.00532697582244873, 0.00528988790512085, 0.005252831935882568, 0.005306208133697509, 0.00522431993484497, 0.005162176132202149, 0.00522649621963501, 0.005281599998474121, 0.005203231811523438, 0.005269216060638428, 0.0052674560546875, 0.005892096042633056, 0.0052873601913452145, 0.005240896224975586, 0.005259136199951172, 0.005234784126281739, 0.005257376194000244, 0.005218688011169433, 0.005227968215942382, 0.005362048149108887, 0.005214272022247314, 0.005238560199737549, 0.00525164794921875, 0.0052156481742858885, 0.005239168167114258, 0.005228064060211182, 0.005243328094482422, 0.00521014404296875, 0.005205471992492675, 0.005239327907562256, 0.005316832065582275, 0.005265376091003418, 0.005946303844451904, 0.005267807960510254, 0.005233183860778808, 0.005412864208221436, 0.005266880035400391, 0.005234687805175781, 0.00527350378036499, 0.0052120318412780765, 0.00527561616897583, 0.0052600960731506346, 0.005209727764129639, 0.005246335983276367, 0.005256480216979981, 0.005228223800659179, 0.005238815784454346, 0.00526307201385498, 0.0052165441513061525, 0.005233791828155518, 0.005344223976135254, 0.005277599811553955, 0.005249311923980713, 0.005199711799621582, 0.005209983825683594, 0.005203968048095703, 0.005238239765167236, 0.005241407871246338, 0.005191584110260009, 0.005231904029846191, 0.0052009282112121585, 0.005233727931976319, 0.005241536140441895, 0.005217631816864013, 0.005221024036407471, 0.005168896198272705, 0.005224703788757324, 0.005240640163421631, 0.005213664054870605, 0.005194047927856446, 0.0052492480278015135, 0.005203199863433838, 0.005198592185974121, 0.005208064079284668, 0.005230591773986816, 0.005214208126068115, 0.00520304012298584, 0.005255616188049316, 0.005214719772338867, 0.005222367763519287, 0.005229919910430908, 0.005243711948394776, 0.005254655838012695, 0.0052370882034301755, 0.005269504070281982, 0.00523686408996582, 0.005280960083007813, 0.005279647827148438, 0.005329760074615478, 0.005324736118316651, 0.005273215770721436, 0.005257599830627442, 0.005255360126495362, 0.005251167774200439, 0.005276607990264893, 0.00521295976638794, 0.00524729585647583, 0.005183008193969727, 0.005207295894622803, 0.0052130880355834965, 0.0052605757713317875, 0.005245759963989258, 0.005242335796356201, 0.0053805441856384275, 0.005345280170440674, 0.005387904167175293, 0.005286272048950195, 0.005302559852600097, 0.005260223865509033, 0.005341983795166015, 0.005252511978149414, 0.005241407871246338, 0.005545695781707763, 0.00525548791885376, 0.0052304320335388186, 0.00527891206741333, 0.005202015876770019, 0.0052276158332824705, 0.00525494384765625, 0.0054637761116027835, 0.005238175868988037, 0.005312384128570557, 0.005254144191741943, 0.005254784107208252, 0.005267072200775147, 0.005231359958648682, 0.0052014079093933106, 0.005261023998260498, 0.005238944053649902, 0.005255807876586914, 0.005219327926635742, 0.005171199798583984, 0.005203199863433838, 0.005223167896270752, 0.005217951774597168, 0.005215712070465088, 0.005229440212249756, 0.0052013759613037105, 0.0052434239387512205, 0.005211520195007324, 0.005236703872680664, 0.005228608131408691, 0.005247583866119385, 0.0052111678123474125, 0.005200863838195801, 0.0052674560546875, 0.005201920032501221, 0.005220352172851562, 0.00524012804031372, 0.005171552181243897, 0.005253536224365234, 0.005203904151916504, 0.005238368034362793, 0.005197728157043457, 0.0052024321556091305, 0.005236639976501465, 0.005212575912475586, 0.005238463878631592, 0.0052369279861450195, 0.005240640163421631, 0.005224448204040527, 0.005264448165893555, 0.005241792201995849, 0.005260767936706543, 0.005269983768463135, 0.0051896958351135255, 0.005230656147003174, 0.0052103037834167485, 0.005230336189270019, 0.005195903778076172, 0.0053062400817871095, 0.005251071929931641, 0.005210112094879151, 0.005261312007904053, 0.005234496116638183, 0.0052143998146057125, 0.005236832141876221, 0.005212063789367676, 0.00521830415725708, 0.005578303813934326, 0.005298175811767578, 0.005255616188049316, 0.005322751998901368, 0.0053002238273620605, 0.005261312007904053, 0.005504672050476074, 0.005279679775238037, 0.00522108793258667, 0.005236415863037109, 0.005215231895446777, 0.005286464214324951, 0.005223104000091553, 0.005259136199951172, 0.00525708818435669, 0.0051370558738708496, 0.005203680038452148, 0.005213632106781006, 0.005263936042785644, 0.005867584228515625, 0.005199808120727539, 0.005279744148254394, 0.005216256141662597, 0.005283167839050293, 0.005350048065185547, 0.0052297601699829105, 0.005223231792449951, 0.005210112094879151, 0.005208064079284668, 0.0052221441268920895, 0.005219935894012451, 0.005186207771301269, 0.005222335815429687, 0.005228384017944336, 0.005219744205474853, 0.005212992191314697, 0.0051957440376281736, 0.005312128067016602, 0.005203616142272949, 0.005196256160736084, 0.005324351787567139, 0.00519001579284668, 0.0051933760643005375, 0.005299039840698242, 0.0052171840667724605, 0.005257343769073487, 0.005239424228668213, 0.005217631816864013, 0.005197728157043457, 0.005223360061645508, 0.005220352172851562, 0.0051998400688171385, 0.005209375858306884, 0.00525171184539795, 0.005224575996398926, 0.005212160110473632, 0.005242879867553711, 0.0052008957862854, 0.00518064022064209, 0.005296319961547852, 0.0052399678230285645, 0.005195648193359375, 0.005211775779724121, 0.005220704078674317, 0.005183551788330078, 0.005204288005828858, 0.005286111831665039, 0.005189407825469971, 0.005191584110260009, 0.005213696002960205, 0.005212992191314697, 0.005183872222900391, 0.005211775779724121, 0.005222400188446045, 0.005207520008087158, 0.005196288108825684, 0.0052449598312377926, 0.00522649621963501, 0.0063203201293945315, 0.006229599952697754, 0.0054520959854125976, 0.006143167972564697, 0.0052518720626831055, 0.00531059217453003, 0.0052772479057312015, 0.0052576642036437984, 0.005281792163848877, 0.005222400188446045, 0.00524729585647583, 0.005209919929504395, 0.005231679916381836, 0.005210944175720215, 0.005211935997009277, 0.005251391887664795, 0.005201824188232422, 0.00532480001449585, 0.005312672138214111, 0.005228384017944336, 0.005246975898742676, 0.005265120029449463, 0.005243167877197265, 0.005296127796173096, 0.005317952156066895, 0.0052698559761047365, 0.005345632076263427, 0.005312863826751709, 0.005281439781188965, 0.0052633600234985355, 0.005209727764129639, 0.005261824131011963, 0.005231776237487793, 0.005231296062469483, 0.0055517759323120116, 0.00525267219543457, 0.005509952068328858, 0.0052984638214111325, 0.005258975982666015, 0.005274943828582764, 0.005268159866333008, 0.005257567882537842, 0.0052377920150756836, 0.005279903888702393, 0.005238368034362793, 0.005274144172668457, 0.0052796158790588375, 0.005231167793273926, 0.0055701441764831544, 0.0052741761207580565, 0.005242623805999756, 0.005236544132232666, 0.005261504173278809, 0.005240255832672119, 0.00523529577255249, 0.00536572790145874, 0.005242879867553711, 0.005248127937316894, 0.005226880073547363, 0.005270304203033448, 0.005217152118682861, 0.005243584156036377, 0.005253151893615722, 0.0051806077957153324, 0.005249216079711914, 0.005235328197479248, 0.005255424022674561, 0.005243711948394776, 0.005253632068634034, 0.005261760234832763, 0.005206016063690186, 0.005271711826324463, 0.005270624160766601, 0.005296800136566162, 0.0052770237922668455, 0.005273856163024902, 0.005531744003295898, 0.005280064105987549, 0.006780960083007812, 0.007360576152801513, 0.005941343784332275, 0.008478976249694825, 0.0068784642219543455, 0.005280384063720703, 0.00525494384765625, 0.005320256233215332, 0.005274400234222412, 0.005320703983306885, 0.005240479946136475, 0.0052408318519592285, 0.00532480001449585, 0.005228384017944336, 0.005228831768035889, 0.005267327785491944, 0.00521830415725708, 0.005243040084838867, 0.005273056030273437, 0.00524726390838623, 0.0052614078521728514, 0.005239007949829101, 0.005194591999053955, 0.0052211198806762695, 0.005250368118286133, 0.0052297601699829105, 0.005209792137145996, 0.005230591773986816, 0.005380191802978516, 0.005246880054473877, 0.0052841281890869144, 0.005297247886657715, 0.005202784061431885, 0.005256768226623535, 0.005232736110687256, 0.005204095840454102, 0.005266687870025635, 0.005272192001342773, 0.005208191871643067, 0.005224736213684082, 0.005215775966644287, 0.005262847900390625, 0.005215199947357177, 0.005227392196655274, 0.005267744064331055, 0.005198592185974121, 0.005258912086486816, 0.0052368960380554195, 0.005182496070861816, 0.005325664043426514, 0.005224575996398926, 0.0053002238273620605, 0.005218175888061524, 0.005234911918640137, 0.005242911815643311, 0.005213344097137451, 0.0052128958702087405, 0.005217440128326416, 0.005198719978332519, 0.0052165122032165525, 0.005248735904693604, 0.005201344013214111, 0.0052741761207580565, 0.005224448204040527, 0.00522876787185669, 0.005287680149078369, 0.005254208087921142, 0.00526639986038208, 0.005280992031097412, 0.005251552104949951, 0.005291423797607422, 0.005286687850952148, 0.005339263916015625, 0.005608704090118408, 0.0052436480522155765, 0.005279200077056885, 0.005277599811553955, 0.0052847681045532224, 0.0052551040649414064, 0.005285791873931885, 0.0053266558647155765, 0.0052911357879638675, 0.005245888233184814, 0.005212160110473632, 0.005254784107208252, 0.005263743877410888, 0.005234687805175781, 0.0053610877990722654, 0.005263679981231689, 0.005252863883972168, 0.005241343975067139, 0.005264607906341553, 0.005280352115631103, 0.0052410240173339845, 0.005242527961730957, 0.005255519866943359, 0.005214208126068115, 0.005251071929931641, 0.005284863948822022, 0.005284863948822022, 0.005299295902252197, 0.0053014721870422365, 0.005289792060852051, 0.005267327785491944, 0.005287487983703613, 0.005265503883361816, 0.0053292479515075685, 0.005369855880737305, 0.005232639789581299, 0.005255231857299805, 0.0052503361701965335, 0.005218016147613525, 0.005220928192138672, 0.005238783836364746, 0.005252352237701416, 0.005298175811767578, 0.005239488124847412, 0.0052531838417053224, 0.005268832206726074, 0.0052292160987854, 0.005253119945526123, 0.005253119945526123, 0.005230591773986816, 0.0052287039756774905, 0.00535964822769165, 0.005269279956817627, 0.005266880035400391, 0.0052557759284973145, 0.005228544235229492, 0.005314623832702637, 0.005228127956390381, 0.005232480049133301, 0.005322400093078613, 0.005251935958862305, 0.0052408318519592285, 0.005248640060424805, 0.005270143985748291, 0.005340127944946289, 0.005261280059814453, 0.005245759963989258, 0.005287936210632324, 0.005338111877441406, 0.005552927970886231, 0.005334239959716797, 0.005461056232452393, 0.005281023979187012, 0.005290688037872315, 0.005232480049133301, 0.005244351863861084, 0.005348159790039063, 0.005263264179229736, 0.005233759880065918, 0.005255263805389404, 0.005244991779327393, 0.0053788161277770995, 0.00532480001449585, 0.005307712078094482, 0.005244800090789795, 0.005262144088745117, 0.005222591876983642, 0.005240992069244384, 0.005271200180053711, 0.005216256141662597, 0.005312287807464599, 0.0052594242095947265, 0.00525113582611084, 0.005210336208343506, 0.005254176139831543, 0.005282464027404785, 0.00522438383102417, 0.005236480236053467, 0.005230303764343262, 0.005238624095916748, 0.005228640079498291, 0.005147935867309571, 0.0052436161041259765, 0.005230271816253662, 0.005243264198303223, 0.005252543926239014, 0.005251488208770752, 0.005244863986968994, 0.0052633600234985355, 0.0052256321907043456, 0.005239007949829101, 0.005226399898529053, 0.0053136320114135745, 0.005248960018157959, 0.005304255962371826, 0.00524886417388916, 0.005218368053436279, 0.005207647800445556, 0.005259679794311523, 0.005197824001312256, 0.005257215976715088, 0.005256768226623535, 0.0052444801330566405, 0.005243775844573975, 0.005242879867553711, 0.0052839360237121585, 0.005238687992095948, 0.005265408039093018, 0.00530841588973999, 0.005246816158294677, 0.005285920143127442, 0.005334688186645508, 0.00523305606842041, 0.0052370882034301755, 0.005269216060638428, 0.005251071929931641, 0.005261312007904053, 0.005275648117065429, 0.005234911918640137, 0.005240608215332032, 0.005435391902923584, 0.005284160137176514, 0.005297247886657715, 0.005304927825927734, 0.00524889612197876, 0.005273888111114502, 0.005533535957336425, 0.005273600101470947, 0.005404575824737549, 0.005338399887084961, 0.00528659200668335, 0.005281951904296875, 0.005307871818542481, 0.005237152099609375, 0.0052614078521728514, 0.005256927967071533, 0.005247424125671387, 0.005229472160339355, 0.005294047832489014, 0.0052623038291931155, 0.005277696132659912, 0.005506080150604248, 0.005764063835144043, 0.005260672092437744, 0.00514246416091919, 0.00525219202041626, 0.005248640060424805, 0.005268896102905273, 0.005395552158355713, 0.005242879867553711, 0.0052564477920532225, 0.005282495975494385, 0.00525705623626709, 0.005232639789581299, 0.005228544235229492, 0.005230303764343262, 0.005229983806610107, 0.005251776218414306, 0.0053515520095825195, 0.005228096008300781, 0.005234560012817383, 0.005266047954559326, 0.005220352172851562, 0.005214208126068115, 0.005253119945526123, 0.005206016063690186, 0.005259263992309571, 0.005229951858520508, 0.005229407787322998, 0.005207839965820312, 0.005226304054260254, 0.0052204480171203615, 0.005222432136535645, 0.005205664157867431, 0.005224127769470215, 0.005204319953918457, 0.005230976104736328, 0.005246975898742676, 0.005220352172851562, 0.005214528083801269, 0.0053695359230041505, 0.005261312007904053, 0.005238207817077636, 0.005237311840057373, 0.005242879867553711, 0.005224448204040527, 0.005227871894836426, 0.005214272022247314, 0.0051943678855896, 0.005234079837799072, 0.005227071762084961, 0.005226624011993408, 0.005261184215545655, 0.005295135974884033, 0.005223392009735107, 0.005214208126068115, 0.005233727931976319, 0.005257952213287354, 0.005224671840667725, 0.005238016128540039, 0.005247744083404541, 0.005240032196044922, 0.005229152202606201, 0.005279935836791992, 0.0052081279754638676, 0.005259200096130371, 0.005257215976715088, 0.005164383888244629, 0.005224512100219726, 0.005233312129974365, 0.005238239765167236, 0.005238719940185547, 0.005249567985534668, 0.005230591773986816, 0.005244768142700195, 0.005214367866516113, 0.005250207901000977, 0.005237696170806885, 0.005218207836151123, 0.005201920032501221, 0.005275263786315918, 0.005229951858520508, 0.0052605757713317875, 0.0052644162178039555, 0.005227200031280518, 0.005199423789978027, 0.005328447818756104, 0.00523967981338501, 0.005238783836364746, 0.005224575996398926, 0.005234399795532227, 0.005225664138793946, 0.005225567817687988, 0.005341055870056152, 0.005254335880279541, 0.0052641921043395995, 0.005304031848907471, 0.005417247772216797, 0.0054254398345947264, 0.005438432216644287, 0.005268064022064209, 0.005262879848480224, 0.005274335861206055, 0.005296031951904297, 0.005273183822631836, 0.005275839805603027, 0.005269728183746338, 0.005257215976715088, 0.005464223861694336, 0.005264544010162354, 0.005270495891571045, 0.005305984020233154, 0.0052470722198486325, 0.005232639789581299, 0.0053628158569335935, 0.005225599765777588, 0.005209248065948486, 0.005230591773986816, 0.005245535850524902, 0.005214208126068115, 0.00521235179901123, 0.005244128227233887, 0.005208352088928223, 0.005237376213073731, 0.005270400047302246, 0.005233376026153564, 0.005246719837188721, 0.005241439819335937, 0.005276991844177246, 0.005220799922943115, 0.005139232158660889, 0.005239808082580567, 0.005238783836364746, 0.005238783836364746, 0.005231679916381836, 0.005240960121154785, 0.005200607776641846, 0.005232831954956055, 0.005262432098388672, 0.005225279808044434, 0.005269408226013184, 0.005244895935058593, 0.005214176177978516, 0.005261055946350098, 0.005304768085479737, 0.0052328000068664554, 0.005227327823638916, 0.005223648071289062, 0.005234464168548584, 0.005228544235229492, 0.005264448165893555, 0.005259967803955078, 0.0052646718025207515, 0.005263775825500488, 0.005268095970153808, 0.005261248111724854, 0.005246975898742676, 0.005258272171020508, 0.005230912208557129, 0.005257887840270996, 0.005294271945953369, 0.005244128227233887, 0.005270112037658692, 0.0052715840339660645, 0.00525439977645874, 0.005257952213287354, 0.005230591773986816, 0.005254848003387451, 0.00522431993484497, 0.005299903869628906, 0.005252096176147461, 0.005230271816253662, 0.0052531838417053224, 0.0052531838417053224, 0.005205952167510987, 0.005252255916595459, 0.005246111869812012, 0.005218272209167481, 0.00524832010269165, 0.005250912189483642, 0.0052434558868408206, 0.005351295948028564, 0.005300352096557617, 0.005252287864685058, 0.005206624031066895, 0.005230815887451172, 0.005258431911468506, 0.005208159923553467, 0.005843679904937744, 0.005304255962371826, 0.005217696189880371, 0.005269408226013184, 0.0065131840705871585, 0.006428287982940674, 0.005683296203613282, 0.005294303894042968, 0.0053578557968139645, 0.005320256233215332, 0.005282527923583984, 0.005295872211456299, 0.005246848106384277, 0.005261312007904053, 0.005287936210632324, 0.005238783836364746, 0.0052633600234985355, 0.005236671924591064, 0.005223968029022217, 0.005224575996398926, 0.005265823841094971, 0.0052326078414916995, 0.005281824111938477, 0.005253407955169678, 0.005238687992095948, 0.0051987838745117185, 0.005303167819976807, 0.005261312007904053, 0.0051998720169067385, 0.00522649621963501, 0.0052674560546875, 0.005203455924987793, 0.0052351999282836915, 0.005248799800872803, 0.0052779197692871095, 0.005230591773986816, 0.0052715520858764645, 0.005238783836364746, 0.0052367358207702636, 0.00521727991104126, 0.005244256019592285, 0.005289440155029297, 0.005247136116027832, 0.005236767768859864, 0.005234208106994629, 0.005237215995788574, 0.0053821439743042, 0.005266496181488037, 0.005263904094696045, 0.005268159866333008, 0.005211775779724121, 0.005288032054901123, 0.005249023914337158, 0.0052408318519592285, 0.005283360004425049, 0.0052328000068664554, 0.005264768123626709, 0.0052173762321472165, 0.005271520137786865, 0.005241951942443848, 0.0052293438911437985, 0.005896224021911621, 0.0052715520858764645, 0.0052646718025207515, 0.005283552169799805, 0.005260255813598633, 0.005254271984100342, 0.005229440212249756, 0.00518230390548706, 0.005230591773986816, 0.00531660795211792, 0.005228288173675537, 0.005264736175537109, 0.005209280014038086, 0.0052466878890991215, 0.005216383934020996, 0.005227424144744873, 0.005273920059204101, 0.0052642240524291995, 0.005253952026367187, 0.005239808082580567, 0.005284895896911621, 0.005219295978546142, 0.005255167961120606, 0.005260704040527343, 0.005230303764343262, 0.00525164794921875, 0.005215775966644287, 0.005272287845611573, 0.005206079959869385, 0.0052573118209838865, 0.005314432144165039, 0.005236608028411865, 0.005260735988616943, 0.005234879970550537, 0.005241151809692383, 0.005312448024749756, 0.005555712223052979, 0.005401343822479248, 0.005401663780212403, 0.005342175960540771, 0.005252863883972168, 0.005279263973236084, 0.005300960063934326, 0.005247007846832275, 0.0053983359336853024, 0.005268959999084472, 0.005329504013061524, 0.005245376110076905, 0.005281439781188965, 0.005244927883148193, 0.005230591773986816, 0.005275904178619385, 0.00533622407913208, 0.005337215900421142, 0.0052986559867858884, 0.0052215042114257815, 0.005237919807434082, 0.005226208209991455, 0.005289023876190186, 0.005201056003570557, 0.005248064041137695, 0.005344319820404053, 0.00523638391494751, 0.005261312007904053, 0.005251167774200439, 0.005238207817077636, 0.00525593614578247, 0.005246272087097168, 0.0052681279182434084, 0.005269248008728028]",tokens/s,189.34369190149414,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,916.922368,645.791744,0.0,260.046848,253.883392,s,1,7.69745849609375,7.69745849609375,0.0,7.69745849609375,7.69745849609375,7.69745849609375,7.69745849609375,[7.69745849609375],,kWh,1.5062923866753408e-05,1.6544327113446191e-06,4.799170505997985e-06,2.1516527084096013e-05,,MB,1298.624512,752.746496,0.0,335.54432,313.01632,s,13,0.18993318367004394,0.014610244897695688,0.00010653249771399043,0.01458512020111084,0.01474984302520752,0.014790630340576172,0.014829153442382814,"[0.014838784217834473, 0.014506912231445313, 0.014485471725463868, 0.01458512020111084, 0.014758527755737305, 0.01471510410308838, 0.014640735626220704, 0.014621919631958007, 0.014580544471740722, 0.014661120414733888, 0.014552255630493165, 0.014503775596618652, 0.014482912063598632]",tokens/s,17521.951328849802,kWh,4.3234876023454467e-07,4.767863230239425e-08,2.8624961171561825e-07,7.662770042525573e-07,tokens/kWh,334082842.8613851,MB,1338.540032,777.91232,0.0,360.710144,313.805312,s,13,10.08658770751953,0.7758913621168873,0.002057490541455394,0.774981201171875,0.7791577880859375,0.7797146606445313,0.7800260864257813,"[0.7746382446289063, 0.7763409423828125, 0.774853515625, 0.7766514282226562, 0.774850830078125, 0.775460205078125, 0.7794551391601563, 0.774981201171875, 0.7779683837890625, 0.7801039428710937, 0.773489013671875, 0.7731815185546875, 0.7746133422851562]",tokens/s,81.19693435961865,kWh,2.2377772783677727e-05,2.467881140145017e-06,8.689633637361633e-06,3.3535287561184374e-05,tokens/kWh,1878618.1536406367,,s,819,10.079915365219113,0.012307588968521507,0.0002471176029672587,0.012261568069458008,0.012458393669128418,0.01257834587097168,0.012938879413604734,"[0.012283904075622559, 0.012406784057617188, 0.012355584144592285, 0.012225760459899902, 0.012147456169128419, 0.012143775939941407, 0.01217625617980957, 0.012195712089538574, 0.012345824241638183, 0.012396191596984863, 0.012365792274475098, 0.012591135978698731, 0.012297439575195312, 0.012213024139404297, 0.012267519950866699, 0.012341504096984863, 0.01234928035736084, 0.012335071563720703, 0.012275456428527832, 0.012156319618225098, 0.012188320159912109, 0.012257568359375, 0.012205920219421386, 0.01215283203125, 0.012152704238891601, 0.012265376091003418, 0.012234944343566895, 0.012159008026123047, 0.012124159812927245, 0.012457792282104491, 0.012208319664001465, 0.012244064331054688, 0.012243871688842773, 0.012305983543395996, 0.012269184112548829, 0.012301247596740722, 0.012259552001953125, 0.012277119636535645, 0.012228896141052246, 0.012389439582824707, 0.012325823783874512, 0.012611583709716797, 0.012510304450988769, 0.01256287956237793, 0.012294624328613281, 0.012284223556518554, 0.012286848068237304, 0.012336288452148438, 0.01249244785308838, 0.012462080001831055, 0.012285951614379884, 0.012257280349731446, 0.012542207717895508, 0.012280863761901855, 0.012227295875549316, 0.012183520317077636, 0.012154368400573731, 0.012283679962158203, 0.012199968338012694, 0.012206527709960937, 0.012267807960510253, 0.012165056228637696, 0.01213814353942871, 0.011916064262390136, 0.012279999732971192, 0.012285759925842285, 0.012197888374328614, 0.012339200019836426, 0.01245139217376709, 0.01268396759033203, 0.012751872062683106, 0.012298144340515137, 0.012275712013244629, 0.012394623756408691, 0.01227235221862793, 0.012269599914550781, 0.012297216415405274, 0.012321760177612304, 0.01234563159942627, 0.012272480010986329, 0.012247936248779297, 0.012549216270446778, 0.01230508804321289, 0.012203776359558105, 0.012229087829589844, 0.012232704162597656, 0.012226783752441406, 0.012562335968017578, 0.012445856094360351, 0.012320480346679687, 0.012395808219909668, 0.01259712028503418, 0.012292511940002441, 0.012216768264770507, 0.012163071632385255, 0.012228320121765137, 0.012496512413024902, 0.012293984413146972, 0.012207039833068848, 0.012171232223510742, 0.012281696319580078, 0.012304608345031737, 0.012142751693725586, 0.012201951980590821, 0.012297311782836913, 0.012354175567626953, 0.01229430389404297, 0.012291392326354981, 0.012194399833679198, 0.012304320335388183, 0.012263392448425293, 0.01219760036468506, 0.012224831581115723, 0.012281279563903808, 0.01235206413269043, 0.012275712013244629, 0.012324864387512208, 0.012272671699523926, 0.01224988842010498, 0.012348992347717285, 0.012339839935302735, 0.012318431854248047, 0.01242959976196289, 0.012419072151184082, 0.012386303901672363, 0.012441727638244629, 0.012057951927185059, 0.01243126392364502, 0.012411904335021973, 0.012232959747314453, 0.012242079734802246, 0.012232671737670899, 0.012227199554443359, 0.012240896224975586, 0.012349439620971679, 0.012293984413146972, 0.01223855972290039, 0.012376511573791504, 0.012267519950866699, 0.012338687896728515, 0.012278271675109862, 0.012314623832702636, 0.012308480262756348, 0.012240351676940918, 0.012333600044250489, 0.012333215713500977, 0.012285792350769043, 0.012255231857299804, 0.01236361598968506, 0.012374176025390624, 0.012296256065368652, 0.012631903648376465, 0.012314047813415527, 0.01337001609802246, 0.012283904075622559, 0.012195839881896972, 0.01220137596130371, 0.012237407684326173, 0.012220416069030762, 0.012177344322204589, 0.012232831954956055, 0.012734399795532227, 0.012281855583190919, 0.01245792007446289, 0.012212512016296386, 0.01225500774383545, 0.012226559638977052, 0.012165023803710937, 0.012310336112976075, 0.01218899154663086, 0.012340191841125489, 0.012225536346435547, 0.012116127967834473, 0.012176383972167968, 0.012279647827148438, 0.012218655586242676, 0.012396320343017578, 0.01226643180847168, 0.01222499179840088, 0.01227939224243164, 0.01214560031890869, 0.012209216117858886, 0.012131263732910156, 0.012238335609436036, 0.012255743980407715, 0.012195839881896972, 0.012256799697875976, 0.012179488182067872, 0.012210623741149902, 0.011980192184448242, 0.012214879989624023, 0.012228416442871095, 0.012300479888916015, 0.012212224006652832, 0.012189696311950684, 0.012216320037841797, 0.012290207862854004, 0.012230496406555175, 0.012171263694763184, 0.012140768051147461, 0.012829824447631836, 0.012419743537902832, 0.012263423919677734, 0.012345343589782716, 0.012248736381530761, 0.012269920349121093, 0.012357439994812012, 0.012499263763427734, 0.012350848197937012, 0.012212096214294433, 0.012169792175292968, 0.012208160400390624, 0.012159008026123047, 0.012205183982849121, 0.012249983787536622, 0.012430368423461914, 0.01246127986907959, 0.012375040054321289, 0.012431903839111328, 0.012436991691589355, 0.012400447845458984, 0.012419808387756347, 0.012380319595336914, 0.012396224021911621, 0.012347359657287598, 0.012347776412963868, 0.01232051181793213, 0.012423423767089844, 0.01242473602294922, 0.01237007999420166, 0.012431679725646972, 0.012381792068481445, 0.01245635223388672, 0.012369407653808593, 0.012673536300659179, 0.012345343589782716, 0.012309727668762207, 0.012270463943481445, 0.01224931240081787, 0.012254912376403809, 0.012354847908020019, 0.012229344367980958, 0.012543999671936035, 0.012185279846191405, 0.012209600448608398, 0.012212575912475586, 0.01218819236755371, 0.01215897560119629, 0.012230239868164062, 0.01252188777923584, 0.012392288208007812, 0.01226153564453125, 0.01194803237915039, 0.012240544319152832, 0.012202336311340331, 0.012140543937683105, 0.01226137638092041, 0.01233619213104248, 0.012144960403442383, 0.012210399627685546, 0.012275872230529785, 0.012220671653747558, 0.012309856414794922, 0.01223465633392334, 0.012235872268676758, 0.012287903785705566, 0.012267264366149902, 0.012265055656433106, 0.012337568283081055, 0.012212224006652832, 0.012147775650024414, 0.012245951652526855, 0.012277759552001954, 0.012347392082214356, 0.012283904075622559, 0.012292096138000488, 0.012183327674865723, 0.012210399627685546, 0.012185600280761719, 0.012322400093078613, 0.012240927696228027, 0.012333439826965333, 0.012134400367736817, 0.01220751953125, 0.012314816474914551, 0.01232096004486084, 0.012236607551574708, 0.012255647659301757, 0.012412927627563476, 0.0124518404006958, 0.012279744148254395, 0.01218995189666748, 0.012280672073364259, 0.01222550392150879, 0.012253439903259278, 0.012227680206298828, 0.012327584266662597, 0.012229791641235352, 0.012290080070495606, 0.012145471572875977, 0.012244159698486329, 0.012217472076416016, 0.012244671821594237, 0.01217740821838379, 0.012193984031677246, 0.01219155216217041, 0.012254976272583007, 0.012275967597961426, 0.012336735725402831, 0.01244816017150879, 0.012434432029724121, 0.013038592338562012, 0.012578016281127929, 0.012798527717590331, 0.01284432029724121, 0.012050399780273438, 0.012325599670410156, 0.01227558422088623, 0.01223692798614502, 0.012273407936096192, 0.012226816177368163, 0.012685312271118163, 0.012299807548522949, 0.012335583686828614, 0.012279808044433594, 0.01222054386138916, 0.01225312042236328, 0.01238419246673584, 0.012256256103515625, 0.012368576049804687, 0.012310208320617676, 0.01245024013519287, 0.012224736213684083, 0.012193535804748535, 0.01223628807067871, 0.01240342426300049, 0.012431008338928223, 0.012329248428344727, 0.012230719566345215, 0.01221350383758545, 0.012232895851135253, 0.012314559936523438, 0.012208928108215332, 0.012836928367614747, 0.012310336112976075, 0.012941408157348632, 0.012942912101745606, 0.01236409568786621, 0.012239104270935058, 0.012226304054260255, 0.012217920303344726, 0.012144927978515624, 0.012187583923339844, 0.012333279609680175, 0.01233619213104248, 0.012214336395263672, 0.012266016006469727, 0.012290207862854004, 0.012211423873901367, 0.01222544002532959, 0.012207903861999513, 0.012179743766784669, 0.012245152473449707, 0.0122325439453125, 0.012199584007263184, 0.012200096130371093, 0.012165311813354493, 0.012281855583190919, 0.01223203182220459, 0.012210111618041992, 0.012276448249816895, 0.012192799568176269, 0.012274880409240723, 0.012236576080322265, 0.01214022445678711, 0.012147007942199707, 0.01215897560119629, 0.012842047691345214, 0.011927424430847168, 0.012173312187194824, 0.012246463775634766, 0.012198464393615722, 0.01216044807434082, 0.012106304168701171, 0.012141983985900879, 0.012227168083190918, 0.012386303901672363, 0.012224800109863281, 0.012232416152954102, 0.0123853759765625, 0.012327839851379394, 0.012245311737060546, 0.012369536399841308, 0.012445792198181153, 0.012307456016540527, 0.012239839553833008, 0.012246335983276367, 0.012200639724731446, 0.012285951614379884, 0.012217535972595214, 0.012431679725646972, 0.012586688041687012, 0.012425663948059083, 0.012355744361877441, 0.012395744323730468, 0.012429856300354003, 0.012440383911132813, 0.012586239814758302, 0.012415424346923828, 0.012371935844421388, 0.012363967895507812, 0.012609343528747558, 0.012326911926269531, 0.012320799827575684, 0.012333024024963378, 0.012453887939453125, 0.012398591995239258, 0.012651743888854981, 0.012407391548156739, 0.012468416213989258, 0.012788031578063965, 0.012472000122070313, 0.012484607696533203, 0.0125665283203125, 0.012554112434387208, 0.012496031761169434, 0.012415167808532714, 0.012411552429199219, 0.01241648006439209, 0.012380831718444825, 0.012288000106811523, 0.012328960418701173, 0.01242959976196289, 0.012488415718078613, 0.012447744369506837, 0.012561440467834473, 0.012420063972473144, 0.012349439620971679, 0.012250911712646485, 0.01218992042541504, 0.012150208473205566, 0.011901023864746094, 0.012146688461303711, 0.012195263862609863, 0.012173888206481933, 0.01226956844329834, 0.012226559638977052, 0.012191743850708007, 0.012191776275634766, 0.012162112236022949, 0.012219296455383302, 0.012230655670166016, 0.01217353630065918, 0.012324192047119141, 0.012388287544250488, 0.012323231697082519, 0.012425312042236328, 0.012278783798217773, 0.012214336395263672, 0.012265824317932128, 0.012185503959655761, 0.012222816467285156, 0.01221446418762207, 0.012281056404113769, 0.012271743774414062, 0.012355999946594238, 0.012292511940002441, 0.01233737564086914, 0.012426655769348144, 0.012594719886779786, 0.012473183631896973, 0.012491807937622071, 0.012909055709838867, 0.012495327949523925, 0.01250432014465332, 0.01241494369506836, 0.01248681640625, 0.012351584434509278, 0.012267487525939941, 0.01225376033782959, 0.01225267219543457, 0.01227836799621582, 0.012370976448059082, 0.012206944465637207, 0.012203295707702637, 0.012261599540710448, 0.012233247756958008, 0.012243231773376466, 0.012162783622741698, 0.01213152027130127, 0.01230726432800293, 0.01235148811340332, 0.012267871856689453, 0.012236384391784668, 0.012205408096313477, 0.012215007781982421, 0.012281855583190919, 0.012388416290283203, 0.012206015586853027, 0.01228492832183838, 0.012382304191589356, 0.012329343795776368, 0.012245696067810058, 0.012290143966674804, 0.01197708797454834, 0.012320704460144043, 0.012249152183532714, 0.012218367576599122, 0.012148351669311523, 0.012623871803283691, 0.01335103988647461, 0.012714240074157715, 0.012398655891418458, 0.012424192428588866, 0.012399552345275879, 0.012341312408447266, 0.012285887718200683, 0.012269184112548829, 0.012257663726806641, 0.012255231857299804, 0.01233129596710205, 0.01215401554107666, 0.012192319869995118, 0.012123616218566894, 0.01223475170135498, 0.012173407554626465, 0.01222428798675537, 0.012142496109008789, 0.012171775817871093, 0.012224479675292969, 0.012196127891540526, 0.01224124813079834, 0.01215452766418457, 0.012207232475280761, 0.012229503631591797, 0.01217859172821045, 0.012482943534851075, 0.012411007881164551, 0.012400992393493652, 0.012345343589782716, 0.012273152351379395, 0.012200672149658203, 0.012219679832458497, 0.012228256225585937, 0.012288864135742188, 0.012158816337585449, 0.012361408233642578, 0.012509663581848145, 0.01257596778869629, 0.012440352439880372, 0.012476192474365235, 0.012523743629455566, 0.012531423568725586, 0.012552543640136719, 0.012548255920410156, 0.012408927917480468, 0.012352512359619141, 0.012277728080749511, 0.012247967720031738, 0.01229190444946289, 0.012251168251037598, 0.012668992042541504, 0.012318783760070801, 0.012379743576049805, 0.012334783554077149, 0.012380736351013184, 0.012523520469665527, 0.012052767753601075, 0.012375040054321289, 0.012753888130187989, 0.012707967758178711, 0.012554112434387208, 0.012523424148559571, 0.01243289566040039, 0.012853856086730958, 0.012414976119995117, 0.01229804801940918, 0.012351615905761719, 0.01222867202758789, 0.012205792427062987, 0.01220406436920166, 0.012253439903259278, 0.012283904075622559, 0.012220416069030762, 0.012197759628295899, 0.012214400291442872, 0.012191904067993165, 0.012170687675476074, 0.012180959701538086, 0.012161824226379394, 0.012158495903015137, 0.012139360427856445, 0.0121364803314209, 0.012195584297180175, 0.012262399673461915, 0.012209152221679688, 0.012185824394226075, 0.012152768135070801, 0.012148320198059082, 0.012146944046020508, 0.01233510398864746, 0.01233078384399414, 0.012230751991271972, 0.012233920097351074, 0.012172224044799805, 0.012195839881896972, 0.012249279975891113, 0.012231776237487793, 0.012270719528198241, 0.012199520111083984, 0.012381471633911133, 0.01219388771057129, 0.01213811206817627, 0.012144831657409668, 0.012192576408386231, 0.012173151969909668, 0.012154687881469726, 0.012239199638366699, 0.01604956817626953, 0.015190624237060546, 0.012525568008422852, 0.012365823745727538, 0.012296511650085449, 0.0121845121383667, 0.01218454360961914, 0.012277088165283202, 0.012138815879821777, 0.012116095542907716, 0.012080896377563476, 0.012520895957946778, 0.012127455711364745, 0.012243743896484374, 0.012189215660095214, 0.012278240203857422, 0.012240063667297364, 0.012144960403442383, 0.012179391860961914, 0.012186176300048828, 0.012230655670166016, 0.012228032112121583, 0.012126560211181641, 0.012196063995361328, 0.01222822380065918, 0.012205856323242188, 0.012175968170166016, 0.012223936080932618, 0.012196191787719727, 0.012183008193969726, 0.0122291841506958, 0.012289248466491699, 0.012280511856079101, 0.012275551795959473, 0.01221820831298828, 0.012223263740539551, 0.012142720222473145, 0.012215999603271485, 0.012242048263549805, 0.012106623649597168, 0.012193792343139649, 0.012248127937316895, 0.01228816032409668, 0.012183744430541993, 0.01216368007659912, 0.012096544265747071, 0.01263430404663086, 0.015132767677307129, 0.012437408447265624, 0.012221216201782226, 0.012208191871643067, 0.012181599617004395, 0.012224063873291016, 0.012202176094055175, 0.01229366397857666, 0.012317248344421387, 0.012226816177368163, 0.012226304054260255, 0.012247072219848634, 0.012271583557128906, 0.012267519950866699, 0.012232704162597656, 0.012203968048095704, 0.012243391990661621, 0.012211775779724121, 0.012152159690856933, 0.012304991722106933, 0.012162464141845703, 0.012176223754882812, 0.012298175811767578, 0.012142016410827636, 0.01216534423828125, 0.0122225923538208, 0.012208288192749023, 0.01220355224609375, 0.01197987174987793, 0.012517696380615234, 0.012302559852600098, 0.013451040267944336, 0.012927359580993652, 0.012331328392028808, 0.012324543952941894, 0.012316543579101563, 0.012388511657714844, 0.012266495704650878, 0.012288448333740234, 0.012179840087890624, 0.01223040008544922, 0.012183456420898438, 0.01225164794921875, 0.012314304351806641, 0.012193280220031738, 0.0121659517288208, 0.012183775901794433, 0.012243840217590332, 0.012186528205871582, 0.012247039794921874, 0.012218111991882325, 0.012220671653747558, 0.012210176467895508, 0.01223641586303711, 0.012165568351745605, 0.012195775985717774, 0.012253184318542481, 0.01223641586303711, 0.012259136199951172, 0.012194368362426758, 0.012223872184753418, 0.012305024147033692, 0.012197823524475097, 0.012224575996398927, 0.012300288200378418, 0.012427424430847168, 0.01241487979888916, 0.012289664268493653, 0.012321408271789552, 0.012281536102294922, 0.012285152435302734, 0.012303135871887206, 0.01230406379699707, 0.012163583755493163, 0.012240480422973633, 0.012202207565307616, 0.01221555233001709, 0.012177791595458985, 0.012138527870178223, 0.012198271751403809, 0.012175071716308593, 0.012187199592590332, 0.01213206386566162, 0.012174592018127442, 0.012185567855834962, 0.012168416023254395, 0.012149279594421386, 0.012135616302490234, 0.012145471572875977, 0.012186752319335938, 0.012149951934814452, 0.011879232406616211, 0.012197888374328614, 0.012207615852355956, 0.012148480415344238, 0.012219327926635742, 0.012185407638549804, 0.012238847732543945, 0.012232704162597656, 0.012187840461730957, 0.012194687843322755, 0.0121845121383667, 0.012165120124816894, 0.012283040046691894, 0.012220479965209962, 0.01215129566192627, 0.012228032112121583, 0.012204383850097656, 0.01211680030822754, 0.012205696105957032, 0.012219903945922851, 0.012178239822387695, 0.012308223724365235, 0.012193792343139649, 0.012209440231323242, 0.012257247924804688, 0.012229375839233399, 0.01229641628265381, 0.012457088470458984, 0.012436127662658692, 0.012402688026428223, 0.012388352394104005, 0.01236787223815918, 0.012588831901550293, 0.01237014389038086, 0.01240995216369629, 0.01258131217956543, 0.012411007881164551, 0.012349216461181641, 0.012324511528015137, 0.012327615737915038, 0.012261568069458008, 0.012221759796142578, 0.012212479591369629, 0.012194111824035644, 0.012292256355285644, 0.012382207870483398, 0.012285568237304688, 0.012312416076660157, 0.012225055694580078, 0.012738335609436035, 0.012339424133300782, 0.012234496116638184, 0.012460288047790527, 0.01226694393157959, 0.012380736351013184, 0.012263680458068848, 0.012253952026367188, 0.012339743614196777, 0.012368351936340331, 0.012355584144592285, 0.012371359825134277, 0.012316608428955078, 0.01228444766998291]",tokens/s,81.25068220572274,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1499.660288,1868.43136,0.0,1465.909248,1358.169088,s,1,8.6935634765625,8.6935634765625,0.0,8.6935634765625,8.6935634765625,8.6935634765625,8.6935634765625,[8.6935634765625],,kWh,4.7340809162415095e-05,5.20366422133716e-06,1.8270847950002667e-05,7.081532133375493e-05,,MB,1490.415616,1889.40288,0.0,1472.200704,1356.544512,s,10,0.5137330513000489,0.05137330513000489,0.0001881615490391906,0.05131383895874024,0.05153486557006836,0.05170012054443359,0.051832324523925784,"[0.05186537551879883, 0.051330303192138674, 0.051235904693603514, 0.05119846343994141, 0.0512973747253418, 0.05128755187988281, 0.0514194221496582, 0.051398017883300784, 0.05149814224243164, 0.05120249557495117]",tokens/s,4983.13276422781,kWh,1.5551860853059947e-06,1.715099518223524e-07,1.0293108116276736e-06,2.7560068487560208e-06,tokens/kWh,92888013.00169148,MB,1517.850624,1889.40288,0.0,1472.200704,1409.728,s,10,14.371477416992187,1.4371477416992184,0.006329999267512961,1.4343649291992189,1.4465791015624998,1.447636535644531,1.4484824829101561,"[1.4295303955078125, 1.431884765625, 1.4347640380859374, 1.438283203125, 1.4486939697265624, 1.431526611328125, 1.4463441162109374, 1.4430374755859374, 1.433447021484375, 1.4339658203125]",tokens/s,43.836829138743695,kWh,4.146890259219674e-05,4.5736335849092666e-06,1.8715593873170942e-05,6.475813005027696e-05,tokens/kWh,972850.8212187105,,s,630,14.369021972656247,0.02280797138516865,0.0004680408693745384,0.022686448097229004,0.023116051292419434,0.023417549991607667,0.025308745937347412,"[0.023054208755493164, 0.023095232009887695, 0.02275779151916504, 0.0226790714263916, 0.022626943588256836, 0.022697984695434572, 0.022771711349487304, 0.023744672775268556, 0.022721920013427734, 0.02268617630004883, 0.022458368301391602, 0.02251366424560547, 0.02253209686279297, 0.02251094436645508, 0.022424224853515626, 0.022601375579833983, 0.022729055404663086, 0.022525888442993164, 0.022636032104492186, 0.022444608688354493, 0.0224944953918457, 0.022512351989746094, 0.022561887741088867, 0.022522783279418944, 0.02248294448852539, 0.022441984176635742, 0.022471872329711914, 0.022696767807006836, 0.022806144714355468, 0.023240800857543945, 0.023001279830932617, 0.02309049606323242, 0.022909727096557617, 0.022680639266967773, 0.02254739189147949, 0.022624256134033204, 0.022630304336547852, 0.022528095245361326, 0.022634111404418945, 0.022589824676513673, 0.022595584869384764, 0.022853631973266602, 0.02287820816040039, 0.022648832321166993, 0.0227061767578125, 0.023126016616821288, 0.02271980857849121, 0.022624959945678712, 0.022728063583374022, 0.02278054428100586, 0.022579200744628908, 0.022601728439331056, 0.02265657615661621, 0.022589887619018555, 0.02263222312927246, 0.022595808029174803, 0.022673408508300782, 0.022494880676269532, 0.02267375946044922, 0.02248067283630371, 0.02273711967468262, 0.022648704528808593, 0.02257673645019531, 0.022850112915039064, 0.02273695945739746, 0.02262553596496582, 0.022608640670776368, 0.02331648063659668, 0.026220544815063477, 0.02293129539489746, 0.022923423767089845, 0.022826847076416017, 0.02283126449584961, 0.02277769660949707, 0.0227426872253418, 0.02301388740539551, 0.022648672103881835, 0.02266332817077637, 0.022568960189819336, 0.02268569564819336, 0.022609920501708985, 0.022730752944946288, 0.022566911697387695, 0.022556640625, 0.022701631546020506, 0.02271836853027344, 0.02262396812438965, 0.023165760040283204, 0.022700031280517577, 0.022726688385009765, 0.022626304626464845, 0.022648832321166993, 0.02243071937561035, 0.02254745674133301, 0.02241535949707031, 0.022401023864746093, 0.022453407287597656, 0.022492000579833984, 0.022635583877563478, 0.0224815673828125, 0.0224617919921875, 0.022520767211914063, 0.022509056091308592, 0.02255878448486328, 0.022602176666259764, 0.02270412826538086, 0.022568992614746094, 0.02267747116088867, 0.022589439392089843, 0.02252345657348633, 0.022546880722045897, 0.022575103759765625, 0.02245987129211426, 0.022569503784179688, 0.02254643249511719, 0.022912063598632813, 0.02293596839904785, 0.02268592071533203, 0.023675167083740234, 0.022609952926635743, 0.022544384002685547, 0.022560768127441407, 0.02248908805847168, 0.022556671142578123, 0.02251366424560547, 0.022550527572631835, 0.0226824951171875, 0.02265497589111328, 0.024236032485961914, 0.023414047241210937, 0.023042783737182618, 0.02292531204223633, 0.022902624130249023, 0.022710432052612306, 0.022564863204956053, 0.022546112060546877, 0.022638912200927733, 0.02256025505065918, 0.023187328338623046, 0.022864511489868164, 0.022618112564086915, 0.022740991592407226, 0.02265088081359863, 0.022599679946899414, 0.022558048248291017, 0.022577728271484375, 0.022635936737060547, 0.022595264434814452, 0.022553375244140625, 0.022615455627441407, 0.022690399169921875, 0.022595584869384764, 0.022646272659301758, 0.022749727249145507, 0.022708288192749025, 0.02309747123718262, 0.02286796760559082, 0.022961599349975586, 0.02350752067565918, 0.02313363265991211, 0.022849311828613283, 0.022950399398803712, 0.02295814323425293, 0.022748800277709962, 0.022768224716186523, 0.022845439910888672, 0.022835039138793947, 0.022745248794555663, 0.022618112564086915, 0.022566911697387695, 0.02267955207824707, 0.022618112564086915, 0.022558719635009765, 0.02286755180358887, 0.023255456924438478, 0.022709280014038085, 0.022895488739013672, 0.0229388484954834, 0.02254911994934082, 0.022685951232910156, 0.02260905647277832, 0.022510431289672853, 0.022614015579223632, 0.022384031295776367, 0.02258799934387207, 0.022466560363769532, 0.022550527572631835, 0.022677248001098632, 0.022453855514526368, 0.02266540718078613, 0.02268815994262695, 0.022568511962890624, 0.022802879333496093, 0.02267046356201172, 0.02252672004699707, 0.022547744750976564, 0.022640863418579103, 0.022890752792358398, 0.023263616561889647, 0.022994047164916993, 0.02266396713256836, 0.022636640548706056, 0.022595584869384764, 0.02265907287597656, 0.02259724807739258, 0.022438240051269532, 0.022495264053344725, 0.022589439392089843, 0.022444032669067384, 0.022531776428222655, 0.02252627182006836, 0.02247270393371582, 0.02255820846557617, 0.022657215118408205, 0.022524032592773437, 0.02295417594909668, 0.02540284729003906, 0.025274688720703126, 0.026018016815185546, 0.022853631973266602, 0.02277779197692871, 0.02271366310119629, 0.02258406448364258, 0.02273040008544922, 0.022645088195800783, 0.02272643280029297, 0.022634208679199217, 0.022704639434814454, 0.02264678382873535, 0.022586591720581056, 0.02252060890197754, 0.022611263275146485, 0.022714368820190428, 0.022701791763305664, 0.022692832946777344, 0.02265907287597656, 0.022718463897705078, 0.022763519287109374, 0.02277350425720215, 0.02259174346923828, 0.022841344833374022, 0.022773759841918945, 0.02265497589111328, 0.022689792633056642, 0.022693344116210937, 0.02272649574279785, 0.02250979232788086, 0.022739423751831054, 0.02354380798339844, 0.02281881523132324, 0.02263596725463867, 0.022767871856689454, 0.02255574417114258, 0.022532575607299803, 0.022634752273559572, 0.02246441650390625, 0.023254304885864257, 0.022494207382202147, 0.023017471313476562, 0.02502979278564453, 0.02490847969055176, 0.022795616149902345, 0.022698816299438478, 0.022752288818359376, 0.02272559928894043, 0.022751039505004882, 0.022648992538452147, 0.022536224365234375, 0.02249318313598633, 0.02263859176635742, 0.022622207641601562, 0.022648735046386717, 0.022621280670166017, 0.02353424072265625, 0.022606176376342775, 0.0225664005279541, 0.022673023223876952, 0.022813024520874022, 0.02259612846374512, 0.022599552154541017, 0.022596832275390624, 0.02266803169250488, 0.022917280197143553, 0.023146400451660155, 0.023011423110961913, 0.02389606475830078, 0.023420415878295898, 0.023245279312133788, 0.023144479751586913, 0.02290995216369629, 0.02306355285644531, 0.022850847244262694, 0.02295881652832031, 0.022761472702026365, 0.02276515197753906, 0.022859968185424805, 0.022832735061645508, 0.022872512817382812, 0.02294508743286133, 0.022774656295776366, 0.02311577606201172, 0.022773439407348633, 0.023204160690307618, 0.022788095474243163, 0.025322656631469726, 0.025578336715698244, 0.024153535842895507, 0.022871679306030273, 0.02282182312011719, 0.022755327224731444, 0.022657024383544923, 0.02257094383239746, 0.02256697654724121, 0.022771711349487304, 0.022648832321166993, 0.022906463623046876, 0.022739360809326172, 0.022664640426635744, 0.022442047119140624, 0.0227346248626709, 0.022600704193115235, 0.022602752685546876, 0.022587648391723635, 0.02264726448059082, 0.02266726493835449, 0.022588447570800783, 0.022643680572509765, 0.022664543151855468, 0.022650720596313477, 0.022542591094970702, 0.02247123146057129, 0.02249318313598633, 0.02256447982788086, 0.022632255554199218, 0.022542911529541014, 0.02271808052062988, 0.022615936279296874, 0.02267385673522949, 0.022528064727783202, 0.022486976623535156, 0.02250553512573242, 0.022769664764404295, 0.022591232299804687, 0.022561023712158203, 0.022940959930419922, 0.022655231475830078, 0.02272425651550293, 0.02488812828063965, 0.024528608322143555, 0.02261020851135254, 0.02257459259033203, 0.0226856632232666, 0.02254902458190918, 0.022775455474853514, 0.02258902359008789, 0.022530143737792968, 0.02254300880432129, 0.022603776931762694, 0.022553855895996094, 0.022625024795532227, 0.022543903350830077, 0.022555103302001955, 0.022587072372436522, 0.022707775115966798, 0.022632192611694336, 0.022577184677124024, 0.02272559928894043, 0.022669023513793945, 0.02288217544555664, 0.02294825553894043, 0.02306252861022949, 0.022772832870483397, 0.02270198440551758, 0.02271334457397461, 0.022583295822143554, 0.02262835121154785, 0.02261958312988281, 0.023161407470703124, 0.02271753692626953, 0.022762208938598632, 0.02274937629699707, 0.022703712463378906, 0.022730335235595703, 0.023222591400146483, 0.022616575241088867, 0.02263859176635742, 0.02318060874938965, 0.022725311279296875, 0.022811904907226562, 0.023159551620483398, 0.025169919967651368, 0.024788991928100586, 0.022902271270751954, 0.022963935852050782, 0.022856479644775392, 0.023055967330932618, 0.02306502342224121, 0.022877920150756837, 0.022845056533813475, 0.02280860710144043, 0.023064416885375978, 0.023067136764526368, 0.022886655807495118, 0.024186847686767578, 0.024782175064086913, 0.02304275131225586, 0.022847488403320314, 0.0225729923248291, 0.022677568435668944, 0.022689599990844727, 0.022822431564331055, 0.022829727172851564, 0.022775680541992187, 0.022816896438598633, 0.02275472068786621, 0.022665952682495116, 0.022632287979125976, 0.022743072509765625, 0.022771711349487304, 0.022804479598999023, 0.022607872009277344, 0.02268569564819336, 0.022487039566040038, 0.022794240951538085, 0.022542335510253905, 0.022640640258789063, 0.02273689651489258, 0.02305843162536621, 0.022591232299804687, 0.022540544509887694, 0.02303385543823242, 0.02266032028198242, 0.022823711395263672, 0.02262601661682129, 0.023049856185913088, 0.02313488006591797, 0.023357440948486328, 0.023005184173583985, 0.022916576385498048, 0.02292176055908203, 0.023076864242553712, 0.022896160125732423, 0.022783712387084962, 0.022904895782470704, 0.022766016006469728, 0.022698015213012696, 0.02250160026550293, 0.022738943099975584, 0.022657024383544923, 0.022648832321166993, 0.022568960189819336, 0.022781568527221678, 0.022683008193969727, 0.02299292755126953, 0.02256175994873047, 0.022568960189819336, 0.023767040252685546, 0.02266262435913086, 0.022524448394775392, 0.02263408088684082, 0.022682016372680663, 0.022591487884521484, 0.02255462455749512, 0.022520992279052736, 0.022500192642211914, 0.022675455093383787, 0.02265452766418457, 0.02290323257446289, 0.023525375366210938, 0.02329190444946289, 0.02305638313293457, 0.02315673637390137, 0.02336489677429199, 0.023091232299804688, 0.023118528366088867, 0.023250944137573244, 0.02289580726623535, 0.022749919891357422, 0.023291391372680666, 0.022901344299316406, 0.022747135162353514, 0.022700031280517577, 0.022972415924072266, 0.02267955207824707, 0.022779903411865234, 0.022656896591186523, 0.02266070365905762, 0.022794784545898436, 0.02250547218322754, 0.022583295822143554, 0.02257254409790039, 0.022646303176879882, 0.022932064056396483, 0.025800384521484376, 0.023781440734863282, 0.02317580795288086, 0.023121055603027345, 0.02299785614013672, 0.023104768753051758, 0.02315340805053711, 0.02297884750366211, 0.02276937675476074, 0.022798015594482423, 0.02275974464416504, 0.022752639770507812, 0.022689855575561524, 0.022552255630493165, 0.022626720428466796, 0.022577632904052736, 0.022793567657470704, 0.022829727172851564, 0.02267955207824707, 0.022568384170532228, 0.0226693115234375, 0.023181184768676758, 0.02268025588989258, 0.02271980857849121, 0.022735551834106447, 0.022729856491088867, 0.022663679122924805, 0.022606208801269533, 0.022627967834472656, 0.022761856079101563, 0.02270412826538086, 0.02263238334655762, 0.0225598087310791, 0.022746112823486327, 0.02263039970397949, 0.02282464027404785, 0.02256108856201172, 0.022650079727172853, 0.022608671188354492, 0.022595584869384764, 0.022761056900024414, 0.022792608261108398, 0.022808576583862306, 0.022942880630493163, 0.023625600814819337, 0.023487455368041994, 0.02308710479736328, 0.022795551300048827, 0.022847999572753908, 0.022698207855224608, 0.022659008026123046, 0.022789279937744142, 0.022687999725341797, 0.02268841552734375, 0.02289664077758789, 0.022556255340576172, 0.022577568054199217, 0.02262015914916992, 0.022517759323120116, 0.02371788787841797, 0.02266111946105957, 0.02284316825866699, 0.022532320022583006, 0.022511615753173828, 0.02255241584777832, 0.022892351150512694, 0.022817119598388672, 0.022673408508300782, 0.02255462455749512, 0.022652448654174803, 0.02266556739807129, 0.02270572853088379, 0.022612543106079103, 0.022992095947265624, 0.02276950454711914, 0.022678272247314453, 0.02266080093383789, 0.022577472686767578, 0.02258051109313965, 0.02244272041320801, 0.022416448593139647, 0.02245315170288086, 0.022476831436157228, 0.022605567932128905, 0.022683679580688478, 0.022562688827514648, 0.02278348731994629, 0.023468896865844725, 0.022748191833496093, 0.02272355270385742, 0.02272051239013672, 0.023021568298339845, 0.02271027183532715, 0.02286755180358887, 0.022865375518798827, 0.022688703536987306, 0.022396928787231447, 0.022536191940307617, 0.022611967086791994, 0.022558719635009765, 0.02242742347717285, 0.022431968688964844, 0.022714368820190428, 0.022634496688842775, 0.025856000900268555, 0.023027360916137694, 0.022609312057495116, 0.022836063385009767, 0.02287830352783203, 0.02268671989440918, 0.022801183700561525, 0.022782175064086915, 0.02267136001586914, 0.022589439392089843, 0.02263859176635742, 0.022618112564086915, 0.022609920501708985, 0.022890495300292968, 0.02285977554321289, 0.022597631454467772, 0.022593536376953126, 0.02263987159729004, 0.022700288772583007, 0.022673919677734376, 0.022718175888061524, 0.022645023345947264, 0.022792192459106447, 0.022687744140625, 0.022956031799316406, 0.023164928436279295, 0.02287820816040039, 0.022716064453125, 0.022769695281982423, 0.02272287940979004, 0.02271139144897461, 0.02275200080871582, 0.02282636833190918]",tokens/s,43.84432017703558,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,896.4096,564.002816,0.0,178.25792,176.190464,s,1,7.7239228515625,7.7239228515625,0.0,7.7239228515625,7.7239228515625,7.7239228515625,7.7239228515625,[7.7239228515625],,kWh,1.6435881262539926e-05,1.8058186215866887e-06,5.708060122011638e-06,2.3949760006138254e-05,,MB,1300.15232,668.860416,0.0,253.755392,220.752384,s,12,0.1842765111923218,0.015356375932693483,0.0001134301599095308,0.015313920021057128,0.015512354946136475,0.015530354928970336,0.01554145001411438,"[0.015210495948791505, 0.015519007682800293, 0.015327296257019042, 0.015204095840454101, 0.015278079986572265, 0.015441696166992187, 0.01545248031616211, 0.01554422378540039, 0.015287263870239258, 0.015300543785095214, 0.015263999938964844, 0.015447327613830567]",tokens/s,16670.599959394065,kWh,4.629168807910301e-07,5.1048778862044606e-08,3.0616858443107444e-07,8.201342440841491e-07,tokens/kWh,312144020.1364563,MB,1339.035648,681.443328,0.0,266.338304,220.754944,s,12,10.119113830566405,0.8432594858805338,0.0036995593296089615,0.8423731384277344,0.847173095703125,0.8491366180419923,0.8510133587646485,"[0.8453490600585938, 0.8422355346679687, 0.8408296508789063, 0.8394060668945312, 0.8425107421875, 0.8443201904296875, 0.8514825439453125, 0.8385535888671874, 0.8404120483398437, 0.8467759399414062, 0.8472172241210938, 0.840021240234375]",tokens/s,74.71009938798997,kWh,2.4482295118862938e-05,2.699966765399952e-06,9.372566010402038e-06,3.655482789466493e-05,tokens/kWh,1723438.5614271944,,s,756,10.112754016876226,0.013376658752481773,0.0002303777155777851,0.013340975761413574,0.013574031829833983,0.013678232192993163,0.01433374390602112,"[0.01309164810180664, 0.013361184120178223, 0.013434368133544922, 0.013365728378295898, 0.01347334384918213, 0.01346230411529541, 0.013401856422424316, 0.013589983940124511, 0.013431232452392578, 0.013283040046691895, 0.013295743942260742, 0.01326095962524414, 0.01317033576965332, 0.01319257640838623, 0.013359231948852539, 0.013539551734924316, 0.013467519760131837, 0.013408896446228027, 0.013297792434692382, 0.013273183822631835, 0.013368736267089844, 0.013470208168029785, 0.013661312103271485, 0.013884448051452636, 0.013831744194030762, 0.013846847534179687, 0.013703136444091797, 0.013522944450378417, 0.013434111595153808, 0.013445887565612793, 0.01357209587097168, 0.013469504356384278, 0.01335110378265381, 0.013373439788818359, 0.013257023811340331, 0.013230079650878907, 0.013211615562438965, 0.01344655990600586, 0.013244864463806152, 0.013303680419921875, 0.013184415817260741, 0.013092543601989746, 0.013086784362792968, 0.013687647819519044, 0.013236224174499512, 0.013283328056335449, 0.013142016410827637, 0.013405247688293457, 0.013153216361999513, 0.013158432006835937, 0.013178655624389649, 0.013459648132324219, 0.013268768310546875, 0.013244288444519043, 0.013217599868774414, 0.013211711883544922, 0.013202112197875976, 0.0133438081741333, 0.013220864295959473, 0.013258496284484863, 0.01368665599822998, 0.014367039680480958, 0.014911328315734863, 0.013129728317260742, 0.013471360206604005, 0.013559552192687988, 0.013564543724060059, 0.013372672080993653, 0.013344832420349121, 0.013341376304626465, 0.013438976287841797, 0.013294655799865723, 0.013327584266662598, 0.01329974365234375, 0.013386560440063476, 0.013255711555480956, 0.013388704299926758, 0.013816767692565918, 0.013653183937072754, 0.013498975753784179, 0.013405952453613281, 0.01332646369934082, 0.01331222438812256, 0.01358614444732666, 0.013330559730529785, 0.013334431648254395, 0.013352479934692383, 0.013234272003173828, 0.01316431999206543, 0.01406601619720459, 0.013318079948425293, 0.013343584060668946, 0.013355936050415039, 0.013332991600036622, 0.013304256439208984, 0.013308064460754394, 0.0132258243560791, 0.013082752227783203, 0.013152128219604492, 0.013318143844604492, 0.013336352348327637, 0.013285247802734376, 0.013245823860168458, 0.013482975959777831, 0.01333846378326416, 0.01328927993774414, 0.013314399719238281, 0.013165727615356445, 0.013241024017333984, 0.013314208030700684, 0.013318143844604492, 0.013487648010253907, 0.013391551971435546, 0.013415200233459473, 0.01339187240600586, 0.013306015968322754, 0.013328224182128907, 0.013242048263549805, 0.013203231811523438, 0.013263615608215332, 0.013247967720031738, 0.013390144348144532, 0.013345855712890625, 0.013544384002685548, 0.013359071731567383, 0.013480159759521484, 0.012918496131896972, 0.01328985595703125, 0.013162943840026855, 0.013189087867736817, 0.013300959587097167, 0.013400863647460938, 0.013387776374816895, 0.01337923240661621, 0.013328736305236816, 0.013375807762145996, 0.013401791572570801, 0.013520671844482421, 0.0133471040725708, 0.013356351852416992, 0.01344927978515625, 0.013346688270568847, 0.013420607566833496, 0.013249024391174317, 0.013158528327941895, 0.013247679710388184, 0.013248576164245605, 0.013246912002563477, 0.013318464279174805, 0.013326335906982421, 0.013284480094909668, 0.013335424423217774, 0.013670016288757324, 0.013748191833496093, 0.013476032257080078, 0.013408479690551757, 0.013344767570495606, 0.013297151565551758, 0.013410816192626953, 0.013287424087524414, 0.013445119857788086, 0.013324416160583497, 0.013312000274658203, 0.013314175605773925, 0.013287296295166015, 0.013323519706726074, 0.013439007759094238, 0.013307680130004882, 0.013318400382995605, 0.013240608215332031, 0.013549663543701173, 0.013356351852416992, 0.013234848022460938, 0.014380895614624023, 0.013450976371765136, 0.013294112205505372, 0.01335103988647461, 0.013110464096069335, 0.013085503578186036, 0.013107199668884278, 0.013232128143310547, 0.013254783630371093, 0.013427616119384766, 0.013279423713684083, 0.013220735549926758, 0.013305760383605958, 0.013242367744445802, 0.013169983863830566, 0.013344863891601562, 0.012925663948059081, 0.01323641586303711, 0.013102720260620117, 0.013111680030822755, 0.013137855529785157, 0.013110752105712891, 0.013090399742126465, 0.013184000015258789, 0.01346668815612793, 0.013387776374816895, 0.013343456268310547, 0.013334752082824707, 0.01325875186920166, 0.013414400100708008, 0.013282464027404786, 0.013311936378479003, 0.013238368034362792, 0.013306655883789063, 0.013243488311767578, 0.01325766372680664, 0.01324169635772705, 0.013171360015869141, 0.013191167831420898, 0.013262847900390624, 0.013321824073791504, 0.013230496406555176, 0.013412351608276368, 0.013164095878601074, 0.013117888450622558, 0.013094911575317383, 0.013139967918395995, 0.013164544105529785, 0.013137920379638672, 0.01310099220275879, 0.013168543815612792, 0.013082079887390137, 0.013116095542907715, 0.013127679824829102, 0.013299776077270507, 0.013229472160339355, 0.013394047737121582, 0.013449024200439454, 0.013377920150756836, 0.013951199531555176, 0.014219264030456542, 0.013974687576293946, 0.013345919609069824, 0.013432736396789552, 0.013265888214111328, 0.013304672241210937, 0.013479071617126465, 0.013262816429138184, 0.013347423553466797, 0.013365535736083985, 0.013307904243469238, 0.013502592086791993, 0.013444992065429687, 0.01347379207611084, 0.013487903594970704, 0.013488320350646973, 0.013545503616333008, 0.01365552043914795, 0.01330799961090088, 0.013035360336303712, 0.01338764762878418, 0.013364928245544434, 0.013336576461791993, 0.013359359741210938, 0.013312352180480958, 0.013460607528686523, 0.013515647888183593, 0.013320192337036133, 0.013405728340148925, 0.013320927619934081, 0.01329139232635498, 0.013358976364135742, 0.01338806438446045, 0.01346326446533203, 0.013401280403137207, 0.01343283176422119, 0.013352767944335937, 0.01327616024017334, 0.013374655723571778, 0.01317356777191162, 0.013332736015319825, 0.013485183715820312, 0.0134967041015625, 0.013631072044372559, 0.013580096244812013, 0.013525856018066407, 0.013550911903381348, 0.013372096061706543, 0.013211647987365722, 0.013242367744445802, 0.013262080192565919, 0.013186911582946777, 0.013325216293334961, 0.01330799961090088, 0.013485440254211426, 0.013273440361022949, 0.013381376266479492, 0.013353343963623046, 0.013317184448242187, 0.0132642240524292, 0.013205151557922363, 0.013252608299255371, 0.01323801612854004, 0.013483839988708496, 0.01337500762939453, 0.013307040214538574, 0.013327263832092285, 0.013383711814880371, 0.0134585599899292, 0.013522624015808105, 0.013477727890014649, 0.013258912086486817, 0.01325875186920166, 0.013260800361633301, 0.013561856269836426, 0.013547327995300293, 0.01338105583190918, 0.013286272048950195, 0.013277119636535644, 0.01327507209777832, 0.01336729621887207, 0.013518400192260742, 0.013309568405151366, 0.013520352363586426, 0.01369536018371582, 0.01345577621459961, 0.013638015747070312, 0.013346015930175781, 0.013238816261291505, 0.013290592193603516, 0.013287808418273926, 0.01337609577178955, 0.013399007797241212, 0.013347968101501465, 0.01335689640045166, 0.01325881576538086, 0.013344703674316406, 0.013279423713684083, 0.013600288391113281, 0.01334928035736084, 0.013674367904663086, 0.013516799926757812, 0.013412447929382324, 0.01334988784790039, 0.013423520088195801, 0.013421759605407715, 0.013291872024536133, 0.013392352104187011, 0.01339964771270752, 0.013278656005859375, 0.01341766357421875, 0.013405920028686524, 0.013399231910705566, 0.013394271850585938, 0.01338435173034668, 0.01332975959777832, 0.013363743782043457, 0.013563743591308594, 0.013471903800964356, 0.013472767829895019, 0.013425663948059082, 0.013486432075500489, 0.013420096397399902, 0.013430879592895508, 0.013417535781860352, 0.013358304023742676, 0.013207263946533203, 0.013277183532714844, 0.013217535972595215, 0.013256511688232421, 0.013338111877441406, 0.013330847740173339, 0.013783007621765138, 0.013745856285095216, 0.013343680381774902, 0.013345952033996583, 0.013347519874572753, 0.01351689624786377, 0.01336678409576416, 0.013266880035400391, 0.013192992210388183, 0.01320854377746582, 0.013280063629150391, 0.01334169578552246, 0.013416447639465333, 0.013049856185913086, 0.01328335952758789, 0.013235424041748047, 0.013278176307678222, 0.013336031913757325, 0.013332799911499023, 0.013459456443786622, 0.013598015785217285, 0.01351750373840332, 0.013495776176452636, 0.013439519882202149, 0.013448896408081055, 0.013476351737976074, 0.013561152458190917, 0.013705792427062989, 0.013576128005981446, 0.013631487846374512, 0.013720992088317872, 0.013575967788696288, 0.01358080005645752, 0.013440383911132812, 0.014476415634155273, 0.01532694435119629, 0.013672224044799805, 0.013508319854736328, 0.013660767555236816, 0.013565952301025391, 0.013592576026916504, 0.013694975852966309, 0.01348192024230957, 0.013432895660400391, 0.013377792358398437, 0.013468480110168457, 0.013498623847961426, 0.01336905574798584, 0.013460576057434082, 0.013438847541809082, 0.01338326358795166, 0.01342300796508789, 0.013479680061340331, 0.013532447814941406, 0.013626336097717285, 0.013387776374816895, 0.013351231575012208, 0.013415295600891114, 0.013294719696044921, 0.013289471626281739, 0.013366239547729492, 0.013396672248840332, 0.013365280151367188, 0.013284832000732423, 0.013212191581726073, 0.013401408195495606, 0.013422975540161133, 0.013263168334960938, 0.014318623542785645, 0.013560799598693848, 0.013412351608276368, 0.013293120384216308, 0.013341312408447265, 0.013406016349792481, 0.013428383827209473, 0.013474143981933594, 0.013224287986755371, 0.013472064018249512, 0.013313664436340331, 0.013280960083007812, 0.013412480354309081, 0.01329315185546875, 0.013366239547729492, 0.01347539234161377, 0.013414752006530762, 0.013590080261230469, 0.013650464057922364, 0.013591903686523438, 0.01358944034576416, 0.013473440170288085, 0.013570015907287597, 0.01341859245300293, 0.013320320129394532, 0.013319616317749023, 0.013275520324707031, 0.013256768226623535, 0.013301759719848634, 0.01341859245300293, 0.013674400329589843, 0.013372544288635255, 0.013468607902526856, 0.013461440086364747, 0.013477312088012696, 0.013298175811767577, 0.013318079948425293, 0.01327295970916748, 0.013165951728820801, 0.01329644775390625, 0.01320918369293213, 0.013300191879272462, 0.013115391731262208, 0.013153632164001465, 0.013143808364868163, 0.013319071769714355, 0.013317631721496583, 0.013228704452514648, 0.013307744026184083, 0.013186400413513183, 0.013191328048706055, 0.01316710376739502, 0.013143263816833495, 0.013198111534118652, 0.013131775856018067, 0.013295040130615235, 0.013168255805969238, 0.013252799987792968, 0.013214464187622071, 0.013229184150695801, 0.013155232429504395, 0.013141792297363281, 0.013152447700500489, 0.013172608375549316, 0.013110624313354493, 0.013195232391357422, 0.013265727996826172, 0.013223936080932617, 0.01313587188720703, 0.013109024047851563, 0.013288736343383789, 0.013062015533447265, 0.01343843173980713, 0.013327008247375488, 0.013389439582824706, 0.0133569917678833, 0.0132991361618042, 0.013319168090820312, 0.013311776161193847, 0.013294143676757813, 0.013254591941833496, 0.013453023910522461, 0.013368351936340332, 0.013541728019714356, 0.01337612819671631, 0.013429023742675782, 0.013291232109069824, 0.013266655921936035, 0.013264191627502442, 0.013564895629882813, 0.013586432456970214, 0.013416447639465333, 0.013466879844665528, 0.01344102382659912, 0.013385472297668457, 0.013266079902648926, 0.01345903968811035, 0.013310272216796875, 0.013309056282043458, 0.013332639694213867, 0.01333350372314453, 0.013307904243469238, 0.013292832374572754, 0.013241888046264649, 0.01315135955810547, 0.01321782398223877, 0.013343520164489746, 0.013689760208129884, 0.013408127784729005, 0.013291647911071777, 0.013309951782226562, 0.013363295555114747, 0.013301983833312988, 0.013324992179870605, 0.013429759979248047, 0.013352959632873536, 0.0132259521484375, 0.013297696113586426, 0.01321945571899414, 0.013341055870056153, 0.013381216049194336, 0.013233728408813476, 0.013187456130981445, 0.013263327598571777, 0.013375200271606446, 0.013226271629333496, 0.013202431678771973, 0.013452287673950195, 0.013203519821166992, 0.013213631629943847, 0.013297696113586426, 0.013262495994567872, 0.013284704208374024, 0.013278112411499024, 0.013090815544128418, 0.013338624000549316, 0.01620137596130371, 0.014048864364624023, 0.013456128120422363, 0.013334527969360351, 0.013291520118713379, 0.01330121612548828, 0.0132675199508667, 0.013251968383789062, 0.013382240295410157, 0.013272224426269531, 0.013244928359985352, 0.013291232109069824, 0.013743935585021972, 0.013468480110168457, 0.013395968437194825, 0.013285120010375977, 0.01328335952758789, 0.013248607635498047, 0.013274784088134766, 0.013338208198547363, 0.013366144180297851, 0.01325376033782959, 0.013366144180297851, 0.013424832344055176, 0.013415519714355468, 0.013261504173278808, 0.01321337604522705, 0.013256064414978027, 0.01362224006652832, 0.013721599578857421, 0.01354751968383789, 0.013435999870300292, 0.013335455894470214, 0.01335647964477539, 0.01338419246673584, 0.013231391906738282, 0.013333279609680176, 0.013299712181091309, 0.013309599876403809, 0.013322591781616211, 0.013387167930603027, 0.013333087921142578, 0.013931872367858887, 0.01343561553955078, 0.014352224349975585, 0.013485312461853027, 0.01345587158203125, 0.01336582374572754, 0.013352831840515137, 0.013379487991333008, 0.013397184371948243, 0.013291872024536133, 0.013371040344238281, 0.01325062370300293, 0.013183712005615234, 0.013305727958679198, 0.013228256225585938, 0.013612288475036621, 0.013305952072143554, 0.013326368331909179, 0.013244864463806152, 0.013453887939453126, 0.013269375801086426, 0.013242079734802245, 0.013318143844604492, 0.013307904243469238, 0.013382880210876465, 0.013408896446228027, 0.01375766372680664, 0.013404159545898438, 0.01335308837890625, 0.014082048416137695, 0.014215359687805176, 0.013742719650268555, 0.01452847957611084, 0.013348896026611328, 0.013376799583435059, 0.013463647842407226, 0.013265536308288575, 0.013311264038085937, 0.013421279907226562, 0.013238335609436035, 0.013223872184753419, 0.013133824348449707, 0.013196672439575195, 0.013217439651489258, 0.013359519958496094, 0.013292096138000488, 0.013356127738952638, 0.01324665641784668, 0.013340895652770995, 0.013199263572692872, 0.013117183685302734, 0.01312451171875, 0.01334227180480957, 0.013310336112976074, 0.01325055980682373, 0.013141087532043457, 0.013211647987365722, 0.01344809627532959, 0.013479743957519531, 0.01351692771911621, 0.0135513277053833, 0.013580191612243652, 0.013623519897460938, 0.013471967697143554, 0.013374848365783691, 0.013468000411987304, 0.013360992431640625, 0.013516448020935058, 0.013446208000183105, 0.01357795238494873, 0.013685888290405273, 0.013642975807189942, 0.01367568016052246, 0.01353324794769287, 0.013438591957092285, 0.013595680236816406, 0.013434528350830078, 0.013530688285827637, 0.013449695587158203, 0.013399200439453125, 0.013368576049804688, 0.01355129623413086, 0.013035327911376952, 0.013380895614624024, 0.013433759689331054, 0.013348863601684571, 0.013411840438842774, 0.013604800224304199, 0.013517631530761718, 0.013389087677001954, 0.013379103660583496, 0.013239232063293457, 0.013243519783020019, 0.013230976104736327, 0.013142208099365234, 0.01322492790222168, 0.013157183647155762, 0.013131263732910157, 0.013242719650268554, 0.013101247787475586, 0.013503775596618653, 0.013382368087768555, 0.013273088455200196, 0.013215231895446777, 0.013271136283874512, 0.01344758415222168, 0.013301728248596191, 0.013166624069213867, 0.013199263572692872, 0.01319536018371582, 0.013098688125610351, 0.013134143829345702, 0.013158304214477539, 0.013260800361633301, 0.013221983909606933, 0.013307744026184083, 0.013226143836975098, 0.01331100845336914, 0.013310848236083984, 0.013271488189697266, 0.013522720336914063, 0.013332287788391113, 0.013294943809509277, 0.013176639556884766, 0.013290399551391602, 0.013314240455627442, 0.013210783958435059, 0.01320576000213623, 0.013196991920471192, 0.01321340847015381, 0.013258848190307617, 0.013279616355895996, 0.01320400047302246, 0.0134552001953125, 0.01360092830657959, 0.013543647766113281, 0.013448991775512696, 0.013422592163085938, 0.013526752471923828, 0.013723936080932617, 0.013444352149963378, 0.013576479911804199, 0.013536959648132325, 0.013554335594177246, 0.013657376289367676]",tokens/s,74.75708385058937,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,6758.752256,7532.838912,0.0,7147.094016,7138.9184,s,1,11.2900458984375,11.2900458984375,0.0,11.2900458984375,11.2900458984375,11.2900458984375,11.2900458984375,[11.2900458984375],,kWh,0.00012377358869168043,1.3645896831450578e-05,3.999642088597932e-05,0.00017741590640911033,,MB,1679.691776,8235.384832,0.0,7818.182656,7724.300288,s,10,7.568954528808593,0.7568954528808594,0.005086869802917714,0.7572083435058594,0.7616975524902344,0.7621363739013671,0.7624874310302734,"[0.7443909301757813, 0.7568643798828125, 0.7561492919921875, 0.754361328125, 0.753830810546875, 0.7575523071289062, 0.7616000366210938, 0.7613030395507813, 0.7625751953125, 0.7603272094726562]",tokens/s,338.2237256488,kWh,2.210387129583372e-05,2.437696245962112e-06,1.469921810857184e-05,3.924078565036767e-05,tokens/kWh,6523824.529940353,MB,1690.918912,8382.185472,0.0,7964.983296,7904.605696,s,10,34.23303637695312,3.4233036376953123,0.005141226833617375,3.4223874511718746,3.429089599609375,3.4320592041015625,3.434434887695313,"[3.4163046875, 3.419956787109375, 3.423561279296875, 3.423797607421875, 3.421213623046875, 3.420406982421875, 3.41865185546875, 3.4284296875, 3.42568505859375, 3.43502880859375]",tokens/s,18.40327551032365,kWh,0.00010046165967666334,1.108149392744313e-05,6.672251369542985e-05,0.00017826566729953634,tokens/kWh,353405.122558694,,s,630,34.22917131805422,0.054332017965165395,0.0005970174243463809,0.05430491256713867,0.054852021408081056,0.05505815715789795,0.05684820362091065,"[0.05681958389282227, 0.05398134231567383, 0.05329734420776367, 0.053286815643310545, 0.05347903823852539, 0.053520862579345706, 0.05406515121459961, 0.05396684646606445, 0.053626880645751954, 0.05446441650390625, 0.054046016693115234, 0.05363177490234375, 0.053599712371826175, 0.0535332145690918, 0.0536607666015625, 0.05361756896972656, 0.05401139068603516, 0.05474969482421875, 0.0546058235168457, 0.054357601165771485, 0.05404444885253906, 0.054045310974121095, 0.05364326477050781, 0.053817344665527345, 0.0542740478515625, 0.05401804733276367, 0.05525279998779297, 0.05406943893432617, 0.05391155242919922, 0.05399286270141602, 0.05419068908691406, 0.05409177780151367, 0.05390911865234375, 0.05415769577026367, 0.054591041564941406, 0.05486227035522461, 0.05455257415771484, 0.05442736053466797, 0.05421903991699219, 0.05422694396972656, 0.05410512161254883, 0.054163936614990235, 0.054306880950927734, 0.054908737182617184, 0.05429417419433594, 0.054321376800537106, 0.054378463745117185, 0.05414582443237305, 0.054214656829833986, 0.05430051040649414, 0.05422694396972656, 0.05425971221923828, 0.054430110931396485, 0.054822654724121095, 0.054749183654785157, 0.054488990783691404, 0.05451510238647461, 0.05465568161010742, 0.05450284957885742, 0.054349536895751956, 0.05442646408081055, 0.05436006546020508, 0.05441939163208008, 0.05723148727416992, 0.0545552978515625, 0.05358111953735351, 0.053400257110595706, 0.0539607048034668, 0.05351833724975586, 0.05373443222045898, 0.05350630569458008, 0.053440574645996095, 0.05357833480834961, 0.05342390441894531, 0.053543201446533205, 0.05357068634033203, 0.05388991928100586, 0.05430886459350586, 0.053759998321533206, 0.053910945892333986, 0.05461660766601562, 0.054949695587158204, 0.05415756988525391, 0.05390742492675781, 0.05428022384643555, 0.05388262557983398, 0.053940479278564456, 0.053796062469482424, 0.053943073272705075, 0.053741504669189456, 0.053919681549072264, 0.05386252975463867, 0.0538109130859375, 0.05378460693359375, 0.05424972915649414, 0.054575103759765625, 0.05425356674194336, 0.05441126251220703, 0.05469734573364258, 0.05443443298339844, 0.054492351531982425, 0.05459641647338867, 0.05448294448852539, 0.05433910369873047, 0.05413251113891602, 0.05440153503417969, 0.05446259307861328, 0.05446044921875, 0.054349857330322264, 0.054454078674316404, 0.05529600143432617, 0.05467359924316406, 0.054581375122070314, 0.05451327896118164, 0.054661022186279294, 0.0547344970703125, 0.054736927032470704, 0.054542816162109375, 0.054526142120361325, 0.054797470092773436, 0.054868606567382815, 0.05488163375854492, 0.054596481323242185, 0.05462572860717774, 0.054250049591064456, 0.055024864196777344, 0.0570599365234375, 0.054421886444091794, 0.053628929138183595, 0.05353267288208008, 0.0538603515625, 0.05379276657104492, 0.053710849761962894, 0.053904735565185546, 0.05355382537841797, 0.053503040313720704, 0.053674144744873045, 0.05391651153564453, 0.05371078491210937, 0.053866497039794924, 0.05396889495849609, 0.054097183227539064, 0.05386928176879883, 0.05483683013916016, 0.055158592224121096, 0.05472905731201172, 0.05429888153076172, 0.05423478317260742, 0.05408777618408203, 0.054087936401367186, 0.054429054260253906, 0.05412460708618164, 0.053891647338867185, 0.05391360092163086, 0.05402188873291015, 0.05401750564575195, 0.05387676620483398, 0.05412531280517578, 0.05417516708374023, 0.05416198348999023, 0.05418188858032227, 0.05482841491699219, 0.05518399810791016, 0.05462835311889649, 0.0543744010925293, 0.054224479675292966, 0.05485353469848633, 0.055222686767578126, 0.05460591888427734, 0.054295616149902345, 0.05428499221801758, 0.054120704650878905, 0.0543109130859375, 0.05404844665527344, 0.0543246078491211, 0.05420473480224609, 0.054419681549072264, 0.05491331100463867, 0.05454655838012695, 0.054691841125488284, 0.05500915145874023, 0.05480051040649414, 0.05480243301391602, 0.05503347015380859, 0.05444972610473633, 0.05481350326538086, 0.05452703857421875, 0.05462089538574219, 0.05467772674560547, 0.05647993469238281, 0.05450131225585937, 0.053521217346191405, 0.05363711929321289, 0.053731006622314455, 0.053516609191894535, 0.05390335845947265, 0.053596160888671876, 0.05354857635498047, 0.053440799713134764, 0.05366316986083984, 0.05368627166748047, 0.053501953125, 0.053835742950439455, 0.054010238647460934, 0.05403894424438477, 0.05404876708984375, 0.05457676696777344, 0.05468758392333985, 0.054319454193115235, 0.05431315231323242, 0.0539005126953125, 0.053916126251220706, 0.053851806640625, 0.05374019241333008, 0.05416755294799805, 0.05406220626831055, 0.05400870513916016, 0.05401804733276367, 0.0541014404296875, 0.053992000579833985, 0.054023616790771486, 0.053960289001464844, 0.05427276611328125, 0.05465929412841797, 0.05518854522705078, 0.0547685775756836, 0.05465887832641601, 0.05458553695678711, 0.05431500625610351, 0.05452339172363281, 0.054728702545166014, 0.0544793586730957, 0.05445017623901367, 0.05426995086669922, 0.05445632171630859, 0.05432726287841797, 0.05455465698242187, 0.054439521789550784, 0.05472275161743164, 0.05480780792236328, 0.05491811370849609, 0.055115360260009766, 0.05519161605834961, 0.05493727874755859, 0.05486147308349609, 0.054811038970947266, 0.05520444869995117, 0.054838367462158207, 0.05485251235961914, 0.054814720153808595, 0.054438945770263675, 0.05466211318969726, 0.05720700836181641, 0.05498643112182617, 0.05388848114013672, 0.05360316848754883, 0.05354086303710937, 0.05353472137451172, 0.053622814178466795, 0.05353219223022461, 0.05347135925292969, 0.053700927734375, 0.053483200073242185, 0.05365996932983398, 0.05360435104370117, 0.05419120025634765, 0.05411219024658203, 0.05390419387817383, 0.05398748779296875, 0.05491097640991211, 0.054763519287109375, 0.05416479873657227, 0.054104576110839846, 0.05390668869018555, 0.05418899154663086, 0.054035713195800784, 0.05400185775756836, 0.053788959503173826, 0.05380448150634766, 0.05382771301269531, 0.053664321899414065, 0.05429177474975586, 0.053932064056396486, 0.05387680053710937, 0.05402291107177734, 0.054161407470703124, 0.05446819305419922, 0.054681598663330076, 0.05477827072143555, 0.05474009704589844, 0.05489753723144531, 0.05437548828125, 0.05470633697509766, 0.05496092987060547, 0.05451980972290039, 0.05429452896118164, 0.054386688232421876, 0.05408153533935547, 0.05401164627075195, 0.0541739501953125, 0.05444812774658203, 0.054510848999023434, 0.05434982299804687, 0.05433808135986328, 0.05446268844604492, 0.05488844680786133, 0.05498992156982422, 0.055255008697509767, 0.05507174301147461, 0.05478345489501953, 0.054331329345703124, 0.054571552276611326, 0.054714366912841796, 0.05505379104614258, 0.054566654205322265, 0.05767782211303711, 0.05457100677490234, 0.0538372802734375, 0.05382556915283203, 0.05370316696166992, 0.05352374267578125, 0.053523136138916017, 0.05367606353759766, 0.053471233367919924, 0.053892574310302734, 0.05402601623535156, 0.05372963333129883, 0.05361679840087891, 0.05397119903564453, 0.05415116882324219, 0.05433734512329102, 0.05437849426269531, 0.054636737823486325, 0.05459929656982422, 0.054370689392089847, 0.05408476638793945, 0.053957473754882815, 0.05406515121459961, 0.05388611221313477, 0.05368902587890625, 0.05380656051635742, 0.05448348617553711, 0.054491294860839846, 0.05417574310302734, 0.054251518249511715, 0.05392588806152344, 0.054131969451904294, 0.05417184066772461, 0.0547968635559082, 0.0547567024230957, 0.05458515167236328, 0.05460054397583008, 0.05473689651489258, 0.05422489547729492, 0.05437849426269531, 0.05422694396972656, 0.05432729721069336, 0.054191265106201175, 0.05416159820556641, 0.05439052963256836, 0.054168449401855466, 0.05422403335571289, 0.053984127044677734, 0.05403647994995117, 0.05424326324462891, 0.05434374237060547, 0.05455392074584961, 0.05480419158935547, 0.05465139389038086, 0.05454486465454102, 0.05463040161132812, 0.05480838394165039, 0.05475049591064453, 0.0547103042602539, 0.05439958572387695, 0.054427745819091794, 0.05436569595336914, 0.05443654251098633, 0.05671116638183594, 0.053999393463134766, 0.05348515319824219, 0.054046497344970704, 0.05371990585327149, 0.05387855911254883, 0.0537663688659668, 0.05359820938110352, 0.053352127075195314, 0.0536640625, 0.05376934432983398, 0.0535513916015625, 0.05371145629882813, 0.05382144165039063, 0.05383113479614258, 0.05390800094604492, 0.05455001449584961, 0.054456832885742185, 0.05474496078491211, 0.054534271240234376, 0.05449932861328125, 0.05440499114990234, 0.05422502517700195, 0.053852161407470706, 0.05395769500732422, 0.053976001739501955, 0.05387654495239258, 0.05423068618774414, 0.053846561431884765, 0.05378662490844727, 0.053735424041748046, 0.05490892791748047, 0.05390502548217774, 0.05404915237426758, 0.05454643249511719, 0.05445017623901367, 0.05464473724365235, 0.05480799865722656, 0.054561344146728516, 0.05422079849243164, 0.054212608337402345, 0.05425475311279297, 0.0545780143737793, 0.05403366470336914, 0.05413894271850586, 0.05438943862915039, 0.054271743774414065, 0.05448051071166992, 0.054460159301757814, 0.05449382400512695, 0.054319358825683596, 0.054605056762695316, 0.054907550811767576, 0.05469318389892578, 0.0550530891418457, 0.054828609466552734, 0.054766014099121095, 0.05447270584106445, 0.05428140640258789, 0.05442643356323242, 0.05435391998291016, 0.054319103240966796, 0.05445017623901367, 0.056436065673828126, 0.054589599609375, 0.053923969268798826, 0.05345727920532226, 0.05382144165039063, 0.053550880432128904, 0.05342025756835937, 0.053610496520996094, 0.05358544158935547, 0.05374211120605469, 0.05394364929199219, 0.05385891342163086, 0.05376335906982422, 0.05423791885375977, 0.054417407989501954, 0.05418803024291992, 0.053981182098388675, 0.05425932693481445, 0.054851966857910155, 0.0546693115234375, 0.054556671142578124, 0.054294399261474606, 0.05407551956176758, 0.05377228927612305, 0.05372108840942383, 0.05496579360961914, 0.05425164794921875, 0.053939743041992186, 0.05397135925292969, 0.05393641662597656, 0.053986881256103514, 0.05452243041992187, 0.05434352111816406, 0.05450070571899414, 0.054625087738037106, 0.05445206451416015, 0.054519649505615234, 0.054577472686767575, 0.05490835189819336, 0.05583929443359375, 0.054373950958251954, 0.05437027359008789, 0.05473068618774414, 0.05458998489379883, 0.054468608856201174, 0.05455424118041992, 0.05449766540527344, 0.05462790298461914, 0.054609375, 0.05513929748535156, 0.05461196899414063, 0.05444371032714844, 0.05425900650024414, 0.05453516769409179, 0.054691841125488284, 0.05480038452148438, 0.05544550323486328, 0.05644249725341797, 0.05465923309326172, 0.054683872222900394, 0.0543744010925293, 0.05446640014648438, 0.05465718460083008, 0.05685984039306641, 0.05430064010620117, 0.05374809646606445, 0.053682113647460936, 0.0539218864440918, 0.05372361755371094, 0.05363097763061524, 0.053872638702392575, 0.05378662490844727, 0.05465497589111328, 0.05399334335327149, 0.05375347137451172, 0.053830142974853515, 0.0539156494140625, 0.05395455932617187, 0.054261760711669924, 0.053996639251708986, 0.054537120819091796, 0.055574367523193356, 0.05428035354614258, 0.054712318420410154, 0.054392833709716794, 0.05414092636108398, 0.054230785369873045, 0.054161376953125, 0.053855712890625, 0.05380799865722656, 0.05385209655761719, 0.053695552825927736, 0.053795135498046875, 0.05392448043823242, 0.0538152961730957, 0.0541102066040039, 0.05423212814331055, 0.05479625701904297, 0.05478623962402344, 0.05471903991699219, 0.05446268844604492, 0.05453004837036133, 0.05465292739868164, 0.05455795288085938, 0.054811393737792966, 0.05435158538818359, 0.05455462265014648, 0.054562881469726564, 0.05506172943115235, 0.0547147216796875, 0.054200992584228516, 0.05440716934204102, 0.054340736389160156, 0.0542155532836914, 0.054368160247802735, 0.05490800094604492, 0.05481475067138672, 0.05467644882202148, 0.05468569564819336, 0.054788097381591794, 0.0547512321472168, 0.054730751037597655, 0.05461590576171875, 0.05449916839599609, 0.055368000030517575, 0.054400894165039064, 0.05831935882568359, 0.054284320831298825, 0.05373894500732422, 0.05367251205444336, 0.053550430297851566, 0.054033088684082034, 0.05392176055908203, 0.05379398345947266, 0.05415919876098633, 0.05367087936401367, 0.05363097763061524, 0.053945568084716795, 0.05389596939086914, 0.05393407821655274, 0.054155265808105466, 0.05390335845947265, 0.05391769790649414, 0.054748798370361326, 0.054929630279541015, 0.05451984024047852, 0.05428031921386719, 0.05410201644897461, 0.05390447998046875, 0.05441142272949219, 0.05466515350341797, 0.05454111862182617, 0.054251518249511715, 0.05389926528930664, 0.053856254577636715, 0.05401356887817383, 0.053873023986816405, 0.05396889495849609, 0.05394131088256836, 0.05427500915527344, 0.05471641540527344, 0.05484134292602539, 0.05479542541503906, 0.054462528228759764, 0.0545759048461914, 0.0546611213684082, 0.054752288818359376, 0.05446934509277344, 0.054730785369873046, 0.05430294418334961, 0.054476158142089844, 0.054519905090332034, 0.054505760192871094, 0.05444988632202148, 0.054364704132080076, 0.05432320022583008, 0.05471641540527344, 0.05442559814453125, 0.054988670349121097, 0.055037601470947266, 0.05529411315917969, 0.05606185531616211, 0.05790460968017578, 0.05487712097167969, 0.0568197135925293, 0.05477580642700195, 0.05472051239013672, 0.054642688751220705, 0.05446569442749023]",tokens/s,18.405353554899126,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,11151.880192,14593.949696,0.0,14191.427584,13325.783552,s,1,18.104853515625,18.104853515625,0.0,18.104853515625,18.104853515625,18.104853515625,18.104853515625,[18.104853515625],,kWh,0.00032298358085000324,3.562022408437369e-05,0.000148114007380018,0.000506717812314395,,MB,2093.285376,14608.62976,0.0,14191.427584,12591.007744,s,10,96.04759375,9.604759375,0.01036137556502717,9.602073242187501,9.620193945312499,9.62156767578125,9.62266666015625,"[9.61227734375, 9.6018515625, 9.594748046875, 9.5932080078125, 9.59368359375, 9.59691796875, 9.602294921875, 9.6097822265625, 9.619888671875, 9.62294140625]",tokens/s,26.65345273160474,kWh,0.00028153631824208257,3.105325828531778e-05,0.00018718837197279626,0.0004997779485001966,tokens/kWh,512227.4817611311,MB,2099.28192,14608.62976,0.0,14191.427584,12934.19776,s,10,44.40920751953125,4.440920751953125,0.004921353083231648,4.440137451171875,4.447161181640625,4.447932641601563,4.448549809570313,"[4.44300634765625, 4.44698974609375, 4.4347666015625, 4.43511083984375, 4.4360458984375, 4.4380341796875, 4.43829150390625, 4.4419833984375, 4.44627490234375, 4.4487041015625]",tokens/s,14.186247293940674,kWh,0.00012840012222333066,1.4165132573752944e-05,8.538376275140646e-05,0.00022794901754849008,tokens/kWh,276377.58950462873,,s,630,44.38109857940676,0.07044618822128054,0.0006309081468677406,0.07040760040283203,0.07127052230834961,0.07149790115356446,0.07216985633850098,"[0.07223929595947266, 0.06930931091308594, 0.07061395263671875, 0.06986697387695312, 0.06974012756347656, 0.06933187103271485, 0.06968915557861328, 0.0698656997680664, 0.06994841766357422, 0.06978179168701172, 0.07010755157470704, 0.07017836761474609, 0.06966057586669921, 0.07040444946289062, 0.07009126281738282, 0.07025459289550781, 0.07000198364257812, 0.07044729614257812, 0.07005430603027343, 0.07011952209472656, 0.07014553833007812, 0.07000278472900391, 0.07013155364990234, 0.07025721740722657, 0.07034060668945312, 0.07027283477783203, 0.07028486633300782, 0.07038019561767578, 0.07019312286376952, 0.07118572998046875, 0.07095069122314453, 0.07023420715332031, 0.07032915496826173, 0.07039961242675781, 0.07031027221679688, 0.07007830047607422, 0.0702751693725586, 0.07068678283691407, 0.07058409881591797, 0.07043276977539062, 0.07045346832275391, 0.07090995025634765, 0.07057103729248047, 0.070951904296875, 0.07076659393310547, 0.07112258911132813, 0.07144847869873047, 0.07068080139160156, 0.07056182098388672, 0.07032611083984375, 0.0709287338256836, 0.07081574249267578, 0.07059865570068359, 0.07087923431396484, 0.07127161407470703, 0.0711396484375, 0.07112550354003906, 0.07116595458984375, 0.07127040100097656, 0.07085670471191406, 0.07150105285644531, 0.07086361694335938, 0.07123356628417969, 0.07178803253173828, 0.06957100677490234, 0.06915455627441407, 0.06948441314697265, 0.0698476791381836, 0.07003558349609375, 0.06995545959472656, 0.06957209777832031, 0.06959129333496093, 0.07009065246582032, 0.0698883514404297, 0.07055359649658204, 0.06970572662353515, 0.0700600357055664, 0.07008665466308593, 0.07010870361328125, 0.07010269165039062, 0.06971065521240234, 0.07077680206298828, 0.07113014221191406, 0.07055625915527344, 0.07026319885253907, 0.0701455078125, 0.07017449951171875, 0.0711094741821289, 0.07069305419921874, 0.07053897857666015, 0.07072147369384765, 0.07016044616699219, 0.06992691040039062, 0.0700638427734375, 0.07014979553222657, 0.07036172485351562, 0.07069286346435547, 0.07021568298339843, 0.07089904022216797, 0.0701016616821289, 0.07041433715820312, 0.0704524154663086, 0.0705584945678711, 0.07055283355712891, 0.07079507446289063, 0.0703375015258789, 0.07024845123291015, 0.07091161346435547, 0.07125644683837891, 0.07123516845703125, 0.07115200042724609, 0.07095053100585938, 0.07149404907226563, 0.07106511688232423, 0.07168048095703125, 0.07150387573242188, 0.07094464111328125, 0.07100838470458984, 0.07133353424072265, 0.07117654418945313, 0.07143833923339844, 0.07084044647216797, 0.07147711944580078, 0.0707215347290039, 0.07107353973388672, 0.0716242904663086, 0.07168137359619141, 0.06910633850097657, 0.06895795440673828, 0.06943974304199219, 0.06962989044189453, 0.06979385375976563, 0.06960463714599609, 0.07001570892333984, 0.06965862274169922, 0.07016140747070312, 0.06957158660888672, 0.06965452575683594, 0.0695889892578125, 0.07024230194091798, 0.07090198516845703, 0.0699840316772461, 0.06952323150634766, 0.06960969543457031, 0.07021772766113281, 0.06978559875488281, 0.07039810943603515, 0.07020076751708984, 0.06998409271240234, 0.07005036926269531, 0.0701665267944336, 0.07039295959472656, 0.0701013412475586, 0.07029404449462891, 0.07012992095947265, 0.07051427459716797, 0.0700594253540039, 0.07015296173095703, 0.07019020843505859, 0.07031222534179687, 0.07015071868896484, 0.07056591796875, 0.06995967864990234, 0.07021062469482423, 0.07064002990722656, 0.07034099578857422, 0.07043087768554687, 0.07037542724609375, 0.0707747802734375, 0.07099795532226562, 0.07047782135009766, 0.0704840316772461, 0.07097676849365234, 0.07101074981689454, 0.07054777526855469, 0.0713154525756836, 0.07056364440917968, 0.0703428497314453, 0.0709570541381836, 0.07136982727050781, 0.07087200164794921, 0.07125186920166016, 0.07134201812744141, 0.07092031860351562, 0.07095091247558594, 0.07074406433105469, 0.07130028533935547, 0.07080429077148437, 0.07124342346191406, 0.07217001342773438, 0.06939254760742188, 0.06927494049072265, 0.06924921417236328, 0.06920966339111329, 0.06968313598632812, 0.06949750518798828, 0.06968742370605469, 0.06943142700195312, 0.06933878326416015, 0.07002566528320313, 0.06989222717285157, 0.06993827056884766, 0.0701612777709961, 0.07105903625488282, 0.07040850830078126, 0.06987171173095703, 0.06983369445800781, 0.06968745422363282, 0.06980073547363282, 0.07006617736816406, 0.06987744140625, 0.06993132781982422, 0.07006208038330078, 0.07033446502685547, 0.06997196960449219, 0.06962995147705078, 0.07021148681640625, 0.07041756439208985, 0.07072576141357421, 0.07006905364990235, 0.06999654388427734, 0.07027069091796875, 0.07023554992675782, 0.07033916473388672, 0.07033449554443359, 0.07011763000488282, 0.07057408142089844, 0.0711981430053711, 0.07089536285400391, 0.07032886505126953, 0.07052845001220703, 0.07054627227783203, 0.07047401428222656, 0.07089737701416016, 0.07134822082519532, 0.07071068572998047, 0.07036579132080079, 0.07046758270263671, 0.07053107452392578, 0.0702525405883789, 0.07132364654541015, 0.07121846771240234, 0.0712998046875, 0.07079116821289062, 0.07116995239257813, 0.07065200042724609, 0.07055564880371094, 0.07113865661621094, 0.07080931091308594, 0.07157036590576171, 0.07077299499511719, 0.07154662322998047, 0.07219219207763672, 0.06940198516845703, 0.0697900161743164, 0.06960902404785156, 0.06907357025146485, 0.06931436920166016, 0.06964972686767579, 0.06982259368896485, 0.06998278045654296, 0.06985318756103516, 0.06988159942626954, 0.07025606536865234, 0.06982943725585937, 0.07055564880371094, 0.07056342315673828, 0.07003177642822266, 0.0697993927001953, 0.06963279724121094, 0.07000233459472656, 0.0698326416015625, 0.06984825897216797, 0.06995862579345703, 0.06991462707519532, 0.07067648315429688, 0.07079232025146484, 0.07036390686035156, 0.07074230194091796, 0.07043666839599609, 0.0704205093383789, 0.07050214385986328, 0.07024435424804687, 0.07018495941162109, 0.07002700805664062, 0.069949951171875, 0.07003481292724609, 0.07023270416259765, 0.07064307403564453, 0.0705799331665039, 0.07012239837646485, 0.07033650970458985, 0.07045305633544922, 0.07071762847900391, 0.07076454162597656, 0.07035494232177734, 0.0702730255126953, 0.07044818878173828, 0.07013193511962891, 0.07073251342773437, 0.07069631958007813, 0.07030780792236328, 0.07032447814941406, 0.07144080352783203, 0.07138508605957031, 0.07069449615478515, 0.07046950531005859, 0.07097398376464843, 0.07135193634033203, 0.07119500732421875, 0.07088057708740235, 0.07159059143066407, 0.07090287780761718, 0.07132048034667969, 0.07075801849365235, 0.07410736083984375, 0.06971376037597657, 0.06922988891601563, 0.06919007873535156, 0.06996428680419922, 0.06926509094238281, 0.06932009887695313, 0.0696493148803711, 0.07008460998535156, 0.06966242980957031, 0.06993743896484375, 0.06999244689941406, 0.06969686126708985, 0.06999945831298827, 0.07151404571533203, 0.07041401672363282, 0.06989228820800782, 0.06983270263671874, 0.0697733154296875, 0.06926646423339844, 0.06995247650146484, 0.069987548828125, 0.07026547241210937, 0.07001718139648437, 0.06996991729736328, 0.07002726745605468, 0.07053926086425781, 0.07028931427001953, 0.07068681335449219, 0.07046723175048829, 0.07047532653808594, 0.07034905242919921, 0.0703529281616211, 0.06973490905761719, 0.07018495941162109, 0.07010294342041015, 0.07028729248046875, 0.069978271484375, 0.07046553802490234, 0.07020748901367188, 0.07072073364257812, 0.0712609634399414, 0.07103308868408204, 0.0706434555053711, 0.0702402572631836, 0.07148134613037109, 0.07082166290283202, 0.07113446044921876, 0.07049314880371094, 0.07141500854492187, 0.07060546875, 0.07046361541748047, 0.07104512023925781, 0.07065132904052734, 0.07076838684082032, 0.07075718688964844, 0.07148258972167969, 0.07071625518798828, 0.0713031005859375, 0.07147708892822266, 0.07053046417236328, 0.07034710693359375, 0.0709615707397461, 0.07238944244384765, 0.06946406555175781, 0.06938009643554688, 0.06909951782226563, 0.07003545379638672, 0.06975692749023438, 0.07002022552490235, 0.07017504119873047, 0.07014630126953125, 0.06983209228515624, 0.06950364685058594, 0.07009001922607422, 0.0698061752319336, 0.06992082977294922, 0.0699126739501953, 0.06969417572021484, 0.06982450866699219, 0.06989004516601563, 0.06995558166503907, 0.07051417541503906, 0.06999100494384766, 0.06997609710693359, 0.0704963836669922, 0.070534912109375, 0.07039398193359375, 0.07006912231445313, 0.07007849884033203, 0.07019171142578125, 0.07043827056884766, 0.07030592346191407, 0.0705849609375, 0.07017670440673827, 0.07018688201904297, 0.07021408081054688, 0.07006822204589844, 0.07005187225341797, 0.07063350677490235, 0.07055967712402343, 0.07074972534179688, 0.07040156555175782, 0.07053791809082031, 0.0703900146484375, 0.07079856109619141, 0.07036905670166016, 0.07092473602294921, 0.07120310211181641, 0.07110684967041016, 0.07121510314941407, 0.07105945587158204, 0.07047987365722656, 0.07051372528076172, 0.07125004577636719, 0.07064806365966797, 0.07054422760009765, 0.07145043182373047, 0.07117549133300781, 0.07067305755615234, 0.07053472137451172, 0.07135273742675781, 0.07103388977050781, 0.0708018569946289, 0.07082201385498046, 0.07102444458007813, 0.07229235076904297, 0.06942626953125, 0.06956124877929687, 0.06933503723144531, 0.06956543731689453, 0.0698986587524414, 0.06969200134277344, 0.07021363067626953, 0.06974259185791015, 0.06964157104492187, 0.06986380767822266, 0.06977881622314454, 0.06972099304199218, 0.07067849731445312, 0.07132943725585937, 0.07054137420654297, 0.06960934448242187, 0.06965702056884765, 0.06965657806396484, 0.07029497528076172, 0.0701645736694336, 0.06993558502197265, 0.06984230041503907, 0.07003404998779297, 0.07038771057128906, 0.07015987396240235, 0.07033087921142578, 0.07222271728515625, 0.071225341796875, 0.07032198333740235, 0.07000816345214844, 0.06965261077880859, 0.0701201629638672, 0.07042800140380859, 0.07052960205078125, 0.07043081665039062, 0.07040716552734375, 0.07091097259521484, 0.07063961791992188, 0.07033219146728516, 0.07109040069580078, 0.07107328033447266, 0.07198274993896485, 0.069901123046875, 0.07057820892333984, 0.07062092590332031, 0.07103049468994141, 0.07044534301757813, 0.07101055908203124, 0.07074114990234374, 0.07084534454345703, 0.07051872253417969, 0.07117401885986328, 0.07089177703857422, 0.07046685028076172, 0.07072972869873047, 0.07153298950195312, 0.07201769256591797, 0.07059699249267579, 0.0710165786743164, 0.07067225646972657, 0.07085874938964844, 0.07065340423583985, 0.07193670654296876, 0.06989635467529297, 0.06932889556884765, 0.06945587158203125, 0.0705433578491211, 0.0694988784790039, 0.06958921813964844, 0.06970681762695312, 0.07007279968261719, 0.06985289764404297, 0.06994588470458984, 0.0702298583984375, 0.07080361938476562, 0.07059584045410157, 0.0705396499633789, 0.06979164886474609, 0.06985980987548829, 0.06962934112548828, 0.07080435180664063, 0.07085798645019531, 0.07009664154052735, 0.07000899505615234, 0.07021823883056641, 0.07008876800537109, 0.07012147521972656, 0.07030691528320313, 0.07050665283203125, 0.07163766479492187, 0.0705103988647461, 0.0699676513671875, 0.06976665496826172, 0.07008966064453125, 0.06998831939697266, 0.07162684631347656, 0.07004774475097657, 0.07031193542480468, 0.07041574096679687, 0.07044115447998046, 0.07025504302978515, 0.07060479736328125, 0.07083737945556641, 0.07063622283935547, 0.07053648376464844, 0.07062416076660157, 0.07075020599365234, 0.07072531127929688, 0.07040803527832032, 0.0704311981201172, 0.07181107330322266, 0.07055359649658204, 0.07099305725097656, 0.07160099029541016, 0.0706242904663086, 0.07169478607177734, 0.0707630386352539, 0.07102464294433594, 0.07145062255859375, 0.07089695739746094, 0.07132640075683594, 0.07105945587158204, 0.07106559753417968, 0.07152588653564453, 0.0720656967163086, 0.07183497619628906, 0.06978806304931641, 0.06940803527832032, 0.06967011260986328, 0.06992044830322265, 0.07015225219726562, 0.06943949127197266, 0.06953369903564453, 0.0700211181640625, 0.06960332489013672, 0.07000198364257812, 0.06947500610351562, 0.07092018890380859, 0.0705638427734375, 0.07038521575927735, 0.06997036743164063, 0.06987980651855469, 0.06979993438720702, 0.06996173095703125, 0.07003129577636719, 0.07043897247314453, 0.07004569244384766, 0.07035011291503906, 0.07039663696289063, 0.07041139221191406, 0.07051558685302735, 0.07058841705322266, 0.07147929382324218, 0.07087664031982421, 0.07067820739746093, 0.07022627258300781, 0.07015078735351563, 0.07051363372802734, 0.07082816314697266, 0.07057884979248047, 0.07028543853759765, 0.07044710540771484, 0.07023817443847656, 0.07049766540527344, 0.0704784927368164, 0.07048191833496094, 0.07199334716796875, 0.0711553955078125, 0.07132521820068359, 0.07090057373046875, 0.07147309112548828, 0.07027916717529296, 0.07072882843017578, 0.07076338958740234, 0.07091814422607422, 0.07100621032714843, 0.07061913299560547, 0.07155615997314453, 0.0711239013671875, 0.07216947174072266, 0.07105677032470703, 0.07075603485107422, 0.07126108551025391, 0.0712314224243164, 0.07094496154785156, 0.0713542709350586, 0.07113289642333985, 0.07118252563476563]",tokens/s,14.19523220843222,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4357.275648,4562.28864,0.0,4183.81824,4182.069248,s,1,10.0476240234375,10.0476240234375,0.0,10.0476240234375,10.0476240234375,10.0476240234375,10.0476240234375,[10.0476240234375],,kWh,9.00516337291691e-05,9.925974722610089e-06,2.9063078805999232e-05,0.00012904068725777844,,MB,1646.116864,4715.380736,0.0,4307.550208,4281.174016,s,10,4.240579620361328,0.4240579620361328,0.00973513499326288,0.4238329620361328,0.43182900390625,0.439088330078125,0.444895791015625,"[0.4058084716796875, 0.418193603515625, 0.42089401245117186, 0.44634765625, 0.41901727294921876, 0.42609442138671877, 0.4232594299316406, 0.4263424377441406, 0.4302158203125, 0.424406494140625]",tokens/s,603.6910585779474,kWh,1.2225260772569512e-05,1.3473687572735923e-06,8.141916235750024e-06,2.1714545765593125e-05,tokens/kWh,11789332.49460986,MB,1650.315264,4730.0608,0.0,4322.230272,4281.176576,s,10,25.663558593750004,2.5663558593749998,0.012182986936632783,2.57035546875,2.5787344970703123,2.580027038574219,2.5810610717773437,"[2.57439501953125, 2.548342529296875, 2.56631591796875, 2.55032080078125, 2.564474853515625, 2.54906787109375, 2.581319580078125, 2.574916259765625, 2.57595849609375, 2.578447265625]",tokens/s,24.54842720656159,kWh,7.52577669924302e-05,8.299891720148061e-06,4.9471185410250034e-05,0.00013302884412282824,tokens/kWh,473581.5034356821,,s,630,25.659648288726828,0.04072960045829652,0.0006686577631998439,0.04065139198303223,0.04114664154052734,0.0414242654800415,0.043639575233459474,"[0.04099299240112304, 0.04076748657226562, 0.0403513298034668, 0.040348064422607424, 0.040390655517578124, 0.04044976043701172, 0.04084288024902344, 0.040583839416503904, 0.040483905792236326, 0.04080841445922852, 0.040733566284179686, 0.04058486557006836, 0.04051555252075195, 0.04062460708618164, 0.04030054473876953, 0.040328670501708984, 0.041082431793212894, 0.040731616973876957, 0.04061721420288086, 0.04047743988037109, 0.040271713256835935, 0.04044809722900391, 0.04028422546386719, 0.04057843017578125, 0.040628864288330076, 0.04068460845947266, 0.04068038558959961, 0.040687614440917966, 0.040904705047607424, 0.04079372787475586, 0.040857982635498044, 0.04057702255249023, 0.04013804626464844, 0.040585536956787106, 0.04038899230957031, 0.04036812973022461, 0.044386302947998044, 0.040521728515625, 0.04073174285888672, 0.04049382400512695, 0.04045225524902344, 0.04076544189453125, 0.040882080078125, 0.04057097625732422, 0.040286209106445314, 0.04068924713134765, 0.0414744644165039, 0.040427520751953126, 0.04086579132080078, 0.04296636962890625, 0.04254991912841797, 0.04094976043701172, 0.04068153762817383, 0.04109436798095703, 0.04098735809326172, 0.04190127944946289, 0.04121049499511719, 0.0405667839050293, 0.041056190490722656, 0.04073494338989258, 0.043607585906982424, 0.040876510620117185, 0.04141670227050781, 0.040854686737060546, 0.04049292755126953, 0.040171966552734376, 0.04034000015258789, 0.03998310470581055, 0.040302593231201174, 0.04025139236450195, 0.040443904876708986, 0.039964672088623046, 0.04060160064697266, 0.04005449676513672, 0.04046169662475586, 0.04043571090698242, 0.040393630981445314, 0.04053401565551758, 0.04067327880859375, 0.04053561782836914, 0.04061743927001953, 0.04064969635009766, 0.040687614440917966, 0.04075929641723633, 0.04046950531005859, 0.040313152313232424, 0.04031900787353516, 0.04016604614257813, 0.040122207641601564, 0.04009795379638672, 0.04019200134277344, 0.040136703491210936, 0.040385601043701175, 0.04028067016601562, 0.04058534240722656, 0.04008572769165039, 0.04019171142578125, 0.040245025634765626, 0.04042598342895508, 0.04023651123046875, 0.04015977478027344, 0.04037129592895508, 0.0407213134765625, 0.04067327880859375, 0.041166847229003906, 0.040754814147949216, 0.04069209671020508, 0.040541248321533205, 0.040721343994140624, 0.04042918395996094, 0.040647041320800784, 0.04050124740600586, 0.04046761703491211, 0.040309600830078125, 0.04012236785888672, 0.04023455810546875, 0.04039081573486328, 0.0405261116027832, 0.04043881607055664, 0.040522720336914064, 0.040687614440917966, 0.04068966293334961, 0.04067728042602539, 0.040742687225341793, 0.040640064239501957, 0.040653568267822265, 0.04099398422241211, 0.04044800186157226, 0.04026655960083008, 0.04081011199951172, 0.040527328491210934, 0.04061385726928711, 0.04032198333740234, 0.04034956741333008, 0.04076300811767578, 0.04047513580322266, 0.04068556976318359, 0.04076748657226562, 0.04049100875854492, 0.04095795059204101, 0.04068124771118164, 0.04091507339477539, 0.04103177642822266, 0.04173551940917969, 0.040768161773681644, 0.040687614440917966, 0.04060979080200195, 0.04059340667724609, 0.04070348739624023, 0.04064230346679688, 0.04059734344482422, 0.04052470397949219, 0.0408675537109375, 0.04050758361816406, 0.040502750396728515, 0.0406165771484375, 0.040574977874755856, 0.04029849624633789, 0.04055219268798828, 0.040470592498779295, 0.0402413444519043, 0.04036198425292969, 0.04030809783935547, 0.04056537628173828, 0.040531967163085936, 0.040529918670654294, 0.040650718688964846, 0.04053763198852539, 0.04080275344848633, 0.04061916732788086, 0.040428447723388675, 0.04046438217163086, 0.040542209625244144, 0.04080361557006836, 0.040780513763427735, 0.040515583038330076, 0.04065033721923828, 0.04058563232421875, 0.0456736946105957, 0.04099555206298828, 0.040759361267089844, 0.04069375991821289, 0.04068966293334961, 0.040769535064697264, 0.040837120056152344, 0.04097359848022461, 0.04070883178710937, 0.04064460754394531, 0.04068486404418945, 0.04122544097900391, 0.04059017562866211, 0.04028416061401367, 0.040118270874023435, 0.03983740615844727, 0.040118560791015626, 0.040114177703857425, 0.04040003204345703, 0.040219009399414064, 0.040522144317626956, 0.044087360382080075, 0.04056883239746094, 0.0404128303527832, 0.04004079818725586, 0.039798782348632815, 0.039847934722900394, 0.04000153732299805, 0.039907329559326174, 0.040019168853759765, 0.039772544860839844, 0.039874561309814455, 0.03970294570922851, 0.03973222351074219, 0.03986735916137695, 0.03997494506835937, 0.039858177185058595, 0.03974553680419922, 0.039923583984375, 0.03997708892822266, 0.03990300750732422, 0.03969660949707031, 0.03993971252441406, 0.03968447875976563, 0.03996604919433594, 0.04004726409912109, 0.040288257598876956, 0.04018380737304687, 0.039858081817626956, 0.04016537475585937, 0.04063036727905273, 0.04063961410522461, 0.040573406219482425, 0.04217897415161133, 0.04079526519775391, 0.04056972885131836, 0.04063772964477539, 0.040677921295166015, 0.04031507110595703, 0.04016742324829101, 0.04041849517822266, 0.040756031036376955, 0.040316001892089844, 0.04020316696166992, 0.04023471832275391, 0.04056092834472656, 0.04045004653930664, 0.04056063842773437, 0.04100505447387695, 0.04126271820068359, 0.041216129302978514, 0.0410994873046875, 0.04076752090454101, 0.04764665603637695, 0.0408870735168457, 0.0409354248046875, 0.044077056884765625, 0.040892414093017575, 0.04066918563842774, 0.040768959045410155, 0.04060422515869141, 0.04060774230957031, 0.040515583038330076, 0.040359584808349606, 0.04047407913208008, 0.0401847038269043, 0.04207820892333984, 0.04047872161865235, 0.04040703964233398, 0.04056790542602539, 0.04019292831420898, 0.040298336029052736, 0.04082854461669922, 0.04355945587158203, 0.04052550506591797, 0.04044800186157226, 0.04027628707885742, 0.040307807922363284, 0.04035676956176758, 0.04023436737060547, 0.04010841751098633, 0.040048736572265625, 0.04021878433227539, 0.04014460754394531, 0.04010604858398437, 0.04014435195922852, 0.040686336517333985, 0.040738815307617186, 0.04076508712768555, 0.04229974365234375, 0.04100710296630859, 0.040949825286865235, 0.04104595184326172, 0.04077699279785156, 0.04058595275878906, 0.04066099166870117, 0.04074905776977539, 0.04057692718505859, 0.040597599029541014, 0.04060160064697266, 0.040683135986328126, 0.04040073776245117, 0.040280609130859374, 0.040787967681884765, 0.04067932891845703, 0.04069116973876953, 0.04039539337158203, 0.040431167602539064, 0.040470977783203126, 0.04036713409423828, 0.04043056106567383, 0.04029206466674805, 0.040519966125488284, 0.04089782333374024, 0.04095049667358398, 0.04093743896484375, 0.04056659317016602, 0.04129014587402344, 0.04072652816772461, 0.040720382690429685, 0.040738815307617186, 0.04065894317626953, 0.040795551300048825, 0.04112847900390625, 0.040642623901367185, 0.040643840789794924, 0.040825599670410155, 0.040736766815185545, 0.04103987121582031, 0.040420574188232423, 0.04053433609008789, 0.040444385528564455, 0.04039833450317383, 0.040544063568115234, 0.04047123336791992, 0.04053942489624023, 0.040544319152832034, 0.04041779327392578, 0.04059337615966797, 0.04062432098388672, 0.04055622482299805, 0.04060150527954102, 0.040547870635986326, 0.04041558456420898, 0.04052431869506836, 0.040320510864257815, 0.040405502319335936, 0.04021247863769531, 0.03996057510375976, 0.04013596725463867, 0.04001251220703125, 0.04041318511962891, 0.040019966125488284, 0.04008345413208008, 0.040383937835693356, 0.04028473663330078, 0.04034307098388672, 0.04022630310058594, 0.040285152435302736, 0.040352832794189455, 0.04075411224365234, 0.04010531234741211, 0.04061983871459961, 0.03988966369628906, 0.040185951232910154, 0.040030208587646485, 0.04005068969726563, 0.04032716751098633, 0.0401899528503418, 0.04045388793945313, 0.04029254531860352, 0.040321086883544924, 0.04050124740600586, 0.040204032897949216, 0.040081375122070315, 0.04038684844970703, 0.040564735412597655, 0.040836673736572265, 0.04063686370849609, 0.04072998428344726, 0.04122224044799805, 0.04087174224853515, 0.04069660949707031, 0.040787967681884765, 0.04343804931640625, 0.04214700698852539, 0.04054307174682617, 0.040537792205810545, 0.040487232208251955, 0.04087398529052735, 0.04053305435180664, 0.0409804801940918, 0.0404284782409668, 0.04140415954589844, 0.040936958312988284, 0.041293567657470706, 0.04073497772216797, 0.04078054428100586, 0.04114636611938476, 0.04106668853759766, 0.04201852798461914, 0.04207388687133789, 0.04095827102661133, 0.04076697540283203, 0.040540672302246096, 0.040574943542480466, 0.04081462478637695, 0.04078182220458984, 0.0406418571472168, 0.040468544006347654, 0.040604286193847657, 0.04063987350463867, 0.04069807815551758, 0.04055231857299805, 0.04050998306274414, 0.04066707229614258, 0.04618656158447266, 0.041025535583496094, 0.040567806243896484, 0.040399360656738284, 0.04029087829589844, 0.0422808952331543, 0.040648704528808595, 0.04089993667602539, 0.0407374382019043, 0.040986625671386716, 0.040546302795410154, 0.04086067199707031, 0.040799232482910154, 0.040854686737060546, 0.04072739028930664, 0.04140031814575195, 0.04088761520385742, 0.04054822540283203, 0.040690494537353517, 0.04067737579345703, 0.04038246536254883, 0.04083907318115235, 0.04056892776489258, 0.04052896118164063, 0.040840129852294925, 0.04074086380004883, 0.040836639404296875, 0.04127043151855469, 0.04026230239868164, 0.04004617691040039, 0.04057148742675781, 0.04025328063964844, 0.04026377487182617, 0.04096006393432617, 0.04070367813110352, 0.040955520629882815, 0.040904705047607424, 0.04114912033081055, 0.04098777770996094, 0.04095616149902344, 0.04092911911010742, 0.04118403244018555, 0.04270284652709961, 0.04107385635375976, 0.04081903839111328, 0.04087424087524414, 0.04071855926513672, 0.04085097503662109, 0.04062460708618164, 0.040595169067382815, 0.04101705551147461, 0.04064716720581055, 0.040745025634765626, 0.041223838806152345, 0.041655937194824216, 0.04113248062133789, 0.041070655822753904, 0.04078976058959961, 0.04148681640625, 0.04094473648071289, 0.04082575988769531, 0.04088576126098633, 0.0408089599609375, 0.04131430435180664, 0.04141993713378906, 0.04292204666137695, 0.04059011077880859, 0.04065206527709961, 0.040893150329589845, 0.04141424179077149, 0.040610206604003905, 0.040376319885253906, 0.04057907104492187, 0.04045759963989258, 0.040645248413085935, 0.040776927947998046, 0.040715038299560545, 0.040476673126220705, 0.04062412643432617, 0.0405852165222168, 0.04048076629638672, 0.040531967163085936, 0.040720382690429685, 0.04074291229248047, 0.040905887603759766, 0.04070896148681641, 0.040709983825683596, 0.04045225524902344, 0.040720382690429685, 0.0405931510925293, 0.04134003067016601, 0.040792831420898436, 0.04067136001586914, 0.04076892852783203, 0.04091494369506836, 0.0404376335144043, 0.04088905715942383, 0.04075462341308594, 0.040722782135009766, 0.0406036491394043, 0.040623870849609375, 0.04082710266113281, 0.04079232025146484, 0.040613056182861325, 0.04066387176513672, 0.040892353057861326, 0.04120787048339844, 0.041059486389160155, 0.041427806854248045, 0.04118233489990234, 0.040768383026123046, 0.040761119842529295, 0.0414681282043457, 0.04094486236572266, 0.04093417739868164, 0.040787967681884765, 0.04103577423095703, 0.041084129333496096, 0.041175838470458984, 0.040787582397460935, 0.04093705749511719, 0.041401119232177735, 0.040632320404052735, 0.04073583984375, 0.04072259140014649, 0.04100771331787109, 0.04110316848754883, 0.04105865478515625, 0.04094748687744141, 0.04100527954101563, 0.04057702255249023, 0.04117913436889648, 0.041027454376220705, 0.04105023956298828, 0.04093952178955078, 0.04084326553344726, 0.04074700927734375, 0.04077881622314453, 0.04068652725219726, 0.04084051132202148, 0.04099142456054688, 0.040812545776367185, 0.04061798477172852, 0.040764480590820315, 0.04083808135986328, 0.040853023529052734, 0.0406258544921875, 0.04068022537231445, 0.04072652816772461, 0.04070713424682617, 0.04085036849975586, 0.041095169067382815, 0.04085747146606445, 0.04129443359375, 0.04072857666015625, 0.04048486328125, 0.040796161651611325, 0.0406036491394043, 0.040783199310302734, 0.04070467376708985, 0.040748638153076173, 0.04072284698486328, 0.04085964965820312, 0.04078387069702148, 0.04085075378417969, 0.04069036865234375, 0.04116070556640625, 0.04066089630126953, 0.040581214904785154, 0.040897567749023436, 0.040653087615966796, 0.04106924819946289, 0.04173587036132813, 0.04215225601196289, 0.04080543899536133, 0.04059996795654297, 0.04066678237915039, 0.04084415817260742, 0.04085760116577149, 0.04125491333007812, 0.040690719604492186, 0.040778720855712894, 0.040738815307617186, 0.04037587356567383, 0.040673023223876954, 0.04062627029418946, 0.04074911880493164, 0.04069839859008789, 0.0406828498840332, 0.04088611221313477, 0.04094806289672852, 0.040914913177490235, 0.04126092910766602, 0.041097854614257814, 0.041455615997314454, 0.040963680267333984, 0.04129219055175781, 0.04134092712402344, 0.04219638442993164, 0.040944225311279295, 0.040630271911621094, 0.04078579330444336, 0.04084953689575195, 0.041284862518310546, 0.04091494369506836, 0.040794017791748044, 0.04064547348022461, 0.04052374267578125, 0.040554527282714845, 0.040564735412597655, 0.040417278289794925, 0.04067737579345703, 0.04070809555053711, 0.04365264129638672, 0.04082457733154297, 0.0409463996887207]",tokens/s,24.552168171252035,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphu5n64in/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 113952 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyh_zsdh5/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 293709 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3894, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading model, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear model._modules[name] = target_cls( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 131, in __init__ assert out_features % (32 // self.w_bit) == 0 AssertionError " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 29534 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpqk80fxs8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpx4ipzc_j/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp1nj5d4lt/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 225087 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8w2wbor4/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,919.105536,578.68288,0.0,176.160768,154.500608,s,1,10.0747802734375,10.0747802734375,0.0,10.0747802734375,10.0747802734375,10.0747802734375,10.0747802734375,[10.0747802734375],,kWh,1.8840223354162808e-05,2.070879002957847e-06,6.10750488600105e-06,2.7018607243121703e-05,,MB,1384.685568,660.471808,0.0,243.269632,200.402944,s,17,0.21332748794555662,0.012548675761503331,0.00013371747206850152,0.012522272109985352,0.012712550354003906,0.01274401912689209,0.012804906578063966,"[0.012820128440856934, 0.01269916820526123, 0.01242959976196289, 0.01244159984588623, 0.012516703605651856, 0.012624320030212402, 0.012724991798400878, 0.012397472381591796, 0.01254041576385498, 0.012704256057739258, 0.012415936470031738, 0.012563103675842286, 0.012454624176025391, 0.012407039642333985, 0.012689599990844727, 0.012522272109985352, 0.012376255989074707]",tokens/s,20400.558980522357,kWh,3.6570128713450635e-07,4.0330179204704985e-08,1.850788169573936e-07,5.91110283296605e-07,tokens/kWh,433083313.27327853,MB,1419.890688,675.151872,0.0,257.949696,200.405504,s,17,9.84460040283203,0.5790941413430607,0.004406247438370094,0.5777682495117188,0.5858223632812499,0.5875574096679688,0.5884948315429688,"[0.5776289672851562, 0.5751454467773438, 0.5766144409179688, 0.5812184448242188, 0.5753554077148437, 0.5777682495117188, 0.5872644653320312, 0.5759959716796875, 0.57974267578125, 0.5848609619140624, 0.5764119873046875, 0.5790549926757812, 0.57115625, 0.5812243041992188, 0.5800532836914063, 0.5763753662109375, 0.5887291870117187]",tokens/s,108.79060156590018,kWh,1.6418961541106175e-05,1.8107257671662856e-06,5.69628391282036e-06,2.392597122109282e-05,tokens/kWh,2633121.950111686,,s,1071,9.833752511024459,0.009181841746988306,0.0002607534372296272,0.009144703865051269,0.009337151527404785,0.009478511810302735,0.010154534053802487,"[0.010020256042480469, 0.009593376159667969, 0.009131232261657714, 0.009132384300231933, 0.009153311729431152, 0.009393664360046386, 0.009007328033447266, 0.008967904090881348, 0.009011167526245117, 0.009075008392333984, 0.008989888191223145, 0.009141247749328613, 0.008959232330322265, 0.009027135848999024, 0.009038847923278808, 0.009078783988952637, 0.009105119705200195, 0.009076543807983398, 0.009210335731506347, 0.009002528190612792, 0.008981184005737304, 0.009348896026611328, 0.009086463928222656, 0.0091112642288208, 0.008880191802978516, 0.008901344299316407, 0.008962047576904298, 0.008945599555969238, 0.008962271690368652, 0.009043519973754882, 0.00915670394897461, 0.009019424438476562, 0.009080320358276368, 0.009053888320922852, 0.009110495567321778, 0.009052160263061524, 0.009080896377563477, 0.009307680130004882, 0.009130399703979492, 0.009060192108154296, 0.009133440017700195, 0.01030412769317627, 0.00914038372039795, 0.009110976219177246, 0.009128512382507325, 0.009136128425598144, 0.009363360404968261, 0.009225760459899902, 0.009040160179138184, 0.009099552154541016, 0.009182687759399413, 0.009097760200500488, 0.009156607627868652, 0.009186559677124024, 0.009190143585205078, 0.00929792022705078, 0.009189375877380371, 0.009152192115783692, 0.009204031944274903, 0.009551872253417968, 0.009371135711669922, 0.009150848388671876, 0.009195072174072266, 0.008968192100524902, 0.00921718406677246, 0.009165663719177247, 0.009492256164550782, 0.009373920440673828, 0.009484288215637206, 0.00923033618927002, 0.009089280128479003, 0.009389792442321777, 0.009195679664611817, 0.009049983978271484, 0.009139583587646485, 0.009132672309875488, 0.008961055755615234, 0.009079775810241698, 0.009115039825439453, 0.009091679573059081, 0.009021759986877442, 0.009037504196166992, 0.009093088150024414, 0.009037856101989747, 0.00906611156463623, 0.009109888076782226, 0.009228032112121582, 0.009335040092468262, 0.009107456207275391, 0.00904576015472412, 0.009084416389465331, 0.009140992164611816, 0.009062399864196777, 0.009274592399597168, 0.009212703704833984, 0.009119296073913574, 0.00904851245880127, 0.009181440353393555, 0.009983455657958985, 0.009131744384765625, 0.008884799957275391, 0.008986623764038085, 0.009301664352416993, 0.009042271614074707, 0.008947232246398925, 0.009032159805297852, 0.00905129623413086, 0.009189248085021973, 0.009120512008666991, 0.009018815994262696, 0.009175840377807618, 0.009090399742126465, 0.009061023712158204, 0.009265151977539063, 0.008957759857177734, 0.008840928077697754, 0.008894463539123536, 0.009037887573242188, 0.00911625576019287, 0.009113408088684083, 0.009039392471313476, 0.009023296356201171, 0.00893616008758545, 0.009033920288085938, 0.009031423568725586, 0.008870176315307617, 0.008547295570373535, 0.008986623764038085, 0.009009471893310547, 0.008998592376708985, 0.00910483169555664, 0.008944128036499023, 0.008895775794982911, 0.008962528228759765, 0.009002335548400879, 0.00899283218383789, 0.009016223907470703, 0.0090316801071167, 0.009099264144897461, 0.009097439765930176, 0.009160767555236817, 0.009098272323608398, 0.009140768051147462, 0.009152671813964845, 0.009328703880310058, 0.009163040161132813, 0.009258655548095704, 0.009120896339416503, 0.009094335556030274, 0.00904099178314209, 0.009030240058898926, 0.009015040397644043, 0.009111424446105958, 0.009011584281921387, 0.008973407745361327, 0.009012191772460938, 0.009001184463500976, 0.008955039978027345, 0.009222847938537598, 0.009087200164794923, 0.009135616302490235, 0.009432703971862792, 0.009325087547302246, 0.009234496116638184, 0.00926035213470459, 0.009164480209350586, 0.009212863922119141, 0.009185471534729005, 0.00910476779937744, 0.009159104347229004, 0.00942080020904541, 0.009219903945922852, 0.009193663597106933, 0.009166848182678223, 0.009183263778686524, 0.009523167610168456, 0.009439231872558594, 0.009475647926330566, 0.009242239952087403, 0.009156736373901367, 0.009155263900756836, 0.009180447578430177, 0.009337151527404785, 0.009219584465026855, 0.009122336387634277, 0.009152416229248048, 0.00933731174468994, 0.009261055946350098, 0.00928275203704834, 0.008819040298461913, 0.009033503532409668, 0.009156512260437013, 0.009152704238891602, 0.009276000022888184, 0.00911571216583252, 0.00919705581665039, 0.009156640052795411, 0.00918899154663086, 0.00916982364654541, 0.009142144203186035, 0.009406463623046875, 0.00916812801361084, 0.00910211181640625, 0.009052127838134765, 0.009267200469970703, 0.009623552322387695, 0.00919961643218994, 0.009168895721435547, 0.00922544002532959, 0.009091967582702637, 0.009113504409790038, 0.009195551872253419, 0.009080256462097168, 0.009195648193359374, 0.009243040084838868, 0.009201663970947266, 0.00916051197052002, 0.00927353572845459, 0.009302080154418945, 0.009131360054016113, 0.009433568000793457, 0.009133664131164551, 0.009097760200500488, 0.009189375877380371, 0.009148415565490722, 0.009100576400756837, 0.009062463760375977, 0.009052831649780274, 0.009164352416992188, 0.009068127632141113, 0.008972928047180176, 0.008974464416503906, 0.00910755157470703, 0.009646080017089843, 0.009002559661865235, 0.009380288124084472, 0.009678848266601562, 0.009800959587097167, 0.010218239784240723, 0.009265151977539063, 0.009299008369445801, 0.009192383766174317, 0.009134079933166504, 0.009095423698425293, 0.009149344444274902, 0.009157792091369629, 0.009258687973022461, 0.009231616020202636, 0.00922652816772461, 0.009195679664611817, 0.009298239707946777, 0.009244064331054687, 0.008675935745239258, 0.009133695602416992, 0.008985312461853027, 0.009000960350036622, 0.008955743789672852, 0.009060511589050293, 0.00884224033355713, 0.00882380771636963, 0.008939583778381348, 0.009059359550476074, 0.009032447814941407, 0.00910268783569336, 0.009073727607727051, 0.009123583793640137, 0.009047871589660644, 0.009063872337341308, 0.00904297637939453, 0.009272064208984375, 0.009173472404479981, 0.009086976051330567, 0.009073311805725098, 0.009018431663513184, 0.008995743751525878, 0.009142144203186035, 0.009244959831237793, 0.009117471694946288, 0.009091008186340331, 0.009246720314025878, 0.009144415855407715, 0.009151840209960937, 0.009094816207885742, 0.009417632102966308, 0.009095040321350098, 0.009209856033325196, 0.009154751777648925, 0.009199904441833497, 0.009236160278320313, 0.009064255714416504, 0.009156384468078614, 0.009144703865051269, 0.009067904472351075, 0.009122015953063965, 0.009316351890563965, 0.009159071922302246, 0.0091909761428833, 0.00904793643951416, 0.00909164810180664, 0.009164416313171386, 0.009216256141662597, 0.009214079856872559, 0.009112832069396972, 0.009142047882080078, 0.009120736122131348, 0.009154399871826173, 0.009074624061584474, 0.009156831741333008, 0.009119744300842286, 0.009455615997314454, 0.009183232307434081, 0.009104960441589356, 0.009577183723449708, 0.009213248252868652, 0.009219679832458496, 0.008863743782043456, 0.009184800148010253, 0.00900937557220459, 0.008936832427978516, 0.008943903923034667, 0.009204319953918457, 0.009027584075927735, 0.009009152412414552, 0.008863807678222656, 0.008877984046936035, 0.008931327819824218, 0.008949312210083007, 0.009311712265014649, 0.00896940803527832, 0.008980287551879882, 0.008959168434143067, 0.009284543991088866, 0.009143936157226562, 0.009081088066101074, 0.009222463607788086, 0.00912377643585205, 0.009084671974182128, 0.009063936233520508, 0.009036288261413575, 0.009254912376403808, 0.00914031982421875, 0.009261183738708496, 0.009322336196899414, 0.009116800308227539, 0.009117695808410644, 0.009159487724304199, 0.00920406436920166, 0.009129631996154785, 0.009247967720031738, 0.00917737579345703, 0.00918716812133789, 0.009167519569396973, 0.00926313591003418, 0.009313664436340331, 0.009253312110900878, 0.009279199600219726, 0.009267168045043945, 0.009279135704040528, 0.00945644760131836, 0.009183263778686524, 0.009172096252441406, 0.009150367736816406, 0.009198528289794922, 0.009172767639160155, 0.009502431869506837, 0.009366016387939453, 0.009185279846191406, 0.009142271995544434, 0.009047807693481446, 0.00902780818939209, 0.009121824264526367, 0.009142016410827637, 0.009158368110656739, 0.009105152130126953, 0.008993215560913086, 0.009565952301025391, 0.010027615547180176, 0.009195296287536622, 0.0090481595993042, 0.009097599983215332, 0.009206432342529297, 0.009196127891540527, 0.009281951904296875, 0.009244640350341797, 0.009262656211853027, 0.009158143997192383, 0.009174176216125488, 0.009217568397521973, 0.009096991539001465, 0.009232895851135254, 0.00939583969116211, 0.009316448211669923, 0.009341216087341309, 0.009397503852844238, 0.009367775917053223, 0.009314847946166991, 0.0093504638671875, 0.009196096420288086, 0.009211903572082519, 0.009072768211364745, 0.009436991691589356, 0.009119872093200684, 0.009183296203613281, 0.009842687606811524, 0.01128649616241455, 0.010879008293151856, 0.009275615692138672, 0.009309056282043458, 0.009307040214538573, 0.009291775703430176, 0.009199071884155273, 0.00916534423828125, 0.009172991752624511, 0.009399871826171875, 0.00912992000579834, 0.009175552368164062, 0.009207776069641113, 0.009328672409057617, 0.009357312202453612, 0.009120863914489746, 0.0091145601272583, 0.009289695739746093, 0.009072223663330077, 0.00902121639251709, 0.008994688034057617, 0.009066656112670898, 0.009310815811157227, 0.009451007843017578, 0.009250847816467286, 0.009091551780700684, 0.009168671607971192, 0.009302207946777344, 0.00935097599029541, 0.009335136413574219, 0.010127231597900391, 0.009181056022644043, 0.009228416442871094, 0.009132320404052735, 0.009214879989624024, 0.00934995174407959, 0.009205823898315429, 0.008734848022460938, 0.008977855682373047, 0.008972928047180176, 0.009242336273193359, 0.00903001594543457, 0.009162464141845704, 0.009115296363830567, 0.009056832313537598, 0.009039936065673828, 0.009158880233764648, 0.009009984016418457, 0.00904736042022705, 0.009037247657775879, 0.009025759696960449, 0.009047455787658691, 0.009169504165649415, 0.009160703659057617, 0.009158111572265626, 0.009206303596496582, 0.009285280227661133, 0.009041983604431153, 0.009109791755676269, 0.009215616226196289, 0.009123807907104492, 0.0091428804397583, 0.009047871589660644, 0.0091364164352417, 0.009231552124023438, 0.009096927642822265, 0.009111680030822755, 0.009079487800598144, 0.009179264068603516, 0.009076607704162598, 0.009134079933166504, 0.009074432373046875, 0.00926358413696289, 0.009105055809020997, 0.00911577606201172, 0.009211903572082519, 0.009168255805969238, 0.009085760116577149, 0.009119711875915527, 0.009096960067749023, 0.009084256172180175, 0.00910211181640625, 0.009283552169799805, 0.009156607627868652, 0.009125887870788574, 0.00925836753845215, 0.009275168418884277, 0.00919638442993164, 0.009294943809509277, 0.00918393611907959, 0.009091263771057128, 0.009164352416992188, 0.009113727569580078, 0.009059935569763184, 0.009272064208984375, 0.00919961643218994, 0.009180447578430177, 0.009353952407836914, 0.009142271995544434, 0.00923033618927002, 0.008884223937988281, 0.00911507225036621, 0.009475872039794922, 0.009140576362609863, 0.009256928443908692, 0.009126367568969727, 0.008969375610351563, 0.009159520149230957, 0.009088735580444336, 0.009173503875732422, 0.009098912239074707, 0.009228032112121582, 0.009229920387268066, 0.009143072128295899, 0.009168800354003906, 0.009219391822814942, 0.009326848030090332, 0.009152159690856934, 0.009108448028564453, 0.009172415733337403, 0.009187808036804199, 0.008996416091918945, 0.009094719886779784, 0.009196191787719727, 0.009136351585388183, 0.009127936363220214, 0.00912384033203125, 0.009013407707214356, 0.009192416191101074, 0.00923532772064209, 0.009072575569152832, 0.009073056221008301, 0.009088800430297852, 0.009116831779479981, 0.009136863708496094, 0.009162943840026855, 0.009285440444946289, 0.009157759666442871, 0.009216896057128906, 0.009195520401000976, 0.009228287696838379, 0.00916819190979004, 0.00912275218963623, 0.009211647987365722, 0.00910540771484375, 0.009183232307434081, 0.009342399597167969, 0.009197376251220702, 0.009333503723144532, 0.009193408012390136, 0.009405695915222168, 0.009421471595764161, 0.009483839988708496, 0.00927625560760498, 0.009209759712219238, 0.009218079566955566, 0.009324383735656739, 0.009246815681457519, 0.009274751663208007, 0.009224703788757324, 0.00926848030090332, 0.009253824234008789, 0.009346079826354981, 0.009018239974975585, 0.009218048095703125, 0.009316512107849121, 0.009306976318359375, 0.009362751960754395, 0.009481151580810547, 0.009275808334350585, 0.009265215873718261, 0.009294464111328124, 0.009278592109680175, 0.009273856163024903, 0.00935091209411621, 0.009236767768859863, 0.0093570556640625, 0.009244928359985352, 0.009252960205078126, 0.009238431930541992, 0.009276703834533691, 0.009228608131408692, 0.009510432243347168, 0.009220959663391114, 0.009194879531860352, 0.009251744270324706, 0.009246015548706054, 0.009310784339904786, 0.009240351676940917, 0.009193887710571289, 0.009158432006835937, 0.00950707244873047, 0.009412256240844727, 0.009260704040527343, 0.009257247924804687, 0.009236255645751952, 0.009264991760253907, 0.009243071556091309, 0.0093307523727417, 0.009103296279907226, 0.009125887870788574, 0.009203840255737304, 0.00913804817199707, 0.009138175964355469, 0.009219552040100098, 0.009248479843139649, 0.009267104148864747, 0.009241663932800293, 0.00920150375366211, 0.009114912033081055, 0.009095904350280761, 0.009172991752624511, 0.009162752151489258, 0.009089183807373048, 0.009115263938903809, 0.00910262393951416, 0.009689184188842773, 0.009276032447814941, 0.010402015686035156, 0.009896960258483887, 0.009325568199157714, 0.009196831703186035, 0.009165151596069336, 0.009204095840454101, 0.009165056228637695, 0.009045984268188477, 0.008941216468811035, 0.009068160057067872, 0.009145119667053222, 0.009267392158508301, 0.009209407806396484, 0.009097311973571777, 0.00910598373413086, 0.009199392318725587, 0.009125760078430176, 0.009093024253845216, 0.009052384376525878, 0.009129023551940918, 0.009067456245422363, 0.009187295913696289, 0.009100959777832032, 0.00911366367340088, 0.009208127975463868, 0.009162752151489258, 0.009228608131408692, 0.009126943588256836, 0.00922697639465332, 0.009127327919006348, 0.008999744415283202, 0.008988320350646973, 0.009101375579833985, 0.009062527656555176, 0.009086848258972168, 0.00912723159790039, 0.009120127677917481, 0.009034144401550292, 0.00905942440032959, 0.00902841567993164, 0.009062560081481933, 0.009076576232910156, 0.009110752105712891, 0.009106111526489259, 0.009195615768432617, 0.009232383728027344, 0.009215999603271484, 0.009163040161132813, 0.009155967712402343, 0.009183168411254883, 0.009212032318115234, 0.009248607635498047, 0.009160896301269532, 0.0091211519241333, 0.009202560424804688, 0.009100831985473633, 0.009171360015869141, 0.009123711585998536, 0.009111104011535644, 0.009055871963500977, 0.009049087524414063, 0.009175040245056153, 0.009148287773132324, 0.009158623695373536, 0.009142208099365234, 0.009293919563293456, 0.009229536056518555, 0.009214240074157715, 0.00931875228881836, 0.009191424369812011, 0.009190879821777343, 0.009031264305114747, 0.009265695571899414, 0.009301952362060547, 0.00920787239074707, 0.009172639846801757, 0.009224703788757324, 0.009191264152526855, 0.009146368026733399, 0.009194592475891113, 0.009211968421936036, 0.009154656410217284, 0.009130047798156739, 0.009116352081298828, 0.009232383728027344, 0.009284640312194824, 0.009168031692504883, 0.009281023979187012, 0.009186880111694336, 0.009151231765747071, 0.00924403190612793, 0.00924556827545166, 0.00916256046295166, 0.00917683219909668, 0.009111743927001953, 0.009183039665222168, 0.00915065574645996, 0.00923971176147461, 0.00928444766998291, 0.009285632133483887, 0.00919161605834961, 0.00918505573272705, 0.009232416152954101, 0.009279423713684083, 0.009642304420471192, 0.009998080253601074, 0.010280960083007813, 0.009906175613403321, 0.009389151573181152, 0.009244704246520996, 0.009139039993286132, 0.009093152046203614, 0.009028736114501954, 0.0090632963180542, 0.009107647895812988, 0.009076543807983398, 0.008859647750854491, 0.008790016174316406, 0.008776896476745606, 0.008782176017761231, 0.008798687934875489, 0.008869759559631348, 0.008877599716186523, 0.00897439956665039, 0.008942111968994141, 0.008943840026855469, 0.009029760360717773, 0.009104415893554688, 0.009208255767822266, 0.009111743927001953, 0.009066495895385742, 0.00935763168334961, 0.009132767677307129, 0.0091976318359375, 0.009099455833435058, 0.009168831825256348, 0.009206720352172851, 0.009188287734985352, 0.009103360176086426, 0.009129983901977539, 0.009048352241516113, 0.008961759567260742, 0.00893887996673584, 0.008970111846923827, 0.008872480392456054, 0.008900832176208497, 0.008861408233642579, 0.008971967697143556, 0.008884991645812988, 0.008871487617492676, 0.008828351974487305, 0.008847295761108398, 0.008944416046142578, 0.008927200317382812, 0.008913056373596191, 0.008804351806640624, 0.008961055755615234, 0.008936415672302246, 0.0090316801071167, 0.00899071979522705, 0.009063615798950195, 0.009096063613891602, 0.00910643196105957, 0.009192704200744629, 0.0091627197265625, 0.009092032432556152, 0.008951744079589844, 0.009197792053222657, 0.009173376083374023, 0.009082400321960449, 0.00900601577758789, 0.009044032096862793, 0.009200799942016601, 0.009112128257751465, 0.0090928316116333, 0.009119711875915527, 0.00908351993560791, 0.009053791999816894, 0.009250368118286133, 0.009057151794433593, 0.00926195240020752, 0.009351967811584472, 0.009060352325439454, 0.009180512428283692, 0.009081503868103028, 0.008989888191223145, 0.009083264350891114, 0.009162495613098145, 0.008972415924072266, 0.009150527954101562, 0.009075200080871582, 0.009179136276245118, 0.00909721565246582, 0.0090316801071167, 0.009274720191955566, 0.009091103553771972, 0.008996959686279296, 0.008856863975524902, 0.008981216430664062, 0.008984576225280762, 0.009602399826049804, 0.009817024230957032, 0.010045120239257813, 0.008965184211730957, 0.00957539176940918, 0.009314432144165039, 0.009202752113342285, 0.009048895835876465, 0.009075839996337891, 0.009163488388061523, 0.009072383880615235, 0.009376416206359863, 0.00921993637084961, 0.009090559959411621, 0.009062432289123535, 0.009077119827270507, 0.009166848182678223, 0.009820480346679688, 0.009123488426208496, 0.009310239791870118, 0.009156607627868652, 0.009228287696838379, 0.009846783638000489, 0.009185279846191406, 0.00914179229736328, 0.009116127967834472, 0.009048064231872559, 0.009002335548400879, 0.009044639587402344, 0.008976672172546387, 0.009010208129882813, 0.009046879768371582, 0.009090559959411621, 0.0090251522064209, 0.009073087692260742, 0.009170975685119629, 0.00906383991241455, 0.009075551986694337, 0.009191424369812011, 0.009440959930419922, 0.009118016242980958, 0.009046015739440917, 0.009123135566711425, 0.009105952262878417, 0.009143551826477051, 0.009241375923156738, 0.009178624153137208, 0.009171648025512695, 0.009110560417175293, 0.009118816375732423, 0.009120767593383788, 0.00911843204498291, 0.009117728233337402, 0.009148480415344239, 0.009344127655029297, 0.009898048400878906, 0.009800288200378418, 0.009246496200561524, 0.009234496116638184, 0.009292511940002442, 0.008936991691589355, 0.009124320030212402, 0.009240575790405273, 0.009123583793640137, 0.009210111618041993, 0.00914857578277588, 0.008978400230407715, 0.009027456283569336, 0.009027584075927735, 0.009139360427856446, 0.009091744422912598, 0.00910969638824463, 0.00904368019104004, 0.009107935905456542, 0.009051103591918946, 0.009105279922485351, 0.009221280097961426, 0.008888128280639649, 0.008876031875610351, 0.008937472343444825, 0.00893507194519043, 0.009149087905883788, 0.009246080398559571, 0.009056575775146484, 0.009070143699645997, 0.00906396770477295, 0.009109951972961427, 0.009214431762695312, 0.009236063957214356, 0.009165056228637695, 0.00904793643951416, 0.009005344390869141, 0.008939328193664551, 0.008906208038330077, 0.008899295806884765, 0.008932415962219237, 0.008954943656921387, 0.008992639541625977, 0.009136128425598144, 0.009062751770019532, 0.008996512413024902, 0.009030783653259277, 0.0090895357131958, 0.009177472114562988, 0.00917529582977295, 0.00907756805419922, 0.009073472023010254, 0.009072768211364745, 0.009192607879638671, 0.00930844783782959, 0.009179712295532227, 0.009103487968444825, 0.009070528030395509, 0.009482175827026366, 0.009719807624816895, 0.009197567939758301, 0.009116831779479981, 0.009143136024475097, 0.009490528106689454, 0.009621312141418457, 0.012688960075378418, 0.010652192115783691, 0.009240223884582519, 0.008923135757446288, 0.009153568267822265, 0.009137408256530762, 0.009062432289123535, 0.009135040283203125, 0.00923686408996582, 0.009160256385803223, 0.009163583755493164, 0.009117695808410644, 0.009068544387817384, 0.009099264144897461, 0.00909023952484131, 0.009054944038391114, 0.009061504364013671, 0.009119999885559083, 0.00921670436859131, 0.009099295616149902, 0.008999967575073242, 0.009072959899902344, 0.009175711631774903, 0.00913849639892578, 0.009127615928649902, 0.00917199993133545, 0.009212127685546875, 0.009133855819702148, 0.009136639595031738, 0.009177568435668946, 0.00914179229736328, 0.009138912200927734, 0.0090764799118042, 0.00928767967224121, 0.009099264144897461, 0.009019392013549805, 0.009125887870788574, 0.009068544387817384, 0.009070943832397461, 0.009087871551513672, 0.009114687919616699, 0.009930047988891601, 0.009275039672851563, 0.009241344451904297, 0.00919164752960205, 0.009119647979736328, 0.009116831779479981, 0.009087712287902831, 0.009134079933166504, 0.009046015739440917, 0.009136128425598144, 0.009066495895385742, 0.009103679656982422, 0.009160063743591308, 0.00911788845062256, 0.009357119560241699, 0.009056384086608887, 0.009123071670532227, 0.009075679779052735, 0.009049216270446777, 0.009251680374145508, 0.00912553596496582, 0.009073087692260742, 0.009084832191467286, 0.009217984199523925, 0.009119680404663086, 0.008955904006958008, 0.009128000259399414, 0.009123456001281739, 0.009060671806335449, 0.009080608367919922, 0.009411999702453613, 0.009198399543762207, 0.009101311683654785, 0.009091072082519532, 0.00909280014038086, 0.009091232299804687, 0.009131936073303223, 0.009066752433776856, 0.009145536422729493, 0.009051136016845703, 0.009164799690246582, 0.009086560249328614, 0.009051584243774414, 0.009128576278686523, 0.009215295791625977, 0.009020352363586425, 0.009025312423706055, 0.00903711986541748, 0.009265503883361816, 0.009220576286315918, 0.00921177577972412, 0.009140352249145507, 0.009148384094238281, 0.009152031898498536, 0.00927337646484375, 0.009388447761535645, 0.00926633644104004, 0.009318623542785644, 0.009255616188049317, 0.00925443172454834, 0.009282015800476074, 0.009224191665649414, 0.009211903572082519, 0.009238368034362792, 0.009228511810302735, 0.009322431564331055, 0.009736191749572755, 0.009309375762939453, 0.01171894359588623, 0.011846240043640138, 0.009608384132385253, 0.00934550380706787, 0.009322784423828124, 0.009263104438781738, 0.009400575637817384, 0.009193216323852539, 0.009375743865966797, 0.009285247802734375, 0.009764736175537109, 0.009172639846801757, 0.009376735687255859, 0.009181183815002441, 0.009227807998657227, 0.009144672393798828, 0.010803199768066407, 0.00938105583190918, 0.009494976043701172, 0.009266719818115235]",tokens/s,108.91061156962387,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,837.804032,522.059776,0.0,136.31488,125.60896,s,1,10.019990234375,10.019990234375,0.0,10.019990234375,10.019990234375,10.019990234375,10.019990234375,[10.019990234375],,kWh,1.447449984166743e-05,1.5888696760684177e-06,4.306670112000539e-06,2.0370039629736385e-05,,MB,1280.192512,641.59744,0.0,226.492416,195.515904,s,16,0.21704246425628657,0.013565154016017912,6.976283725045848e-05,0.01355571174621582,0.013654160022735597,0.013662384271621704,0.013671791887283326,"[0.013474464416503906, 0.013544095993041991, 0.013674143791198731, 0.01361353588104248, 0.013649855613708496, 0.01362713623046875, 0.013461664199829101, 0.013466624259948731, 0.013637151718139649, 0.013553919792175293, 0.013580320358276367, 0.013534976005554198, 0.013557503700256348, 0.013658464431762695, 0.013482784271240235, 0.013525823593139648]",tokens/s,18871.883039271936,kWh,3.9866689128529706e-07,4.396547300078106e-08,2.6071552429781444e-07,7.033478885838926e-07,tokens/kWh,363973510.34269196,MB,1312.915456,656.277504,0.0,241.17248,195.518464,s,16,10.154903320312501,0.6346814575195312,0.004070535816518479,0.6341619567871094,0.6389557189941406,0.6404804077148438,0.6437780639648437,"[0.6323868408203125, 0.6377255249023438, 0.6366112670898437, 0.6446024780273437, 0.6391063842773438, 0.6358828125, 0.6335826416015625, 0.6358870239257812, 0.6388050537109375, 0.6347412719726563, 0.6334695434570312, 0.6289264526367188, 0.631905517578125, 0.6315582885742187, 0.6319912109375, 0.6277210083007813]",tokens/s,99.26239258070855,kWh,1.816251975767309e-05,2.0029564601219822e-06,6.38972382382715e-06,2.6555200041622213e-05,tokens/kWh,2372416.698095091,,s,1008,10.144575751304625,0.01006406324534189,0.00022677404264694238,0.010033775806427002,0.01017954912185669,0.01026013627052307,0.011048656177520752,"[0.009697728157043457, 0.009923520088195801, 0.010001184463500977, 0.009981792449951172, 0.010025471687316894, 0.009966303825378418, 0.010001407623291016, 0.010006239891052246, 0.009975616455078124, 0.009978336334228516, 0.010168319702148437, 0.010086079597473145, 0.010029376029968262, 0.00993280029296875, 0.010029024124145507, 0.009968864440917968, 0.009969632148742675, 0.010029312133789062, 0.011045472145080566, 0.010111295700073242, 0.009949919700622558, 0.009970687866210937, 0.00995529556274414, 0.010034432411193847, 0.010089216232299804, 0.01002905559539795, 0.010014176368713379, 0.0100481595993042, 0.009950976371765136, 0.010038944244384765, 0.009910752296447754, 0.009975808143615723, 0.010115296363830566, 0.01014681625366211, 0.01003395175933838, 0.010043392181396485, 0.00997372817993164, 0.010051808357238769, 0.00996947193145752, 0.0100065279006958, 0.010032959938049316, 0.010170559883117675, 0.010011839866638183, 0.00997868824005127, 0.009992192268371582, 0.009910271644592286, 0.009981696128845214, 0.009988351821899414, 0.01003718376159668, 0.01001478385925293, 0.010004480361938477, 0.010053952217102051, 0.010038816452026367, 0.009998496055603028, 0.010128992080688477, 0.01001961612701416, 0.010042431831359864, 0.010168319702148437, 0.009983839988708497, 0.009994976043701172, 0.009989407539367676, 0.009937631607055663, 0.010030464172363281, 0.009783167839050293, 0.010084799766540527, 0.010026304244995118, 0.010049568176269532, 0.010040063858032227, 0.010010496139526367, 0.00998969554901123, 0.00997212791442871, 0.010189120292663573, 0.01009222412109375, 0.00988758373260498, 0.009979328155517579, 0.009970591545104981, 0.010002304077148437, 0.010095711708068847, 0.010089504241943359, 0.010116991996765136, 0.010117440223693848, 0.010094016075134278, 0.010037599563598633, 0.01018051242828369, 0.010080256462097169, 0.010065983772277833, 0.009968928337097168, 0.010055423736572266, 0.009996928215026856, 0.010053728103637695, 0.010295488357543946, 0.01007436752319336, 0.009954239845275879, 0.010039839744567871, 0.009990431785583497, 0.010045120239257813, 0.0102608642578125, 0.010157631874084474, 0.010013055801391602, 0.010233823776245116, 0.010074080467224121, 0.010052767753601075, 0.010097567558288573, 0.010170368194580079, 0.010190943717956542, 0.010130656242370606, 0.010191360473632812, 0.010106783866882324, 0.010012960433959961, 0.010038880348205567, 0.0101278076171875, 0.010055264472961426, 0.009989824295043945, 0.010099488258361817, 0.010170271873474121, 0.010219136238098144, 0.010555935859680175, 0.010419615745544434, 0.010076288223266602, 0.010237759590148925, 0.010197664260864258, 0.010229472160339356, 0.010545056343078613, 0.010703359603881836, 0.010129247665405273, 0.010156255722045898, 0.009844736099243164, 0.010231616020202637, 0.010066335678100585, 0.01012713623046875, 0.010250240325927735, 0.010112992286682129, 0.010100768089294434, 0.01010208034515381, 0.010068767547607422, 0.010176128387451171, 0.010101023674011231, 0.010267744064331055, 0.010181440353393554, 0.010247488021850586, 0.010201984405517579, 0.010139552116394043, 0.010057344436645507, 0.009970272064208984, 0.010124256134033203, 0.010021696090698242, 0.010174464225769043, 0.010104319572448731, 0.010017279624938966, 0.009969663619995118, 0.009975808143615723, 0.009955072402954101, 0.010200991630554199, 0.010082592010498046, 0.009994463920593262, 0.00992848014831543, 0.00999385643005371, 0.010192543983459472, 0.010050432205200195, 0.01001052761077881, 0.01004748821258545, 0.010018815994262695, 0.009990464210510254, 0.010087936401367188, 0.00999392032623291, 0.010102656364440918, 0.01003206443786621, 0.01002665615081787, 0.01008233642578125, 0.010135680198669434, 0.010035103797912597, 0.010061408042907715, 0.009971808433532715, 0.010049152374267577, 0.010054304122924804, 0.01030348777770996, 0.010113311767578124, 0.010089504241943359, 0.01010524845123291, 0.010169631958007812, 0.01004377555847168, 0.010097087860107422, 0.010219488143920898, 0.010117152214050292, 0.010319904327392578, 0.01030185604095459, 0.010112704277038575, 0.01012451171875, 0.010113887786865235, 0.011581439971923829, 0.010056735992431641, 0.010009568214416504, 0.010092608451843262, 0.010157759666442872, 0.010096896171569824, 0.010018783569335938, 0.010096159934997559, 0.010011136054992676, 0.010053631782531738, 0.01005190372467041, 0.010069024085998536, 0.010098976135253906, 0.010107263565063477, 0.010092703819274903, 0.010051615715026855, 0.01015120029449463, 0.010058143615722655, 0.010023039817810058, 0.01013161563873291, 0.01007209587097168, 0.010039135932922364, 0.010053600311279297, 0.010051584243774414, 0.010595711708068847, 0.010193535804748535, 0.010101056098937989, 0.010042911529541015, 0.010740063667297363, 0.012150591850280761, 0.012277759552001954, 0.010168319702148437, 0.010066176414489746, 0.010028800010681153, 0.010010335922241211, 0.010092512130737304, 0.011048895835876465, 0.012200096130371093, 0.01024022388458252, 0.010042400360107421, 0.009995519638061523, 0.01009225559234619, 0.010535072326660157, 0.009940128326416015, 0.010033663749694824, 0.010016960144042969, 0.00993721580505371, 0.009998016357421875, 0.009961471557617188, 0.009955327987670898, 0.010153440475463868, 0.010075743675231934, 0.009917375564575195, 0.010059776306152344, 0.010004192352294923, 0.010080544471740723, 0.01002905559539795, 0.009982208251953124, 0.009955072402954101, 0.00993280029296875, 0.009946880340576171, 0.010084768295288087, 0.010034527778625488, 0.009776927947998048, 0.010040863990783692, 0.01003113555908203, 0.009972640037536621, 0.009950976371765136, 0.010057024002075195, 0.010002847671508788, 0.010104543685913086, 0.009994175910949707, 0.01000716781616211, 0.00995132827758789, 0.009922143936157226, 0.00992083168029785, 0.01002086353302002, 0.00999833583831787, 0.010044960021972657, 0.010041695594787597, 0.01000607967376709, 0.009998784065246583, 0.010025088310241698, 0.009918463706970216, 0.010018719673156738, 0.009983743667602539, 0.00991641616821289, 0.009996959686279297, 0.00994876766204834, 0.009844832420349121, 0.009906175613403321, 0.009921888351440429, 0.010035872459411621, 0.009897983551025391, 0.010288576126098632, 0.010179136276245117, 0.010041343688964843, 0.011900927543640137, 0.009942527770996093, 0.009990816116333007, 0.009906047821044922, 0.010124799728393554, 0.010071871757507324, 0.010021056175231934, 0.010112735748291015, 0.010007295608520508, 0.010088255882263183, 0.009996479988098144, 0.009936896324157715, 0.010011839866638183, 0.009994976043701172, 0.01001411247253418, 0.010105536460876465, 0.010070015907287597, 0.010155808448791505, 0.01092630386352539, 0.010493535995483399, 0.010166815757751465, 0.012963680267333985, 0.010704575538635254, 0.010258784294128418, 0.01001471996307373, 0.010332159996032715, 0.010282431602478027, 0.010018431663513183, 0.010048447608947754, 0.009982784271240235, 0.010074111938476562, 0.010233344078063965, 0.009950943946838379, 0.010004287719726562, 0.01068950366973877, 0.011161600112915039, 0.010110976219177246, 0.01002905559539795, 0.010092415809631348, 0.010062175750732422, 0.010091296195983887, 0.009996447563171387, 0.010074975967407227, 0.010011775970458985, 0.010000639915466309, 0.010189663887023926, 0.00989139175415039, 0.009972000122070312, 0.009981887817382813, 0.009901856422424317, 0.009965920448303222, 0.010872703552246094, 0.009989983558654785, 0.009963680267333984, 0.009967743873596191, 0.010095647811889649, 0.01005196762084961, 0.010145536422729492, 0.01008937644958496, 0.010090304374694823, 0.0100795841217041, 0.010068639755249024, 0.010065664291381836, 0.009957632064819337, 0.009994400024414063, 0.010159968376159668, 0.009969120025634765, 0.010070528030395508, 0.010064127922058106, 0.010055583953857422, 0.009952287673950195, 0.010036064147949218, 0.010037440299987793, 0.010139679908752442, 0.010019935607910157, 0.010017215728759766, 0.009994496345520019, 0.010047648429870605, 0.010145376205444336, 0.010164064407348633, 0.010016256332397461, 0.010024864196777344, 0.010101759910583496, 0.010209407806396485, 0.010010496139526367, 0.010027008056640625, 0.010016768455505372, 0.010024959564208985, 0.010190239906311035, 0.0100665283203125, 0.00990937614440918, 0.009877728462219239, 0.009750783920288086, 0.010055423736572266, 0.01005782413482666, 0.0099934720993042, 0.009994112014770507, 0.010093343734741212, 0.010211647987365723, 0.010065600395202637, 0.009977631568908692, 0.009908288002014161, 0.009989760398864745, 0.01017296028137207, 0.010007712364196777, 0.009991040229797363, 0.010033120155334472, 0.00993280029296875, 0.01001420783996582, 0.010105119705200196, 0.009973919868469239, 0.010014752388000488, 0.009952768325805664, 0.010111519813537598, 0.009950431823730468, 0.009999135971069335, 0.009944928169250489, 0.009928447723388672, 0.009955360412597656, 0.009943552017211914, 0.010266495704650878, 0.01003865623474121, 0.00990681552886963, 0.010317824363708495, 0.010147839546203614, 0.010044639587402343, 0.010087264060974122, 0.009994175910949707, 0.009926655769348144, 0.010150912284851075, 0.009992416381835938, 0.010148639678955079, 0.010067968368530274, 0.010052703857421874, 0.010533791542053222, 0.010044832229614258, 0.010000991821289062, 0.010133503913879394, 0.010028863906860351, 0.00998966407775879, 0.009949055671691895, 0.009982751846313477, 0.010127200126647948, 0.009990303993225098, 0.010035200119018555, 0.010026495933532715, 0.01003337574005127, 0.010148287773132324, 0.010114080429077148, 0.0100196475982666, 0.010012672424316407, 0.010145792007446289, 0.010073887825012206, 0.01013548755645752, 0.010146080017089843, 0.009735551834106446, 0.009988351821899414, 0.01011731243133545, 0.009955424308776856, 0.01005072021484375, 0.00990233612060547, 0.009980640411376952, 0.01000374412536621, 0.010066431999206543, 0.00997100830078125, 0.01005577564239502, 0.009958399772644042, 0.010073856353759765, 0.010182687759399414, 0.010117055892944336, 0.010004575729370118, 0.01008563232421875, 0.01021014404296875, 0.00997772789001465, 0.010022848129272462, 0.010053695678710938, 0.009989567756652832, 0.01006220817565918, 0.010068032264709472, 0.010207136154174805, 0.010138943672180175, 0.010089471817016601, 0.01010489559173584, 0.010037088394165039, 0.010066207885742187, 0.010178272247314452, 0.00997539234161377, 0.0099967041015625, 0.010080256462097169, 0.010054880142211915, 0.010145728111267089, 0.010957663536071777, 0.010053759574890136, 0.01016204833984375, 0.010174464225769043, 0.010117440223693848, 0.010026687622070313, 0.01003110408782959, 0.010133503913879394, 0.010184991836547851, 0.009940704345703126, 0.010123264312744141, 0.010012031555175781, 0.010125951766967773, 0.01003110408782959, 0.010078207969665527, 0.010281279563903808, 0.010057408332824707, 0.010104991912841798, 0.010047295570373534, 0.010086432456970214, 0.010072064399719239, 0.010141695976257324, 0.010070015907287597, 0.010092160224914552, 0.010119551658630372, 0.010194656372070312, 0.010115584373474122, 0.009959424018859863, 0.010147168159484863, 0.010383328437805176, 0.010271167755126953, 0.010044992446899414, 0.010060480117797851, 0.010069696426391602, 0.0100449275970459, 0.011273023605346679, 0.010005599975585937, 0.01006175994873047, 0.010076895713806152, 0.01016038417816162, 0.010088255882263183, 0.010161600112915038, 0.010049599647521973, 0.010078911781311034, 0.009986047744750976, 0.010043231964111329, 0.01118342399597168, 0.010500800132751465, 0.010162079811096191, 0.010015328407287598, 0.010071616172790527, 0.010084447860717773, 0.010024415969848632, 0.009968095779418946, 0.00999839973449707, 0.010018815994262695, 0.01000169563293457, 0.010058464050292969, 0.0099835844039917, 0.010060031890869141, 0.010008352279663086, 0.010023200035095214, 0.009951616287231445, 0.010133024215698241, 0.010066047668457031, 0.010205568313598633, 0.009957056045532226, 0.010055551528930664, 0.010182784080505372, 0.010123264312744141, 0.01004310417175293, 0.009955615997314453, 0.00998572826385498, 0.010041888236999513, 0.010077376365661622, 0.010141504287719727, 0.010066047668457031, 0.010134304046630859, 0.010067296028137208, 0.010197279930114746, 0.00996787166595459, 0.010158080101013184, 0.010606143951416016, 0.01014243221282959, 0.01013526439666748, 0.010073599815368652, 0.010234368324279786, 0.010157279968261719, 0.010054368019104004, 0.01009779167175293, 0.009900256156921387, 0.010168288230895996, 0.009996767997741698, 0.010025119781494141, 0.010121312141418457, 0.010084256172180176, 0.010012672424316407, 0.01010041618347168, 0.010172736167907715, 0.00999833583831787, 0.009965472221374512, 0.010053919792175294, 0.010002528190612793, 0.009979616165161133, 0.010026911735534667, 0.010098624229431152, 0.01004355239868164, 0.010022368431091309, 0.01004800033569336, 0.010053664207458497, 0.010055808067321778, 0.010155903816223144, 0.010135519981384277, 0.009990431785583497, 0.00999625587463379, 0.009979328155517579, 0.010015071868896485, 0.010028063774108887, 0.010083328247070313, 0.0100797758102417, 0.010236351966857911, 0.009981663703918457, 0.01010416030883789, 0.010271360397338867, 0.010071455955505371, 0.009986080169677735, 0.010126208305358887, 0.01012070369720459, 0.010088864326477051, 0.01005577564239502, 0.010029248237609863, 0.010039104461669921, 0.010074111938476562, 0.010012543678283692, 0.010020992279052735, 0.010079392433166504, 0.01011513614654541, 0.010146592140197754, 0.01005568027496338, 0.010069439888000489, 0.010269248008728027, 0.010205535888671876, 0.010091520309448243, 0.010133248329162597, 0.009965472221374512, 0.010116095542907716, 0.009999903678894043, 0.010111455917358398, 0.010112256050109862, 0.010072128295898437, 0.010012639999389648, 0.009976415634155274, 0.009951647758483886, 0.009812000274658204, 0.010012639999389648, 0.010031295776367187, 0.00993791961669922, 0.009975071907043456, 0.010130399703979493, 0.010062591552734375, 0.010118528366088867, 0.009988032341003417, 0.010019264221191405, 0.010057984352111816, 0.010041088104248046, 0.010023167610168457, 0.010077856063842773, 0.010062175750732422, 0.010194623947143554, 0.010178879737854004, 0.010022239685058594, 0.009964384078979493, 0.010067359924316406, 0.00998419189453125, 0.009993632316589356, 0.010277312278747558, 0.009947039604187012, 0.009918975830078124, 0.009999872207641602, 0.01006383991241455, 0.010073760032653808, 0.010180928230285645, 0.009892383575439452, 0.010197279930114746, 0.009965567588806153, 0.010088000297546387, 0.009936927795410156, 0.01008566379547119, 0.009966431617736816, 0.010090496063232422, 0.010022720336914063, 0.010152128219604492, 0.010002431869506835, 0.010135552406311036, 0.010285056114196778, 0.010004223823547364, 0.009996800422668458, 0.009972607612609864, 0.010001279830932618, 0.00996678352355957, 0.010038080215454102, 0.009967616081237793, 0.01002905559539795, 0.010092032432556153, 0.01000108814239502, 0.010092479705810547, 0.010014752388000488, 0.010093503952026368, 0.010052096366882325, 0.010071616172790527, 0.01006009578704834, 0.010054368019104004, 0.010199007987976074, 0.010060031890869141, 0.010018400192260742, 0.010004480361938477, 0.009670656204223632, 0.010002431869506835, 0.0100065279006958, 0.00999014377593994, 0.01023744010925293, 0.010082816123962402, 0.010051136016845702, 0.010115519523620605, 0.010049535751342773, 0.010059776306152344, 0.010024671554565429, 0.00993718433380127, 0.010084383964538575, 0.010215392112731933, 0.009906175613403321, 0.009981951713562011, 0.010182208061218261, 0.010250911712646485, 0.009953215599060058, 0.010084192276000976, 0.009983519554138183, 0.009869183540344238, 0.009861503601074219, 0.009958911895751953, 0.009857791900634766, 0.009899999618530273, 0.00974847984313965, 0.009781248092651367, 0.00990937614440918, 0.009884544372558594, 0.009963775634765625, 0.009887519836425782, 0.009887711524963379, 0.009895936012268066, 0.01000163173675537, 0.009912768363952636, 0.009935392379760741, 0.00986911964416504, 0.009982303619384765, 0.009917920112609863, 0.009889984130859375, 0.009904128074645996, 0.010155360221862792, 0.009956352233886719, 0.00988038444519043, 0.009920960426330566, 0.0101626558303833, 0.009944704055786133, 0.009976127624511718, 0.009921664237976075, 0.010003199577331543, 0.01015334415435791, 0.009945343971252442, 0.009902591705322266, 0.009983551979064942, 0.009905856132507324, 0.009941568374633789, 0.009908096313476562, 0.010223936080932618, 0.009838591575622559, 0.009920063972473145, 0.00997539234161377, 0.009993023872375488, 0.009771007537841797, 0.009983039855957032, 0.010095423698425292, 0.00995263957977295, 0.010023008346557618, 0.0099966402053833, 0.010049856185913086, 0.009901535987854005, 0.01005190372467041, 0.009920736312866212, 0.009951168060302735, 0.009869376182556152, 0.009989888191223144, 0.009953791618347169, 0.009842432022094727, 0.009924287796020509, 0.009963839530944825, 0.009907487869262696, 0.009890432357788087, 0.00996771240234375, 0.009895903587341309, 0.010001472473144531, 0.009940159797668458, 0.00989692783355713, 0.009917119979858398, 0.010112704277038575, 0.01006227207183838, 0.009996288299560547, 0.010016608238220214, 0.009978015899658204, 0.010295295715332031, 0.010045439720153808, 0.010031167984008788, 0.010094719886779785, 0.010055520057678223, 0.010115327835083008, 0.010008288383483886, 0.009994239807128906, 0.010118623733520508, 0.009929247856140137, 0.010184703826904297, 0.009998623847961425, 0.010046463966369629, 0.01003388786315918, 0.010073344230651855, 0.01002131175994873, 0.010171839714050293, 0.010162976264953614, 0.010144960403442383, 0.010070976257324218, 0.010263744354248048, 0.01002783966064453, 0.010116415977478027, 0.010127712249755859, 0.010086720466613769, 0.010035072326660156, 0.01009267234802246, 0.010000384330749512, 0.009922816276550292, 0.01003884792327881, 0.01002019214630127, 0.01002787208557129, 0.010061823844909668, 0.009770943641662597, 0.010102496147155762, 0.010076512336730957, 0.010066143989562988, 0.009987648010253907, 0.010101216316223145, 0.01007795238494873, 0.010094592094421387, 0.010092543601989747, 0.009994239807128906, 0.009914719581604003, 0.010036383628845215, 0.009968128204345703, 0.010055551528930664, 0.009930047988891601, 0.00994927978515625, 0.009898719787597656, 0.010027008056640625, 0.009926464080810547, 0.009867008209228516, 0.009900320053100585, 0.009940352439880371, 0.010865471839904784, 0.011198431968688964, 0.00993996810913086, 0.009887007713317872, 0.00992636775970459, 0.009948896408081056, 0.009957823753356933, 0.010055520057678223, 0.009866399765014649, 0.009924448013305665, 0.010002752304077149, 0.009917119979858398, 0.009820223808288574, 0.00993068790435791, 0.009954527854919433, 0.009976608276367187, 0.009947135925292968, 0.009945088386535645, 0.01005568027496338, 0.010067968368530274, 0.010107935905456543, 0.009930751800537109, 0.00995577621459961, 0.010054240226745606, 0.010016096115112305, 0.010373663902282716, 0.009963583946228028, 0.009994239807128906, 0.009859071731567384, 0.00985484790802002, 0.009873536109924317, 0.010076160430908204, 0.010010047912597657, 0.009983967781066895, 0.009998944282531739, 0.010071423530578614, 0.010055904388427734, 0.009955007553100586, 0.009921279907226563, 0.00999625587463379, 0.00991801643371582, 0.009689279556274414, 0.01000870418548584, 0.0101081600189209, 0.010039360046386718, 0.01004582405090332, 0.010076160430908204, 0.010086720466613769, 0.010062848091125488, 0.00995804786682129, 0.009920543670654296, 0.009956352233886719, 0.010107232093811034, 0.010049311637878418, 0.010124159812927245, 0.01017580795288086, 0.010096863746643066, 0.010041088104248046, 0.010196991920471191, 0.010089056015014648, 0.00996342372894287, 0.010250720024108886, 0.009961600303649903, 0.010034208297729491, 0.009970272064208984, 0.010323967933654785, 0.009974080085754395, 0.00997871971130371, 0.010011520385742188, 0.010008543968200683, 0.00986342430114746, 0.010119232177734375, 0.00986252784729004, 0.010117664337158203, 0.009936927795410156, 0.00999839973449707, 0.009959136009216309, 0.010040608406066895, 0.009937536239624024, 0.01003667163848877, 0.00995132827758789, 0.009998880386352538, 0.01001030445098877, 0.00997212791442871, 0.010065823554992677, 0.009959424018859863, 0.00991436767578125, 0.010094592094421387, 0.009999584197998047, 0.009972512245178223, 0.010158080101013184, 0.010049599647521973, 0.01004684829711914, 0.009978431701660156, 0.009957183837890625, 0.010031583786010743, 0.010030143737792969, 0.00993552017211914, 0.009970687866210937, 0.009972800254821777, 0.009985471725463867, 0.010020992279052735, 0.01004582405090332, 0.010051584243774414, 0.009834943771362304, 0.009922207832336426, 0.009936991691589356, 0.009946720123291015, 0.01004355239868164, 0.009983872413635254, 0.010041983604431152, 0.009947360038757324, 0.01012713623046875, 0.00989788818359375, 0.01003273582458496, 0.009907744407653808, 0.009976799964904786, 0.009926655769348144, 0.00999014377593994, 0.009867263793945312, 0.009961248397827149, 0.009906623840332031, 0.009858847618103028, 0.009917920112609863, 0.009879967689514161, 0.009754495620727539, 0.009822591781616211, 0.009930208206176757, 0.009863903999328613, 0.009821887969970702, 0.010090496063232422, 0.009969887733459472, 0.009846719741821289, 0.010049247741699218, 0.009853055953979492, 0.009869215965270996, 0.009967743873596191, 0.009945055961608888, 0.009922880172729492, 0.009913439750671387, 0.009916735649108886, 0.010060064315795899, 0.009913887977600098, 0.009906720161437989, 0.009912256240844727, 0.00998528003692627, 0.009863264083862304, 0.009882528305053711, 0.009995327949523927, 0.009849535942077636, 0.009827903747558594, 0.010064543724060059, 0.009989791870117188, 0.010218815803527832, 0.010062496185302734, 0.010229984283447265, 0.010004032135009765, 0.010013055801391602, 0.009918463706970216, 0.009966912269592286, 0.010080960273742676, 0.00992460823059082, 0.009971712112426758, 0.009974080085754395, 0.010161343574523926, 0.009900095939636231, 0.009945343971252442]",tokens/s,99.36344552115625,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,840.634368,522.059776,0.0,136.31488,125.252608,s,1,9.6473408203125,9.6473408203125,0.0,9.6473408203125,9.6473408203125,9.6473408203125,9.6473408203125,[9.6473408203125],,kWh,1.425596275833243e-05,1.5652343235724862e-06,4.30361455400121e-06,2.0124811635906127e-05,,MB,1287.319552,641.59744,0.0,226.492416,195.159552,s,13,0.19372073650360105,0.01490159511566162,0.00017144373868081323,0.014955583572387696,0.015107629203796387,0.015120243644714355,0.015131517906188965,"[0.015110848426818847, 0.014955583572387696, 0.014821791648864746, 0.014790656089782715, 0.014937472343444825, 0.015094752311706544, 0.01495798397064209, 0.015134336471557618, 0.014766207695007324, 0.014543295860290528, 0.01497596836090088, 0.014980031967163085, 0.01465180778503418]",tokens/s,17179.36892077703,kWh,4.4004443138805067e-07,4.852505528797472e-08,2.534096739457833e-07,7.419791606218087e-07,tokens/kWh,345023167.20790595,MB,1319.919616,656.277504,0.0,241.17248,195.162112,s,13,10.557029235839842,0.8120791719876803,0.007326012652424802,0.8123471069335938,0.8213180908203125,0.8231855346679687,0.8249905737304688,"[0.8198651123046875, 0.8103616943359375, 0.8041770629882813, 0.8133671875, 0.8216813354492187, 0.8254418334960938, 0.8123471069335938, 0.8143312377929688, 0.8168601684570312, 0.8032828369140625, 0.801821044921875, 0.8027590942382813, 0.8107335205078126]",tokens/s,77.57864278897642,kWh,2.3350824261560822e-05,2.5752092475806226e-06,8.504311007438839e-06,3.44303445165803e-05,tokens/kWh,1829781.2840548744,,s,819,10.549785240173351,0.012881300659552307,0.0003375919730032391,0.012855168342590331,0.01311066837310791,0.013270953273773194,0.014132797660827636,"[0.01283334445953369, 0.012958111763000489, 0.012929023742675782, 0.013075872421264649, 0.015822976112365723, 0.01448582363128662, 0.014039168357849121, 0.013108639717102051, 0.013168383598327636, 0.012992704391479492, 0.01304851245880127, 0.013221728324890136, 0.012860608100891114, 0.012945759773254394, 0.012819199562072755, 0.012893983840942383, 0.012748543739318847, 0.012765888214111329, 0.012709407806396485, 0.012869728088378907, 0.012935199737548829, 0.012902400016784669, 0.012951552391052246, 0.012929375648498536, 0.012858464241027831, 0.012909119606018066, 0.012928223609924317, 0.012987168312072753, 0.012943488121032715, 0.012953472137451173, 0.012914688110351562, 0.012973631858825684, 0.013132224082946778, 0.01298582363128662, 0.012962335586547852, 0.012847104072570802, 0.012867584228515624, 0.012873087882995606, 0.013007488250732422, 0.0129017276763916, 0.013002400398254394, 0.01297856044769287, 0.012863360404968261, 0.012853568077087402, 0.012894559860229492, 0.012890368461608888, 0.012799936294555664, 0.012745759963989257, 0.012851072311401366, 0.012949503898620606, 0.013034496307373047, 0.013006464004516601, 0.012966336250305176, 0.01296003246307373, 0.012884672164916993, 0.01291881561279297, 0.012829855918884277, 0.01283459186553955, 0.012916735649108887, 0.012791808128356934, 0.012745759963989257, 0.012741151809692383, 0.012762720108032227, 0.012703807830810546, 0.013013055801391601, 0.012646400451660156, 0.01256383991241455, 0.012601408004760742, 0.012624159812927246, 0.012818719863891602, 0.012744000434875488, 0.012786368370056152, 0.012881919860839844, 0.012843008041381837, 0.012728320121765137, 0.012832703590393067, 0.012908608436584472, 0.013117440223693847, 0.012958847999572753, 0.012995552062988282, 0.01385257625579834, 0.013635040283203125, 0.013767200469970703, 0.012951552391052246, 0.012840959548950195, 0.012828224182128907, 0.012848896026611328, 0.013199999809265136, 0.012818367958068847, 0.012757120132446289, 0.012722175598144531, 0.012680383682250976, 0.012677951812744141, 0.012695551872253418, 0.012629376411437989, 0.012705535888671874, 0.012948703765869141, 0.012951135635375977, 0.013015104293823241, 0.013146016120910644, 0.012998368263244629, 0.013337087631225587, 0.01300057601928711, 0.012855168342590331, 0.012853376388549805, 0.012791168212890626, 0.012769824028015136, 0.012736607551574707, 0.012564000129699706, 0.012644831657409667, 0.012676799774169922, 0.012557791709899903, 0.01249734401702881, 0.01258675193786621, 0.012615967750549316, 0.012617247581481934, 0.012793855667114258, 0.01272713565826416, 0.012668928146362305, 0.012756704330444336, 0.012824864387512206, 0.01302297592163086, 0.012977439880371094, 0.01286348819732666, 0.012868576049804688, 0.012738176345825195, 0.012305952072143555, 0.01262070369720459, 0.012421216011047363, 0.012519583702087403, 0.012525407791137696, 0.012571807861328124, 0.012777631759643554, 0.012579520225524903, 0.012561951637268066, 0.01260591983795166, 0.012556447982788085, 0.012650336265563965, 0.012750847816467285, 0.012800000190734863, 0.012843008041381837, 0.012955072402954102, 0.012724800109863281, 0.01261299228668213, 0.01260812759399414, 0.012572671890258789, 0.012599295616149902, 0.01253171157836914, 0.012771295547485352, 0.01329526424407959, 0.012843392372131347, 0.013197312355041504, 0.013554944038391113, 0.012973088264465332, 0.012995391845703126, 0.012965984344482422, 0.012923423767089843, 0.012865823745727539, 0.01284505558013916, 0.012744864463806153, 0.012867391586303711, 0.013252384185791015, 0.012810015678405761, 0.012726752281188965, 0.01286963176727295, 0.012836864471435547, 0.012756319999694824, 0.012861472129821777, 0.012737152099609374, 0.01262118434906006, 0.012673567771911622, 0.01265180778503418, 0.012612223625183105, 0.012622015953063965, 0.012865535736083985, 0.012838912010192872, 0.012828800201416015, 0.012830431938171388, 0.012816320419311524, 0.012810463905334473, 0.01278771209716797, 0.012775391578674316, 0.012727392196655274, 0.012705920219421386, 0.012671808242797851, 0.012654591560363769, 0.012672608375549316, 0.012537599563598633, 0.012564800262451171, 0.01236137580871582, 0.012687135696411132, 0.01280780792236328, 0.012849760055541993, 0.012770560264587403, 0.012765888214111329, 0.0127258882522583, 0.01277302360534668, 0.012972127914428712, 0.012895071983337402, 0.01283670425415039, 0.012774911880493164, 0.012706303596496582, 0.012547072410583495, 0.012719424247741699, 0.012688672065734864, 0.01275699234008789, 0.012749216079711915, 0.013449248313903808, 0.01300271987915039, 0.013137215614318848, 0.01320633602142334, 0.013037440299987792, 0.012967167854309082, 0.013029215812683105, 0.013017888069152833, 0.012947744369506835, 0.012885855674743653, 0.01291814422607422, 0.013182815551757812, 0.012798751831054687, 0.012691743850708008, 0.01287241554260254, 0.012841983795166016, 0.013194815635681152, 0.012983872413635253, 0.013003328323364257, 0.012963199615478515, 0.012995327949523926, 0.013152447700500489, 0.013187071800231934, 0.013011232376098632, 0.013038463592529297, 0.012975040435791016, 0.013023136138916015, 0.013008992195129394, 0.012818623542785644, 0.012897248268127442, 0.012772095680236817, 0.012666080474853516, 0.01274342441558838, 0.012793888092041015, 0.013421952247619629, 0.012938912391662599, 0.013044608116149903, 0.012909760475158691, 0.012919103622436524, 0.012939840316772462, 0.012800031661987305, 0.012739871978759766, 0.012897055625915527, 0.012789183616638184, 0.012796704292297362, 0.01255023956298828, 0.012847007751464844, 0.01276211166381836, 0.012843071937561035, 0.012909503936767578, 0.012976127624511719, 0.01304361629486084, 0.013029631614685058, 0.012914239883422851, 0.012963583946228027, 0.013086496353149414, 0.013045696258544922, 0.013822848320007325, 0.013076767921447754, 0.012991488456726074, 0.013009280204772948, 0.012982560157775878, 0.012967935562133789, 0.012988287925720215, 0.013086624145507812, 0.013987711906433106, 0.013049375534057618, 0.012960512161254883, 0.01302022361755371, 0.01307926368713379, 0.01300108814239502, 0.01302723217010498, 0.013021183967590331, 0.013164799690246582, 0.012954751968383789, 0.012966015815734862, 0.012920319557189941, 0.012987199783325196, 0.013190943717956543, 0.013079039573669434, 0.012867232322692872, 0.01301529598236084, 0.012998368263244629, 0.012815648078918458, 0.012788064002990722, 0.012863200187683105, 0.012889023780822753, 0.013180928230285644, 0.012986720085144043, 0.012914624214172363, 0.012762304306030273, 0.0127391357421875, 0.012719231605529785, 0.01287673568725586, 0.012904352188110351, 0.012820480346679687, 0.012920351982116699, 0.012843487739562988, 0.012803680419921875, 0.012933119773864746, 0.013095232009887695, 0.012936287879943848, 0.01293619155883789, 0.013, 0.014121855735778809, 0.014138848304748534, 0.013539679527282715, 0.01332595157623291, 0.012925375938415527, 0.01310694408416748, 0.013369152069091797, 0.013271455764770507, 0.013165472030639648, 0.014908255577087403, 0.01315017604827881, 0.013134048461914063, 0.013057727813720704, 0.01306163215637207, 0.0130764799118042, 0.01313049602508545, 0.012959744453430176, 0.013049856185913086, 0.013127679824829102, 0.013080575942993163, 0.013035519599914551, 0.013115391731262208, 0.013145855903625488, 0.0131976318359375, 0.01305958366394043, 0.013179648399353027, 0.01308134365081787, 0.013030336380004883, 0.013088768005371093, 0.013298912048339844, 0.013017760276794434, 0.013321632385253907, 0.013210335731506347, 0.013197407722473145, 0.0130600004196167, 0.013068160057067872, 0.01312604808807373, 0.013057184219360352, 0.013103679656982422, 0.012937215805053711, 0.012988384246826173, 0.013137887954711915, 0.013088831901550293, 0.012898303985595704, 0.012876095771789551, 0.01292416000366211, 0.012837311744689942, 0.012782976150512696, 0.01281503963470459, 0.012747039794921875, 0.01288976001739502, 0.012770848274230957, 0.012775903701782226, 0.01314406394958496, 0.013291520118713379, 0.013009183883666993, 0.012914463996887207, 0.01386291217803955, 0.013009023666381837, 0.013127424240112305, 0.01309449577331543, 0.01292131233215332, 0.013271039962768554, 0.012910592079162597, 0.012933119773864746, 0.0129617919921875, 0.012967167854309082, 0.012419072151184082, 0.012850815773010255, 0.01276796817779541, 0.012753824234008788, 0.012776191711425782, 0.012837920188903808, 0.01295680046081543, 0.012850272178649902, 0.012839679718017577, 0.012778783798217773, 0.012701663970947265, 0.012755007743835449, 0.012677727699279785, 0.012727456092834473, 0.012905407905578613, 0.012972031593322754, 0.012916319847106934, 0.012873344421386719, 0.012886816024780274, 0.012886079788208007, 0.013000639915466308, 0.01285910415649414, 0.012923263549804687, 0.012914591789245606, 0.012918656349182128, 0.0128919677734375, 0.012814656257629394, 0.01287782382965088, 0.012891327857971192, 0.012796735763549805, 0.012805376052856445, 0.012843392372131347, 0.01286796760559082, 0.012979680061340333, 0.012970272064208985, 0.013270943641662598, 0.014344544410705566, 0.013049856185913086, 0.013115488052368163, 0.01300499153137207, 0.012953472137451173, 0.012870752334594726, 0.012892255783081055, 0.012984767913818359, 0.012943584442138672, 0.012897407531738281, 0.01279206371307373, 0.012855327606201172, 0.012864159584045411, 0.01278934383392334, 0.01277888011932373, 0.012784607887268066, 0.012797344207763671, 0.01270150375366211, 0.012886816024780274, 0.012985919952392578, 0.012855744361877442, 0.01273855972290039, 0.013025152206420898, 0.0129334716796875, 0.012779295921325684, 0.012711935997009278, 0.012651935577392578, 0.012749407768249512, 0.01304975986480713, 0.013050975799560547, 0.013056927680969239, 0.013307904243469238, 0.013021183967590331, 0.012881919860839844, 0.013092415809631348, 0.012904352188110351, 0.01281488037109375, 0.012822624206542968, 0.012797920227050782, 0.012734175682067871, 0.012868063926696777, 0.012996352195739747, 0.013046048164367675, 0.013060992240905761, 0.01283568000793457, 0.013033472061157226, 0.013041664123535156, 0.012951135635375977, 0.013054368019104003, 0.012957887649536133, 0.013012288093566894, 0.012874239921569825, 0.012924927711486817, 0.0130764799118042, 0.012962976455688476, 0.012851648330688477, 0.012932767868041992, 0.012892959594726562, 0.012865376472473145, 0.014940128326416016, 0.013897151947021484, 0.01306003189086914, 0.012858240127563477, 0.012823519706726074, 0.012749888420104981, 0.012742112159729004, 0.012786080360412597, 0.013293567657470704, 0.01286143970489502, 0.012970208168029785, 0.01296787166595459, 0.012908576011657714, 0.012834815979003907, 0.012724032402038574, 0.01274880027770996, 0.012748191833496094, 0.01274953556060791, 0.012599167823791503, 0.012552191734313965, 0.012497023582458496, 0.012576640129089356, 0.012496735572814941, 0.012638431549072265, 0.012524864196777345, 0.012656800270080567, 0.012845760345458984, 0.012840383529663086, 0.012829024314880371, 0.012870847702026367, 0.012712191581726075, 0.012139200210571289, 0.012552032470703125, 0.012684608459472656, 0.012831040382385254, 0.012796287536621093, 0.012901663780212402, 0.012855104446411133, 0.012938176155090332, 0.012820480346679687, 0.012789759635925293, 0.012824095726013184, 0.012679776191711426, 0.012724096298217774, 0.012680447578430176, 0.012757568359375, 0.013037856101989747, 0.013071423530578613, 0.013109919548034667, 0.012956159591674805, 0.013059679985046386, 0.01290220832824707, 0.012912063598632813, 0.01284547233581543, 0.012915136337280274, 0.012779232025146484, 0.012689951896667481, 0.012730112075805664, 0.012715807914733887, 0.012744928359985351, 0.012865088462829589, 0.012683135986328125, 0.012923456192016602, 0.01300592041015625, 0.012921695709228515, 0.012892224311828614, 0.012899807929992676, 0.012833312034606933, 0.013318016052246093, 0.01286905574798584, 0.013122048377990723, 0.014135199546813965, 0.013664352416992187, 0.013627584457397462, 0.013012895584106446, 0.015816415786743164, 0.016584991455078125, 0.012927071571350097, 0.012893280029296876, 0.012734368324279785, 0.013029376029968261, 0.012745887756347656, 0.01286128044128418, 0.012877056121826172, 0.012720671653747559, 0.012564191818237305, 0.012724736213684081, 0.012715807914733887, 0.012694848060607911, 0.012591679573059083, 0.012642271995544433, 0.012446335792541504, 0.01229152011871338, 0.012307776451110839, 0.01190668773651123, 0.012325056076049805, 0.01228870391845703, 0.012304384231567383, 0.012273920059204102, 0.01232051181793213, 0.012322367668151856, 0.012448287963867188, 0.01242739200592041, 0.012671072006225586, 0.012773247718811035, 0.012637920379638672, 0.012611807823181153, 0.012557472229003906, 0.01247715187072754, 0.01239635181427002, 0.012390591621398927, 0.012488736152648925, 0.012584223747253417, 0.012454143524169922, 0.012503487586975098, 0.013930399894714356, 0.012739968299865723, 0.012727007865905762, 0.012678655624389648, 0.012899904251098633, 0.012679776191711426, 0.01265494441986084, 0.012611807823181153, 0.012840767860412598, 0.012762656211853027, 0.012962240219116211, 0.012795231819152831, 0.012996383666992187, 0.012927871704101562, 0.01294713592529297, 0.013121279716491699, 0.013029952049255372, 0.012920831680297852, 0.012840959548950195, 0.012989824295043945, 0.012978336334228516, 0.01280457592010498, 0.012908543586730957, 0.012733887672424317, 0.012858079910278321, 0.012943167686462403, 0.01307852840423584, 0.013098976135253907, 0.013037631988525391, 0.01319321632385254, 0.012864831924438477, 0.012878463745117188, 0.012896320343017579, 0.012832768440246582, 0.012716192245483399, 0.013014656066894531, 0.012931232452392577, 0.01284115219116211, 0.012756832122802734, 0.012810272216796875, 0.012742655754089355, 0.012625408172607423, 0.012322815895080566, 0.012776576042175292, 0.012884608268737793, 0.012730655670166015, 0.012660384178161621, 0.012670559883117676, 0.012610591888427735, 0.01263379192352295, 0.012572671890258789, 0.012648351669311523, 0.012719231605529785, 0.01272316837310791, 0.012717823982238769, 0.012675328254699707, 0.012792896270751954, 0.012687552452087402, 0.01267311954498291, 0.012722847938537597, 0.012689248085021972, 0.01256873607635498, 0.012769120216369629, 0.012620287895202637, 0.012511199951171875, 0.012644031524658202, 0.012658687591552734, 0.012586943626403809, 0.0128155517578125, 0.012766016006469727, 0.012675040245056153, 0.012632160186767579, 0.01261353588104248, 0.012717823982238769, 0.012720607757568359, 0.012740480422973632, 0.012785663604736328, 0.01297599983215332, 0.01275260829925537, 0.012845439910888672, 0.012751168251037597, 0.012737728118896485, 0.01273737621307373, 0.012613311767578125, 0.01258512020111084, 0.01249075222015381, 0.012439647674560546, 0.012779423713684082, 0.01263599967956543, 0.01273804759979248, 0.01262559986114502, 0.012925760269165039, 0.012978176116943359, 0.013014399528503418, 0.012888704299926758, 0.01273151969909668, 0.012706975936889648, 0.012610943794250488, 0.012625823974609375, 0.012674752235412598, 0.012681983947753906, 0.013000288009643556, 0.012916704177856446, 0.01304319953918457, 0.01296070384979248, 0.012628543853759765, 0.012935168266296386, 0.012838912010192872, 0.012999936103820801, 0.012841312408447266, 0.013019392013549805, 0.013218239784240722, 0.01302892780303955, 0.01297663974761963, 0.01319491195678711, 0.012937408447265625, 0.013172608375549316, 0.012912192344665528, 0.012994463920593263, 0.012990464210510254, 0.012925408363342284, 0.012942399978637695, 0.012629183769226074, 0.012568544387817383, 0.012643808364868165, 0.01262822437286377, 0.012703167915344237, 0.012485247611999511, 0.012406463623046876, 0.012558655738830566, 0.01256668758392334, 0.012522656440734864, 0.012505215644836426, 0.012637920379638672, 0.013423104286193848, 0.012699999809265138, 0.012537856101989747, 0.01264844799041748, 0.012584959983825684, 0.012521087646484375, 0.012588479995727538, 0.012609824180603028, 0.012628383636474609, 0.012551456451416016, 0.012590335845947266, 0.012654303550720215, 0.012562432289123536, 0.012529472351074218, 0.012521663665771484, 0.01271110439300537, 0.012652959823608399, 0.012743071556091308, 0.01285491180419922, 0.012889632225036621, 0.013095456123352051, 0.012865856170654296, 0.012734047889709473, 0.012761343955993652, 0.012805472373962403, 0.012671808242797851, 0.012578816413879394, 0.012567999839782715, 0.0124781436920166, 0.012572704315185547, 0.012505951881408692, 0.012836864471435547, 0.012461343765258788, 0.012425727844238281, 0.012195839881896972, 0.012558527946472168, 0.01261353588104248, 0.012801952362060547, 0.01275494384765625, 0.01287782382965088, 0.012783616065979005, 0.012722016334533692, 0.012910752296447755, 0.01277952003479004, 0.013484288215637207, 0.0136779203414917, 0.013418463706970215, 0.012785247802734375, 0.01305891227722168, 0.012662783622741699, 0.012807231903076172, 0.01295251178741455, 0.012734623908996582, 0.012679007530212403, 0.012613344192504882, 0.012713408470153809, 0.012669792175292969, 0.012886015892028809, 0.012830240249633789, 0.012697952270507813, 0.01263424015045166, 0.012668928146362305, 0.01296399974822998, 0.01267091178894043, 0.012679072380065918, 0.012820480346679687, 0.012832832336425782, 0.012953536033630371, 0.012912639617919922, 0.012879167556762695, 0.01293996810913086, 0.012955712318420411, 0.012866975784301758, 0.012859935760498048, 0.012840543746948242, 0.012745280265808106, 0.012748736381530762, 0.012856575965881347, 0.012825247764587402, 0.012780991554260254, 0.012804800033569336, 0.01295961570739746, 0.013014816284179687, 0.012966464042663575, 0.012912287712097167, 0.012952863693237305, 0.012856127738952637, 0.012779392242431641, 0.012771488189697266, 0.013053824424743652, 0.012978272438049316, 0.013024576187133789, 0.01311366367340088, 0.013011039733886719, 0.01291648006439209, 0.012947903633117677, 0.012972288131713867]",tokens/s,77.63191205838645,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,907.362304,559.808512,0.0,157.2864,148.864512,s,1,9.6457763671875,9.6457763671875,0.0,9.6457763671875,9.6457763671875,9.6457763671875,9.6457763671875,[9.6457763671875],,kWh,1.736581533750249e-05,1.9084219268710492e-06,5.3466709439998344e-06,2.4620908208373373e-05,,MB,1340.94848,641.59744,0.0,224.395264,194.897408,s,17,0.7034094657897949,0.0413770273993997,0.0014021114323797515,0.04127715301513672,0.04149244689941406,0.04254590072631835,0.045893295745849605,"[0.04673014450073242, 0.04041139221191406, 0.04066841506958008, 0.04055558395385742, 0.04135107040405273, 0.04149983978271484, 0.04131590270996094, 0.041293758392333985, 0.041052288055419925, 0.04127715301513672, 0.041462272644042966, 0.04003868865966797, 0.040986942291259765, 0.04064223861694336, 0.04127417755126953, 0.041487518310546874, 0.04136207962036133]",tokens/s,6187.008011206578,kWh,1.4162677294296327e-06,1.5618847014344996e-07,9.401949269126267e-07,2.512651126485709e-06,tokens/kWh,101884418.931669,MB,1373.487104,656.277504,0.0,239.075328,194.899968,s,17,9.589910583496094,0.5641123872644761,0.004264007805265783,0.5634992065429687,0.5700108764648437,0.5704887695312499,0.57079716796875,"[0.5590472412109375, 0.5634992065429687, 0.5575225830078125, 0.5697565307617187, 0.5669478759765625, 0.5631503295898438, 0.5602890625, 0.5591054077148437, 0.5602511596679688, 0.570874267578125, 0.5687327270507813, 0.56692529296875, 0.560313232421875, 0.5703923950195312, 0.5613878784179688, 0.5648975219726563, 0.56681787109375]",tokens/s,111.6798734122875,kWh,1.5980695300472074e-05,1.7623458477259301e-06,5.947588361087345e-06,2.369062950928535e-05,tokens/kWh,2659279.2722248123,,s,1071,9.579752827644347,0.00894468051133926,0.000182924171962468,0.008937472343444825,0.009132032394409179,0.00921504020690918,0.009475059032440185,"[0.008812607765197754, 0.009136223793029785, 0.008986528396606446, 0.009154080390930176, 0.009105567932128905, 0.008979807853698731, 0.008899904251098633, 0.008967424392700195, 0.008974944114685059, 0.008941375732421875, 0.008996095657348633, 0.00897100830078125, 0.00902348804473877, 0.008900256156921386, 0.008927392005920411, 0.008855744361877442, 0.008856672286987305, 0.008838399887084961, 0.008875679969787598, 0.008806400299072266, 0.008751104354858399, 0.008725824356079102, 0.008755904197692871, 0.008783871650695801, 0.008759488105773925, 0.008741888046264648, 0.008726847648620606, 0.008785471916198731, 0.00868166446685791, 0.008721247673034668, 0.008709535598754883, 0.0088089599609375, 0.008705727577209473, 0.008651071548461915, 0.008597503662109375, 0.00858521556854248, 0.008664799690246581, 0.008720671653747559, 0.008715488433837891, 0.00884768009185791, 0.008878560066223144, 0.008798208236694336, 0.008804608345031738, 0.008903807640075683, 0.008917632102966309, 0.008964096069335938, 0.008923487663269043, 0.008971263885498047, 0.009239263534545898, 0.00892307186126709, 0.008926560401916504, 0.008888992309570312, 0.008937472343444825, 0.00894502353668213, 0.008882623672485351, 0.009091263771057128, 0.008876064300537109, 0.00875868797302246, 0.00880288028717041, 0.008841216087341308, 0.009080703735351563, 0.008843199729919433, 0.008843456268310547, 0.008630271911621093, 0.008882111549377441, 0.00889452838897705, 0.00891484832763672, 0.008891936302185058, 0.008813119888305665, 0.008776864051818848, 0.008731488227844239, 0.00888646411895752, 0.008834976196289063, 0.008834272384643556, 0.008843487739562988, 0.008792063713073731, 0.00872326374053955, 0.008816287994384766, 0.008814111709594726, 0.008858079910278321, 0.008983807563781738, 0.008866080284118652, 0.00887177562713623, 0.008995455741882324, 0.008794112205505371, 0.008652447700500488, 0.008629983901977539, 0.008708736419677735, 0.0087326717376709, 0.008753151893615722, 0.008751104354858399, 0.008792063713073731, 0.008851807594299316, 0.008959839820861817, 0.008852767944335937, 0.008892959594726562, 0.008820735931396484, 0.008802304267883301, 0.00885148811340332, 0.008978719711303712, 0.008892095565795899, 0.008925279617309571, 0.008820639610290528, 0.008931327819824218, 0.008799839973449707, 0.00876576042175293, 0.008779871940612792, 0.008885248184204102, 0.008905759811401367, 0.008892383575439453, 0.009356800079345704, 0.009119423866271972, 0.009042880058288575, 0.009099103927612305, 0.009151616096496583, 0.009128255844116211, 0.009029760360717773, 0.00910489559173584, 0.009014368057250977, 0.009073760032653809, 0.008985376358032227, 0.009053728103637696, 0.00911788845062256, 0.010559776306152343, 0.009746432304382324, 0.009232383728027344, 0.008607775688171386, 0.009072256088256837, 0.008899968147277832, 0.008809439659118653, 0.008781824111938476, 0.008767487525939942, 0.008900447845458985, 0.00873078441619873, 0.00875491237640381, 0.008734784126281737, 0.008710047721862794, 0.00866755199432373, 0.008679327964782714, 0.008752287864685059, 0.008881279945373535, 0.008869759559631348, 0.008886112213134765, 0.008822784423828126, 0.008881471633911132, 0.008915648460388184, 0.008790016174316406, 0.008760640144348145, 0.008753855705261231, 0.008706048011779785, 0.008725600242614746, 0.008784799575805665, 0.008857600212097168, 0.008847359657287598, 0.008887359619140625, 0.008960960388183593, 0.008871935844421386, 0.00892751979827881, 0.008885984420776366, 0.008933055877685546, 0.009019840240478515, 0.008836992263793945, 0.009170623779296875, 0.008837023735046386, 0.008802207946777343, 0.008812607765197754, 0.00871673583984375, 0.00903987216949463, 0.008802304267883301, 0.008767487525939942, 0.008932671546936034, 0.008756959915161133, 0.008747136116027832, 0.008741503715515136, 0.008800000190734862, 0.008831456184387207, 0.008796159744262694, 0.008777728080749512, 0.00883238410949707, 0.008714879989624023, 0.008690943717956543, 0.009016063690185547, 0.0088985595703125, 0.008967935562133789, 0.009017600059509277, 0.00892518424987793, 0.008980480194091797, 0.00893945598602295, 0.00891481590270996, 0.008692000389099121, 0.008943584442138672, 0.008892319679260253, 0.009015392303466797, 0.008963456153869629, 0.008933792114257813, 0.008995039939880371, 0.008964096069335938, 0.008939519882202148, 0.008939071655273438, 0.008954303741455078, 0.008888319969177246, 0.009082880020141602, 0.008920703887939453, 0.008832991600036621, 0.008947104454040527, 0.008915743827819823, 0.009031904220581055, 0.009181183815002441, 0.009085087776184083, 0.008968064308166504, 0.009037792205810547, 0.00908463954925537, 0.009068832397460938, 0.009052160263061524, 0.009064127922058105, 0.009042304039001465, 0.009222368240356445, 0.009059455871582031, 0.00899772834777832, 0.008992192268371581, 0.009019712448120117, 0.009092255592346192, 0.00921388816833496, 0.009153440475463867, 0.00913593578338623, 0.009172256469726563, 0.009065407752990723, 0.009175007820129394, 0.008974080085754394, 0.009031552314758301, 0.009089695930480956, 0.009163871765136718, 0.009012895584106445, 0.008983519554138184, 0.008962176322937011, 0.00897811222076416, 0.0090382080078125, 0.009101311683654785, 0.009195327758789062, 0.009332736015319825, 0.00925603199005127, 0.009131168365478516, 0.009025279998779296, 0.008966143608093263, 0.009142271995544434, 0.00894275188446045, 0.00901580810546875, 0.009068896293640136, 0.009033727645874023, 0.009007391929626464, 0.008984288215637208, 0.008964096069335938, 0.008928511619567871, 0.009059200286865235, 0.009007072448730469, 0.008962047576904298, 0.008936736106872558, 0.009046239852905274, 0.009117216110229491, 0.009048031806945802, 0.009079008102416993, 0.009040672302246094, 0.009023839950561524, 0.008976032257080078, 0.00903769588470459, 0.008964192390441895, 0.008855584144592286, 0.008777728080749512, 0.00873036766052246, 0.008790271759033203, 0.008826751708984375, 0.008720352172851562, 0.008784255981445312, 0.008898336410522461, 0.008943039894104004, 0.008997440338134766, 0.009046336174011231, 0.008934847831726074, 0.00895036792755127, 0.008971136093139648, 0.008966560363769532, 0.008897055625915527, 0.00905395221710205, 0.009250944137573242, 0.009360608100891113, 0.009120223999023437, 0.009115936279296875, 0.009066495895385742, 0.009053695678710937, 0.009056032180786133, 0.009154687881469727, 0.009099807739257813, 0.009034912109375, 0.009026592254638671, 0.009090559959411621, 0.009447775840759277, 0.009111455917358399, 0.009012831687927245, 0.009077280044555665, 0.00913644790649414, 0.009076416015625, 0.009056256294250489, 0.009065631866455079, 0.008934240341186523, 0.008865632057189942, 0.008843104362487792, 0.008795807838439942, 0.008910911560058594, 0.008897120475769044, 0.008865792274475098, 0.008878080368041993, 0.008908608436584472, 0.008949407577514648, 0.008887136459350586, 0.008841216087341308, 0.008723487854003906, 0.00904524803161621, 0.009016192436218262, 0.00901296043395996, 0.009008864402770995, 0.008908639907836914, 0.009052607536315918, 0.009195520401000976, 0.00890015983581543, 0.009167296409606934, 0.009029631614685058, 0.008970399856567383, 0.008996479988098145, 0.009052384376525878, 0.009396191596984863, 0.009007136344909667, 0.008984576225280762, 0.008859392166137695, 0.008811008453369141, 0.008947456359863281, 0.008932767868041992, 0.008892704010009765, 0.008965567588806152, 0.008981535911560059, 0.009037311553955079, 0.008919391632080078, 0.009009152412414552, 0.009040191650390625, 0.0089617280960083, 0.008965536117553711, 0.008993375778198242, 0.008951807975769043, 0.008867839813232421, 0.008796159744262694, 0.008886112213134765, 0.008904864311218261, 0.009083040237426758, 0.00891478443145752, 0.008793951988220215, 0.008756447792053222, 0.008868800163269044, 0.00894553565979004, 0.00881062412261963, 0.008824352264404298, 0.008769280433654785, 0.008800352096557617, 0.008637056350708008, 0.008629695892333984, 0.00863532829284668, 0.008889568328857423, 0.009027999877929687, 0.008867839813232421, 0.00991983985900879, 0.008710816383361816, 0.008799903869628907, 0.008640512466430664, 0.00932259178161621, 0.008722880363464355, 0.008672863960266113, 0.00862217617034912, 0.008634495735168456, 0.009308159828186035, 0.008790143966674805, 0.008578368186950684, 0.008747872352600097, 0.008732512474060058, 0.008822784423828126, 0.008697855949401855, 0.00865449619293213, 0.008577695846557617, 0.008609472274780273, 0.008690784454345703, 0.008823712348937989, 0.008646656036376953, 0.00881049633026123, 0.008828927993774414, 0.008902175903320313, 0.008806143760681152, 0.008657952308654786, 0.008590847969055175, 0.008572928428649903, 0.00852342414855957, 0.008612383842468261, 0.008680480003356934, 0.008596192359924317, 0.008648960113525391, 0.008845248222351075, 0.008885855674743653, 0.008937952041625976, 0.008990752220153808, 0.009037792205810547, 0.008979904174804687, 0.00913260841369629, 0.009156895637512206, 0.008977760314941406, 0.009042304039001465, 0.009236319541931153, 0.00904364776611328, 0.008882816314697266, 0.008884063720703125, 0.009013471603393554, 0.009084511756896972, 0.00888646411895752, 0.009514880180358887, 0.009080320358276368, 0.0090447998046875, 0.009137248039245606, 0.008966879844665527, 0.009020607948303222, 0.008999744415283202, 0.008916223526000976, 0.008962335586547852, 0.009130751609802246, 0.008941280364990234, 0.00935756778717041, 0.008867584228515624, 0.00894976043701172, 0.009019392013549805, 0.008957951545715333, 0.008849408149719238, 0.008904383659362794, 0.00886729621887207, 0.008778623580932617, 0.008976351737976075, 0.008839167594909669, 0.008808095932006836, 0.008614463806152344, 0.008878080368041993, 0.008839103698730469, 0.008873279571533204, 0.008743712425231934, 0.008700063705444336, 0.008688480377197265, 0.008727007865905762, 0.008886943817138672, 0.008740736007690429, 0.008844863891601563, 0.00882051181793213, 0.00870809555053711, 0.008663711547851562, 0.008765439987182617, 0.008824159622192383, 0.008749728202819823, 0.008757311820983887, 0.008705599784851074, 0.008753536224365234, 0.0087674560546875, 0.008765024185180664, 0.008832511901855468, 0.00908902359008789, 0.008862784385681152, 0.008807456016540528, 0.009003935813903808, 0.009488320350646973, 0.00894694423675537, 0.009411104202270508, 0.009678048133850098, 0.00883743953704834, 0.008907296180725098, 0.009091584205627442, 0.008956895828247071, 0.008931743621826171, 0.008844672203063965, 0.008936351776123046, 0.009266400337219238, 0.00888912010192871, 0.008892319679260253, 0.008867936134338379, 0.008842528343200684, 0.0088951997756958, 0.008833024024963379, 0.008828927993774414, 0.008812543869018554, 0.008788031578063965, 0.008902591705322266, 0.008838751792907714, 0.008788607597351073, 0.008879263877868653, 0.008767680168151855, 0.008697471618652343, 0.008702272415161133, 0.008808192253112792, 0.00883187198638916, 0.00884067153930664, 0.00876585578918457, 0.008828319549560548, 0.008889247894287109, 0.008887999534606934, 0.008779135704040528, 0.008458208084106446, 0.008777695655822754, 0.00871116828918457, 0.00870195198059082, 0.008638400077819824, 0.008697279930114746, 0.00864243221282959, 0.008651519775390625, 0.008622048377990722, 0.00862172794342041, 0.008699392318725586, 0.008653696060180665, 0.008832063674926758, 0.008940447807312011, 0.008942912101745606, 0.008991040229797364, 0.00901366424560547, 0.008884223937988281, 0.00880025577545166, 0.008876031875610351, 0.008822784423828126, 0.008828191757202149, 0.008753567695617677, 0.008861120223999024, 0.008758111953735352, 0.008667167663574218, 0.008732192039489746, 0.008931103706359864, 0.009003904342651367, 0.008928223609924317, 0.008871999740600586, 0.008948736190795899, 0.008908576011657714, 0.008871456146240235, 0.008799712181091308, 0.008774656295776367, 0.008955904006958008, 0.009469375610351563, 0.009084511756896972, 0.00902889633178711, 0.0088853759765625, 0.008743488311767578, 0.008683232307434082, 0.008747167587280273, 0.008845184326171875, 0.008802016258239747, 0.0088919677734375, 0.008882240295410156, 0.008818783760070802, 0.00889516830444336, 0.008847488403320312, 0.00881868839263916, 0.008910847663879394, 0.00880793571472168, 0.008832768440246582, 0.008980319976806641, 0.008858688354492187, 0.008918016433715821, 0.008968480110168457, 0.008996895790100098, 0.009382464408874512, 0.01005942440032959, 0.009869631767272949, 0.008801919937133789, 0.009061920166015624, 0.009003968238830566, 0.009117664337158203, 0.008972352027893066, 0.008945919990539552, 0.008952735900878906, 0.00893337631225586, 0.009056384086608887, 0.008912575721740723, 0.009120767593383788, 0.008979552268981934, 0.008927647590637206, 0.00894972801208496, 0.008886752128601074, 0.009211711883544922, 0.009149696350097656, 0.009021535873413086, 0.00897321605682373, 0.008934528350830077, 0.008829631805419923, 0.008978303909301757, 0.00895251178741455, 0.009026975631713867, 0.008875904083251953, 0.008911007881164551, 0.008931520462036133, 0.008830816268920898, 0.008949919700622559, 0.008956000328063965, 0.008887616157531739, 0.00913651180267334, 0.009193951606750489, 0.00912940788269043, 0.009056351661682128, 0.008966367721557617, 0.008955583572387696, 0.008898880004882813, 0.009029151916503907, 0.009087360382080079, 0.009046048164367675, 0.0091975040435791, 0.009087200164794923, 0.009148223876953125, 0.00903593635559082, 0.009248255729675293, 0.009723615646362305, 0.00912019157409668, 0.009140031814575195, 0.009272800445556641, 0.009107744216918946, 0.009139039993286132, 0.009337023735046386, 0.009196288108825683, 0.00912281608581543, 0.009052096366882324, 0.009072640419006347, 0.009027584075927735, 0.009280703544616698, 0.009202079772949219, 0.009105695724487304, 0.009057503700256347, 0.009047039985656738, 0.008823583602905274, 0.009111743927001953, 0.00901852798461914, 0.009054880142211914, 0.008984959602355957, 0.008996479988098145, 0.00894156837463379, 0.008824095726013184, 0.00909558391571045, 0.00905247974395752, 0.008835071563720704, 0.009137375831604004, 0.009081215858459472, 0.009107711791992188, 0.00909068775177002, 0.009070591926574707, 0.009056960105895996, 0.009078495979309082, 0.009127615928649902, 0.008956192016601562, 0.008904159545898438, 0.009077280044555665, 0.00904207992553711, 0.009047200202941894, 0.008994976043701171, 0.008920031547546387, 0.008944767951965332, 0.008912608146667481, 0.008962944030761718, 0.008926752090454102, 0.00899888038635254, 0.008916959762573243, 0.008893983840942383, 0.009044447898864745, 0.008978079795837403, 0.009027839660644531, 0.009107392311096191, 0.009043904304504394, 0.009007871627807617, 0.009006784439086915, 0.0091080322265625, 0.009103103637695313, 0.009141599655151367, 0.009164959907531738, 0.009045696258544922, 0.009007935523986816, 0.009132032394409179, 0.009003007888793945, 0.009043968200683594, 0.00915180778503418, 0.009138879776000976, 0.009111552238464356, 0.008996864318847657, 0.00900483226776123, 0.008896736145019532, 0.008945887565612792, 0.008882047653198243, 0.008953439712524413, 0.00908454418182373, 0.009069248199462891, 0.008947839736938476, 0.009082688331604003, 0.008916831970214844, 0.00874608039855957, 0.009053631782531739, 0.008974783897399903, 0.008880127906799316, 0.008820735931396484, 0.008963552474975586, 0.008991264343261718, 0.008932640075683593, 0.00894153594970703, 0.009051136016845703, 0.009000703811645508, 0.00901318359375, 0.009002752304077148, 0.008970208168029785, 0.008964447975158692, 0.008948063850402832, 0.009082528114318847, 0.008978240013122559, 0.009001248359680176, 0.008980575561523438, 0.009068639755249023, 0.009227744102478027, 0.009118240356445312, 0.00907868766784668, 0.009064255714416504, 0.00896339225769043, 0.008938464164733886, 0.009048959732055665, 0.009020447731018066, 0.009011232376098632, 0.009040831565856933, 0.008985440254211425, 0.008977984428405762, 0.00891487979888916, 0.008903552055358886, 0.009350784301757812, 0.008997023582458496, 0.00898646354675293, 0.008953375816345215, 0.00905673599243164, 0.008983807563781738, 0.00897715187072754, 0.008855551719665527, 0.008855263710021972, 0.008819040298461913, 0.008783647537231445, 0.008990528106689454, 0.008877984046936035, 0.008855999946594239, 0.008801535606384277, 0.008902432441711426, 0.009011936187744141, 0.008998144149780273, 0.00896457576751709, 0.008960479736328126, 0.009027456283569336, 0.00933625602722168, 0.009014016151428222, 0.009013248443603515, 0.009287903785705567, 0.009016480445861817, 0.008980480194091797, 0.008980863571166993, 0.008628191947937012, 0.008935551643371582, 0.009129983901977539, 0.009033727645874023, 0.008839167594909669, 0.008816639900207519, 0.008831199645996093, 0.00887558364868164, 0.008673503875732421, 0.008949088096618653, 0.008786591529846191, 0.008769375801086426, 0.008687359809875488, 0.008763808250427246, 0.008753120422363281, 0.008794431686401366, 0.008785471916198731, 0.008577183723449707, 0.008469568252563477, 0.008539423942565919, 0.008549183845520019, 0.00859222412109375, 0.008768896102905273, 0.008778688430786132, 0.00886956787109375, 0.009012319564819337, 0.009014528274536132, 0.008898207664489746, 0.008856703758239746, 0.008772031784057617, 0.008974911689758301, 0.008880000114440918, 0.009113439559936523, 0.008861856460571289, 0.00893337631225586, 0.008840703964233398, 0.008894463539123536, 0.008786304473876953, 0.008844767570495606, 0.008849887847900391, 0.00886409568786621, 0.008895392417907716, 0.00876966381072998, 0.008866175651550293, 0.010121408462524415, 0.008921343803405762, 0.008834464073181152, 0.008849120140075684, 0.008774720191955566, 0.00893455982208252, 0.008979104042053222, 0.008994112014770508, 0.008954560279846192, 0.009197888374328613, 0.009014431953430175, 0.008957535743713378, 0.008911935806274415, 0.008988063812255859, 0.00892966365814209, 0.008831007957458496, 0.009218111991882324, 0.009043968200683594, 0.009104736328125, 0.008770303726196289, 0.00925385570526123, 0.009065471649169921, 0.009152352333068848, 0.009150464057922364, 0.009164799690246582, 0.009207807540893554, 0.009064448356628419, 0.009172991752624511, 0.00902143955230713, 0.009078399658203125, 0.009179519653320313, 0.009125887870788574, 0.00913599967956543, 0.009125439643859864, 0.009283935546875, 0.009113727569580078, 0.009109120368957519, 0.009168576240539551, 0.009169376373291016, 0.00909113597869873, 0.009070560455322266, 0.009047679901123047, 0.009095040321350098, 0.009196096420288086, 0.009091584205627442, 0.008982239723205566, 0.008824288368225098, 0.008872480392456054, 0.008921055793762208, 0.009033056259155273, 0.008946368217468262, 0.008946016311645507, 0.008945311546325684, 0.008855168342590331, 0.008868224143981933, 0.008927040100097657, 0.008900704383850098, 0.008906463623046874, 0.00901363182067871, 0.009004544258117676, 0.009413439750671387, 0.009135168075561523, 0.009075103759765625, 0.009005279541015624, 0.009008383750915528, 0.009092000007629395, 0.00901296043395996, 0.009060383796691895, 0.009024800300598145, 0.009222559928894043, 0.008962464332580567, 0.009070879936218261, 0.008916159629821778, 0.008896191596984864, 0.009390463829040527, 0.009015775680541993, 0.00896992015838623, 0.008878399848937988, 0.008874079704284669, 0.00897219181060791, 0.008937472343444825, 0.008830528259277343, 0.008754655838012696, 0.009035360336303712, 0.008958911895751953, 0.00904531192779541, 0.008970111846923827, 0.00906937599182129, 0.008989824295043945, 0.009130304336547851, 0.008987199783325196, 0.008959775924682617, 0.009027135848999024, 0.00933135986328125, 0.008962335586547852, 0.00898044776916504, 0.009108320236206054, 0.008971167564392089, 0.008996864318847657, 0.009093119621276855, 0.009093279838562012, 0.009027711868286133, 0.008854304313659668, 0.008803263664245605, 0.008876031875610351, 0.008844448089599609, 0.009367839813232422, 0.008827391624450684, 0.008785216331481934, 0.008739520072937011, 0.008685728073120118, 0.009059712409973145, 0.008780672073364257, 0.008638303756713867, 0.008580703735351563, 0.008533760070800782, 0.008569151878356933, 0.008556735992431641, 0.00857692813873291, 0.00857913589477539, 0.00870195198059082, 0.008869888305664063, 0.009041888236999512, 0.009065535545349121, 0.008768159866333008, 0.008761664390563965, 0.008756256103515625, 0.008855999946594239, 0.008928031921386718, 0.008836864471435547, 0.008902655601501466, 0.00899897575378418, 0.008871359825134277, 0.008849920272827149, 0.00897433567047119, 0.00962668800354004, 0.008870719909667969, 0.008908896446228028, 0.008869919776916504, 0.008865983963012695, 0.008845120429992677, 0.008865792274475098, 0.008953856468200684, 0.008818783760070802, 0.008832927703857421, 0.008732576370239258, 0.009081631660461427, 0.008990367889404298, 0.009035136222839356, 0.008935487747192383, 0.00886844825744629, 0.008816960334777833, 0.00881868839263916, 0.008972352027893066, 0.008976192474365234, 0.008893823623657227, 0.008997440338134766, 0.00887827205657959, 0.008867615699768067, 0.00883340835571289, 0.00881817626953125, 0.008888832092285156, 0.008959903717041015, 0.00888316822052002, 0.00889568042755127, 0.008901823997497558, 0.008849568367004394, 0.008806783676147461, 0.008908448219299316, 0.008932000160217285, 0.009017151832580566, 0.008937727928161622, 0.008922816276550293, 0.0088985595703125, 0.008912704467773437, 0.008947808265686035, 0.008910880088806153, 0.009021504402160645, 0.009073792457580566, 0.009190272331237793, 0.008910847663879394, 0.00893337631225586, 0.00899283218383789, 0.008978367805480957, 0.008890368461608887, 0.009107808113098144, 0.008930720329284669, 0.009078944206237793, 0.009214112281799317, 0.008957823753356934, 0.008975872039794922, 0.008858176231384277, 0.008815744400024413, 0.008944512367248535, 0.0089169921875, 0.009240223884582519, 0.00887782382965088, 0.00890060806274414, 0.008823391914367675, 0.00881049633026123, 0.008918784141540527, 0.009343232154846191, 0.008947487831115722, 0.009068639755249023, 0.008968640327453613, 0.009123519897460937, 0.009312479972839356, 0.009060128211975098, 0.008775679588317872, 0.009013248443603515, 0.009062399864196777, 0.009078783988952637, 0.008972576141357422, 0.008963808059692383, 0.009158783912658692, 0.00895372772216797, 0.009093119621276855, 0.008880127906799316, 0.00899071979522705, 0.008978431701660156, 0.008855423927307128, 0.008877311706542969, 0.008841792106628417, 0.008918560028076172, 0.008882975578308106, 0.008982175827026368, 0.008900256156921386, 0.008921792030334472, 0.0088985595703125, 0.008927583694458008, 0.009012895584106445, 0.009045151710510254, 0.009204575538635254, 0.009324543952941895, 0.009248767852783203, 0.009149663925170899, 0.009259455680847168, 0.00939891242980957, 0.009281279563903809, 0.009215968132019043, 0.009204863548278808, 0.009056639671325684, 0.00903219223022461, 0.009072095870971679, 0.009023712158203126, 0.008902815818786622, 0.008886431694030762, 0.008847455978393554, 0.008937376022338867, 0.00897811222076416, 0.00905247974395752, 0.008988672256469727, 0.008839296340942384, 0.00899232006072998, 0.008804672241210938, 0.008939231872558594, 0.008878368377685547, 0.008981792449951172, 0.008968928337097167, 0.009058527946472168, 0.008805791854858398, 0.008722816467285156, 0.008701120376586914, 0.008791135787963868, 0.008912704467773437, 0.008920767784118652, 0.008847583770751953, 0.00887980842590332, 0.008942208290100097, 0.009033408164978027, 0.009125439643859864]",tokens/s,111.79829159155433,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4333.416448,4569.563136,0.0,4183.81824,4182.069248,s,1,10.1635,10.1635,0.0,10.1635,10.1635,10.1635,10.1635,[10.1635],,kWh,8.576651527498597e-05,9.4534771035184e-06,2.8487245011990514e-05,0.00012370723739049488,,MB,1622.482944,4722.655232,0.0,4307.550208,4281.174016,s,10,3.7266936035156246,0.3726693603515625,0.0024222125909151987,0.37304234313964846,0.37480166320800784,0.3749669937133789,0.37509925811767575,"[0.3660775756835937, 0.3715085144042969, 0.37245123291015625, 0.3730545654296875, 0.3741449279785156, 0.37476492309570314, 0.37294683837890624, 0.37303012084960935, 0.37513232421875, 0.37358258056640625]",tokens/s,686.9360007447327,kWh,1.0775990644791968e-05,1.188270927801436e-06,7.193696231142759e-06,1.915795780373616e-05,tokens/kWh,13362593.373604529,MB,1626.632192,4737.335296,0.0,4322.230272,4281.176576,s,10,19.315797607421874,1.9315797607421874,0.0066651199980240754,1.9291423950195312,1.93842744140625,1.9424410766601563,1.9456519848632812,"[1.9222420654296875, 1.9250628662109375, 1.9284111328125, 1.9285787353515624, 1.9464547119140625, 1.935525390625, 1.9342493896484374, 1.92803173828125, 1.9375355224609374, 1.9297060546875]",tokens/s,32.61579008044326,kWh,5.655815386687361e-05,6.238402661912035e-06,3.733964495105717e-05,0.00010013620147984282,tokens/kWh,629143.0977904804,,s,630,19.309970489501964,0.03065074680873326,0.000522372786755224,0.03055992031097412,0.03098632049560547,0.03127637300491333,0.032555961799621586,"[0.031021055221557618, 0.03045542335510254, 0.030398847579956055, 0.030321695327758788, 0.030151647567749025, 0.030316511154174806, 0.03051728057861328, 0.03043849563598633, 0.030515743255615235, 0.030531967163085937, 0.030287103652954103, 0.03036969566345215, 0.030323551177978515, 0.030559263229370116, 0.030749664306640626, 0.03081999969482422, 0.031061920166015625, 0.03077801513671875, 0.03065014457702637, 0.030438655853271483, 0.030503679275512695, 0.03037753677368164, 0.030246719360351563, 0.030218751907348632, 0.03012006378173828, 0.030053600311279297, 0.030183712005615235, 0.03011743927001953, 0.030530303955078126, 0.030660415649414064, 0.030411136627197265, 0.030468095779418947, 0.030652416229248046, 0.03241164779663086, 0.030598432540893554, 0.030628576278686523, 0.030484672546386718, 0.0304597110748291, 0.03032268714904785, 0.030468095779418947, 0.03031667137145996, 0.030180896759033203, 0.030222688674926758, 0.030629888534545898, 0.0305930233001709, 0.030424352645874023, 0.030132959365844727, 0.03038924789428711, 0.030442495346069336, 0.03055961608886719, 0.030395008087158202, 0.030298112869262695, 0.030792863845825195, 0.03172438430786133, 0.030463104248046876, 0.030573440551757813, 0.03046540832519531, 0.030324672698974608, 0.030276287078857423, 0.030719999313354493, 0.03041279983520508, 0.03054102325439453, 0.03041360092163086, 0.031069087982177734, 0.030596288681030273, 0.030122751235961913, 0.030067935943603515, 0.030057344436645508, 0.030482431411743165, 0.030983680725097655, 0.030788095474243164, 0.030664703369140626, 0.03056230354309082, 0.03040870475769043, 0.030472095489501954, 0.03044771194458008, 0.030365695953369142, 0.030281152725219727, 0.03027609634399414, 0.03028175926208496, 0.030641183853149415, 0.030468671798706055, 0.030967584609985353, 0.030528160095214845, 0.03047603225708008, 0.03053113555908203, 0.030401439666748048, 0.030609344482421873, 0.030762847900390626, 0.030479679107666014, 0.030429471969604494, 0.03039664077758789, 0.03042064094543457, 0.03038703918457031, 0.030635295867919923, 0.03040812873840332, 0.03041974449157715, 0.03269660949707031, 0.03127631950378418, 0.03048512077331543, 0.03033497619628906, 0.030414848327636718, 0.030177375793457032, 0.030242176055908204, 0.030463552474975585, 0.030288000106811524, 0.030475103378295898, 0.031805471420288084, 0.030432832717895507, 0.030515008926391602, 0.030610015869140625, 0.030394367218017578, 0.030353408813476562, 0.030502912521362304, 0.03079782485961914, 0.030529119491577147, 0.030376031875610353, 0.030572864532470705, 0.03040604782104492, 0.030384767532348634, 0.03048192024230957, 0.030400768280029297, 0.030611679077148436, 0.030612543106079103, 0.030921375274658203, 0.03030659294128418, 0.031733760833740236, 0.030655872344970702, 0.03061619186401367, 0.030638080596923828, 0.030504928588867188, 0.030481536865234374, 0.030837600708007812, 0.030570560455322266, 0.03058687973022461, 0.030914623260498045, 0.030965696334838866, 0.030431232452392577, 0.0302957763671875, 0.030223840713500975, 0.030202335357666015, 0.03057289505004883, 0.030345216751098632, 0.030459455490112305, 0.030906816482543946, 0.030478080749511718, 0.030443775177001954, 0.03039244842529297, 0.03023164749145508, 0.030197887420654296, 0.03040083122253418, 0.03060361671447754, 0.03055161666870117, 0.030621408462524414, 0.03068796730041504, 0.03068339157104492, 0.030709503173828125, 0.03060121536254883, 0.030846624374389647, 0.030682655334472658, 0.03060223960876465, 0.03073823928833008, 0.03063596725463867, 0.030668863296508787, 0.030480575561523438, 0.030380895614624023, 0.032118911743164065, 0.030731103897094728, 0.030670848846435547, 0.030650367736816408, 0.03068435287475586, 0.030473024368286132, 0.03031449508666992, 0.03127910423278808, 0.030215551376342773, 0.03027827262878418, 0.030248960494995116, 0.030612768173217772, 0.030279424667358397, 0.030305248260498047, 0.03015065574645996, 0.03023699188232422, 0.030082752227783203, 0.030736127853393556, 0.03087945556640625, 0.030822399139404297, 0.030805984497070314, 0.03073052787780762, 0.031166336059570313, 0.031281951904296876, 0.030715456008911134, 0.030683616638183593, 0.03044550323486328, 0.03046777534484863, 0.03076323127746582, 0.030369855880737304, 0.0304169921875, 0.030548223495483397, 0.030670591354370117, 0.03080339241027832, 0.030570592880249024, 0.03046403121948242, 0.030863807678222655, 0.030472192764282226, 0.030456064224243164, 0.03095305633544922, 0.030587039947509765, 0.030670848846435547, 0.03143260765075684, 0.030525632858276367, 0.030982048034667968, 0.03068441581726074, 0.030399232864379882, 0.03075676727294922, 0.03045590400695801, 0.030584543228149415, 0.030585344314575196, 0.030468095779418947, 0.03040233612060547, 0.030365695953369142, 0.030431232452392577, 0.03057459259033203, 0.030520639419555663, 0.030544576644897462, 0.03076675224304199, 0.030660959243774415, 0.03053878402709961, 0.03087027168273926, 0.030669023513793945, 0.030394367218017578, 0.030445119857788087, 0.030503456115722655, 0.030940959930419922, 0.0308175048828125, 0.030892959594726564, 0.030498815536499024, 0.030561471939086916, 0.03054060745239258, 0.030533632278442382, 0.030224384307861327, 0.030611455917358397, 0.030866527557373048, 0.03086739158630371, 0.03068115234375, 0.030620479583740236, 0.030445663452148438, 0.030455167770385743, 0.030324607849121093, 0.030543840408325196, 0.03036240005493164, 0.030277023315429686, 0.030412736892700194, 0.03195699119567871, 0.031194976806640625, 0.03116048049926758, 0.03075017547607422, 0.03238729476928711, 0.030558528900146483, 0.030680479049682616, 0.030564319610595702, 0.03084556770324707, 0.030801919937133788, 0.030620735168457033, 0.03152787208557129, 0.03820460891723633, 0.030531551361083983, 0.030426080703735352, 0.030400384902954103, 0.03038412857055664, 0.030514240264892578, 0.030390432357788086, 0.030143232345581056, 0.0302838077545166, 0.0305677433013916, 0.030691551208496093, 0.030400384902954103, 0.030658624649047853, 0.030460447311401368, 0.03031228828430176, 0.030200288772583007, 0.030487232208251953, 0.030325504302978517, 0.030218496322631835, 0.030404031753540037, 0.030488767623901368, 0.030617984771728515, 0.03058892822265625, 0.03077939224243164, 0.03068511962890625, 0.031031360626220705, 0.030774463653564454, 0.03071673583984375, 0.0315733757019043, 0.03093772888183594, 0.03127004814147949, 0.03126761627197266, 0.030832704544067384, 0.030871551513671876, 0.030693376541137695, 0.030639263153076173, 0.030671968460083007, 0.030676511764526366, 0.030594688415527344, 0.030614112854003905, 0.03052329635620117, 0.030509248733520507, 0.030873504638671875, 0.030690399169921875, 0.030640256881713866, 0.030666847229003907, 0.0311662712097168, 0.03238185501098633, 0.030922719955444336, 0.031444480895996094, 0.030554048538208006, 0.03138636779785156, 0.030760959625244142, 0.030619295120239257, 0.03054627227783203, 0.030297183990478517, 0.030227136611938477, 0.030494943618774414, 0.030629888534545898, 0.030648448944091796, 0.03070751953125, 0.0305928955078125, 0.03069875144958496, 0.03026790428161621, 0.030249120712280274, 0.030298112869262695, 0.03034499168395996, 0.030312959671020507, 0.0302589111328125, 0.030220159530639647, 0.030568256378173828, 0.03092131233215332, 0.03054582405090332, 0.030361087799072265, 0.0305948486328125, 0.030373760223388672, 0.030542783737182617, 0.030477920532226564, 0.030974367141723632, 0.030910463333129884, 0.030736383438110353, 0.030652416229248046, 0.030817344665527345, 0.03085817527770996, 0.030721920013427734, 0.030526912689208985, 0.03068998336791992, 0.030517248153686522, 0.030431232452392577, 0.030379392623901366, 0.030379871368408203, 0.030434080123901367, 0.030392160415649416, 0.03029395294189453, 0.030217472076416015, 0.030761663436889648, 0.030478208541870118, 0.03067286491394043, 0.030713567733764647, 0.03052207946777344, 0.03395356750488281, 0.03073865509033203, 0.031395328521728515, 0.030597408294677733, 0.030471712112426757, 0.030503616333007813, 0.030535680770874023, 0.030431232452392577, 0.030705663681030275, 0.030514656066894533, 0.03265385437011719, 0.033898494720458985, 0.030949151992797852, 0.030828863143920898, 0.031121088027954102, 0.03099849510192871, 0.030607711791992186, 0.030504159927368164, 0.03056924819946289, 0.03055615997314453, 0.030373023986816405, 0.030376127243041992, 0.030448543548583985, 0.03103241539001465, 0.03049888038635254, 0.03059772872924805, 0.0306680965423584, 0.030542112350463866, 0.030493087768554687, 0.030764863967895507, 0.030502176284790038, 0.031265216827392577, 0.030632415771484376, 0.030570655822753905, 0.031237632751464843, 0.03079347229003906, 0.031236703872680665, 0.031072256088256835, 0.03061350440979004, 0.03099852752685547, 0.030525440216064452, 0.030379743576049806, 0.03044380760192871, 0.030517023086547853, 0.03029209518432617, 0.030269535064697265, 0.03013350486755371, 0.03026799964904785, 0.03087171173095703, 0.030482048034667968, 0.030490463256835937, 0.030585248947143553, 0.030776479721069335, 0.030701728820800783, 0.030517663955688477, 0.030615615844726562, 0.03068079948425293, 0.0305546875, 0.030451776504516602, 0.030842880249023437, 0.0305598087310791, 0.030517696380615234, 0.03055615997314453, 0.03075481605529785, 0.03127641677856445, 0.030638719558715822, 0.03057459259033203, 0.03098159980773926, 0.031113759994506836, 0.03086947250366211, 0.03132009506225586, 0.030814207077026368, 0.030781503677368163, 0.030686527252197265, 0.030701631546020507, 0.03077894401550293, 0.030542848587036132, 0.03131391906738281, 0.030553535461425783, 0.030589504241943358, 0.0305947208404541, 0.030461824417114258, 0.03025721549987793, 0.030267808914184572, 0.030672895431518556, 0.030390079498291016, 0.030294208526611327, 0.030357152938842773, 0.030349088668823243, 0.031021408081054688, 0.03100067138671875, 0.031021280288696287, 0.030508960723876953, 0.030486528396606444, 0.03045756721496582, 0.030667039871215822, 0.03082582473754883, 0.03048841667175293, 0.030330783843994142, 0.030560960769653322, 0.030619871139526366, 0.030900224685668946, 0.03038003158569336, 0.03040870475769043, 0.030764896392822264, 0.030820512771606447, 0.030689279556274415, 0.03056844711303711, 0.03058687973022461, 0.030627840042114256, 0.030600736618041992, 0.030431583404541017, 0.030681215286254882, 0.030479936599731444, 0.03094163131713867, 0.03057004737854004, 0.03049443244934082, 0.030695167541503907, 0.030366399765014648, 0.03023286437988281, 0.030305984497070313, 0.0305316162109375, 0.03106435203552246, 0.030834688186645507, 0.030521343231201172, 0.030809951782226563, 0.03065657615661621, 0.03064227294921875, 0.030694400787353516, 0.030640159606933594, 0.03058572769165039, 0.030468191146850586, 0.03049798393249512, 0.030407487869262697, 0.030988191604614256, 0.03031769561767578, 0.030732736587524415, 0.030556735992431642, 0.030560224533081055, 0.03056003189086914, 0.03180108833312988, 0.03080201530456543, 0.031199167251586914, 0.03177638435363769, 0.03074723243713379, 0.03060121536254883, 0.030649696350097656, 0.030542495727539063, 0.03060326385498047, 0.03078144073486328, 0.030438528060913086, 0.0303768310546875, 0.030476287841796876, 0.030490591049194337, 0.030603168487548828, 0.031528703689575194, 0.03422860717773438, 0.031014911651611327, 0.030851072311401367, 0.030637983322143555, 0.030570592880249024, 0.03051510429382324, 0.030665119171142577, 0.032444225311279294, 0.03067686462402344, 0.030714879989624022, 0.030534656524658203, 0.030596160888671876, 0.030358463287353515, 0.030474239349365235, 0.03055183982849121, 0.030410112380981444, 0.030590911865234376, 0.03064531135559082, 0.03052547264099121, 0.030465791702270508, 0.03049679946899414, 0.03237212753295898, 0.031931072235107424, 0.03068921661376953, 0.030415071487426757, 0.030545536041259765, 0.030419296264648437, 0.030629695892333983, 0.030418943405151368, 0.030672384262084962, 0.03056435203552246, 0.030585535049438478, 0.03033888053894043, 0.03059916877746582, 0.030420736312866212, 0.03050489616394043, 0.030533727645874024, 0.030873823165893554, 0.03047372817993164, 0.030205984115600586, 0.030315103530883788, 0.030327775955200195, 0.030186399459838868, 0.030141504287719726, 0.030188480377197267, 0.03010870361328125, 0.030163936614990235, 0.031225824356079103, 0.030662784576416014, 0.03061782455444336, 0.030644224166870116, 0.03057369613647461, 0.03011849594116211, 0.030136064529418947, 0.030470495223999024, 0.03260160064697266, 0.031261375427246094, 0.03062486457824707, 0.0304586238861084, 0.030986112594604494, 0.030410688400268556, 0.03037628746032715, 0.030668800354003906, 0.03057049560546875, 0.030991552352905273, 0.030391359329223634, 0.030494464874267577, 0.030659839630126952, 0.030562047958374024, 0.03036262321472168, 0.030545503616333007, 0.030701120376586913, 0.030548831939697266, 0.030644224166870116, 0.03060531234741211, 0.031065792083740235, 0.030564064025878905, 0.030687135696411134, 0.03077190399169922, 0.03055753517150879, 0.03042576026916504, 0.030451711654663087, 0.030514976501464845, 0.03041302490234375, 0.03056230354309082, 0.030524608612060546, 0.030538368225097656, 0.030653663635253906, 0.030648992538452147, 0.0304704647064209, 0.03055615997314453, 0.03060105514526367, 0.030523551940917968, 0.03036470413208008, 0.030288864135742187, 0.03060326385498047, 0.03075257682800293, 0.030503103256225586, 0.030423040390014647, 0.03048588752746582, 0.030716543197631837, 0.030461952209472655, 0.030490623474121094, 0.030513151168823242, 0.030922752380371094, 0.03094118309020996, 0.030615680694580077, 0.030580608367919922, 0.030640127182006836, 0.03061555290222168]",tokens/s,32.625632459796115,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4384.518144,5362.286592,0.0,4959.76448,4769.731072,s,1,11.5832275390625,11.5832275390625,0.0,11.5832275390625,11.5832275390625,11.5832275390625,11.5832275390625,[11.5832275390625],,kWh,0.00012836795212497996,1.4128874333218193e-05,5.673865650200616e-05,0.0001992354829602043,,MB,1629.351936,5383.258112,0.0,4966.055936,4251.027456,s,10,32.4425234375,3.24425234375,0.007091350838878713,3.2430179443359375,3.2522633544921877,3.253871423339844,3.2551578784179687,"[3.230861083984375, 3.237745849609375, 3.23954541015625, 3.242152099609375, 3.241451171875, 3.2438837890625, 3.248853759765625, 3.251906005859375, 3.250644775390625, 3.2554794921875]",tokens/s,78.90878170842039,kWh,9.482586464749754e-05,1.0458262269819845e-05,6.308718935860158e-05,0.00016837131627591898,tokens/kWh,1520449.0032047932,MB,1633.517568,5397.938176,0.0,4980.736,4251.030016,s,10,18.360163818359375,1.8360163818359374,0.004735661966507554,1.835945068359375,1.8403024169921873,1.8429421020507812,1.845053850097656,"[1.8397158203125, 1.8337735595703124, 1.845581787109375, 1.826554443359375, 1.834335205078125, 1.837554931640625, 1.8335498046875, 1.8380169677734375, 1.833444091796875, 1.83763720703125]",tokens/s,34.31341932635846,kWh,5.3813646982082446e-05,5.937070096647661e-06,3.557516734899874e-05,9.532588442772881e-05,tokens/kWh,660890.7997886279,,s,630,18.356937038421638,0.029137995299081954,0.00047515384139428104,0.02906135940551758,0.029453203201293944,0.029671596908569336,0.03032571828842164,"[0.030102272033691407, 0.029155328750610353, 0.028983295440673826, 0.028872703552246092, 0.028831743240356447, 0.029357343673706054, 0.029098623275756835, 0.029097856521606444, 0.02891004753112793, 0.029047903060913087, 0.028883615493774415, 0.028919040679931642, 0.02883456039428711, 0.0291409912109375, 0.028880895614624022, 0.02879692840576172, 0.02879283142089844, 0.028917760848999025, 0.028849376678466796, 0.030393119812011718, 0.031196319580078125, 0.029612672805786132, 0.02915247917175293, 0.02932124710083008, 0.02961440086364746, 0.029170207977294922, 0.02922835159301758, 0.02913363265991211, 0.02902252769470215, 0.029007551193237304, 0.028886079788208008, 0.029191104888916016, 0.029284351348876952, 0.029663232803344725, 0.02916099166870117, 0.029159679412841796, 0.028901599884033204, 0.029216768264770508, 0.02943180847167969, 0.029632255554199217, 0.029531679153442382, 0.02952675247192383, 0.02933558464050293, 0.029463743209838866, 0.02931177520751953, 0.029263296127319337, 0.029035072326660156, 0.029119808197021483, 0.029053632736206054, 0.02916761589050293, 0.028940288543701172, 0.02901718330383301, 0.02905999946594238, 0.02911427116394043, 0.029050975799560546, 0.02903654479980469, 0.028901376724243165, 0.028992639541625977, 0.029347871780395506, 0.029041824340820314, 0.029109952926635742, 0.029195808410644532, 0.02890390396118164, 0.029769920349121095, 0.029274112701416017, 0.029161279678344726, 0.029276351928710937, 0.029057024002075195, 0.02918809509277344, 0.029054624557495117, 0.029726655960083007, 0.029218784332275392, 0.029114816665649416, 0.028999679565429686, 0.02897100830078125, 0.028993087768554686, 0.02890787124633789, 0.028851295471191408, 0.028810239791870116, 0.029083648681640626, 0.028807167053222657, 0.028932096481323243, 0.028997663497924805, 0.0290098876953125, 0.02921228790283203, 0.02911680030822754, 0.02897443199157715, 0.029116352081298827, 0.029035232543945313, 0.02893619155883789, 0.029054975509643553, 0.02891366386413574, 0.028960639953613282, 0.028825151443481446, 0.029286975860595702, 0.029403135299682616, 0.02913068771362305, 0.029099647521972655, 0.029082048416137696, 0.029089664459228514, 0.02961401557922363, 0.029746431350708008, 0.029330207824707032, 0.02931318473815918, 0.029340768814086916, 0.029100160598754882, 0.029233951568603516, 0.029759296417236326, 0.029169952392578125, 0.02907526397705078, 0.029263935089111327, 0.02890140724182129, 0.028732927322387695, 0.028822015762329102, 0.028872095108032226, 0.028905088424682618, 0.029143199920654297, 0.028902175903320313, 0.02893449592590332, 0.029167295455932617, 0.028913728713989256, 0.028986751556396486, 0.02924595260620117, 0.028905536651611326, 0.028940288543701172, 0.02871833610534668, 0.03012076759338379, 0.029169248580932616, 0.02893779182434082, 0.02904764747619629, 0.028964864730834962, 0.029222911834716796, 0.02896076774597168, 0.028985183715820314, 0.02904489517211914, 0.029069311141967775, 0.029140447616577147, 0.029384639739990233, 0.029182559967041017, 0.029337600708007814, 0.03062278366088867, 0.03862828826904297, 0.03015452766418457, 0.028971200942993165, 0.028946464538574218, 0.028688383102416993, 0.02919628715515137, 0.02869772720336914, 0.028715776443481444, 0.028956672668457032, 0.028889215469360352, 0.028867712020874025, 0.028969600677490236, 0.029010175704956054, 0.028945856094360352, 0.02905084800720215, 0.029219711303710937, 0.029168703079223632, 0.02938719940185547, 0.02921478462219238, 0.02928451156616211, 0.029359392166137695, 0.029249792098999024, 0.029149663925170897, 0.029042688369750977, 0.029837312698364257, 0.028868608474731446, 0.029085248947143556, 0.029036991119384764, 0.02902016067504883, 0.0288536319732666, 0.029082239151000975, 0.02933964729309082, 0.029539424896240233, 0.02914192008972168, 0.029155071258544923, 0.02895193672180176, 0.02894732856750488, 0.029243392944335936, 0.029106176376342774, 0.029106176376342774, 0.029054975509643553, 0.029154624938964844, 0.028949184417724608, 0.029007871627807616, 0.028996864318847657, 0.02901068878173828, 0.028893152236938478, 0.02891983985900879, 0.02952406311035156, 0.029009920120239258, 0.02892367935180664, 0.02914531135559082, 0.02895462417602539, 0.029001087188720704, 0.028741952896118163, 0.028940256118774415, 0.028897184371948242, 0.02895712089538574, 0.028896671295166015, 0.028998239517211914, 0.028964191436767577, 0.02916009521484375, 0.029296640396118165, 0.02929254341125488, 0.029013696670532226, 0.028905792236328123, 0.02880512046813965, 0.02899126434326172, 0.02891779136657715, 0.02885036849975586, 0.029310976028442383, 0.028892608642578126, 0.029039167404174806, 0.029097984313964844, 0.029001728057861328, 0.029017759323120118, 0.028792512893676757, 0.028873376846313477, 0.02894857597351074, 0.029190208435058592, 0.029107648849487303, 0.028829696655273438, 0.028610784530639647, 0.028776256561279297, 0.0290512638092041, 0.029187231063842772, 0.0289780158996582, 0.029210336685180666, 0.028952863693237303, 0.02900982475280762, 0.02873353576660156, 0.028824607849121095, 0.02901091194152832, 0.029056543350219725, 0.029507968902587892, 0.02894857597351074, 0.02900521659851074, 0.029153888702392577, 0.029282304763793947, 0.029181951522827147, 0.02896076774597168, 0.028827648162841796, 0.028850175857543944, 0.02877187156677246, 0.02884566307067871, 0.028908416748046874, 0.028917760848999025, 0.0287825927734375, 0.02893775939941406, 0.028834272384643554, 0.028837472915649413, 0.030118783950805663, 0.02899305534362793, 0.02892464065551758, 0.02883564758300781, 0.028837888717651368, 0.02896905517578125, 0.02913270378112793, 0.02943132781982422, 0.02906159973144531, 0.0301977596282959, 0.03037798309326172, 0.029114368438720704, 0.02897862434387207, 0.02949580764770508, 0.029104192733764647, 0.028856319427490236, 0.028881216049194337, 0.029248544692993164, 0.029020864486694335, 0.029153247833251954, 0.028911615371704103, 0.02894816017150879, 0.028857919692993166, 0.02921548843383789, 0.029679616928100585, 0.029216768264770508, 0.02915283203125, 0.029354400634765625, 0.02925775909423828, 0.02928166389465332, 0.029234975814819337, 0.028981695175170897, 0.02910588836669922, 0.02898761558532715, 0.029194719314575197, 0.029058143615722655, 0.029124607086181642, 0.029043615341186522, 0.029191583633422852, 0.02912704086303711, 0.02893027114868164, 0.028878400802612305, 0.02896735954284668, 0.028794815063476562, 0.028790847778320312, 0.028676095962524413, 0.028673215866088866, 0.02860102462768555, 0.028847488403320312, 0.028664575576782227, 0.028653568267822265, 0.029001728057861328, 0.029046783447265623, 0.029340927124023437, 0.028935935974121092, 0.029060096740722657, 0.02886204719543457, 0.029509855270385743, 0.029173343658447266, 0.02912908744812012, 0.029091968536376953, 0.02939094352722168, 0.029294591903686523, 0.029686368942260743, 0.02911017608642578, 0.028974592208862306, 0.029093664169311525, 0.029039136886596678, 0.029511680603027345, 0.029185695648193358, 0.029305471420288085, 0.029272064208984375, 0.029345407485961914, 0.02929497528076172, 0.029398591995239257, 0.02913324737548828, 0.02914508819580078, 0.029050399780273437, 0.029000160217285156, 0.028863487243652345, 0.0289149112701416, 0.02899283218383789, 0.029055456161499023, 0.02933350372314453, 0.029419071197509767, 0.029090240478515626, 0.028855712890625, 0.029078079223632813, 0.029183168411254883, 0.029622528076171876, 0.02953481674194336, 0.029562368392944335, 0.02932912063598633, 0.02986614418029785, 0.029421695709228514, 0.029677183151245116, 0.02916646385192871, 0.028949920654296874, 0.029012575149536132, 0.029175743103027344, 0.02925164794921875, 0.028959808349609376, 0.029526079177856445, 0.02899648094177246, 0.028822847366333008, 0.028824256896972655, 0.028956672668457032, 0.02892799949645996, 0.029683071136474608, 0.029223552703857424, 0.02911840057373047, 0.028956703186035156, 0.029020191192626953, 0.029491199493408202, 0.028963071823120117, 0.028854015350341797, 0.028985183715820314, 0.029032608032226563, 0.029026111602783202, 0.029118207931518553, 0.02891107177734375, 0.028992223739624023, 0.02885862350463867, 0.028901472091674804, 0.028694431304931642, 0.029506752014160156, 0.029825727462768556, 0.02911027145385742, 0.029855743408203125, 0.029668991088867187, 0.030502559661865235, 0.02909667205810547, 0.029011552810668945, 0.02918828773498535, 0.029083967208862305, 0.029171615600585937, 0.02903654479980469, 0.029050880432128907, 0.028878847122192384, 0.028884992599487305, 0.0289434871673584, 0.028903648376464842, 0.028830591201782226, 0.028959583282470704, 0.028999935150146483, 0.02902016067504883, 0.028917856216430664, 0.029114463806152343, 0.029303295135498047, 0.02931670379638672, 0.028852287292480468, 0.02912086486816406, 0.028828832626342775, 0.02905094337463379, 0.029145376205444336, 0.02907583999633789, 0.028897407531738282, 0.02933468818664551, 0.029178943634033203, 0.02895644760131836, 0.028881919860839843, 0.029049856185913086, 0.029061023712158202, 0.02902230453491211, 0.029054176330566405, 0.028959520339965822, 0.028790655136108397, 0.02891993522644043, 0.028937248229980467, 0.028928991317749023, 0.028896608352661134, 0.029043359756469725, 0.02891366386413574, 0.028953855514526367, 0.028750591278076172, 0.02889894485473633, 0.028791168212890624, 0.028980640411376952, 0.02898748779296875, 0.029125280380249023, 0.029118303298950196, 0.029001728057861328, 0.029067264556884766, 0.029120512008666992, 0.029344863891601562, 0.029673728942871094, 0.02927881622314453, 0.02926188850402832, 0.029282304763793947, 0.029632640838623048, 0.029136768341064455, 0.02905523109436035, 0.02891366386413574, 0.02893379211425781, 0.029038944244384766, 0.028901376724243165, 0.029084768295288086, 0.029145952224731445, 0.029234367370605467, 0.02912656021118164, 0.029342559814453124, 0.029424863815307616, 0.029253536224365235, 0.029254175186157225, 0.02946505546569824, 0.02933350372314453, 0.029253631591796874, 0.029157375335693358, 0.029304607391357422, 0.02936444854736328, 0.02936422348022461, 0.02985536003112793, 0.02885055923461914, 0.02920243263244629, 0.029361631393432615, 0.029452032089233398, 0.02907200050354004, 0.029014175415039062, 0.02901398468017578, 0.02950556755065918, 0.02916966438293457, 0.029066719055175782, 0.02894492721557617, 0.02891526412963867, 0.029079744338989258, 0.029053184509277345, 0.02903606414794922, 0.028951007843017577, 0.02896281623840332, 0.028810848236083986, 0.028928255081176756, 0.02903875160217285, 0.02908527946472168, 0.029049087524414062, 0.028887199401855468, 0.029054975509643553, 0.028940288543701172, 0.029030399322509767, 0.02925472068786621, 0.029072256088256837, 0.028930112838745116, 0.029212671279907225, 0.029313024520874024, 0.02923868751525879, 0.029157663345336916, 0.029094079971313476, 0.029149311065673828, 0.0292044792175293, 0.02946767997741699, 0.029274816513061522, 0.029499679565429687, 0.02977177619934082, 0.02985078430175781, 0.029053600311279296, 0.029186239242553712, 0.02892185592651367, 0.02876736068725586, 0.02892915153503418, 0.02874665641784668, 0.028799840927124024, 0.028685663223266603, 0.02879350471496582, 0.028719104766845704, 0.028923839569091798, 0.02897926330566406, 0.028970272064208984, 0.028944480895996095, 0.02887539291381836, 0.02923334312438965, 0.02908140754699707, 0.029247488021850586, 0.029513376235961914, 0.029122943878173827, 0.02902012825012207, 0.029165567398071288, 0.030484479904174806, 0.02933145523071289, 0.02918806457519531, 0.02930076789855957, 0.029000831604003907, 0.02916339111328125, 0.029145536422729493, 0.029223487854003905, 0.02877440071105957, 0.02906332778930664, 0.030017023086547853, 0.02892608070373535, 0.029006048202514647, 0.028899328231811523, 0.028919328689575197, 0.02890540885925293, 0.02914137649536133, 0.029340831756591797, 0.02914201545715332, 0.02896281623840332, 0.028943904876708983, 0.02899715232849121, 0.028824512481689452, 0.028919807434082033, 0.029061119079589845, 0.02894643211364746, 0.029063167572021483, 0.028855680465698242, 0.02903718376159668, 0.02962403106689453, 0.02946828842163086, 0.029076128005981444, 0.029146656036376953, 0.02895715141296387, 0.029056703567504883, 0.029305152893066407, 0.029162687301635744, 0.029086528778076173, 0.029142911911010743, 0.028981376647949218, 0.029758623123168945, 0.028969823837280275, 0.028989280700683594, 0.028944543838500977, 0.028933280944824218, 0.028896095275878907, 0.028921407699584963, 0.02904275131225586, 0.02920694351196289, 0.029114336013793946, 0.029157375335693358, 0.02913484764099121, 0.029156959533691407, 0.029125024795532226, 0.029580959320068358, 0.029243743896484375, 0.029288448333740235, 0.029230560302734375, 0.029583904266357423, 0.0291507511138916, 0.028888927459716798, 0.029018720626831054, 0.028850208282470702, 0.028870655059814454, 0.029083072662353517, 0.02901046371459961, 0.02874985694885254, 0.028970367431640626, 0.028929855346679686, 0.029143871307373045, 0.029282304763793947, 0.029970432281494142, 0.029329408645629884, 0.029099807739257813, 0.029072671890258788, 0.029116672515869142, 0.029018815994262696, 0.029134368896484374, 0.02910406494140625, 0.02929337692260742, 0.0290948486328125, 0.02918841552734375, 0.029118656158447265, 0.02910032081604004, 0.02938265609741211, 0.029208127975463866, 0.02932681655883789, 0.02925257682800293, 0.02923520088195801, 0.029376224517822267, 0.02922854423522949, 0.029440799713134767, 0.029296640396118165, 0.029293632507324217, 0.029051839828491213, 0.029319168090820313, 0.028817407608032225, 0.02894643211364746, 0.029378143310546875, 0.02969228744506836, 0.029054784774780275, 0.029124191284179687, 0.029016704559326173]",tokens/s,34.31945093461893,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4331.208704,4569.563136,0.0,4183.81824,4182.069248,s,1,10.1197431640625,10.1197431640625,0.0,10.1197431640625,10.1197431640625,10.1197431640625,10.1197431640625,[10.1197431640625],,kWh,8.742598153335165e-05,9.636129412522372e-06,2.857502285999458e-05,0.0001256371338058686,,MB,1618.939904,4722.655232,0.0,4307.550208,4281.174016,s,10,3.7228604125976568,0.3722860412597656,0.0018237517890552016,0.3727941436767578,0.3734767120361328,0.37372251739501955,0.3739191616821289,"[0.3672055358886719, 0.3721265563964844, 0.37396832275390623, 0.3720126647949219, 0.3733023681640625, 0.37341845703125, 0.37249758911132813, 0.3730906982421875, 0.3734220886230469, 0.3718161315917969]",tokens/s,687.643294746509,kWh,1.0784125441667432e-05,1.1892962494295612e-06,7.131374752714478e-06,1.910479644381147e-05,tokens/kWh,13399776.373064939,MB,1618.939904,4737.335296,0.0,4322.230272,4281.176576,s,10,19.28531591796875,1.9285315917968748,0.012212739388282762,1.9242637939453124,1.9445255126953125,1.9479230346679688,1.9506410522460937,"[1.9197684326171875, 1.9138419189453124, 1.917401123046875, 1.9232813720703126, 1.9252462158203125, 1.9211885986328125, 1.9259764404296875, 1.943520751953125, 1.951320556640625, 1.9437705078125]",tokens/s,32.66734144671225,kWh,5.645293681374821e-05,6.226736320163312e-06,3.745274424788546e-05,0.000100132417381797,tokens/kWh,629166.8736987142,,s,630,19.282060108184798,0.030606444616166375,0.0005237074585037335,0.03051587200164795,0.031021064949035646,0.03125826320648193,0.031805548210144044,"[0.031642047882080075, 0.030666816711425782, 0.03045721626281738, 0.030566848754882813, 0.030511104583740234, 0.03060860824584961, 0.030702560424804688, 0.0305513916015625, 0.030435935974121094, 0.030509056091308592, 0.030399999618530273, 0.030286048889160155, 0.03029609680175781, 0.03013657569885254, 0.030244575500488282, 0.030211872100830078, 0.03012563133239746, 0.03010655975341797, 0.030633983612060548, 0.03038982391357422, 0.030478784561157227, 0.030479360580444335, 0.0305262393951416, 0.030512863159179688, 0.030345727920532226, 0.03012819290161133, 0.030119392395019533, 0.030332544326782226, 0.030384544372558595, 0.030659008026123046, 0.030440671920776367, 0.030328704833984376, 0.030264223098754883, 0.03078144073486328, 0.030324735641479493, 0.03027507209777832, 0.030332672119140626, 0.030382623672485353, 0.030238943099975588, 0.030437376022338865, 0.030709728240966797, 0.030394399642944336, 0.030516799926757814, 0.03035385513305664, 0.030484447479248045, 0.030592031478881836, 0.03016089630126953, 0.0303687686920166, 0.030410751342773438, 0.030932575225830077, 0.03083305549621582, 0.030857215881347655, 0.03058483123779297, 0.03062278366088867, 0.030649280548095702, 0.030649568557739256, 0.030753568649291994, 0.030397663116455077, 0.03019148826599121, 0.030380224227905272, 0.030451904296875, 0.030302751541137696, 0.030605215072631836, 0.03121670341491699, 0.03040057563781738, 0.03075161552429199, 0.03029715156555176, 0.030259679794311524, 0.03029859161376953, 0.03013587188720703, 0.03027571105957031, 0.030449600219726564, 0.03031644821166992, 0.03053615951538086, 0.03012601661682129, 0.030396480560302735, 0.030432256698608398, 0.031045856475830077, 0.030416927337646484, 0.030268159866333008, 0.030389631271362304, 0.03028236770629883, 0.03040483283996582, 0.03034294319152832, 0.03021004867553711, 0.030310400009155275, 0.03016294479370117, 0.030379840850830078, 0.030621728897094726, 0.030234207153320314, 0.030192031860351562, 0.030230400085449218, 0.030172544479370116, 0.030157407760620116, 0.030054719924926757, 0.030021631240844726, 0.030264991760253906, 0.03048089599609375, 0.03065430450439453, 0.030465152740478514, 0.030301055908203124, 0.030233951568603517, 0.030206623077392577, 0.03099875259399414, 0.030660383224487303, 0.03041689682006836, 0.03039356803894043, 0.03013916778564453, 0.030277120590209962, 0.030419519424438476, 0.030850208282470704, 0.030268192291259766, 0.030197088241577148, 0.03001206398010254, 0.03000022315979004, 0.03033180809020996, 0.030328832626342773, 0.030326656341552734, 0.030853248596191405, 0.030468095779418947, 0.03047772789001465, 0.0304237117767334, 0.03029395294189453, 0.030271263122558595, 0.030314464569091797, 0.03040614318847656, 0.03122585678100586, 0.030578624725341796, 0.03046406364440918, 0.030500576019287108, 0.030650495529174804, 0.03055619239807129, 0.030363775253295897, 0.03036774444580078, 0.03042118453979492, 0.030558015823364256, 0.03056025505065918, 0.030306304931640625, 0.030229568481445312, 0.030430015563964845, 0.030264928817749025, 0.03050111961364746, 0.030304256439208983, 0.03023641586303711, 0.03015100860595703, 0.030345247268676757, 0.030187360763549803, 0.030366016387939454, 0.030209951400756836, 0.03024496078491211, 0.030423040390014647, 0.03132371139526367, 0.030912960052490234, 0.03058425521850586, 0.030290496826171874, 0.030432960510253907, 0.030740800857543944, 0.03077939224243164, 0.030672895431518556, 0.030472192764282226, 0.030246879577636717, 0.030277664184570313, 0.030264575958251952, 0.030669055938720702, 0.030467744827270507, 0.030459999084472656, 0.03041971206665039, 0.03061721611022949, 0.03041459274291992, 0.030440128326416016, 0.03028937530517578, 0.030208480834960937, 0.030070783615112305, 0.03001753616333008, 0.030006816864013672, 0.029981151580810547, 0.029916479110717775, 0.03000595283508301, 0.030285375595092773, 0.03036614418029785, 0.030188575744628906, 0.03046499252319336, 0.030341119766235353, 0.030400384902954103, 0.030435455322265624, 0.03140403175354004, 0.030750783920288086, 0.03046188735961914, 0.030527488708496094, 0.031461408615112305, 0.030553823471069337, 0.0303470401763916, 0.030220096588134765, 0.03039299201965332, 0.030402559280395508, 0.030428192138671876, 0.030417888641357423, 0.030418943405151368, 0.03033817672729492, 0.03045840072631836, 0.030243455886840822, 0.03028963279724121, 0.03048886489868164, 0.030456863403320312, 0.0301964168548584, 0.03099852752685547, 0.03077939224243164, 0.030813215255737304, 0.030559200286865235, 0.030552064895629883, 0.031084447860717773, 0.03102115249633789, 0.030646272659301758, 0.03054755210876465, 0.030299840927124025, 0.030249696731567383, 0.030405664443969728, 0.03069753646850586, 0.030423967361450196, 0.030287872314453124, 0.030453760147094725, 0.03037798309326172, 0.030406656265258788, 0.030467519760131834, 0.030555936813354494, 0.03032963180541992, 0.030152416229248045, 0.03025129508972168, 0.030336544036865233, 0.0302902717590332, 0.030312576293945313, 0.03054755210876465, 0.030562719345092772, 0.030441471099853516, 0.03039641571044922, 0.03043328094482422, 0.030492671966552733, 0.030243967056274416, 0.030561376571655273, 0.030373664855957033, 0.031123455047607423, 0.030705663681030275, 0.030848384857177735, 0.030787744522094727, 0.031066015243530275, 0.03071766471862793, 0.0306997127532959, 0.03079644775390625, 0.03049785614013672, 0.030257312774658204, 0.030573280334472656, 0.03044175910949707, 0.03208233642578125, 0.030468095779418947, 0.03033091163635254, 0.030539743423461913, 0.03053308868408203, 0.03032486343383789, 0.03106857681274414, 0.030920703887939452, 0.03042918395996094, 0.030381696701049805, 0.030378368377685545, 0.030922143936157227, 0.030792287826538086, 0.03044099235534668, 0.03034115219116211, 0.030261056900024414, 0.030601184844970705, 0.030556095123291015, 0.030372159957885742, 0.03050332832336426, 0.030554399490356446, 0.03020470428466797, 0.030554655075073243, 0.030805984497070314, 0.03079007911682129, 0.03241984176635742, 0.030935359954833985, 0.030338752746582032, 0.030486207962036133, 0.030466367721557617, 0.030441471099853516, 0.030328832626342773, 0.030244863510131836, 0.03023388862609863, 0.03010348892211914, 0.030218143463134766, 0.030294591903686524, 0.03023084831237793, 0.030208000183105467, 0.030121984481811522, 0.030279680252075194, 0.030296064376831053, 0.03055411148071289, 0.03053670310974121, 0.03083776092529297, 0.03061759948730469, 0.03080396842956543, 0.030562240600585936, 0.030551584243774414, 0.030341663360595704, 0.03035545539855957, 0.030303424835205078, 0.030426944732666016, 0.03113465690612793, 0.030848480224609374, 0.03045846366882324, 0.030652416229248046, 0.030341119766235353, 0.030512800216674806, 0.030490848541259767, 0.03051532745361328, 0.03080966377258301, 0.030447967529296877, 0.03133216094970703, 0.031033536911010743, 0.03060940742492676, 0.030627264022827148, 0.030370367050170897, 0.030184671401977538, 0.030260000228881836, 0.030382080078125, 0.030488576889038086, 0.030443103790283203, 0.030336896896362306, 0.0303536319732666, 0.030572864532470705, 0.030478336334228515, 0.030119935989379884, 0.03033497619628906, 0.03028278350830078, 0.030482784271240234, 0.03272150421142578, 0.030572351455688478, 0.030170368194580077, 0.03040764808654785, 0.03021571159362793, 0.030251232147216797, 0.030388032913208008, 0.030335424423217773, 0.030258432388305664, 0.03040947151184082, 0.03038617515563965, 0.030443231582641603, 0.03034476852416992, 0.03051798439025879, 0.030326143264770507, 0.03041481590270996, 0.030466720581054686, 0.030457855224609375, 0.03041689682006836, 0.03059916877746582, 0.03061555290222168, 0.03061667251586914, 0.03031091117858887, 0.030334751129150392, 0.030359584808349608, 0.03027619171142578, 0.03034752082824707, 0.03062348747253418, 0.03035955238342285, 0.030341119766235353, 0.030397727966308595, 0.030354143142700196, 0.030135295867919923, 0.03025168037414551, 0.03018351936340332, 0.03014681625366211, 0.030395519256591796, 0.030367807388305666, 0.03027337646484375, 0.03122425651550293, 0.030876127243041993, 0.031168575286865233, 0.03060121536254883, 0.030869247436523438, 0.030626047134399415, 0.031570047378540037, 0.03089459228515625, 0.030579935073852538, 0.030530336380004883, 0.030535680770874023, 0.030410751342773438, 0.030395904541015626, 0.0303374080657959, 0.030355775833129883, 0.03050886344909668, 0.03042835235595703, 0.03045881652832031, 0.03032041549682617, 0.030275680541992187, 0.030089216232299806, 0.03031449508666992, 0.030195711135864257, 0.0302379207611084, 0.030206560134887695, 0.031123647689819334, 0.03138150405883789, 0.030879743576049806, 0.030652416229248046, 0.030527488708496094, 0.030443071365356445, 0.030516960144042968, 0.03055996894836426, 0.030758975982666015, 0.03081648063659668, 0.030788320541381836, 0.030844287872314455, 0.030732927322387697, 0.0305101432800293, 0.03072435188293457, 0.031066816329956056, 0.030918655395507814, 0.03083241653442383, 0.03075222396850586, 0.0308272647857666, 0.03073843193054199, 0.030565664291381835, 0.03045235252380371, 0.030505056381225585, 0.030447616577148437, 0.030466047286987305, 0.030527488708496094, 0.030427135467529298, 0.030371423721313476, 0.03031491279602051, 0.030279008865356446, 0.03038684844970703, 0.030385215759277343, 0.030366655349731445, 0.03033497619628906, 0.030338272094726563, 0.030556512832641602, 0.030411455154418947, 0.03084649658203125, 0.030902496337890627, 0.030453760147094725, 0.03057254409790039, 0.03032678413391113, 0.030389951705932616, 0.0315795841217041, 0.031005247116088867, 0.030867456436157226, 0.03096575927734375, 0.030826496124267577, 0.0307423038482666, 0.03057036781311035, 0.030496959686279298, 0.03073628807067871, 0.030717439651489258, 0.030807807922363283, 0.030722816467285155, 0.030796031951904296, 0.030939136505126953, 0.031006240844726564, 0.03113007926940918, 0.03124838447570801, 0.031062015533447264, 0.03094528007507324, 0.031172191619873047, 0.030845184326171875, 0.030721376419067383, 0.03100262451171875, 0.0319964485168457, 0.030918943405151368, 0.030990079879760744, 0.03089980888366699, 0.030812032699584962, 0.030780191421508788, 0.03056844711303711, 0.030453760147094725, 0.030494688034057617, 0.030440576553344728, 0.030339136123657226, 0.030307167053222655, 0.030470144271850585, 0.03080179214477539, 0.030691455841064454, 0.03075424003601074, 0.030517824172973634, 0.030445568084716795, 0.030766847610473633, 0.0305850887298584, 0.030996000289916992, 0.030796031951904296, 0.0306649284362793, 0.0306441593170166, 0.03114112091064453, 0.031953727722167966, 0.030849119186401368, 0.030869407653808592, 0.0310864315032959, 0.030652223587036134, 0.030632287979125976, 0.030650175094604493, 0.030787071228027343, 0.030735040664672853, 0.03180748748779297, 0.031021055221557618, 0.030915744781494142, 0.030610271453857422, 0.030724096298217773, 0.030674943923950194, 0.031533184051513674, 0.031585567474365236, 0.030577375411987306, 0.03042416000366211, 0.030529727935791017, 0.031044288635253905, 0.030603456497192382, 0.03048431968688965, 0.030749727249145507, 0.030589344024658204, 0.030413248062133788, 0.030535808563232424, 0.03037148857116699, 0.030284128189086913, 0.03140812873840332, 0.030756799697875977, 0.03075059127807617, 0.031800800323486325, 0.0403298225402832, 0.03097203254699707, 0.030516416549682616, 0.030874687194824217, 0.03078937530517578, 0.030506464004516603, 0.03139638328552246, 0.030693376541137695, 0.030728191375732423, 0.030760959625244142, 0.0306658878326416, 0.030616416931152343, 0.030457855224609375, 0.030988288879394532, 0.030619647979736327, 0.031102432250976562, 0.030734880447387695, 0.030650367736816408, 0.031297536849975584, 0.031531007766723636, 0.031324159622192385, 0.030899391174316407, 0.030607423782348632, 0.030757631301879883, 0.030429088592529296, 0.03063599967956543, 0.030356767654418946, 0.0304484806060791, 0.030209440231323242, 0.03065897560119629, 0.030549503326416014, 0.030628543853759765, 0.03155148887634277, 0.031666175842285156, 0.030779167175292967, 0.03081167984008789, 0.030841535568237304, 0.031171968460083008, 0.031334175109863284, 0.03085807991027832, 0.030818016052246093, 0.030869375228881835, 0.030668960571289063, 0.030755136489868166, 0.03068511962890625, 0.03130156707763672, 0.030757408142089843, 0.030650367736816408, 0.030543743133544923, 0.030425216674804686, 0.03082035255432129, 0.03161702346801758, 0.03126467132568359, 0.031139936447143555, 0.031033344268798828, 0.03094924736022949, 0.030961984634399413, 0.030914239883422852, 0.030977184295654298, 0.030641120910644533, 0.03068079948425293, 0.030666816711425782, 0.030540000915527343, 0.030639488220214842, 0.030716800689697267, 0.030512031555175782, 0.030562240600585936, 0.030597984313964845, 0.03094108772277832, 0.030890144348144532, 0.03100467109680176, 0.03115827178955078, 0.03125043106079101, 0.031029247283935548, 0.031033344268798828, 0.030814207077026368, 0.030956863403320312, 0.030806304931640625, 0.030724512100219727, 0.030620704650878905, 0.030634624481201172, 0.03058927917480469, 0.030740480422973632, 0.030838783264160157, 0.030842880249023437, 0.030701568603515625, 0.030602975845336913, 0.031394079208374025, 0.03162521553039551, 0.030863071441650392, 0.030814495086669922, 0.030576223373413085, 0.030613920211791993, 0.030678783416748047, 0.030565664291381835, 0.030503904342651367, 0.030668800354003906, 0.030652416229248046, 0.030956768035888673, 0.030587392807006834, 0.030587167739868165, 0.03083673667907715, 0.03177267265319824, 0.03120742416381836, 0.031023103713989256, 0.030930559158325197, 0.030781824111938475, 0.030719776153564454]",tokens/s,32.67285738480707,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4331.495424,4569.563136,0.0,4183.81824,4182.069248,s,1,10.07206640625,10.07206640625,0.0,10.07206640625,10.07206640625,10.07206640625,10.07206640625,[10.07206640625],,kWh,8.540670322916715e-05,9.413509459538998e-06,2.774029996999161e-05,0.00012256051265869774,,MB,1606.991872,4722.655232,0.0,4307.550208,4281.174016,s,10,4.169278350830077,0.4169278350830078,0.006605972195987207,0.41766082763671875,0.42467273864746097,0.42640326385498045,0.4277876840209961,"[0.40506536865234377, 0.4220838623046875, 0.40950286865234375, 0.41818838500976563, 0.4182585144042969, 0.4117859191894531, 0.4242881774902344, 0.41713327026367186, 0.41483819580078124, 0.4281337890625]",tokens/s,614.015132736417,kWh,1.216607661232558e-05,1.341369290578669e-06,8.083930078250113e-06,2.1591375981154363e-05,tokens/kWh,11856585.713825969,MB,1611.124736,4737.335296,0.0,4322.230272,4281.176576,s,10,25.232928955078126,2.5232928955078124,0.00899024057003971,2.5236262207031253,2.5315850585937496,2.5333125488281247,2.5346945410156247,"[2.529151123046875, 2.503118408203125, 2.513357666015625, 2.5207587890625, 2.523291015625, 2.52396142578125, 2.522057373046875, 2.5350400390625, 2.531201171875, 2.530991943359375]",tokens/s,24.9673750170494,kWh,7.348133542142193e-05,8.104414932254817e-06,4.8379503981350304e-05,0.00012996525433502706,tokens/kWh,484744.94450337725,,s,630,25.229623096466085,0.04004702078804137,0.0006547288893240537,0.03993707275390625,0.04054216156005859,0.04087392978668213,0.04257120166778565,"[0.04081798553466797, 0.04006121444702149, 0.03995644760131836, 0.03979471969604492, 0.03971955108642578, 0.03986774444580078, 0.04606224060058594, 0.04782291030883789, 0.04010700988769531, 0.03979564666748047, 0.03952025604248047, 0.03953049468994141, 0.04060105514526367, 0.04236956787109375, 0.03961587142944336, 0.039492225646972655, 0.03955916976928711, 0.039619838714599606, 0.03950998306274414, 0.0409505615234375, 0.04001792144775391, 0.04005887985229492, 0.03998515319824219, 0.039994464874267575, 0.03998608016967774, 0.039970367431640626, 0.04001574325561524, 0.03974364852905273, 0.039752094268798825, 0.03954687881469727, 0.0404664306640625, 0.039531871795654296, 0.03992851257324219, 0.03946847915649414, 0.03936044692993164, 0.03962223815917969, 0.039762943267822266, 0.03944268798828125, 0.039618305206298825, 0.03953868865966797, 0.03983769607543945, 0.03939328002929687, 0.03949894332885742, 0.039502655029296875, 0.03945657730102539, 0.039769729614257815, 0.04048310470581055, 0.039980670928955075, 0.03986934280395508, 0.039726848602294924, 0.0397127685546875, 0.03973734283447266, 0.0397209587097168, 0.039723007202148435, 0.03979257583618164, 0.04050950241088867, 0.03989503860473633, 0.039792640686035156, 0.04059036636352539, 0.041487327575683595, 0.04006707382202149, 0.03987865447998047, 0.03982096099853515, 0.0401921272277832, 0.03940358352661133, 0.03944822311401367, 0.039620960235595706, 0.04009369659423828, 0.039564735412597654, 0.03941961669921875, 0.039529281616210936, 0.03923276901245117, 0.03917216110229492, 0.03919500732421875, 0.039129566192626954, 0.03921088027954102, 0.03997817611694336, 0.039422752380371094, 0.03925814437866211, 0.03968806457519531, 0.039408863067626955, 0.03955190277099609, 0.03957555389404297, 0.039518207550048826, 0.03955686569213867, 0.04023116683959961, 0.039913471221923826, 0.03966511917114258, 0.03991606521606445, 0.03963859176635742, 0.03954118347167969, 0.03939932632446289, 0.03933193588256836, 0.03942604827880859, 0.03982147216796875, 0.040050529479980466, 0.03961228942871094, 0.03939923095703125, 0.03951174545288086, 0.03951257705688477, 0.03990131378173828, 0.03969843292236328, 0.039362560272216796, 0.03971059036254883, 0.03939136123657227, 0.039387134552001955, 0.04011008071899414, 0.042278911590576174, 0.04016128158569336, 0.03961139297485351, 0.0400799674987793, 0.04012278366088867, 0.039806495666503905, 0.03993648147583008, 0.039782398223876955, 0.039792640686035156, 0.039930015563964846, 0.039685985565185544, 0.03967606353759766, 0.03989897537231445, 0.04141260910034179, 0.039741439819335936, 0.039632671356201174, 0.03972528076171875, 0.03999260711669922, 0.03981785583496094, 0.04043523025512695, 0.04004332733154297, 0.04087347030639649, 0.0396596794128418, 0.0394153938293457, 0.03950223922729492, 0.039362560272216796, 0.039172096252441405, 0.03915750503540039, 0.039903743743896485, 0.0396607666015625, 0.039618335723876956, 0.039959583282470706, 0.03995414352416992, 0.039827648162841796, 0.0402691535949707, 0.03984431838989258, 0.040027809143066403, 0.03999164962768555, 0.039799007415771484, 0.03969843292236328, 0.039865184783935546, 0.03980179214477539, 0.03999129486083984, 0.03991263961791992, 0.040180545806884765, 0.040202239990234374, 0.04046438217163086, 0.039897087097167966, 0.04011008071899414, 0.04017158508300781, 0.03993766403198242, 0.040499809265136716, 0.03998659133911133, 0.04032755279541016, 0.039849502563476566, 0.039859840393066406, 0.039815391540527344, 0.03969081497192383, 0.039858177185058595, 0.039858177185058595, 0.03995238494873047, 0.039986881256103515, 0.03986054229736328, 0.039731201171875, 0.039897087097167966, 0.03962060928344727, 0.03961161422729492, 0.03950057601928711, 0.039927806854248044, 0.03986636734008789, 0.03983673477172851, 0.03987756729125977, 0.039927806854248044, 0.04004249572753906, 0.03989299011230469, 0.039624702453613284, 0.039577598571777346, 0.04038041687011719, 0.040210430145263674, 0.03957670211791992, 0.03971775817871094, 0.03989113616943359, 0.041455230712890624, 0.040874305725097655, 0.040275135040283204, 0.03937567901611328, 0.03924991989135742, 0.03940556716918946, 0.039476638793945314, 0.03954684829711914, 0.039361152648925785, 0.0396492805480957, 0.03967385482788086, 0.03992156982421875, 0.039626399993896486, 0.03948112106323242, 0.03952051162719727, 0.03955283355712891, 0.039436897277832034, 0.039894462585449215, 0.03960070419311523, 0.039616512298583983, 0.03984147262573242, 0.03982953643798828, 0.03954512023925781, 0.039772159576416014, 0.03966566467285156, 0.03971014404296875, 0.040574718475341796, 0.04073068618774414, 0.03978521728515625, 0.039617729187011716, 0.03961324691772461, 0.03973324966430664, 0.03967795181274414, 0.03964518356323242, 0.039566688537597657, 0.03970729446411133, 0.04009164810180664, 0.04228515243530274, 0.04002515029907226, 0.040036865234375, 0.03988515090942383, 0.039790016174316406, 0.04031737518310547, 0.039938175201416015, 0.04362854385375976, 0.03984611129760742, 0.04051651382446289, 0.03992361450195313, 0.040569854736328126, 0.04022473526000977, 0.040377376556396484, 0.03986057662963867, 0.04082956695556641, 0.04014899063110351, 0.04018175888061523, 0.03982460784912109, 0.039891742706298826, 0.03998310470581055, 0.03992505645751953, 0.040096446990966796, 0.040118270874023435, 0.04000153732299805, 0.04011734390258789, 0.040930206298828126, 0.04039449691772461, 0.04065100860595703, 0.04085843276977539, 0.04088886260986328, 0.040124832153320314, 0.03985612869262695, 0.0400906867980957, 0.04048287963867187, 0.04002656173706055, 0.0398664321899414, 0.04004800033569336, 0.040008705139160154, 0.03989913558959961, 0.039569408416748046, 0.04252467346191406, 0.04024851226806641, 0.0398671989440918, 0.039981281280517575, 0.03973823928833008, 0.03985465621948242, 0.03957347106933594, 0.039826816558837894, 0.04001193618774414, 0.04014985656738281, 0.03986636734008789, 0.03973324966430664, 0.040153087615966795, 0.039964672088623046, 0.04043948745727539, 0.039903553009033206, 0.039802497863769534, 0.03982368087768555, 0.04000979232788086, 0.039651329040527344, 0.03972259140014649, 0.03969820785522461, 0.03993452835083008, 0.03972262573242188, 0.03991392135620117, 0.03962054443359375, 0.03958099365234375, 0.03969260787963867, 0.039983551025390626, 0.039839614868164064, 0.03993203353881836, 0.040032257080078126, 0.04024745559692383, 0.03989673614501953, 0.04007113647460937, 0.04001308822631836, 0.03987529754638672, 0.040280288696289065, 0.04050937652587891, 0.04052588653564453, 0.039970497131347656, 0.039962718963623044, 0.03989254379272461, 0.04010675048828125, 0.03992361450195313, 0.03989404678344727, 0.03961135864257812, 0.039704479217529294, 0.043920543670654295, 0.04128409576416016, 0.04003881454467773, 0.04094489669799805, 0.04002217483520508, 0.03966204833984375, 0.03965331268310547, 0.039753921508789064, 0.03978979110717774, 0.03982614517211914, 0.039660575866699216, 0.03979267120361328, 0.03976208114624023, 0.039817184448242185, 0.03970233535766601, 0.039588863372802735, 0.03983359909057617, 0.03960422515869141, 0.039711776733398436, 0.03962371063232422, 0.039863262176513675, 0.0399659194946289, 0.039939487457275394, 0.0397213134765625, 0.039921791076660156, 0.03978022384643555, 0.040035774230957034, 0.03993244934082031, 0.040075294494628905, 0.040597503662109374, 0.040320159912109375, 0.04032406234741211, 0.0405022087097168, 0.04038332748413086, 0.03993404769897461, 0.0399189453125, 0.03991414260864258, 0.039995391845703124, 0.040050495147705076, 0.03997100830078125, 0.03986022567749024, 0.039825408935546876, 0.03989913558959961, 0.03988479995727539, 0.039618560791015625, 0.03976192092895508, 0.04058931350708008, 0.03990937423706055, 0.03960422515869141, 0.04003839874267578, 0.03995625686645508, 0.03997923278808594, 0.03963289642333984, 0.040048641204833986, 0.04026163101196289, 0.04009072113037109, 0.03988102340698242, 0.040408702850341795, 0.041136318206787106, 0.0399483528137207, 0.03995926284790039, 0.0396038703918457, 0.04063891220092773, 0.04076406478881836, 0.039809120178222655, 0.03996451187133789, 0.039963840484619144, 0.03974038314819336, 0.040130561828613284, 0.04007526397705078, 0.04099238586425781, 0.04039215850830078, 0.04027840042114258, 0.039769695281982424, 0.039857086181640626, 0.0396060791015625, 0.03976732635498047, 0.03961539077758789, 0.040097793579101565, 0.03956934356689453, 0.03972512054443359, 0.03968204879760742, 0.04004975891113281, 0.039908222198486325, 0.04005072021484375, 0.03987865447998047, 0.03974553680419922, 0.03958966445922851, 0.03978672027587891, 0.039904705047607424, 0.03989766311645508, 0.03989254379272461, 0.03995046234130859, 0.03998342514038086, 0.04024524688720703, 0.04051763153076172, 0.04009574508666992, 0.040567935943603514, 0.04072499084472656, 0.04001011276245117, 0.03990323257446289, 0.040005985260009765, 0.0401033935546875, 0.040366016387939456, 0.039887008666992185, 0.0402248649597168, 0.03991756820678711, 0.03993395233154297, 0.039809024810791016, 0.0397059211730957, 0.03999609756469726, 0.03975139236450195, 0.03960588836669922, 0.04003087997436523, 0.03996636962890625, 0.04017414474487305, 0.04026537704467773, 0.03985388946533203, 0.04042169570922852, 0.04083097457885742, 0.03974758529663086, 0.0400830078125, 0.04048121643066406, 0.0400032958984375, 0.040083744049072265, 0.03998310470581055, 0.04361423873901367, 0.042590206146240234, 0.04016742324829101, 0.04016320037841797, 0.04087315368652344, 0.040137664794921875, 0.040199871063232424, 0.03997932815551758, 0.03986415863037109, 0.04017596817016601, 0.0403966064453125, 0.04076323318481445, 0.040166656494140626, 0.04028636932373047, 0.041159423828125, 0.03991756820678711, 0.03998099136352539, 0.03990099334716797, 0.039659038543701175, 0.03970089721679688, 0.03990300750732422, 0.03975408172607422, 0.03985641479492188, 0.03992566299438476, 0.03986431884765625, 0.040417247772216794, 0.03987196731567383, 0.04005331039428711, 0.03984316635131836, 0.03994252777099609, 0.0398809928894043, 0.040153087615966795, 0.04091904067993164, 0.04044800186157226, 0.040863231658935545, 0.040372417449951174, 0.04093369674682617, 0.04033251190185547, 0.04001667022705078, 0.040120319366455076, 0.040158432006835935, 0.040114177703857425, 0.04004739379882812, 0.03968153762817383, 0.04061439895629883, 0.04133027267456055, 0.03984835052490234, 0.040002880096435545, 0.04025724792480469, 0.040145694732666014, 0.040611297607421874, 0.03977289581298828, 0.039661758422851565, 0.03954048156738281, 0.03956934356689453, 0.03943436813354492, 0.039506206512451174, 0.03983331298828125, 0.039790592193603515, 0.03996012878417969, 0.040013248443603516, 0.040873214721679686, 0.0406935043334961, 0.040753150939941404, 0.040202239990234374, 0.040753150939941404, 0.0400261116027832, 0.0397946891784668, 0.039976417541503904, 0.040133152008056644, 0.04004044723510742, 0.040403167724609376, 0.040202014923095705, 0.039998752593994144, 0.03967049789428711, 0.04009996795654297, 0.04086495971679688, 0.0405285758972168, 0.03988457489013672, 0.04023289489746094, 0.03997110366821289, 0.04018380737304687, 0.0398410873413086, 0.03997673416137695, 0.04015731048583984, 0.04021891021728516, 0.040095966339111326, 0.04024086380004883, 0.03981574249267578, 0.0400546875, 0.04069718551635742, 0.04106844711303711, 0.043209568023681644, 0.04044300842285156, 0.04029449462890625, 0.03998764801025391, 0.04021209716796875, 0.04019683074951172, 0.04018380737304687, 0.040174591064453126, 0.040145984649658205, 0.04095174407958985, 0.040218624114990234, 0.040065025329589846, 0.0400313606262207, 0.04012099075317383, 0.04008777618408203, 0.03996192169189453, 0.03970111846923828, 0.03972288131713867, 0.039672000885009766, 0.039626304626464846, 0.0401514892578125, 0.03952025604248047, 0.03973734283447266, 0.04026483154296875, 0.03974233627319336, 0.03985408020019531, 0.04005043029785156, 0.03970822525024414, 0.03998543930053711, 0.04019241714477539, 0.040568416595458984, 0.04033990478515625, 0.03992172622680664, 0.03996662521362305, 0.04092655944824219, 0.04032579040527344, 0.04030054473876953, 0.04104217529296875, 0.04001545715332031, 0.04013475036621094, 0.03994630432128906, 0.040235008239746094, 0.039874561309814455, 0.04051968002319336, 0.03995238494873047, 0.04019219207763672, 0.04022886276245117, 0.040109886169433596, 0.04029439926147461, 0.04052787017822266, 0.04007113647460937, 0.04170959854125977, 0.04024883270263672, 0.04015359878540039, 0.040101886749267575, 0.03982332611083984, 0.039921310424804686, 0.03988268661499023, 0.0399771842956543, 0.03994851303100586, 0.0400827522277832, 0.040096446990966796, 0.04061929702758789, 0.0397790412902832, 0.03994771194458008, 0.03995296096801758, 0.039876609802246096, 0.04004800033569336, 0.040540382385253905, 0.04161167907714844, 0.04033126449584961, 0.039883808135986326, 0.04001481628417969, 0.039973918914794924, 0.039989471435546875, 0.03999023818969726, 0.04014422225952148, 0.03996102523803711, 0.040101886749267575, 0.03984089660644531, 0.0397619514465332, 0.039561054229736325, 0.03992870330810547, 0.04044800186157226, 0.04019404983520508, 0.03995046234130859, 0.03990105438232422, 0.04055817413330078, 0.03983004760742188, 0.04004249572753906, 0.04004422378540039, 0.04004617691040039, 0.04033001708984375, 0.04011363220214844, 0.040448478698730465, 0.039967777252197266, 0.040290496826171876]",tokens/s,24.970646513076318,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4387.147776,4569.563136,0.0,4183.81824,4182.069248,s,1,10.1598896484375,10.1598896484375,0.0,10.1598896484375,10.1598896484375,10.1598896484375,10.1598896484375,[10.1598896484375],,kWh,8.764358846251525e-05,9.660354693846028e-06,2.8710856302002763e-05,0.00012601479945836404,,MB,1685.245952,4722.655232,0.0,4307.550208,4281.174016,s,10,4.3282680969238285,0.43282680969238285,0.007127998597099705,0.43284698486328127,0.4414816864013672,0.44278959503173826,0.44383592193603516,"[0.4158833618164062, 0.43176458740234375, 0.4332077331542969, 0.44409750366210937, 0.4317353515625, 0.43464556884765626, 0.43248623657226565, 0.4343899841308594, 0.4411910400390625, 0.4288667297363281]",tokens/s,591.4605894721342,kWh,1.2459473111458881e-05,1.374057215383713e-06,8.24573576324999e-06,2.2079266090092587e-05,tokens/kWh,11594588.287283352,MB,1693.478912,4737.335296,0.0,4322.230272,4281.176576,s,10,25.583512451171874,2.5583512451171875,0.005381164697254462,2.5567269287109378,2.5653928466796874,2.566444348144531,2.567285549316406,"[2.55424560546875, 2.556730712890625, 2.556445068359375, 2.5651591796875, 2.55672314453125, 2.558515625, 2.56359130859375, 2.567495849609375, 2.556376220703125, 2.548229736328125]",tokens/s,24.6252347562675,kWh,7.496460237437625e-05,8.26873113086108e-06,4.967128279254896e-05,0.00013290461629778628,tokens/kWh,474024.16676665394,,s,630,25.580276203155535,0.04060361302088178,0.0008307481584151202,0.04048006439208984,0.0410968074798584,0.04138100109100342,0.04262874324798585,"[0.041377601623535154, 0.040423103332519535, 0.040264190673828124, 0.04027926254272461, 0.040850208282470706, 0.040130561828613284, 0.0399769287109375, 0.0401446418762207, 0.04034793472290039, 0.040119617462158204, 0.04034755325317383, 0.04081244659423828, 0.04115750503540039, 0.04137984085083008, 0.03992166519165039, 0.04029030227661133, 0.040890335083007816, 0.04029033660888672, 0.04033740615844727, 0.04015923309326172, 0.04024115371704102, 0.04013631820678711, 0.04024563217163086, 0.04015004730224609, 0.04029743957519531, 0.04146176147460937, 0.04270489501953125, 0.04107263946533203, 0.04149657440185547, 0.0408138542175293, 0.04038115310668945, 0.04038860702514648, 0.040164352416992184, 0.03999641418457031, 0.03974332809448242, 0.040077472686767576, 0.04091622543334961, 0.0404405746459961, 0.04043155288696289, 0.0407281608581543, 0.04047100830078125, 0.04031283187866211, 0.04040703964233398, 0.04036608123779297, 0.04029030227661133, 0.04039884948730469, 0.040521278381347656, 0.04024099349975586, 0.04037078475952149, 0.040546302795410154, 0.04040217590332031, 0.04047542572021484, 0.040320991516113285, 0.04129622268676758, 0.04079990386962891, 0.040531871795654296, 0.04075513458251953, 0.040576255798339844, 0.04055542373657227, 0.04067663955688477, 0.040739391326904295, 0.041050273895263674, 0.040427520751953126, 0.04120371246337891, 0.04066019058227539, 0.04043843078613281, 0.041324287414550784, 0.04301059341430664, 0.040453983306884767, 0.04008755111694336, 0.04043683242797851, 0.04007004928588867, 0.04034527969360351, 0.039919902801513675, 0.04053129577636719, 0.04034630584716797, 0.04062815856933594, 0.04054636764526367, 0.04065075302124024, 0.04032716751098633, 0.04053952026367187, 0.040325759887695316, 0.04086783981323242, 0.04032627105712891, 0.04064508819580078, 0.04032524871826172, 0.04056905746459961, 0.040544319152832034, 0.040374526977539064, 0.04014668655395508, 0.040376190185546876, 0.040175167083740235, 0.04109072113037109, 0.04011100769042969, 0.04038860702514648, 0.040525184631347654, 0.04046707153320313, 0.04029849624633789, 0.04060383987426758, 0.040543872833251955, 0.040531169891357424, 0.04048380661010742, 0.04028736114501953, 0.040471424102783204, 0.04060160064697266, 0.04035948944091797, 0.04028425598144531, 0.04053833770751953, 0.040410751342773436, 0.040530433654785154, 0.040316703796386716, 0.04042300796508789, 0.04046057510375976, 0.04012803268432617, 0.04009452819824219, 0.04024028778076172, 0.040260448455810546, 0.04099046325683594, 0.040005760192871095, 0.040077438354492186, 0.04068950271606445, 0.04002751922607422, 0.04012112045288086, 0.04006681442260742, 0.04221977615356445, 0.04555091094970703, 0.041388511657714844, 0.04039811325073242, 0.04028483200073242, 0.04053987121582031, 0.040366687774658204, 0.0408985595703125, 0.04047257614135742, 0.04065862274169922, 0.04027423858642578, 0.040642047882080076, 0.04038896179199219, 0.040577121734619144, 0.04049107360839844, 0.04068297576904297, 0.04047721481323242, 0.04020633697509766, 0.04091628646850586, 0.04167340850830078, 0.04015024185180664, 0.04040758514404297, 0.04022294235229492, 0.04041116714477539, 0.04030028915405273, 0.04048931121826172, 0.03988195037841797, 0.04064531326293945, 0.040432865142822266, 0.04011702346801758, 0.0403394546508789, 0.04100447845458984, 0.040370750427246097, 0.040269054412841794, 0.04051225662231445, 0.0405852165222168, 0.04038406372070313, 0.040272319793701175, 0.04079328155517578, 0.04078409576416016, 0.04066979217529297, 0.0407283821105957, 0.040788158416748044, 0.04056598281860352, 0.040777793884277345, 0.04064729690551758, 0.040452415466308594, 0.04053737640380859, 0.040331775665283204, 0.04171366500854492, 0.040681472778320314, 0.04038633728027344, 0.04053014373779297, 0.040687614440917966, 0.04051279830932617, 0.04048559951782227, 0.04131651306152344, 0.04047228622436523, 0.040673408508300785, 0.04059465789794922, 0.04056134414672852, 0.04060918426513672, 0.04058182525634765, 0.040542209625244144, 0.040569984436035156, 0.04122832107543945, 0.04060729598999024, 0.04028041458129883, 0.040504894256591796, 0.040440319061279296, 0.04097241592407227, 0.040091327667236325, 0.04040531158447266, 0.040697601318359374, 0.0405670394897461, 0.04006092834472656, 0.04021424102783203, 0.04023324966430664, 0.040183040618896486, 0.040272640228271483, 0.04038860702514648, 0.05467318344116211, 0.04083734512329101, 0.040196094512939456, 0.041111553192138675, 0.03980633544921875, 0.0401578254699707, 0.04011977767944336, 0.040153377532958986, 0.040597759246826175, 0.04016742324829101, 0.040223968505859374, 0.0403996467590332, 0.040529918670654294, 0.04208812713623047, 0.04343225479125976, 0.0404398078918457, 0.040376319885253906, 0.04023910522460938, 0.04039884948730469, 0.04020019149780273, 0.04003180694580078, 0.03993993759155273, 0.04048041534423828, 0.04029564666748047, 0.04065456008911133, 0.040451969146728516, 0.040038528442382815, 0.0401162223815918, 0.04010569763183594, 0.040191455841064455, 0.04045497512817383, 0.040307937622070314, 0.04032742309570313, 0.04002595138549805, 0.04047123336791992, 0.04159030532836914, 0.04092095947265625, 0.04058319854736328, 0.04143775939941406, 0.040409088134765625, 0.040441856384277344, 0.04068556976318359, 0.04043148803710937, 0.04033078384399414, 0.040419200897216796, 0.04014153671264648, 0.04026166534423828, 0.04118387222290039, 0.040699905395507815, 0.04069580841064453, 0.04048838424682617, 0.04011065673828125, 0.041355262756347655, 0.04050739288330078, 0.04055855941772461, 0.040795326232910156, 0.04044630432128906, 0.04047324752807617, 0.04017075347900391, 0.04030524826049805, 0.04032508850097656, 0.0402309455871582, 0.04048031997680664, 0.04118713760375976, 0.04213590240478516, 0.040766849517822265, 0.0407213134765625, 0.04074649429321289, 0.04059708786010742, 0.04035881423950195, 0.04024220657348633, 0.0404337272644043, 0.04053084945678711, 0.04024729537963867, 0.04069910430908203, 0.04119161605834961, 0.04203785705566406, 0.04130031967163086, 0.040602272033691406, 0.04068044662475586, 0.04050268936157227, 0.040194656372070314, 0.040525825500488284, 0.040273822784423825, 0.041385440826416015, 0.040371936798095705, 0.04048988723754883, 0.040548351287841795, 0.04058319854736328, 0.04040496063232422, 0.04051724624633789, 0.040479103088378904, 0.040177440643310545, 0.04053424072265625, 0.04038995361328125, 0.04038447952270508, 0.040352481842041016, 0.04006707382202149, 0.040118656158447265, 0.040218238830566404, 0.0400873908996582, 0.04091100692749024, 0.04034560012817383, 0.040804351806640625, 0.040683231353759765, 0.04043395233154297, 0.040527198791503904, 0.04021315383911133, 0.0404664306640625, 0.040079360961914064, 0.04111743927001953, 0.04048681640625, 0.04060140609741211, 0.04047670364379883, 0.04054233551025391, 0.04120412826538086, 0.04063843154907226, 0.0405852165222168, 0.04118518447875977, 0.040550495147705076, 0.0403353271484375, 0.04007491302490234, 0.040753150939941404, 0.04131264114379883, 0.04085670471191406, 0.04070694351196289, 0.04095590209960937, 0.040656383514404294, 0.04045257568359375, 0.04013628768920898, 0.040231361389160156, 0.04038655853271484, 0.04045209503173828, 0.04017766571044922, 0.04050435256958008, 0.04045260620117187, 0.04024163055419922, 0.04046867370605469, 0.040273727416992186, 0.041095169067382815, 0.041183231353759765, 0.04041139221191406, 0.040597183227539066, 0.04054841613769531, 0.04065280151367188, 0.040521728515625, 0.04055036926269531, 0.040581153869628905, 0.04043161773681641, 0.040427520751953126, 0.040531871795654296, 0.04097443389892578, 0.040769729614257816, 0.040521537780761716, 0.04063827133178711, 0.040523551940917966, 0.04061225509643555, 0.04033536148071289, 0.040613311767578125, 0.04040703964233398, 0.041336383819580075, 0.04066313552856445, 0.04045843124389648, 0.04036886215209961, 0.04018787384033203, 0.04132992172241211, 0.040469150543212894, 0.04062604904174805, 0.04061004638671875, 0.04065203094482422, 0.04047251129150391, 0.04058707046508789, 0.04068864059448242, 0.041277633666992185, 0.040855777740478515, 0.040521503448486325, 0.04025958251953125, 0.0398864631652832, 0.04006937789916992, 0.04085158538818359, 0.04060979080200195, 0.04053401565551758, 0.040599552154541016, 0.04050675201416016, 0.04068415832519531, 0.04065280151367188, 0.040693729400634766, 0.040374080657958986, 0.040558815002441406, 0.040799392700195315, 0.040548702239990235, 0.04048108673095703, 0.040155326843261716, 0.04046604919433594, 0.040307071685791014, 0.040292350769042966, 0.040431167602539064, 0.04036022567749024, 0.04038057708740234, 0.04077568054199219, 0.040417278289794925, 0.04042252731323242, 0.04032198333740234, 0.040296382904052734, 0.040992767333984374, 0.04040867233276367, 0.04045852661132812, 0.04039276885986328, 0.04042704010009766, 0.04070265579223633, 0.040471519470214844, 0.042199745178222656, 0.040786304473876954, 0.040374080657958986, 0.040476768493652344, 0.0502391357421875, 0.04064057540893555, 0.040297950744628906, 0.04028185653686524, 0.04032521438598633, 0.040141120910644534, 0.040321601867675784, 0.04070604705810547, 0.04069792175292969, 0.04052371215820313, 0.040398880004882814, 0.04027593612670898, 0.04049817657470703, 0.04103606414794922, 0.04031561660766601, 0.04041459274291992, 0.040467041015625, 0.040794143676757814, 0.0406231689453125, 0.04049097442626953, 0.04069798278808594, 0.04146252822875977, 0.04138195037841797, 0.0418078727722168, 0.041267200469970705, 0.04042956924438477, 0.040581119537353515, 0.04123839950561523, 0.040408512115478516, 0.04049785614013672, 0.0407982063293457, 0.04055244827270508, 0.04024086380004883, 0.040407329559326174, 0.04073267364501953, 0.04098457717895508, 0.04051324844360352, 0.04093366241455078, 0.04082688140869141, 0.04072447967529297, 0.0405810546875, 0.040697280883789065, 0.04047526550292969, 0.04043775939941406, 0.04022272109985352, 0.04060124969482422, 0.04056304168701172, 0.04033510589599609, 0.04066672134399414, 0.040735393524169924, 0.04062412643432617, 0.04033846282958985, 0.040555488586425784, 0.040321025848388675, 0.040599552154541016, 0.040474624633789064, 0.040302593231201174, 0.041062110900878905, 0.04047257614135742, 0.04081081771850586, 0.04112758255004883, 0.04068294525146485, 0.04122880172729492, 0.041328929901123045, 0.04106003189086914, 0.041310752868652344, 0.04086771011352539, 0.040697856903076174, 0.040671230316162106, 0.04046438217163086, 0.04061183929443359, 0.04043571090698242, 0.04050688171386719, 0.040423583984375, 0.0420294075012207, 0.04189798355102539, 0.040564735412597655, 0.04059552001953125, 0.04047980880737305, 0.04045616149902344, 0.040727169036865234, 0.040769824981689455, 0.041094398498535155, 0.04046902465820312, 0.04115763092041016, 0.04066579055786133, 0.040548126220703126, 0.040599552154541016, 0.04066332626342774, 0.04031622314453125, 0.040742977142333985, 0.04048313522338867, 0.040503711700439454, 0.04043775939941406, 0.04033740615844727, 0.039996608734130856, 0.03997699356079101, 0.040510238647460936, 0.03990883255004883, 0.0400819206237793, 0.04100912094116211, 0.04066883087158203, 0.04055843353271484, 0.04059807968139648, 0.03993113708496094, 0.04025347137451172, 0.04030860900878906, 0.04029526519775391, 0.04016742324829101, 0.04009104156494141, 0.04042387390136719, 0.04051564788818359, 0.0406976318359375, 0.040558910369873045, 0.04170751953125, 0.041621246337890626, 0.04061824035644531, 0.04057100677490234, 0.04026764678955078, 0.0405667839050293, 0.040674591064453126, 0.04338147354125976, 0.04138582229614258, 0.04044179153442383, 0.04050140762329101, 0.04086793518066406, 0.04057699203491211, 0.04059503936767578, 0.04055696105957031, 0.04037744140625, 0.04034172821044922, 0.04011868667602539, 0.04025328063964844, 0.0404832649230957, 0.04087923049926758, 0.04034969711303711, 0.04039974212646484, 0.040755199432373046, 0.04074444961547852, 0.040505855560302735, 0.04093267059326172, 0.0406220474243164, 0.04067750549316406, 0.04060220718383789, 0.04036947250366211, 0.04010259246826172, 0.040218624114990234, 0.04129049682617188, 0.04049100875854492, 0.04048896026611328, 0.04058480072021484, 0.04000582504272461, 0.03998537445068359, 0.040187904357910156, 0.04044780731201172, 0.040122112274169924, 0.040437824249267576, 0.04053644943237305, 0.0402083854675293, 0.04039212799072266, 0.04042195129394531, 0.04031488037109375, 0.040226814270019534, 0.04060569763183594, 0.04058723068237305, 0.042442302703857425, 0.04152569580078125, 0.040513568878173825, 0.040460289001464846, 0.04074684906005859, 0.0418776626586914, 0.04023091125488281, 0.040361248016357425, 0.04038041687011719, 0.0407231674194336, 0.04052799987792969, 0.040377376556396484, 0.04003427124023438, 0.04013350296020508, 0.04017356872558594, 0.0402083854675293, 0.04045321655273437, 0.04026460647583008, 0.04039884948730469, 0.04025539016723633, 0.04021871948242187, 0.040199169158935545, 0.040051071166992185, 0.04064265441894531, 0.04151555252075195, 0.04126924896240235, 0.04064051055908203, 0.04053606414794922, 0.04023091125488281, 0.04047257614135742, 0.040153087615966795, 0.04015923309326172, 0.04013071823120117, 0.04013859176635742, 0.040183265686035155, 0.03998582458496094, 0.0398682861328125, 0.04018380737304687, 0.03987865447998047, 0.040323070526123043, 0.040089599609375, 0.040532096862792966, 0.04041862487792969, 0.040271713256835935, 0.04037910461425781]",tokens/s,24.62835017873203,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4385.01376,5362.286592,0.0,4959.76448,4769.731072,s,1,11.4836142578125,11.4836142578125,0.0,11.4836142578125,11.4836142578125,11.4836142578125,11.4836142578125,[11.4836142578125],,kWh,0.00012774722721252751,1.40841229156193e-05,5.549921106599853e-05,0.00019733056119414536,,MB,1621.938176,5383.258112,0.0,4966.055936,4251.027456,s,10,32.44054467773438,3.244054467773437,0.005505840929588435,3.24374609375,3.2494002685546874,3.250417053222656,3.251230480957031,"[3.231277099609375, 3.24019091796875, 3.241441650390625, 3.24313720703125, 3.243220947265625, 3.24917431640625, 3.244271240234375, 3.249072509765625, 3.247324951171875, 3.251433837890625]",tokens/s,78.91359486812381,kWh,9.462355926333354e-05,1.043595304958617e-05,6.284074471700069e-05,0.0001679002570299204,tokens/kWh,1524714.7593965859,MB,1626.075136,5397.938176,0.0,4980.736,4251.030016,s,10,18.234695434570312,1.8234695434570312,0.006640154487602359,1.8237943725585937,1.8302659667968748,1.8337518188476563,1.8365405004882813,"[1.8264407958984374, 1.817359619140625, 1.8294913330078124, 1.8188624267578124, 1.8372376708984375, 1.811933349609375, 1.8202750244140624, 1.8239666748046874, 1.8255064697265626, 1.8236220703125]",tokens/s,34.549521392367886,kWh,5.3198538779167075e-05,5.8691900113962665e-06,3.519955593739993e-05,9.426728472796328e-05,tokens/kWh,668312.4498791444,,s,630,18.23121586227417,0.028938437876625665,0.0007313507583159402,0.028790800094604493,0.029311736869812012,0.029542240333557127,0.03214239925384524,"[0.029621471405029298, 0.028855072021484376, 0.02878054428100586, 0.028672000885009766, 0.02858393669128418, 0.028667903900146483, 0.028633087158203126, 0.02858723258972168, 0.028495744705200197, 0.0317490234375, 0.03061759948730469, 0.028968320846557618, 0.028802879333496095, 0.02878704071044922, 0.028750303268432618, 0.02875507164001465, 0.028862592697143554, 0.028725311279296874, 0.028985504150390626, 0.030220640182495116, 0.028944032669067383, 0.02888547134399414, 0.028955999374389647, 0.028797664642333985, 0.029050336837768555, 0.028766752243041992, 0.028774208068847656, 0.029110464096069336, 0.029107776641845703, 0.029045183181762694, 0.02894803237915039, 0.029187583923339845, 0.029137855529785157, 0.02876006317138672, 0.02906924819946289, 0.028964223861694335, 0.028676799774169922, 0.02857721519470215, 0.028561983108520508, 0.02882054328918457, 0.028752511978149414, 0.029009279251098634, 0.02888595199584961, 0.029173696517944336, 0.028659776687622072, 0.029265920639038087, 0.02876006317138672, 0.03133440017700195, 0.028917760848999025, 0.028867679595947264, 0.028672319412231445, 0.028835552215576172, 0.028723648071289062, 0.028875200271606446, 0.02933350372314453, 0.02893824005126953, 0.02875587272644043, 0.02879497528076172, 0.028645151138305663, 0.028659296035766602, 0.02864508819580078, 0.028711296081542968, 0.02863567924499512, 0.029534496307373048, 0.028865184783935547, 0.028795936584472655, 0.028629791259765624, 0.028698816299438476, 0.028631040573120117, 0.02874777603149414, 0.028602367401123048, 0.028589664459228517, 0.028612735748291016, 0.02881155204772949, 0.028691648483276367, 0.028668735504150392, 0.028655616760253907, 0.028708192825317384, 0.028637407302856445, 0.028875200271606446, 0.028692480087280273, 0.028630912780761717, 0.029182079315185547, 0.028603616714477538, 0.02868508720397949, 0.028866559982299804, 0.028819456100463867, 0.028785728454589845, 0.02929145622253418, 0.028874752044677734, 0.029469951629638672, 0.02939776039123535, 0.02910207939147949, 0.02894825553894043, 0.029481184005737304, 0.028785823822021484, 0.028868448257446288, 0.02866796875, 0.028792863845825196, 0.02876464080810547, 0.028793279647827148, 0.028821504592895508, 0.028758047103881836, 0.0287805118560791, 0.0289300479888916, 0.028685823440551757, 0.028776159286499025, 0.028786880493164062, 0.028787296295166017, 0.028677120208740234, 0.028668928146362304, 0.028728864669799806, 0.028832223892211913, 0.028739328384399413, 0.028838144302368165, 0.02877235221862793, 0.028832799911499025, 0.02867692756652832, 0.028729503631591796, 0.02871500778198242, 0.028835840225219726, 0.02916966438293457, 0.02904473686218262, 0.02896691131591797, 0.029148160934448244, 0.029026880264282226, 0.029519872665405275, 0.028833791732788085, 0.02877644729614258, 0.028767999649047853, 0.028710559844970705, 0.030564319610595702, 0.030025856018066406, 0.02881177520751953, 0.028908639907836913, 0.028648351669311522, 0.028704767227172853, 0.02870681571960449, 0.02869862365722656, 0.0287825927734375, 0.028848127365112306, 0.028800960540771484, 0.028696672439575195, 0.028698591232299803, 0.028717023849487305, 0.02867344093322754, 0.028502368927001955, 0.028557119369506837, 0.02855331230163574, 0.028616832733154296, 0.0284715518951416, 0.028577600479125977, 0.028903615951538085, 0.028520448684692383, 0.028604415893554686, 0.02925935935974121, 0.028873119354248047, 0.028817407608032225, 0.028651519775390624, 0.028688383102416993, 0.028699775695800782, 0.028846975326538085, 0.029321216583251954, 0.02874163246154785, 0.028726495742797852, 0.028740383148193358, 0.02879897689819336, 0.02880441665649414, 0.02876691246032715, 0.028635135650634767, 0.04077568054199219, 0.029953407287597655, 0.02876380729675293, 0.028639392852783205, 0.028603200912475587, 0.029501056671142577, 0.028563520431518555, 0.028681568145751953, 0.028588703155517578, 0.028538175582885742, 0.02857062339782715, 0.02859791946411133, 0.028682592391967774, 0.02867977523803711, 0.02881372833251953, 0.029140544891357423, 0.029292991638183594, 0.029130495071411133, 0.029042943954467774, 0.02951535987854004, 0.02900655937194824, 0.02874176025390625, 0.028604415893554686, 0.02859212875366211, 0.028657279968261718, 0.028649856567382812, 0.028479488372802734, 0.028464672088623046, 0.028572128295898436, 0.02858393669128418, 0.028710592269897462, 0.02940345573425293, 0.028637184143066406, 0.028528640747070313, 0.028606399536132813, 0.028704832077026367, 0.028640352249145507, 0.02868931198120117, 0.028708864212036132, 0.02851798439025879, 0.02898371124267578, 0.028915519714355468, 0.02929180717468262, 0.028986047744750977, 0.029051103591918946, 0.0289300479888916, 0.030166688919067382, 0.029054367065429687, 0.02891667175292969, 0.02876825523376465, 0.029046239852905272, 0.028659616470336914, 0.028885631561279296, 0.02891507148742676, 0.02878060722351074, 0.028751487731933593, 0.02890028762817383, 0.028782527923583986, 0.029030399322509767, 0.028852287292480468, 0.028983295440673826, 0.029087743759155273, 0.02889727973937988, 0.028899328231811523, 0.028923904418945313, 0.02893414306640625, 0.029058624267578125, 0.028942623138427735, 0.02891484832763672, 0.028703744888305665, 0.02880512046813965, 0.02890547180175781, 0.028632352828979492, 0.02866044807434082, 0.02895030403137207, 0.029622495651245116, 0.028825599670410155, 0.02883785629272461, 0.028686368942260742, 0.02876620864868164, 0.028769983291625976, 0.029015552520751952, 0.030713855743408205, 0.03257753753662109, 0.029022207260131837, 0.028811264038085937, 0.028671680450439455, 0.028717376708984374, 0.028737535476684572, 0.028507232666015625, 0.028668832778930665, 0.02898944091796875, 0.02897920036315918, 0.028919807434082033, 0.028895231246948243, 0.028730432510375978, 0.028832704544067382, 0.029095327377319336, 0.028887487411499022, 0.02889743995666504, 0.02913484764099121, 0.028917760848999025, 0.02873958396911621, 0.028779935836791993, 0.02889788818359375, 0.028966880798339843, 0.0290665283203125, 0.02924825668334961, 0.02898044776916504, 0.02903081512451172, 0.029045120239257812, 0.028963903427124023, 0.02868934440612793, 0.028593664169311524, 0.028874431610107422, 0.028944543838500977, 0.02937868881225586, 0.029512096405029296, 0.0289486083984375, 0.028952032089233398, 0.028914207458496093, 0.029173759460449217, 0.030904064178466795, 0.03044931221008301, 0.029172319412231446, 0.029032447814941405, 0.028859424591064452, 0.02915836715698242, 0.02909110450744629, 0.028764896392822266, 0.0289354248046875, 0.029056800842285156, 0.028896223068237303, 0.029007871627807616, 0.02896895980834961, 0.029114368438720704, 0.02961750411987305, 0.02933622360229492, 0.029494815826416016, 0.029153472900390626, 0.029124895095825196, 0.029415008544921874, 0.030220703125, 0.02935807991027832, 0.029319168090820313, 0.029416576385498047, 0.028738431930541992, 0.028431520462036133, 0.02850003242492676, 0.02844044876098633, 0.02849065589904785, 0.028428287506103517, 0.02863030433654785, 0.028545759201049806, 0.028743104934692384, 0.028650047302246094, 0.02877644729614258, 0.02879692840576172, 0.028804096221923828, 0.02876108741760254, 0.028557024002075194, 0.02865328025817871, 0.028803647994995116, 0.02861644744873047, 0.02873779106140137, 0.028440351486206054, 0.028817632675170898, 0.028873823165893556, 0.028504608154296875, 0.028724895477294923, 0.028668191909790038, 0.02877225685119629, 0.028623071670532227, 0.028522815704345703, 0.028637184143066406, 0.028524192810058593, 0.028651424407958984, 0.02849590492248535, 0.028516672134399415, 0.029061216354370117, 0.03301123046875, 0.028717376708984374, 0.02875372886657715, 0.028694208145141602, 0.02859894371032715, 0.02852659225463867, 0.028522079467773437, 0.028581567764282226, 0.02860233688354492, 0.028586751937866212, 0.028785760879516602, 0.02862774467468262, 0.02872947120666504, 0.02863849639892578, 0.028577600479125977, 0.028690784454345704, 0.028785215377807618, 0.028665855407714845, 0.028917760848999025, 0.02889727973937988, 0.02873139190673828, 0.028719104766845704, 0.028868608474731446, 0.02875094413757324, 0.02871183967590332, 0.02867158317565918, 0.02877027130126953, 0.029120031356811522, 0.02942188835144043, 0.028753919601440428, 0.03139555168151856, 0.030898303985595704, 0.028771520614624024, 0.028865440368652344, 0.028710559844970705, 0.02842665672302246, 0.028472320556640625, 0.028678943634033203, 0.028670175552368164, 0.028438528060913085, 0.028528640747070313, 0.028403520584106445, 0.02914726448059082, 0.028694591522216796, 0.028778495788574218, 0.028823551177978517, 0.028704767227172853, 0.028753919601440428, 0.028612287521362304, 0.028633407592773438, 0.02877440071105957, 0.02869862365722656, 0.02895257568359375, 0.028606464385986328, 0.028661760330200195, 0.028682239532470705, 0.028665855407714845, 0.028727296829223634, 0.028513664245605468, 0.02857187271118164, 0.02838979148864746, 0.02866703987121582, 0.02846384048461914, 0.028612640380859374, 0.029492799758911132, 0.03230307388305664, 0.032516414642333985, 0.0290897274017334, 0.02865184020996094, 0.0286167049407959, 0.028481536865234375, 0.02858598327636719, 0.02849510383605957, 0.028660480499267577, 0.028504064559936523, 0.028542047500610353, 0.028353439331054688, 0.028735488891601563, 0.028983295440673826, 0.028978464126586913, 0.028748512268066406, 0.0285614070892334, 0.02854092788696289, 0.028733440399169922, 0.028614656448364258, 0.028613983154296876, 0.028731136322021483, 0.02868111991882324, 0.028637184143066406, 0.028841567993164063, 0.028668319702148438, 0.029310911178588868, 0.028711328506469725, 0.028730783462524414, 0.029721183776855467, 0.02857097625732422, 0.02941391944885254, 0.03306099319458008, 0.02876416015625, 0.028542911529541016, 0.028581951141357424, 0.02857779121398926, 0.028480703353881837, 0.028582719802856444, 0.028878080368041993, 0.02861747169494629, 0.028356288909912108, 0.02841801643371582, 0.02936400032043457, 0.028833568572998045, 0.02875267219543457, 0.02847267150878906, 0.028738208770751953, 0.029394208908081056, 0.028585695266723634, 0.028863136291503905, 0.028536415100097655, 0.0289036808013916, 0.028609024047851563, 0.028664831161499024, 0.02850099182128906, 0.028473344802856446, 0.028491775512695314, 0.02850947189331055, 0.028575519561767578, 0.028576704025268556, 0.02872684860229492, 0.029376480102539064, 0.028534624099731447, 0.02865011215209961, 0.02865740776062012, 0.028544639587402342, 0.028827327728271485, 0.03350624084472656, 0.028841535568237306, 0.028719551086425783, 0.02873958396911621, 0.028794879913330077, 0.028819456100463867, 0.028845727920532225, 0.02873923110961914, 0.02877004814147949, 0.02935897636413574, 0.028729408264160157, 0.029067264556884766, 0.028809215545654295, 0.028753919601440428, 0.028821504592895508, 0.028682239532470705, 0.028753759384155274, 0.028767520904541016, 0.03073695945739746, 0.028868032455444337, 0.029006208419799805, 0.0298417911529541, 0.029251295089721678, 0.02892380714416504, 0.028856704711914063, 0.028667903900146483, 0.029069311141967775, 0.029052896499633787, 0.029001087188720704, 0.028889759063720703, 0.02918809509277344, 0.029257728576660157, 0.028897247314453124, 0.02906883239746094, 0.02878006362915039, 0.02882454490661621, 0.028990911483764648, 0.028806816101074217, 0.028529567718505858, 0.02873139190673828, 0.028983295440673826, 0.028442623138427735, 0.02886787223815918, 0.02885910415649414, 0.028869951248168945, 0.028813472747802736, 0.028819999694824218, 0.029021856307983398, 0.028739295959472656, 0.02891827201843262, 0.028862592697143554, 0.02892185592651367, 0.02878873634338379, 0.02868396759033203, 0.03172080039978027, 0.0288941764831543, 0.029929088592529296, 0.028913951873779296, 0.028910720825195312, 0.02862998390197754, 0.02874777603149414, 0.02871673583984375, 0.02894879913330078, 0.02876006317138672, 0.02892153549194336, 0.028934463500976563, 0.029485055923461914, 0.029495071411132813, 0.02898918342590332, 0.02868592071533203, 0.02876710319519043, 0.028818815231323243, 0.028811103820800783, 0.02892470359802246, 0.028794879913330077, 0.028964096069335938, 0.028672767639160157, 0.028804128646850585, 0.028683231353759765, 0.02875142478942871, 0.029005472183227538, 0.028967039108276367, 0.028787071228027344, 0.02950377655029297, 0.029673471450805664, 0.029013856887817383, 0.028909727096557616, 0.02888035202026367, 0.028860063552856446, 0.028883840560913084, 0.02904473686218262, 0.02880860710144043, 0.02938105583190918, 0.02954857635498047, 0.02917184066772461, 0.028965951919555664, 0.028941247940063478, 0.02916873550415039, 0.028996511459350584, 0.028839935302734376, 0.02894175910949707, 0.02894495964050293, 0.028968576431274415, 0.028884960174560548, 0.02927791976928711, 0.02899628829956055, 0.028737535476684572, 0.02858937644958496, 0.028966623306274412, 0.02895692825317383, 0.02891644859313965, 0.028895231246948243, 0.028785888671875, 0.028846879959106446, 0.028814367294311524, 0.02865660858154297, 0.028679872512817384, 0.028684608459472655, 0.029382207870483398, 0.028815807342529295, 0.02877964782714844, 0.02862323188781738, 0.028895584106445313, 0.028870815277099608, 0.028835840225219726, 0.02914508819580078, 0.02920857620239258, 0.029077503204345705, 0.028851680755615235, 0.028860960006713867, 0.028710912704467774, 0.02845676803588867, 0.028879039764404296, 0.029010976791381836, 0.02865974426269531, 0.02847123146057129, 0.028484607696533205, 0.028653568267822265, 0.02856959915161133, 0.030852256774902345, 0.028840063095092773, 0.028750175476074218, 0.02867852783203125, 0.029052352905273436, 0.02883564758300781, 0.029516063690185546, 0.02888547134399414]",tokens/s,34.556115442835505,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4357.210112,4562.28864,0.0,4183.81824,4182.069248,s,1,10.031142578125,10.031142578125,0.0,10.031142578125,10.031142578125,10.031142578125,10.031142578125,[10.031142578125],,kWh,8.981634780416621e-05,9.900313537311436e-06,3.0006690671999584e-05,0.00012972335201347722,,MB,1635.110912,4715.380736,0.0,4307.550208,4281.174016,s,10,3.7298596801757813,0.3729859680175781,0.00239330107419596,0.3737974243164063,0.3747413604736328,0.37496203460693356,0.3751385739135742,"[0.3665159912109375, 0.373134521484375, 0.37361480712890627, 0.37194921875, 0.3744526062011719, 0.37398004150390624, 0.3743899230957031, 0.37194754028320315, 0.37518270874023435, 0.37469232177734374]",tokens/s,686.3528978332375,kWh,1.0785647206249867e-05,1.1891202052040404e-06,7.142733888785716e-06,1.9117501300239624e-05,tokens/kWh,13390871.32672465,MB,1641.34912,4730.0608,0.0,4322.230272,4281.176576,s,10,19.28433227539062,1.928433227539062,0.011992368182706773,1.9251874999999998,1.9393903198242188,1.9469975280761718,1.9530832946777343,"[1.9149078369140624, 1.92698876953125, 1.92338623046875, 1.9225906982421874, 1.954604736328125, 1.9357330322265625, 1.936275634765625, 1.917160888671875, 1.914984619140625, 1.9376998291015626]",tokens/s,32.66900772104845,kWh,5.615197611333324e-05,6.192956502939655e-06,3.712638485981421e-05,9.947131747608708e-05,tokens/kWh,633348.4023185398,,s,630,19.2810475769043,0.030604837423657612,0.00047886753596131467,0.030571392059326172,0.03109004878997803,0.03141498889923096,0.032441209793090835,"[0.030980224609375, 0.03203251266479492, 0.030491968154907227, 0.03043529510498047, 0.030155744552612305, 0.029895967483520507, 0.029991647720336915, 0.02984511947631836, 0.02998828887939453, 0.02998281669616699, 0.030333791732788086, 0.03010918426513672, 0.029904767990112303, 0.029932159423828125, 0.02993561553955078, 0.029888511657714844, 0.029808895111083984, 0.029858720779418944, 0.029913471221923827, 0.029925823211669922, 0.02991619110107422, 0.03002060890197754, 0.02979248046875, 0.029812223434448244, 0.03008336067199707, 0.029960031509399413, 0.030494688034057617, 0.030851264953613282, 0.030752767562866212, 0.029865983963012696, 0.029716320037841797, 0.029941375732421876, 0.029991455078125, 0.030666751861572264, 0.03087123107910156, 0.030623296737670898, 0.030740480422973632, 0.030692159652709963, 0.030547903060913085, 0.03059916877746582, 0.030748544692993166, 0.03073766326904297, 0.030724992752075197, 0.031192672729492187, 0.03087919998168945, 0.03069228744506836, 0.030691328048706053, 0.03086774444580078, 0.030506399154663084, 0.03044528007507324, 0.031002687454223632, 0.03039481544494629, 0.030324832916259765, 0.030405920028686525, 0.03050150489807129, 0.03039836883544922, 0.03145948791503906, 0.0306297607421875, 0.030623903274536134, 0.030642175674438478, 0.030765056610107422, 0.03038198471069336, 0.030250879287719728, 0.031472896575927736, 0.03066080093383789, 0.03020038414001465, 0.030064640045166017, 0.029894176483154296, 0.02993814468383789, 0.02995609664916992, 0.030144512176513674, 0.030275423049926756, 0.030224544525146484, 0.03330227279663086, 0.031366559982299806, 0.030351936340332033, 0.030357791900634767, 0.030521343231201172, 0.03013382339477539, 0.030124479293823243, 0.030926847457885744, 0.03182796859741211, 0.03020796775817871, 0.03027961540222168, 0.03036169624328613, 0.03032268714904785, 0.030871456146240234, 0.03058492851257324, 0.030527488708496094, 0.03046944046020508, 0.03063043212890625, 0.030556320190429687, 0.030488576889038086, 0.03047644805908203, 0.030477312088012694, 0.030415775299072266, 0.030492607116699218, 0.030623743057250977, 0.030308351516723633, 0.030211551666259766, 0.03023721694946289, 0.030224384307861327, 0.030126079559326172, 0.030107295989990235, 0.03030575942993164, 0.0307077751159668, 0.030720832824707032, 0.030958688735961915, 0.030741151809692384, 0.030822656631469728, 0.031106271743774415, 0.030659360885620116, 0.030672895431518556, 0.03079270362854004, 0.030775583267211915, 0.030697248458862306, 0.030645408630371095, 0.030664127349853517, 0.0307159366607666, 0.030533344268798827, 0.030816640853881836, 0.030671072006225587, 0.030465120315551757, 0.030569055557250976, 0.030961984634399413, 0.03091823959350586, 0.03139583969116211, 0.030961023330688477, 0.030832767486572266, 0.030923456192016602, 0.030781248092651366, 0.030819616317749023, 0.0307412166595459, 0.03116646385192871, 0.031282623291015624, 0.031146591186523437, 0.030942495346069337, 0.030919328689575195, 0.031174688339233397, 0.03079756736755371, 0.030621952056884765, 0.030519264221191406, 0.03012201690673828, 0.030345216751098632, 0.030650367736816408, 0.030514432907104493, 0.03059174346923828, 0.03064793586730957, 0.030500608444213866, 0.030572511672973632, 0.030760896682739257, 0.030728128433227538, 0.030620704650878905, 0.030571456909179687, 0.03068377685546875, 0.030576831817626954, 0.0303636474609375, 0.030338815689086914, 0.03027484893798828, 0.030423871994018553, 0.0304682559967041, 0.03020595169067383, 0.030212287902832032, 0.030537567138671874, 0.030271455764770507, 0.030252479553222657, 0.030217023849487306, 0.03018832015991211, 0.03001852798461914, 0.030306304931640625, 0.03065251159667969, 0.030684064865112305, 0.030301183700561524, 0.030056447982788087, 0.031082496643066407, 0.030319904327392577, 0.030570463180541994, 0.030544639587402344, 0.03018137550354004, 0.03020595169067383, 0.03023072052001953, 0.030229568481445312, 0.03028156852722168, 0.03020879936218262, 0.03007663917541504, 0.03012998390197754, 0.03012668800354004, 0.030116960525512694, 0.0300982723236084, 0.030994592666625978, 0.030203744888305663, 0.03014240074157715, 0.03010361671447754, 0.030300159454345704, 0.030287872314453124, 0.030330751419067385, 0.030406784057617188, 0.03057868766784668, 0.03057459259033203, 0.03054591941833496, 0.030612543106079103, 0.030519615173339842, 0.030345344543457033, 0.030310176849365236, 0.030374624252319335, 0.03025916862487793, 0.030263328552246095, 0.030088287353515625, 0.02989967918395996, 0.030024927139282228, 0.029929983139038087, 0.0300437126159668, 0.030179840087890625, 0.03015817642211914, 0.030156671524047853, 0.030063488006591796, 0.030627967834472657, 0.03082444763183594, 0.030676000595092772, 0.03101795196533203, 0.031362720489501957, 0.03086128044128418, 0.030634368896484375, 0.030638080596923828, 0.03074608039855957, 0.030751167297363283, 0.03071996879577637, 0.030844863891601564, 0.030663904190063478, 0.03050204849243164, 0.030801088333129882, 0.030702144622802734, 0.03069958305358887, 0.030482431411743165, 0.030631616592407228, 0.030527904510498048, 0.031088544845581056, 0.030871551513671876, 0.030928127288818358, 0.030746784210205078, 0.03092336082458496, 0.031916032791137694, 0.030916608810424805, 0.03051728057861328, 0.03045577621459961, 0.030139551162719727, 0.030161760330200196, 0.03016089630126953, 0.030466047286987305, 0.030416864395141602, 0.03008515167236328, 0.030105215072631836, 0.031170495986938475, 0.03076710319519043, 0.03077529525756836, 0.03081827163696289, 0.030838367462158203, 0.03057913589477539, 0.03080303955078125, 0.030837055206298827, 0.03072800064086914, 0.030980064392089845, 0.031043840408325196, 0.032731712341308596, 0.03123404884338379, 0.031024288177490235, 0.030978591918945312, 0.030910783767700196, 0.030942623138427734, 0.03110767936706543, 0.03132371139526367, 0.03171286392211914, 0.03108540725708008, 0.03105996894836426, 0.03127673530578613, 0.031053312301635744, 0.03102131271362305, 0.031013439178466797, 0.030910463333129884, 0.030801088333129882, 0.030795936584472657, 0.03078620719909668, 0.030770208358764647, 0.03063088035583496, 0.030627744674682617, 0.030799615859985353, 0.030781791687011718, 0.030994400024414063, 0.03106768035888672, 0.030800384521484377, 0.030631263732910155, 0.03054560089111328, 0.030546911239624025, 0.030600383758544923, 0.03056108856201172, 0.03061555290222168, 0.030843040466308595, 0.031022943496704102, 0.03102019119262695, 0.031019456863403322, 0.030871551513671876, 0.0308874568939209, 0.03128358459472656, 0.031135808944702147, 0.03143519973754883, 0.032239295959472655, 0.031116992950439452, 0.031194816589355467, 0.031220544815063478, 0.031295616149902346, 0.03125148773193359, 0.031181631088256837, 0.031105184555053712, 0.03197660827636719, 0.031029312133789063, 0.030978784561157227, 0.030494464874267577, 0.030472415924072266, 0.030656288146972657, 0.03041689682006836, 0.03039232063293457, 0.030427135467529298, 0.03063590431213379, 0.030719167709350587, 0.03091961669921875, 0.030453760147094725, 0.03058483123779297, 0.030402528762817384, 0.03029327964782715, 0.030042272567749023, 0.030124639511108397, 0.032833534240722655, 0.03083263969421387, 0.030471391677856445, 0.030605663299560548, 0.03054537582397461, 0.03154646492004395, 0.03168243217468262, 0.030641567230224608, 0.030558656692504883, 0.03061065673828125, 0.030635072708129884, 0.030545215606689453, 0.030517375946044922, 0.030693023681640626, 0.0307857608795166, 0.030691455841064454, 0.030443391799926757, 0.03058131217956543, 0.03043328094482422, 0.031942655563354495, 0.03143065643310547, 0.030620864868164063, 0.030548864364624024, 0.030631263732910155, 0.031103584289550783, 0.030691328048706053, 0.03073843193054199, 0.03077238464355469, 0.0337017936706543, 0.030593984603881835, 0.030398080825805664, 0.03027142333984375, 0.03059119987487793, 0.030714080810546874, 0.03059721565246582, 0.030358623504638672, 0.030136415481567383, 0.030253791809082033, 0.030490495681762694, 0.030519424438476564, 0.03056025505065918, 0.03057244873046875, 0.03064227294921875, 0.030718048095703124, 0.030790815353393553, 0.03065727996826172, 0.030684864044189453, 0.03122364807128906, 0.031167104721069337, 0.03077337646484375, 0.03077529525756836, 0.030670047760009766, 0.030870304107666016, 0.030658559799194338, 0.030770975112915037, 0.0306845760345459, 0.03146198463439941, 0.03080022430419922, 0.03063360023498535, 0.030839040756225587, 0.030678112030029295, 0.03072038459777832, 0.030702112197875976, 0.030697023391723633, 0.03058086395263672, 0.030512672424316406, 0.03046188735961914, 0.030376800537109373, 0.03060531234741211, 0.030498815536499024, 0.030453760147094725, 0.03054182434082031, 0.03054182434082031, 0.03062076759338379, 0.03071788787841797, 0.030667743682861327, 0.030646272659301758, 0.03150569534301758, 0.03084707260131836, 0.03153772735595703, 0.03095961570739746, 0.03073849678039551, 0.03075856018066406, 0.030723487854003906, 0.031075263977050783, 0.03075071907043457, 0.030801919937133788, 0.03083673667907715, 0.030914560317993164, 0.030727935791015626, 0.030748607635498047, 0.030746335983276366, 0.030972320556640624, 0.03082575988769531, 0.0307281608581543, 0.030546239852905274, 0.030521984100341796, 0.030633983612060548, 0.03076848030090332, 0.030519424438476564, 0.030351232528686524, 0.030360223770141602, 0.030220287322998047, 0.030271488189697264, 0.03039459228515625, 0.030418815612792968, 0.03034511947631836, 0.030275487899780275, 0.03159622383117676, 0.03112739181518555, 0.030824480056762697, 0.030556127548217772, 0.03055615997314453, 0.030608415603637695, 0.03067580795288086, 0.030517375946044922, 0.030348608016967774, 0.030470848083496094, 0.03037948799133301, 0.03028438377380371, 0.0300882568359375, 0.030098304748535157, 0.02998886489868164, 0.030192703247070313, 0.030415807723999023, 0.03039823913574219, 0.03045583915710449, 0.030552255630493165, 0.030603296279907228, 0.030604768753051757, 0.030877504348754883, 0.030679136276245116, 0.03176508712768555, 0.030993696212768554, 0.031185216903686523, 0.030517824172973634, 0.03043280029296875, 0.030218559265136717, 0.030212095260620117, 0.030477312088012694, 0.03021670341491699, 0.03003670310974121, 0.030039840698242188, 0.03007600021362305, 0.030019519805908203, 0.030216928482055663, 0.030044416427612304, 0.030072383880615235, 0.029931968688964843, 0.03051456069946289, 0.030558656692504883, 0.03048076820373535, 0.030469600677490234, 0.030828575134277343, 0.03043155288696289, 0.030508800506591795, 0.030555871963500975, 0.030550559997558593, 0.030552064895629883, 0.030537727355957032, 0.030390272140502928, 0.03032268714904785, 0.03057663917541504, 0.030561344146728515, 0.03049158477783203, 0.030462175369262694, 0.030314271926879882, 0.0301711368560791, 0.030209823608398436, 0.030123296737670897, 0.030192575454711913, 0.03015475273132324, 0.03027519989013672, 0.03087545585632324, 0.03027084732055664, 0.029981504440307616, 0.02986934471130371, 0.029797088623046874, 0.02985100746154785, 0.029880960464477538, 0.02996019172668457, 0.02980361557006836, 0.030181663513183594, 0.03006118392944336, 0.03022652816772461, 0.02992083168029785, 0.03081660842895508, 0.03001468849182129, 0.02992617607116699, 0.031170560836791993, 0.030115840911865234, 0.030031871795654298, 0.03057868766784668, 0.03061075210571289, 0.030519744873046876, 0.030380319595336915, 0.030486495971679687, 0.03159449577331543, 0.030564031600952148, 0.030669120788574217, 0.030537727355957032, 0.03047340774536133, 0.030571327209472657, 0.030204032897949217, 0.030365568161010742, 0.03048374366760254, 0.030567136764526368, 0.03041279983520508, 0.03032428741455078, 0.03153359985351562, 0.032523681640625, 0.03040086364746094, 0.030177536010742186, 0.02995574378967285, 0.030112064361572266, 0.030631744384765625, 0.03067046356201172, 0.03059939193725586, 0.030646560668945312, 0.030484479904174806, 0.0303636474609375, 0.03014575958251953, 0.030103519439697267, 0.030015775680541992, 0.03004070472717285, 0.029927488327026366, 0.02993062400817871, 0.03008905601501465, 0.030112640380859375, 0.030130176544189452, 0.03095759963989258, 0.030275360107421875, 0.030231039047241212, 0.030652095794677734, 0.031197183609008788, 0.03065376091003418, 0.03155795288085937, 0.031054304122924804, 0.030932159423828126, 0.03196182441711426, 0.031049728393554688, 0.030865535736083985, 0.030818336486816405, 0.03087753677368164, 0.030697471618652345, 0.03055820846557617, 0.03042323112487793, 0.03035308837890625, 0.030295551300048826, 0.03046022415161133, 0.030277280807495116, 0.030130847930908203, 0.030157983779907225, 0.030444095611572266, 0.030056032180786132, 0.030369983673095704, 0.030379743576049806, 0.030300960540771485, 0.030168767929077148, 0.03053984069824219, 0.030723840713500976, 0.03147120094299317, 0.03092777633666992, 0.03092889595031738, 0.030661855697631836, 0.030636415481567383, 0.03064259147644043, 0.030626848220825197, 0.03075584030151367, 0.030917695999145508, 0.030899103164672852, 0.03040787124633789, 0.03058070373535156, 0.03041267204284668, 0.03041302490234375, 0.030421152114868164, 0.030288480758666993, 0.030117055892944337, 0.030200639724731446, 0.033230846405029296, 0.03119923210144043, 0.03018956756591797, 0.030416223526000978, 0.030712480545043944, 0.03193459129333496, 0.030858207702636718, 0.03102943992614746, 0.030624479293823243, 0.030634143829345702, 0.030635871887207032, 0.03060326385498047, 0.03055820846557617, 0.03062579154968262, 0.030509023666381835, 0.030640159606933594, 0.0306889591217041, 0.03042950439453125, 0.03285606384277344, 0.03216790390014648]",tokens/s,32.67457317799694,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8iya7579/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 114447 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpy9gngpwv/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 294223 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3894, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading model, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear model._modules[name] = target_cls( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 42, in __init__ assert out_features % (32 // self.w_bit) == 0 AssertionError " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 30081 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp0ceiiseq/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxsoh4v0b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpcj27r7tj/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 225571 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp7m94xslu/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4351.619072,6115.16416,0.0,5729.419264,5727.871488,s,1,12.784453125,12.784453125,0.0,12.784453125,12.784453125,12.784453125,12.784453125,[12.784453125],,kWh,0.00010473343346666866,1.1545179846964897e-05,3.1678914232002864e-05,0.00014795752754563643,,MB,1638.027264,6295.519232,0.0,5880.414208,5851.877888,s,10,3.6359690551757815,0.3635969055175782,0.0009591400576781859,0.36364782714843746,0.36452489013671874,0.3647996307373047,0.3650194232177734,"[0.3612745056152344, 0.36412796020507815, 0.363581787109375, 0.36335818481445314, 0.36297561645507814, 0.3644638366699219, 0.36346331787109376, 0.3650743713378906, 0.3639356079101562, 0.3637138671875]",tokens/s,704.0763992080334,kWh,1.0616817495833193e-05,1.1708389359269992e-06,7.039142536071342e-06,1.8826798967831537e-05,tokens/kWh,13597638.155982604,MB,1645.912064,6308.102144,0.0,5892.99712,5851.880448,s,10,21.8140576171875,2.1814057617187497,0.009010288332354146,2.1822803955078127,2.1912506591796874,2.1924764038085938,2.193456999511719,"[2.18161962890625, 2.190080810546875, 2.1791875, 2.186933837890625, 2.1694658203125, 2.173657470703125, 2.165490966796875, 2.182941162109375, 2.190978271484375, 2.1937021484375]",tokens/s,28.880459154174837,kWh,6.357674689666728e-05,7.01252332252988e-06,4.215628570912886e-05,0.000112745555928326,tokens/kWh,558780.3393337297,,s,630,21.81077776336671,0.034620282164074125,0.0005396768342796798,0.03452499198913574,0.03499715957641602,0.03520392341613769,0.03637264377593995,"[0.03546588897705078, 0.03502284622192383, 0.034821441650390625, 0.034996929168701174, 0.0359681282043457, 0.03454601669311524, 0.034611614227294925, 0.03473420715332031, 0.034489921569824215, 0.034451904296875, 0.034528385162353514, 0.034671489715576174, 0.03449375915527344, 0.03436771011352539, 0.03457260894775391, 0.03454991912841797, 0.03442406463623047, 0.0344453125, 0.03458329772949219, 0.034790431976318356, 0.03463993453979492, 0.034394687652587894, 0.03446409606933594, 0.03457183837890625, 0.034560447692871095, 0.03449446487426758, 0.03451203155517578, 0.035199840545654296, 0.034683937072753905, 0.034791393280029295, 0.034621601104736326, 0.03508070373535156, 0.03460335922241211, 0.03444271850585937, 0.03440809631347656, 0.034833278656005856, 0.034514942169189454, 0.03444710540771485, 0.03437007904052734, 0.034294593811035154, 0.0342762565612793, 0.034389823913574216, 0.034446945190429686, 0.034450016021728515, 0.034583839416503906, 0.03645513534545899, 0.03437286376953125, 0.034554622650146485, 0.03434700775146484, 0.034493633270263675, 0.0343513298034668, 0.034259552001953124, 0.03446783828735352, 0.03436675262451172, 0.03430886459350586, 0.03431625747680664, 0.0349277458190918, 0.03458483123779297, 0.03459340667724609, 0.03454886245727539, 0.034659198760986325, 0.034477214813232425, 0.03457465744018555, 0.035251232147216795, 0.0348702392578125, 0.035049152374267575, 0.03457478332519531, 0.03441865539550781, 0.03489545440673828, 0.03454393768310547, 0.03451660919189453, 0.03476518249511719, 0.035004383087158204, 0.03495702362060547, 0.03452700805664063, 0.035923648834228515, 0.03487811279296875, 0.034737953186035155, 0.034735584259033205, 0.034538463592529295, 0.03466460800170899, 0.03469820785522461, 0.03599856185913086, 0.03454947280883789, 0.03451523208618164, 0.03473177719116211, 0.034293983459472654, 0.03471897506713867, 0.03473667144775391, 0.034681087493896486, 0.03458358383178711, 0.034757183074951174, 0.03460752105712891, 0.03464969635009765, 0.03479132843017578, 0.03447654342651367, 0.03474332809448242, 0.03505635070800781, 0.03457257461547852, 0.03471948623657226, 0.03462166213989258, 0.03476435089111328, 0.03465465545654297, 0.034625537872314455, 0.03545907211303711, 0.03494297790527344, 0.03484223937988281, 0.034662784576416014, 0.034832096099853514, 0.0345582389831543, 0.03444636917114258, 0.03465631866455078, 0.03441964721679688, 0.03531695938110352, 0.03508028793334961, 0.03455862426757812, 0.03448012924194336, 0.03460655975341797, 0.03448460769653321, 0.034461055755615234, 0.03449046325683594, 0.03466310501098633, 0.03474806213378906, 0.03525888061523438, 0.034546688079833986, 0.034826080322265626, 0.03578262329101563, 0.034850528717041016, 0.03458076858520508, 0.03471750259399414, 0.03448809432983398, 0.03444367980957031, 0.034786624908447264, 0.034789215087890624, 0.0344359359741211, 0.03457215881347656, 0.03447001647949219, 0.03439411163330078, 0.034541568756103515, 0.034391616821289064, 0.03449081420898437, 0.034762176513671875, 0.03444601440429688, 0.0349977912902832, 0.03463407897949219, 0.034356224060058595, 0.034716670989990234, 0.0346822395324707, 0.03447475051879883, 0.034403839111328126, 0.03443750381469726, 0.0344002571105957, 0.03496345520019531, 0.03457747268676758, 0.03496441650390625, 0.03480937576293945, 0.03452156829833984, 0.03451023864746094, 0.034599040985107424, 0.034623966217041016, 0.03424051284790039, 0.034247936248779295, 0.03422079849243164, 0.034287841796875, 0.03438143920898438, 0.03428793716430664, 0.034151870727539065, 0.03435935974121094, 0.03416099166870117, 0.034444766998291014, 0.03466499328613281, 0.034301952362060545, 0.0344156494140625, 0.03445654296875, 0.034506752014160154, 0.03444102478027344, 0.03470969772338867, 0.034944320678710936, 0.034519744873046876, 0.03469529724121094, 0.03473404693603516, 0.035077537536621094, 0.03477964782714844, 0.034688705444335936, 0.034608673095703125, 0.03459958267211914, 0.03467891311645508, 0.03514575958251953, 0.03449568176269531, 0.035192832946777344, 0.03435520172119141, 0.034557823181152345, 0.034687103271484374, 0.0344002571105957, 0.03460611343383789, 0.03428780746459961, 0.034460254669189457, 0.03427257537841797, 0.03466944122314453, 0.034457599639892575, 0.03751283264160156, 0.03490659332275391, 0.034764705657958986, 0.034713600158691404, 0.034551807403564457, 0.03439616012573242, 0.036208641052246096, 0.03423878479003906, 0.03437740707397461, 0.03440380859375, 0.0342525749206543, 0.0346929931640625, 0.03444755172729492, 0.03431804656982422, 0.03440329742431641, 0.034426433563232425, 0.03431878280639648, 0.03443619155883789, 0.03432540893554688, 0.03433990478515625, 0.034433982849121095, 0.03424480056762695, 0.03423385620117188, 0.03419366455078125, 0.03425209426879883, 0.034210559844970706, 0.0341374397277832, 0.034421409606933594, 0.03417878341674805, 0.0362125129699707, 0.03447273635864258, 0.03439791870117188, 0.035536895751953124, 0.034923744201660153, 0.03430889511108398, 0.03609395217895508, 0.043020286560058595, 0.03434931182861328, 0.03484441757202148, 0.03442448043823242, 0.034253150939941406, 0.034328575134277346, 0.03432243347167969, 0.03420159912109375, 0.03436908721923828, 0.03415494537353515, 0.034549758911132815, 0.03432243347167969, 0.03463539123535156, 0.03424028778076172, 0.03425491333007812, 0.03411203384399414, 0.034719646453857424, 0.03440499114990234, 0.03426643371582031, 0.03446227264404297, 0.03478131103515625, 0.03422822570800781, 0.034133056640625, 0.034209823608398436, 0.03419638442993164, 0.03408012771606445, 0.034073055267333986, 0.03407676696777344, 0.0343551025390625, 0.03417103958129883, 0.03403366470336914, 0.03406441497802734, 0.03440547180175781, 0.03410326385498047, 0.03402617645263672, 0.03402896118164062, 0.034181793212890624, 0.03415456008911133, 0.03431792068481445, 0.03438966369628906, 0.03446166229248047, 0.034587520599365235, 0.03441360092163086, 0.034447681427001955, 0.034457889556884766, 0.03422246551513672, 0.03458028793334961, 0.03419152069091797, 0.034170879364013675, 0.03425811386108398, 0.03499708938598633, 0.03426713562011719, 0.03414147186279297, 0.034242977142333986, 0.0343223991394043, 0.034326881408691404, 0.03443711853027344, 0.03788800048828125, 0.03481190490722656, 0.0344202880859375, 0.034781536102294924, 0.034410591125488284, 0.034408447265625, 0.03447916793823242, 0.03451180648803711, 0.034301952362060545, 0.03437977600097656, 0.03468288040161133, 0.03441664123535156, 0.0343818244934082, 0.03439571380615234, 0.03461536026000977, 0.03470761489868164, 0.034420352935791015, 0.0345483512878418, 0.03478729629516602, 0.03468902587890625, 0.034317409515380856, 0.03439228820800781, 0.03505075073242187, 0.034417022705078126, 0.03439875030517578, 0.03446361541748047, 0.03451289749145508, 0.034632896423339846, 0.03445024108886719, 0.03452297592163086, 0.03438608169555664, 0.03448627090454102, 0.034506656646728515, 0.0352072639465332, 0.03496345520019531, 0.03471769714355469, 0.03471974563598633, 0.034408447265625, 0.03424870300292969, 0.034529121398925784, 0.034469375610351564, 0.03443164825439453, 0.03421567916870117, 0.03457664108276367, 0.034401695251464845, 0.03447663879394531, 0.034252799987792966, 0.034506752014160154, 0.03480387115478516, 0.03461103820800781, 0.034767135620117184, 0.03441635131835938, 0.03431219100952149, 0.034455520629882816, 0.034412574768066403, 0.034285568237304685, 0.03437158584594727, 0.034320606231689456, 0.03450143814086914, 0.03493891143798828, 0.034314655303955076, 0.0349349136352539, 0.034638240814208986, 0.03455337524414062, 0.0345195198059082, 0.03441379165649414, 0.03433552169799805, 0.03430179214477539, 0.034272830963134764, 0.03430051040649414, 0.03444736099243164, 0.03594035339355469, 0.03446076965332031, 0.03446876907348633, 0.03425689697265625, 0.03440435028076172, 0.03419302368164062, 0.034619808197021484, 0.03418489456176758, 0.03414448165893555, 0.03432223892211914, 0.034302207946777345, 0.034260990142822266, 0.034238208770751954, 0.03435737609863281, 0.034998783111572264, 0.034236415863037106, 0.0342891845703125, 0.03425532913208008, 0.034282943725585935, 0.03457891082763672, 0.0349655990600586, 0.03428134536743164, 0.03416896057128906, 0.03425667190551758, 0.034404510498046874, 0.034504768371582034, 0.03410739135742188, 0.0345984001159668, 0.034251262664794925, 0.034361343383789066, 0.034362880706787106, 0.03427993774414063, 0.03414790344238281, 0.034368961334228516, 0.034232734680175785, 0.03440291213989258, 0.03467193603515625, 0.0346385269165039, 0.03458841705322266, 0.034495742797851565, 0.03421392059326172, 0.03424150466918945, 0.03462348937988281, 0.034321662902832034, 0.03430009460449219, 0.03429974365234375, 0.034220382690429686, 0.03440268707275391, 0.0344200325012207, 0.034312030792236325, 0.034374622344970705, 0.03425369644165039, 0.03473100662231445, 0.034293758392333985, 0.03446783828735352, 0.03428966522216797, 0.03439155197143555, 0.034406463623046876, 0.03426553726196289, 0.03437158584594727, 0.034406078338623046, 0.034231937408447266, 0.03427203369140625, 0.03411753463745117, 0.03440748977661133, 0.03433567810058594, 0.03439411163330078, 0.034203647613525394, 0.034345184326171875, 0.03420499038696289, 0.03429833602905273, 0.03448128128051758, 0.034315135955810545, 0.03429692840576172, 0.0342762565612793, 0.03422617721557617, 0.03444326400756836, 0.03500041580200195, 0.034157249450683595, 0.03406383895874023, 0.03417756652832031, 0.03409622573852539, 0.03416147232055664, 0.03481939315795898, 0.035828510284423826, 0.03484463882446289, 0.034447391510009764, 0.034528480529785154, 0.03448451232910156, 0.034402400970458984, 0.03538985443115234, 0.03437884902954102, 0.03431926345825195, 0.03433881759643555, 0.03420979309082031, 0.03423231887817383, 0.03427532958984375, 0.034227519989013674, 0.0343590087890625, 0.034292224884033204, 0.035910110473632816, 0.03443471908569336, 0.03425491333007812, 0.03429404830932617, 0.03416390228271484, 0.03434783935546875, 0.03447951889038086, 0.034382625579833986, 0.034144065856933595, 0.0344535026550293, 0.03441388702392578, 0.03452755355834961, 0.03501612854003906, 0.03643804931640625, 0.03487958526611328, 0.035174560546875, 0.035002273559570314, 0.035865249633789065, 0.03452716827392578, 0.034883743286132814, 0.03462348937988281, 0.03462348937988281, 0.034514942169189454, 0.034802879333496094, 0.03456288146972656, 0.03463711929321289, 0.03445980834960938, 0.03472851181030273, 0.034575904846191406, 0.0348922233581543, 0.034968639373779295, 0.03446851348876953, 0.03491459274291992, 0.03455721664428711, 0.03438665771484375, 0.03465830230712891, 0.03451084899902344, 0.03438576126098633, 0.03487311935424805, 0.03583628845214844, 0.035023006439208984, 0.03426595306396484, 0.034404193878173825, 0.03429935836791992, 0.034190017700195315, 0.034447200775146486, 0.034232479095458984, 0.03435308837890625, 0.03480793762207031, 0.034602943420410155, 0.03504742431640625, 0.034360607147216796, 0.03412662506103516, 0.03422307205200195, 0.03416976165771484, 0.034267200469970706, 0.03431423950195313, 0.034310142517089845, 0.03503513717651367, 0.0346313591003418, 0.03468460845947265, 0.03470751953125, 0.03487136077880859, 0.03458940887451172, 0.034767967224121094, 0.03459897613525391, 0.035199615478515626, 0.03470099258422851, 0.03465043258666992, 0.03453721618652344, 0.03483078384399414, 0.03474208068847656, 0.0347867202758789, 0.03482684707641601, 0.034912254333496096, 0.034977790832519534, 0.034645824432373046, 0.037808319091796876, 0.03477199935913086, 0.03651414489746094, 0.03470710372924805, 0.034571041107177736, 0.034551998138427735, 0.0347770881652832, 0.034748031616210935, 0.03457251358032227, 0.03472563171386719, 0.03487353515625, 0.03482998275756836, 0.03480838394165039, 0.03481804656982422, 0.03489177703857422, 0.03497942352294922, 0.03508060836791992, 0.0348216323852539, 0.035463680267333986, 0.03506995010375977, 0.034799617767333986, 0.0347852783203125, 0.03466239929199219, 0.03592559814453125, 0.03458908843994141, 0.03435347366333008, 0.03555750274658203, 0.03505180740356445, 0.035111488342285155, 0.03481315231323242, 0.03453363037109375, 0.034762401580810544, 0.03514662551879883, 0.034729984283447264, 0.03497788619995117, 0.034784736633300783, 0.03513740921020508, 0.034855487823486325, 0.03469100952148438, 0.03487088012695312, 0.03493024063110352, 0.03489203262329101, 0.03468912124633789, 0.03496607971191406, 0.034955009460449216, 0.03499033737182617, 0.034854911804199216, 0.03501379013061524, 0.03470627212524414, 0.03500566482543945, 0.03473030471801758, 0.03455801773071289, 0.03452944183349609, 0.03468233489990234, 0.03474716949462891, 0.034598560333251954, 0.03462793731689453, 0.03481187057495117, 0.03504684829711914, 0.036014686584472655, 0.03475276947021484, 0.03485465621948242, 0.034664447784423826, 0.03454771041870117, 0.03473612976074219, 0.03489382553100586, 0.034492416381835936, 0.03459065628051758, 0.0347026252746582, 0.03464476776123047, 0.03461734390258789, 0.034782718658447266, 0.034619903564453124, 0.034768703460693356, 0.03464211273193359, 0.034829631805419925, 0.03471417617797851, 0.034584705352783206, 0.03456169509887695, 0.035356704711914065, 0.03501907348632813, 0.034654144287109376, 0.03477123260498047, 0.034514015197753906, 0.034638145446777346, 0.03461772918701172, 0.03495135879516602, 0.03491980743408203, 0.03454611206054688]",tokens/s,28.884802130171884,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4354.584576,6115.16416,0.0,5729.419264,5727.871488,s,1,13.070501953125,13.070501953125,0.0,13.070501953125,13.070501953125,13.070501953125,13.070501953125,[13.070501953125],,kWh,0.0001114377793666667,1.2284647596642621e-05,3.2653637234000485e-05,0.0001563760641973098,,MB,1659.953152,6295.519232,0.0,5880.414208,5852.00896,s,10,3.967869079589844,0.3967869079589844,0.0024580178634641165,0.396612548828125,0.3993159240722656,0.3999868041992187,0.40052350830078126,"[0.3967500915527344, 0.39206121826171875, 0.3933772277832031, 0.39620687866210935, 0.39916683959960936, 0.39640408325195314, 0.3986339111328125, 0.3981361389160156, 0.4006576843261719, 0.3964750061035156]",tokens/s,645.1825775120144,kWh,1.1860505335000426e-05,1.3076495363754693e-06,7.877384079680028e-06,2.1045538951055923e-05,tokens/kWh,12164098.08251338,MB,1669.050368,6310.199296,0.0,5892.99712,5852.01152,s,10,30.396866455078122,3.039686645507813,0.011030230702675812,3.039021240234375,3.0532013916015623,3.053948352050781,3.054545920410156,"[3.038012451171875, 3.033771728515625, 3.0546953125, 3.049496826171875, 3.0360859375, 3.053035400390625, 3.035428955078125, 3.041975341796875, 3.01433447265625, 3.040030029296875]",tokens/s,20.72582056874325,kWh,8.930063229499908e-05,9.850308183153724e-06,5.4957805077320086e-05,0.00015410874555547285,tokens/kWh,408802.2374909448,,s,630,30.393536464691177,0.04824370867411295,0.0006655925785173463,0.04813851165771485,0.048814261627197264,0.04918793601989746,0.051127403259277344,"[0.048492446899414066, 0.04805231857299805, 0.048187168121337894, 0.047874271392822264, 0.048020481109619144, 0.04814742279052735, 0.04790480041503906, 0.04842412948608398, 0.04793632125854492, 0.0482426872253418, 0.0489881591796875, 0.047935489654541016, 0.04733747100830078, 0.04750723266601563, 0.04776675033569336, 0.048282142639160155, 0.04892041778564453, 0.048433792114257815, 0.048500030517578126, 0.04868166351318359, 0.04845772933959961, 0.0481607666015625, 0.048525310516357424, 0.048180286407470706, 0.048048385620117186, 0.04792176055908203, 0.0479167366027832, 0.0478616943359375, 0.04818991851806641, 0.04795596694946289, 0.04806655883789063, 0.052337886810302735, 0.04813929748535156, 0.04796326446533203, 0.047465087890625, 0.047892478942871096, 0.04840000152587891, 0.04848268890380859, 0.04804403305053711, 0.04794678497314453, 0.047782047271728516, 0.04828220748901367, 0.04814041519165039, 0.048167137145996096, 0.04808691024780273, 0.04793958282470703, 0.04783491134643555, 0.04802076721191406, 0.04788524627685547, 0.04800291061401367, 0.04872003173828125, 0.049065792083740234, 0.04921567916870117, 0.04863983917236328, 0.048144542694091796, 0.048080448150634766, 0.048347583770751955, 0.04841072082519531, 0.048212928771972655, 0.0478721923828125, 0.04769257736206055, 0.04763238525390625, 0.047941631317138675, 0.04901244735717773, 0.04835971069335938, 0.047982593536376954, 0.048207679748535154, 0.048352928161621095, 0.048289726257324216, 0.04810569763183594, 0.048228736877441405, 0.04762214279174805, 0.047760929107666016, 0.04820598220825195, 0.04813443374633789, 0.04800889587402344, 0.04776787185668945, 0.04750543975830078, 0.04765817642211914, 0.04768764877319336, 0.047629150390625, 0.04782080078125, 0.04781260681152344, 0.04753519821166992, 0.047465633392333985, 0.04860287857055664, 0.04820991897583008, 0.04772784042358398, 0.047618526458740235, 0.0478682861328125, 0.047895553588867185, 0.04769887924194336, 0.04774883270263672, 0.04766134262084961, 0.04763852691650391, 0.0482979850769043, 0.04770611190795898, 0.04797849655151367, 0.047664958953857424, 0.04822035217285156, 0.047940704345703126, 0.048013824462890625, 0.04795619201660156, 0.048885536193847653, 0.04953539276123047, 0.04803939056396484, 0.04803984069824219, 0.048053985595703126, 0.04829715347290039, 0.04768707275390625, 0.048083263397216795, 0.04762432098388672, 0.047804351806640624, 0.04831635284423828, 0.04785174560546875, 0.04792092895507812, 0.04852230453491211, 0.04806908798217773, 0.048023422241210936, 0.047698814392089846, 0.04836732864379883, 0.04825811386108399, 0.048632766723632814, 0.048807937622070315, 0.05425667190551758, 0.049062015533447266, 0.04960255813598633, 0.04941823959350586, 0.04883987045288086, 0.04871456146240234, 0.04924940872192383, 0.048425857543945315, 0.04869276809692383, 0.048930816650390625, 0.04945353698730469, 0.051732479095458986, 0.0485294075012207, 0.048488288879394534, 0.04795808029174805, 0.048158817291259766, 0.04857388687133789, 0.04893548965454102, 0.04812799835205078, 0.04861731338500976, 0.04797251129150391, 0.0478939208984375, 0.04804191970825195, 0.048005599975585934, 0.0481033935546875, 0.04825094223022461, 0.04809641647338867, 0.04806300735473633, 0.04837823867797852, 0.04850697708129883, 0.048545726776123045, 0.04881350326538086, 0.04846041488647461, 0.04875619125366211, 0.04815264129638672, 0.04795644760131836, 0.04737638473510742, 0.04870963287353516, 0.04790476989746094, 0.04768767929077149, 0.04769177627563476, 0.047874046325683595, 0.047908863067626956, 0.048330753326416016, 0.04841676712036133, 0.049419742584228515, 0.048538143157958985, 0.04884390258789063, 0.04849078369140625, 0.048648799896240234, 0.04855836868286133, 0.0479716796875, 0.04793203353881836, 0.04786764907836914, 0.04797158432006836, 0.04848009490966797, 0.04851948928833008, 0.04880192184448242, 0.04887801742553711, 0.04830384063720703, 0.049473407745361325, 0.04849225616455078, 0.04822294235229492, 0.04818710327148437, 0.048400672912597656, 0.04909734344482422, 0.04837078475952149, 0.048503711700439454, 0.04898393630981445, 0.0488732795715332, 0.048445758819580076, 0.048360958099365234, 0.04809983825683594, 0.048667713165283205, 0.04816787338256836, 0.04823859024047852, 0.04806860733032227, 0.048263168334960936, 0.04820377731323242, 0.048436607360839844, 0.04918966293334961, 0.04845654296875, 0.04826393508911133, 0.04860908889770508, 0.048519168853759766, 0.04862953567504883, 0.048821086883544924, 0.048467391967773436, 0.04979545593261719, 0.04815043258666992, 0.047794143676757814, 0.04825920104980469, 0.048123905181884766, 0.04838399887084961, 0.048312320709228515, 0.04835311889648437, 0.048670913696289064, 0.04888982391357422, 0.0484136962890625, 0.048661502838134765, 0.048107521057128906, 0.04852044677734375, 0.04848054504394531, 0.0482902717590332, 0.04854374313354492, 0.04817510223388672, 0.049099998474121095, 0.04826396942138672, 0.048334625244140624, 0.048735488891601564, 0.04850080108642578, 0.04849126434326172, 0.04817964935302734, 0.04842876815795898, 0.04868473434448242, 0.04812569427490234, 0.047952449798583985, 0.048024993896484375, 0.04780831909179688, 0.047690528869628906, 0.04767331314086914, 0.04803353500366211, 0.048159008026123044, 0.04793958282470703, 0.0482979850769043, 0.04847158432006836, 0.048435680389404295, 0.048185726165771485, 0.04830783843994141, 0.04785939025878906, 0.047548511505126956, 0.04791567993164063, 0.04837340927124024, 0.047435455322265625, 0.04766582489013672, 0.0479027214050293, 0.048025440216064454, 0.048430782318115234, 0.04758371353149414, 0.047406463623046874, 0.04805475234985351, 0.048027809143066404, 0.047585407257080076, 0.047715423583984375, 0.047788833618164064, 0.047482368469238284, 0.04834764862060547, 0.04824582290649414, 0.04831123352050781, 0.04887295913696289, 0.04836937713623047, 0.048078655242919925, 0.04784835052490234, 0.048654239654541014, 0.047601825714111326, 0.04865568161010742, 0.048275135040283204, 0.048380096435546874, 0.04813024139404297, 0.0477599983215332, 0.047542526245117185, 0.047742591857910154, 0.051138687133789065, 0.04764057540893555, 0.04731903839111328, 0.04734566497802734, 0.04745935821533203, 0.04753302383422851, 0.04940390396118164, 0.04764076614379883, 0.04751094436645508, 0.0478087043762207, 0.04765840148925781, 0.04808294296264649, 0.04836240005493164, 0.047931297302246094, 0.04773827362060547, 0.047670047760009764, 0.047928192138671874, 0.048038143157958985, 0.04822281646728516, 0.0482591667175293, 0.049583614349365236, 0.053625343322753906, 0.04894294357299805, 0.04804214477539062, 0.047855422973632815, 0.04945734405517578, 0.04863910293579102, 0.04852988815307617, 0.04845833587646484, 0.049225761413574216, 0.04900902557373047, 0.04952540969848633, 0.04814627075195312, 0.048130046844482424, 0.04799078369140625, 0.04929740905761719, 0.04802764892578125, 0.047987712860107425, 0.04804095840454101, 0.048261119842529294, 0.047857406616210935, 0.04797289657592774, 0.048084705352783204, 0.04872806549072266, 0.048670143127441404, 0.048366241455078125, 0.048533409118652344, 0.048888065338134765, 0.04923980712890625, 0.04841577529907227, 0.048503711700439454, 0.049804481506347656, 0.048572479248046876, 0.048444351196289065, 0.0484637451171875, 0.04901433563232422, 0.048650463104248046, 0.04868527984619141, 0.048686782836914064, 0.04957747268676758, 0.04949395370483398, 0.04811801528930664, 0.04804262542724609, 0.04812169647216797, 0.04807846450805664, 0.04774147033691406, 0.04787798309326172, 0.048468128204345706, 0.04888300704956055, 0.04819833755493164, 0.0483430061340332, 0.04851279830932617, 0.04848457717895508, 0.04842704010009766, 0.04907155227661133, 0.04864659118652344, 0.04825632095336914, 0.04830083084106445, 0.048585983276367185, 0.04844319915771484, 0.049730369567871094, 0.04792540740966797, 0.047742977142333984, 0.04825516891479492, 0.04757385635375976, 0.04800140762329102, 0.047618751525878904, 0.04795935821533203, 0.048080577850341796, 0.04808710479736328, 0.048138496398925784, 0.04866486358642578, 0.048427135467529296, 0.04806883239746094, 0.04783257675170898, 0.047576641082763674, 0.04739775848388672, 0.047474750518798826, 0.04766646575927734, 0.04750185775756836, 0.048416961669921876, 0.048123775482177736, 0.04811945724487305, 0.047634559631347655, 0.048068801879882814, 0.048153823852539065, 0.04846278381347656, 0.04864543914794922, 0.048446144104003906, 0.04837376022338867, 0.04807884979248047, 0.04826931381225586, 0.04768767929077149, 0.0475994873046875, 0.04783670425415039, 0.04814499282836914, 0.04775737762451172, 0.047892608642578126, 0.04778307342529297, 0.047943359375, 0.04823958587646485, 0.048508384704589846, 0.048529953002929685, 0.0485294075012207, 0.0486932487487793, 0.049209342956542966, 0.048535552978515625, 0.049050846099853516, 0.04918582534790039, 0.04860464096069336, 0.04792160034179688, 0.0478001594543457, 0.047906814575195314, 0.04784656143188477, 0.04788860702514648, 0.04779056167602539, 0.04832457733154297, 0.04812790298461914, 0.048240608215332034, 0.04841072082519531, 0.04831999969482422, 0.04810006332397461, 0.04826713562011719, 0.04801753616333008, 0.04792729568481445, 0.04826726531982422, 0.04822748947143555, 0.0483532485961914, 0.04836806488037109, 0.04820012664794922, 0.049141441345214844, 0.04840460968017578, 0.047959903717041015, 0.04847443389892578, 0.048341022491455075, 0.04866326522827148, 0.04780160140991211, 0.0480296630859375, 0.048426048278808594, 0.048454463958740236, 0.04813043212890625, 0.048656543731689456, 0.04852159881591797, 0.04853891372680664, 0.048280288696289066, 0.048465633392333986, 0.048263168334960936, 0.04829622268676758, 0.04830368041992188, 0.04854729461669922, 0.04839865493774414, 0.04804636764526367, 0.04813852691650391, 0.04791417694091797, 0.04784777450561523, 0.04950646209716797, 0.047894943237304685, 0.0478304328918457, 0.04787638473510742, 0.047855072021484375, 0.047698654174804685, 0.04781478500366211, 0.053147647857666014, 0.04873779296875, 0.04827983856201172, 0.04827481460571289, 0.04818415832519531, 0.048353279113769534, 0.048016895294189454, 0.04788275146484375, 0.048117950439453126, 0.048209632873535156, 0.047911006927490236, 0.047860031127929685, 0.04785097503662109, 0.047640544891357425, 0.047898433685302735, 0.04775980758666992, 0.04803481674194336, 0.048804702758789065, 0.047640735626220704, 0.04804608154296875, 0.04865020751953125, 0.04975004959106445, 0.05178777694702148, 0.04810566329956055, 0.04803529739379883, 0.04823014450073242, 0.047637054443359375, 0.047755294799804685, 0.04737033462524414, 0.04751305770874024, 0.047645118713378905, 0.04765695953369141, 0.04747468948364258, 0.047884288787841796, 0.048371585845947265, 0.04891046524047851, 0.048742401123046876, 0.04810956954956055, 0.048009056091308594, 0.04810736083984375, 0.047605728149414064, 0.04784777450561523, 0.04804198455810547, 0.04797756958007812, 0.04784016036987305, 0.04761804962158203, 0.04814438247680664, 0.04746828842163086, 0.047325439453125, 0.047441505432128904, 0.04762636947631836, 0.04792758560180664, 0.048064510345458986, 0.04774911880493164, 0.04758870315551758, 0.0475120964050293, 0.04766524887084961, 0.04782892990112304, 0.047734878540039063, 0.048037696838378906, 0.04756700897216797, 0.04746819305419922, 0.04765497589111328, 0.04747296142578125, 0.04787209701538086, 0.047306655883789066, 0.04795388793945313, 0.047253536224365233, 0.04758723068237305, 0.04740310287475586, 0.04748476791381836, 0.048035999298095704, 0.04798054504394531, 0.04767318344116211, 0.04734080123901367, 0.047635040283203124, 0.047737152099609374, 0.04793139266967773, 0.04817510223388672, 0.04826931381225586, 0.04844271850585938, 0.04898204803466797, 0.04771903991699219, 0.04802150344848633, 0.04809846496582031, 0.04790563201904297, 0.04763852691650391, 0.04749337768554687, 0.047674976348876956, 0.04768988800048828, 0.047925247192382815, 0.047912990570068356, 0.048162368774414065, 0.04828566360473633, 0.047965953826904294, 0.04801196670532227, 0.047839073181152346, 0.04813840103149414, 0.048269214630126955, 0.048676353454589844, 0.04800921630859375, 0.04832672119140625, 0.048406944274902344, 0.04807382583618164, 0.04772137451171875, 0.04815840148925781, 0.04805459213256836, 0.04830822372436523, 0.04882406234741211, 0.04800742340087891, 0.05109977722167969, 0.04792102432250977, 0.04788009643554687, 0.04800726318359375, 0.04861084747314453, 0.047759265899658204, 0.04789427185058594, 0.04786259078979492, 0.0477573127746582, 0.047966209411621094, 0.047783935546875, 0.048002880096435545, 0.04837526321411133, 0.04809417724609375, 0.04803763198852539, 0.0486130256652832, 0.04821648025512695, 0.04828153610229492, 0.04830822372436523, 0.04823859024047852, 0.04847328186035156, 0.04826559829711914, 0.047960479736328124, 0.04852124786376953, 0.048743553161621093, 0.04858143997192383, 0.04805023956298828, 0.04885913467407227, 0.04798585510253906, 0.048315200805664066, 0.04801059341430664, 0.047994815826416015, 0.04787273788452148, 0.04826131057739258, 0.048123905181884766, 0.04808591842651367, 0.047920032501220705, 0.04830646514892578, 0.04817068862915039, 0.04827958297729492, 0.048345088958740234, 0.04788838577270508, 0.04776345443725586, 0.047699520111083984, 0.04821855926513672, 0.048297695159912106, 0.04861711883544922, 0.048657024383544925, 0.04880998229980469, 0.04858607864379883, 0.048182174682617186, 0.0485742073059082]",tokens/s,20.728091340469213,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4351.91808,6905.790464,0.0,6503.268352,6315.533312,s,1,14.1625263671875,14.1625263671875,0.0,14.1625263671875,14.1625263671875,14.1625263671875,14.1625263671875,[14.1625263671875],,kWh,0.000148619097579162,1.638665445124423e-05,6.05403262099978e-05,0.00022554607824040404,,MB,1651.720192,6954.02496,0.0,6538.919936,5800.497664,s,10,31.78375610351562,3.1783756103515626,0.0032166710523816424,3.1793826904296876,3.1814366943359373,3.1817198120117185,3.181946306152344,"[3.170912109375, 3.175130126953125, 3.178578125, 3.1770830078125, 3.180187255859375, 3.17744287109375, 3.18053369140625, 3.181373779296875, 3.1820029296875, 3.18051220703125]",tokens/s,80.54428783251443,kWh,9.266692922666703e-05,1.0221092941694315e-05,6.154252145619959e-05,0.00016443054362456094,tokens/kWh,1556888.3636638501,MB,1651.720192,6968.705024,0.0,6551.502848,5800.500224,s,10,20.644426025390622,2.0644426025390628,0.014009031966466632,2.0685155029296878,2.077953857421875,2.0824088134765626,2.0859727783203126,"[2.08686376953125, 2.0769638671875, 2.06803515625, 2.070611572265625, 2.06546240234375, 2.069162109375, 2.068995849609375, 2.053822509765625, 2.049691162109375, 2.034817626953125]",tokens/s,30.516711834233682,kWh,6.0173826769582736e-05,6.635618011694815e-06,3.981069851520073e-05,0.00010662014329647831,tokens/kWh,590882.7173943679,,s,630,20.640998304367056,0.03276348937201121,0.0005932216704791336,0.032730607986450194,0.033096273040771486,0.0332940185546875,0.034235863990783696,"[0.033329151153564454, 0.032884735107421875, 0.032887840270996095, 0.0329343376159668, 0.03331536102294922, 0.0336814079284668, 0.04213350296020508, 0.03321212768554688, 0.03244675064086914, 0.03254272079467774, 0.032605823516845704, 0.03246716690063477, 0.03254288101196289, 0.03267283248901367, 0.032755775451660155, 0.0328139533996582, 0.03256921768188477, 0.032735359191894534, 0.03287795257568359, 0.0329119987487793, 0.03281884765625, 0.03255945587158203, 0.03289440155029297, 0.032667713165283205, 0.03285452651977539, 0.032677406311035155, 0.032803295135498046, 0.0328908805847168, 0.03279788970947266, 0.032776447296142576, 0.034412158966064456, 0.038290367126464844, 0.03298630523681641, 0.03314361572265625, 0.03297280120849609, 0.03311552047729492, 0.03307379150390625, 0.03278438568115234, 0.03264921569824219, 0.03263692855834961, 0.03272016143798828, 0.03278307342529297, 0.03299103927612305, 0.03263315200805664, 0.03278995132446289, 0.03256159973144531, 0.03269984054565429, 0.03284025573730469, 0.0332729606628418, 0.032889728546142576, 0.0326956787109375, 0.03279731369018555, 0.03303833770751953, 0.033275169372558595, 0.033044193267822264, 0.032979583740234374, 0.032771617889404296, 0.03277686309814453, 0.03280096054077149, 0.032583168029785156, 0.03307283020019531, 0.0329317741394043, 0.033452510833740234, 0.03344918441772461, 0.03337612915039063, 0.03282620620727539, 0.033046592712402345, 0.0331776008605957, 0.03284576034545898, 0.03283769607543945, 0.032941951751708984, 0.03298726272583008, 0.0329969596862793, 0.03283804702758789, 0.032770305633544924, 0.032769790649414064, 0.032882686614990234, 0.033073150634765625, 0.032947551727294924, 0.033151649475097654, 0.033105857849121095, 0.03297695922851562, 0.03282636642456055, 0.03292787170410156, 0.03267264175415039, 0.03292684936523437, 0.0328590087890625, 0.03293145751953125, 0.032973182678222655, 0.03270169448852539, 0.03309235382080078, 0.03300352096557617, 0.0327823371887207, 0.032851615905761716, 0.03292399978637695, 0.03287606430053711, 0.03263321685791016, 0.03279830551147461, 0.032760063171386716, 0.032665184020996094, 0.03261711883544922, 0.032780288696289066, 0.032937694549560546, 0.03285615921020508, 0.03301299285888672, 0.03304044723510742, 0.032922496795654295, 0.03306496047973633, 0.03266969680786133, 0.03352755355834961, 0.03292185592651367, 0.033160320281982424, 0.03277299118041992, 0.032833534240722655, 0.03353190231323242, 0.03289654541015625, 0.033117919921875, 0.033022720336914065, 0.032671745300292966, 0.03273318481445313, 0.03414425659179687, 0.03281011199951172, 0.03311500930786133, 0.03301990509033203, 0.033005119323730465, 0.03319852828979492, 0.033329151153564454, 0.032847198486328125, 0.032647838592529295, 0.032911231994628906, 0.032778369903564454, 0.032745471954345705, 0.033354942321777346, 0.03271113586425781, 0.03304278564453125, 0.03298303985595703, 0.03270041656494141, 0.03263283157348633, 0.03290719985961914, 0.032745216369628904, 0.032920127868652345, 0.03406377410888672, 0.03369200134277344, 0.032718849182128903, 0.032927745819091796, 0.03390054321289063, 0.032686080932617184, 0.032671615600585936, 0.032451873779296876, 0.03267465591430664, 0.03248332977294922, 0.03263046264648437, 0.03311443328857422, 0.03297004699707031, 0.03303664016723633, 0.03290556716918945, 0.03290841674804688, 0.03253337478637695, 0.032674945831298825, 0.032734081268310546, 0.03264483261108399, 0.03268227386474609, 0.03283718490600586, 0.0325103988647461, 0.03262591934204102, 0.03277695846557617, 0.03296460723876953, 0.03263078308105469, 0.03267513656616211, 0.03275846481323242, 0.03255068969726563, 0.032559326171875, 0.03247705459594727, 0.03287228775024414, 0.032766239166259765, 0.03280281448364258, 0.032777759552001955, 0.03270915222167969, 0.03273926544189453, 0.03259187316894531, 0.03266096115112305, 0.032758304595947266, 0.03281913757324219, 0.03292166519165039, 0.03287449645996094, 0.03269225692749023, 0.03269590377807617, 0.03260563278198242, 0.03268703842163086, 0.033122303009033204, 0.032845088958740234, 0.03273596954345703, 0.03260211181640625, 0.03261849594116211, 0.03264281463623047, 0.03285334396362305, 0.032879520416259765, 0.03267168045043945, 0.03287366485595703, 0.03276809692382812, 0.03298384094238281, 0.03317689514160156, 0.03309958267211914, 0.03302809524536133, 0.03281804656982422, 0.032835582733154296, 0.03276796722412109, 0.032796897888183595, 0.0327841911315918, 0.03283494567871094, 0.03260383987426758, 0.03256198501586914, 0.03277568054199219, 0.03265155029296875, 0.032823009490966795, 0.03301030349731445, 0.03262464141845703, 0.03256115341186523, 0.03291910552978516, 0.03266604614257813, 0.032906848907470705, 0.03287696075439453, 0.03263897705078125, 0.03260345458984375, 0.03265811157226563, 0.032774398803710934, 0.03279420852661133, 0.032823455810546874, 0.03275964736938477, 0.033054878234863284, 0.033056766510009765, 0.03311001586914063, 0.03278444671630859, 0.033038272857666015, 0.03271059036254883, 0.03274703979492188, 0.03305731201171875, 0.03287843322753906, 0.03288694381713867, 0.033183231353759765, 0.033507614135742186, 0.03304489517211914, 0.032948032379150394, 0.03303628921508789, 0.032710174560546874, 0.03299164962768555, 0.033129825592041015, 0.0330511360168457, 0.03309590530395508, 0.03281100845336914, 0.03279872131347656, 0.032847713470458985, 0.03427328109741211, 0.03343113708496094, 0.032889247894287106, 0.03324694442749023, 0.03308758544921875, 0.03301395034790039, 0.032876544952392575, 0.03280281448364258, 0.03266336059570313, 0.03256899261474609, 0.03255350494384766, 0.03268163299560547, 0.03267164611816406, 0.03246124649047852, 0.03262790298461914, 0.03254745483398438, 0.0324686393737793, 0.03234611129760742, 0.03263558578491211, 0.03296444702148438, 0.03242598342895508, 0.032763904571533206, 0.03245852661132813, 0.03265311813354492, 0.03276764678955078, 0.03259673690795899, 0.03250995254516602, 0.032564865112304685, 0.03278681564331055, 0.03264102554321289, 0.03282944107055664, 0.03275062561035156, 0.0326577262878418, 0.032813632965087894, 0.03261654281616211, 0.03265068817138672, 0.03269279861450195, 0.0331141128540039, 0.03263804626464844, 0.03287542343139648, 0.03287014389038086, 0.0326822395324707, 0.032672927856445315, 0.03271971130371094, 0.03264838409423828, 0.03270124816894531, 0.0325785903930664, 0.032717792510986325, 0.03274038314819336, 0.03331391906738281, 0.03302588653564453, 0.03289702224731445, 0.0328353271484375, 0.03264128112792969, 0.033029857635498046, 0.032855648040771485, 0.03275430297851562, 0.03275996780395508, 0.03278950500488281, 0.032729663848876954, 0.032823104858398434, 0.03278902435302734, 0.03289206314086914, 0.03322473526000977, 0.0328164176940918, 0.033067745208740236, 0.032710655212402344, 0.03252348709106445, 0.032822048187255856, 0.03276748657226562, 0.03278639984130859, 0.03271120071411133, 0.032903167724609376, 0.0328007698059082, 0.032712703704833986, 0.033175552368164066, 0.032927745819091796, 0.03277619171142578, 0.03294412612915039, 0.03343564987182617, 0.03299033737182617, 0.033091808319091795, 0.03290745544433594, 0.03269836807250977, 0.032827392578125, 0.032836063385009766, 0.0328908805847168, 0.03283148956298828, 0.03279174423217773, 0.03299411010742188, 0.03302310562133789, 0.03284431838989258, 0.032913471221923826, 0.03281948852539063, 0.03275980758666992, 0.032778240203857424, 0.03282124710083008, 0.032804737091064455, 0.0326063346862793, 0.0326943359375, 0.032849857330322266, 0.032942081451416014, 0.03302726364135742, 0.03275859069824219, 0.03271820831298828, 0.03274611282348633, 0.03269945526123047, 0.03283158493041992, 0.03266547012329102, 0.03255599975585938, 0.03273513412475586, 0.03269830322265625, 0.03274956893920898, 0.03297500610351563, 0.032614273071289064, 0.03257292938232422, 0.032973438262939456, 0.03240262222290039, 0.03244243240356445, 0.03266227340698242, 0.03319807815551758, 0.03307468795776367, 0.0332334098815918, 0.0330931510925293, 0.032755905151367185, 0.03279481506347656, 0.03342598342895508, 0.03265267181396484, 0.0329222412109375, 0.032903167724609376, 0.032911361694335936, 0.03280223846435547, 0.032949920654296874, 0.032913600921630856, 0.032772289276123044, 0.032766494750976566, 0.03239116668701172, 0.03228208160400391, 0.03238479995727539, 0.032385345458984374, 0.03241209411621094, 0.032742462158203124, 0.0327894401550293, 0.032610111236572266, 0.03260435104370117, 0.03245795059204101, 0.03240748977661133, 0.03253971099853516, 0.032486495971679685, 0.03256800079345703, 0.03251795196533203, 0.032680286407470706, 0.0325076789855957, 0.03253664016723633, 0.032395263671875, 0.032626686096191404, 0.03288883209228516, 0.032642177581787106, 0.03242483139038086, 0.032513439178466795, 0.03258224105834961, 0.033175552368164066, 0.032519649505615235, 0.032592063903808595, 0.03268438339233398, 0.0331673583984375, 0.032612350463867186, 0.032673503875732424, 0.032610591888427735, 0.03260211181640625, 0.03263459014892578, 0.03250204849243164, 0.03257958221435547, 0.032817150115966795, 0.032849918365478514, 0.03279417419433594, 0.033202014923095706, 0.0330145263671875, 0.03288572692871094, 0.03286700820922851, 0.032726943969726564, 0.03286454391479492, 0.03722412872314453, 0.03315539169311523, 0.03294003295898437, 0.03277747344970703, 0.03305753707885742, 0.03496550369262695, 0.033734657287597655, 0.033397697448730466, 0.032925216674804685, 0.03253830337524414, 0.03259471893310547, 0.03247923278808594, 0.03254457473754883, 0.032576736450195314, 0.03268502426147461, 0.032677886962890625, 0.03244134521484375, 0.03235644912719727, 0.032457855224609374, 0.03269404983520508, 0.03232153701782227, 0.03227852630615234, 0.03254211044311523, 0.032815040588378905, 0.0323263053894043, 0.032385025024414066, 0.03281100845336914, 0.03306496047973633, 0.033165313720703124, 0.03283280181884766, 0.03269843292236328, 0.032507999420166016, 0.032715328216552736, 0.03261849594116211, 0.03261439895629883, 0.03274956893920898, 0.03273932647705078, 0.032554431915283205, 0.032614974975585936, 0.032705825805664064, 0.0325618896484375, 0.032688129425048826, 0.032468223571777345, 0.032305919647216796, 0.032499713897705076, 0.03260006332397461, 0.032395263671875, 0.032814910888671875, 0.032800670623779296, 0.03274166488647461, 0.0326649284362793, 0.032518817901611326, 0.03241321563720703, 0.032589824676513675, 0.03247766494750977, 0.032487422943115234, 0.032421886444091795, 0.03249484634399414, 0.032303871154785155, 0.032317440032958986, 0.03247513580322266, 0.03235635375976562, 0.0323768310546875, 0.03228377532958984, 0.0323818244934082, 0.032696414947509765, 0.03238092803955078, 0.03337801742553711, 0.03247753524780273, 0.03270163345336914, 0.03305267333984375, 0.03254230499267578, 0.032731552124023434, 0.032451969146728515, 0.03245529556274414, 0.03234406280517578, 0.03241743850708008, 0.032532833099365235, 0.032489471435546875, 0.03235420989990234, 0.03250291061401367, 0.03244950485229492, 0.03237424087524414, 0.032475231170654296, 0.032606655120849606, 0.03256134414672852, 0.03244524765014648, 0.03229388809204101, 0.032522239685058595, 0.03258921432495117, 0.03343420791625976, 0.03270655822753906, 0.032712703704833986, 0.032312671661376954, 0.0325843505859375, 0.032865985870361325, 0.03242015838623047, 0.03249151992797852, 0.03233145523071289, 0.03235052871704101, 0.032438270568847655, 0.03259187316894531, 0.03228464126586914, 0.03238694381713867, 0.032428192138671874, 0.03248332977294922, 0.0323870735168457, 0.03242185592651367, 0.0329400634765625, 0.03243731307983398, 0.03234912109375, 0.03231948852539063, 0.03593865585327148, 0.03330944061279297, 0.032295841217041016, 0.03286966323852539, 0.032375007629394534, 0.03370774459838867, 0.03208476638793945, 0.03236460876464844, 0.03226208114624023, 0.03271680068969727, 0.032242752075195315, 0.03236662292480469, 0.03202345657348633, 0.03280691146850586, 0.032118785858154295, 0.03220889663696289, 0.03206553649902344, 0.03186627197265625, 0.0319903678894043, 0.031971168518066403, 0.031938720703125, 0.03306684875488281, 0.0323524169921875, 0.032314464569091796, 0.03259423828125, 0.03233033752441406, 0.032368640899658206, 0.032464897155761716, 0.03242598342895508, 0.03226432037353515, 0.03243750381469727, 0.03231721496582031, 0.032232288360595704, 0.032726112365722655, 0.032234176635742184, 0.03219843292236328, 0.0321019515991211, 0.03203513717651367, 0.03204703903198242, 0.03212563323974609, 0.03207120132446289, 0.03202703857421875, 0.03203792190551758, 0.03227676773071289, 0.03208217620849609, 0.032118751525878904, 0.03211056137084961, 0.032078689575195315, 0.03206924819946289, 0.032416961669921876, 0.032316257476806644, 0.032352256774902347, 0.0321638412475586, 0.03217132949829102, 0.03207766342163086, 0.03207798385620117, 0.032035518646240234, 0.03210847854614258, 0.032267585754394534, 0.032164417266845706, 0.0319489917755127, 0.032247264862060546, 0.032055839538574216, 0.03215151977539062, 0.03225398254394531, 0.032290817260742184, 0.03249334335327148, 0.032360191345214846, 0.0324180793762207, 0.0326247673034668, 0.03240719985961914, 0.0322256965637207, 0.03216588973999023, 0.03242393493652344, 0.03256300735473633, 0.03232342529296875, 0.03207379150390625, 0.032126590728759764, 0.032244384765625, 0.03277568054199219, 0.0331412467956543, 0.03256659317016602, 0.032637630462646484, 0.032321025848388675]",tokens/s,30.521779553012678,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4350.517248,6115.16416,0.0,5729.419264,5727.871488,s,1,12.8476171875,12.8476171875,0.0,12.8476171875,12.8476171875,12.8476171875,12.8476171875,[12.8476171875],,kWh,0.00010448241509999246,1.1517917021384387e-05,3.1653358656000274e-05,0.00014765369077737712,,MB,1648.57856,6295.519232,0.0,5880.414208,5852.00896,s,10,3.708763336181641,0.3708763336181641,0.0014360524337705838,0.3712967987060547,0.3720836517333984,0.37222992095947266,0.372346936340332,"[0.36716839599609374, 0.3701123962402344, 0.37097503662109377, 0.3710345458984375, 0.37155905151367186, 0.37173382568359375, 0.37172021484375, 0.37237619018554685, 0.37003253173828127, 0.3720511474609375]",tokens/s,690.2570393277371,kWh,1.0826963009722204e-05,1.194006894525409e-06,7.2068987696296645e-06,1.9227868673877277e-05,tokens/kWh,13314008.137979336,MB,1656.446976,6310.199296,0.0,5892.99712,5852.01152,s,10,24.77078125,2.477078125,0.011624509843624389,2.4799544677734375,2.4905661865234374,2.490648693847656,2.490714699707031,"[2.487522705078125, 2.483306396484375, 2.46897998046875, 2.456047119140625, 2.467914794921875, 2.463820556640625, 2.490731201171875, 2.4905478515625, 2.4766025390625, 2.48530810546875]",tokens/s,25.433190566002033,kWh,7.165861275319553e-05,7.90402052685537e-06,4.735942266117036e-05,0.0001269220559412213,tokens/kWh,496367.6291942186,,s,630,24.767360958099353,0.03931327136206249,0.0004837065439281051,0.039222272872924804,0.03978804168701172,0.04002165470123291,0.04141244171142578,"[0.03996368026733398, 0.03915055847167969, 0.03925196838378906, 0.039136257171630856, 0.0400250244140625, 0.04018767929077149, 0.03904131317138672, 0.03952409744262695, 0.03909247970581055, 0.03953593444824219, 0.040032958984375, 0.03927395248413086, 0.039490081787109374, 0.039223297119140625, 0.039680286407470705, 0.04006883239746094, 0.03988275146484375, 0.039646240234375, 0.040511905670166014, 0.03964108657836914, 0.03943423843383789, 0.03933171081542969, 0.039601951599121096, 0.039512992858886715, 0.03946086502075195, 0.04129177474975586, 0.03969171142578125, 0.03946460723876953, 0.03939215850830078, 0.039651329040527344, 0.0400591049194336, 0.03910332870483398, 0.03952870559692383, 0.03917689514160156, 0.03934112167358399, 0.03916054534912109, 0.039043296813964845, 0.039444480895996094, 0.039395328521728515, 0.03964518356323242, 0.03941299057006836, 0.039629566192626954, 0.039577598571777346, 0.039221248626708984, 0.039806049346923826, 0.03974646377563477, 0.03925180816650391, 0.03940572738647461, 0.038972766876220706, 0.03880822372436524, 0.03907894515991211, 0.03921148681640625, 0.038982177734375, 0.03912262344360352, 0.03910102462768555, 0.0389505615234375, 0.04007324981689453, 0.03907993698120117, 0.03913292694091797, 0.0389040641784668, 0.03998515319824219, 0.03925299072265625, 0.0393875503540039, 0.04018399810791016, 0.039425857543945314, 0.03931238555908203, 0.03967692947387695, 0.039468448638916014, 0.04154224014282227, 0.03951142501831055, 0.039584095001220704, 0.03927888107299805, 0.04001753616333008, 0.03925849533081055, 0.039593982696533206, 0.039798782348632815, 0.03965577697753906, 0.03931024169921875, 0.03970694351196289, 0.03942854309082031, 0.03947110366821289, 0.039634944915771485, 0.039436286926269534, 0.039272449493408204, 0.03972915267944336, 0.03995872116088867, 0.039548126220703125, 0.03941392135620117, 0.039504318237304686, 0.039423583984375, 0.039385505676269535, 0.03952230453491211, 0.03932160186767578, 0.03920848083496094, 0.03940966415405273, 0.03937673568725586, 0.039312000274658206, 0.03991116714477539, 0.039329441070556644, 0.03943260955810547, 0.03909836959838867, 0.03945900726318359, 0.03924991989135742, 0.03950384140014648, 0.039349281311035156, 0.03924070358276367, 0.03902659225463867, 0.039297119140625, 0.03893161773681641, 0.038951263427734376, 0.03925862503051758, 0.039815166473388675, 0.03938614273071289, 0.039029247283935545, 0.039182815551757816, 0.03910412979125977, 0.03903276824951172, 0.03903257751464844, 0.038730430603027347, 0.038975486755371096, 0.03901603317260742, 0.03894723129272461, 0.039174144744873046, 0.039198112487792966, 0.03953724670410156, 0.03909836959838867, 0.03957126235961914, 0.03904966354370117, 0.038748001098632814, 0.03893648147583008, 0.038908161163330075, 0.03889328002929687, 0.038940929412841795, 0.03933385467529297, 0.039019905090332034, 0.03901305770874024, 0.039136383056640626, 0.039129985809326175, 0.039201793670654295, 0.03973836898803711, 0.038959102630615236, 0.039314624786376956, 0.03919094467163086, 0.039048992156982425, 0.039209598541259764, 0.038952705383300784, 0.03879129409790039, 0.03884454345703125, 0.03890585708618164, 0.03892144012451172, 0.03886569595336914, 0.04144537734985351, 0.03978147125244141, 0.03945974349975586, 0.03919388961791992, 0.0386973762512207, 0.03916767883300781, 0.03924851226806641, 0.03917619323730469, 0.03902259063720703, 0.038870014190673825, 0.03889664077758789, 0.0391596794128418, 0.039188606262207035, 0.03976396942138672, 0.03914547348022461, 0.03911430358886719, 0.038828384399414065, 0.03887728118896484, 0.038954273223876956, 0.039029502868652345, 0.03888739013671875, 0.03885260772705078, 0.03893008041381836, 0.03911715316772461, 0.03902668762207031, 0.03957785415649414, 0.04164988708496094, 0.0395120964050293, 0.03914137649536133, 0.03894611358642578, 0.03898847961425781, 0.039139328002929685, 0.03903897476196289, 0.039065601348876954, 0.03894243240356445, 0.03894672012329101, 0.03917043304443359, 0.03997695922851562, 0.039896640777587894, 0.03957923126220703, 0.038982368469238284, 0.03881177520751953, 0.03893180847167969, 0.039400096893310546, 0.03898767852783203, 0.03909436798095703, 0.03899801635742187, 0.03895500946044922, 0.038813278198242186, 0.03881027221679687, 0.038885120391845704, 0.03881574249267578, 0.0388587532043457, 0.038655136108398436, 0.03892127990722656, 0.03996633529663086, 0.03897328186035156, 0.038771007537841795, 0.038809600830078124, 0.03881942367553711, 0.03879158401489258, 0.03882505416870117, 0.038627712249755856, 0.038838817596435544, 0.03868467330932617, 0.039231487274169925, 0.03933152008056641, 0.03885846328735352, 0.03891465759277344, 0.039112510681152346, 0.03899577713012695, 0.03901792144775391, 0.039023681640625, 0.03934716796875, 0.03873270416259766, 0.038809600830078124, 0.03885465621948242, 0.03911824035644531, 0.03904982376098633, 0.038817440032958984, 0.03880384063720703, 0.03943135833740234, 0.03869776153564453, 0.03898767852783203, 0.03900630569458008, 0.03890176010131836, 0.039091968536376955, 0.03885081481933594, 0.03869900894165039, 0.0389315185546875, 0.03869728088378906, 0.03904166412353516, 0.03892633438110352, 0.03902809524536133, 0.039457408905029294, 0.03909798431396484, 0.038751903533935546, 0.03889599990844726, 0.03881814575195312, 0.039106559753417966, 0.03877830505371094, 0.03976403045654297, 0.03908937454223633, 0.03897622299194336, 0.03891404724121094, 0.03903078460693359, 0.03890784072875977, 0.04018592071533203, 0.03942604827880859, 0.03894428634643555, 0.03903036880493164, 0.03928134536743164, 0.0390002555847168, 0.03881363296508789, 0.03878070449829102, 0.039292736053466795, 0.03925433731079102, 0.039030208587646484, 0.03940425491333008, 0.039825313568115236, 0.0388702392578125, 0.038863742828369144, 0.03910041427612305, 0.039101696014404295, 0.039295135498046876, 0.03897564697265625, 0.03917212677001953, 0.03911718368530273, 0.03907155227661133, 0.03926367950439453, 0.039168800354003906, 0.038983680725097655, 0.03913046264648438, 0.03902326583862305, 0.03887104034423828, 0.04257382583618164, 0.0392437744140625, 0.039043201446533206, 0.038948734283447264, 0.03875635147094727, 0.03892633438110352, 0.03887212753295898, 0.03904604721069336, 0.03909225463867187, 0.038950912475585936, 0.03907369613647461, 0.03902089691162109, 0.038890239715576175, 0.03996979141235352, 0.039214271545410156, 0.03890464019775391, 0.03900643157958984, 0.03901212692260742, 0.039139328002929685, 0.0388298225402832, 0.03861721420288086, 0.03872576141357422, 0.03902057647705078, 0.03954889678955078, 0.03899596786499023, 0.03902838516235352, 0.03947760009765625, 0.039553024291992187, 0.03915135955810547, 0.0404552001953125, 0.03888022232055664, 0.039056705474853515, 0.03903750228881836, 0.03902233505249023, 0.03907417678833008, 0.03907788848876953, 0.03912691116333008, 0.039865856170654294, 0.039330368041992185, 0.03911475372314453, 0.03926611328125, 0.03930444717407226, 0.0390513916015625, 0.03934912109375, 0.03900115203857422, 0.0389189453125, 0.03892031860351562, 0.039150718688964845, 0.039148094177246094, 0.0392523193359375, 0.03949772644042969, 0.0393256950378418, 0.03890790557861328, 0.03902787017822266, 0.03895996856689453, 0.03942399978637695, 0.038950912475585936, 0.038774368286132815, 0.038617504119873046, 0.038801406860351564, 0.03888896179199219, 0.038760608673095706, 0.03880790328979492, 0.03888281631469727, 0.038983551025390625, 0.03916864013671875, 0.038919422149658205, 0.038946624755859374, 0.03915385437011719, 0.039353057861328124, 0.038980831146240236, 0.03872774505615234, 0.03904383850097656, 0.03896275329589844, 0.03898207855224609, 0.038967296600341796, 0.038989822387695314, 0.039122943878173826, 0.03878092956542969, 0.038936321258544924, 0.03900643157958984, 0.03934003067016602, 0.03969971084594726, 0.03906844711303711, 0.03952022552490234, 0.03907279968261719, 0.03917881774902344, 0.03889337539672852, 0.03894870376586914, 0.039184928894042965, 0.03911910247802734, 0.039373855590820316, 0.03992268753051758, 0.039182334899902346, 0.03926214218139648, 0.03919468688964844, 0.03907356643676758, 0.03998742294311523, 0.04014899063110351, 0.03926425552368164, 0.039137279510498044, 0.039199073791503905, 0.039145118713378904, 0.039024639129638675, 0.03912499237060547, 0.039231456756591794, 0.039024478912353514, 0.039376449584960935, 0.040645248413085935, 0.039524478912353514, 0.04015091323852539, 0.03950495910644531, 0.0394268798828125, 0.03926406478881836, 0.03929721450805664, 0.03918656158447266, 0.03927859115600586, 0.039294975280761715, 0.03942105484008789, 0.03924671936035156, 0.03987865447998047, 0.043597824096679685, 0.039855648040771484, 0.03936454391479492, 0.03891664123535156, 0.03986016082763672, 0.03926432037353516, 0.03917728042602539, 0.03939219284057617, 0.03896867370605469, 0.039858272552490234, 0.042676799774169924, 0.03947315216064453, 0.039616512298583983, 0.03962265777587891, 0.03975987243652344, 0.039341793060302735, 0.03942019271850586, 0.03903897476196289, 0.0391756477355957, 0.03918083190917969, 0.03934560012817383, 0.0391317138671875, 0.03911679840087891, 0.039215103149414066, 0.03941948699951172, 0.03947766494750977, 0.03915776062011719, 0.03969577789306641, 0.039868480682373045, 0.039555614471435546, 0.03951615905761719, 0.03932521438598633, 0.03934051132202149, 0.039266143798828125, 0.039963840484619144, 0.03920339202880859, 0.039379199981689456, 0.03947110366821289, 0.039172096252441405, 0.03949513626098633, 0.03946121597290039, 0.03963308715820312, 0.03944777679443359, 0.03972585678100586, 0.03950387191772461, 0.03949488067626953, 0.039483551025390626, 0.03967859268188476, 0.03954403305053711, 0.03954668807983398, 0.039586334228515624, 0.03954528045654297, 0.039411712646484375, 0.039718753814697264, 0.039650718688964845, 0.04004735946655273, 0.03971398544311523, 0.03952892684936524, 0.03951241683959961, 0.039616382598876954, 0.039370880126953126, 0.04159897613525391, 0.03992086410522461, 0.039787296295166016, 0.039403518676757815, 0.0395016975402832, 0.039526527404785156, 0.03957939147949219, 0.03968844985961914, 0.039299072265625, 0.039378559112548825, 0.03931379318237305, 0.03936988830566406, 0.03957763290405274, 0.039401985168457034, 0.03945033645629883, 0.039127105712890624, 0.03927094268798828, 0.03914342498779297, 0.03953481674194336, 0.03934595108032227, 0.03967692947387695, 0.03934268951416016, 0.03944416046142578, 0.039463264465332035, 0.03937305450439453, 0.039424064636230466, 0.03979475021362305, 0.040035648345947264, 0.0391168327331543, 0.03911337661743164, 0.03938431930541992, 0.039182750701904294, 0.039069408416748046, 0.039484031677246095, 0.0396492805480957, 0.03951948928833008, 0.04047958374023437, 0.0392540168762207, 0.03936665725708008, 0.039427135467529295, 0.03985276794433594, 0.03976214218139648, 0.03974668884277344, 0.039562110900878904, 0.03952844619750977, 0.03943833541870117, 0.03929292678833008, 0.03920896148681641, 0.03915161514282227, 0.03958784103393555, 0.039671615600585936, 0.039604415893554686, 0.039182174682617185, 0.03942780685424805, 0.039297470092773436, 0.03968159866333008, 0.03903036880493164, 0.03900668716430664, 0.03915200042724609, 0.03952384185791016, 0.039354881286621096, 0.03957273483276367, 0.0387960319519043, 0.03895856094360352, 0.038779422760009764, 0.0388403205871582, 0.038907585144042967, 0.03898809432983399, 0.038897598266601566, 0.0387786865234375, 0.03901871871948242, 0.03903286361694336, 0.038868896484375, 0.038970623016357425, 0.03910332870483398, 0.03890995025634766, 0.03931299209594727, 0.03920035171508789, 0.0390684814453125, 0.039112865447998045, 0.03913654327392578, 0.03920159912109375, 0.03898249435424805, 0.03911999893188477, 0.03899280166625976, 0.03941846466064453, 0.03923545455932617, 0.04016988754272461, 0.03929420852661133, 0.03931622314453125, 0.03961967849731445, 0.03981939315795899, 0.03935638427734375, 0.039213886260986326, 0.03913094329833984, 0.03977987289428711, 0.03982198333740234, 0.03964108657836914, 0.03931955337524414, 0.04000473785400391, 0.039352222442626955, 0.039498401641845704, 0.03941817474365234, 0.039722782135009765, 0.039133407592773437, 0.04025244903564453, 0.039820255279541014, 0.039090175628662106, 0.03940966415405273, 0.039032833099365234, 0.039231487274169925, 0.03928678512573242, 0.039357440948486325, 0.039261184692382815, 0.039376094818115236, 0.03931206512451172, 0.04014214324951172, 0.03931011199951172, 0.039366016387939455, 0.039392959594726565, 0.039379646301269534, 0.039349857330322265, 0.039158432006835935, 0.039362560272216796, 0.039357566833496095, 0.03938393783569336, 0.040738815307617186, 0.040207679748535154, 0.03989369583129883, 0.03942399978637695, 0.03966566467285156, 0.03942575836181641, 0.03936284637451172, 0.038849536895751956, 0.039070720672607424, 0.03921913528442383, 0.03923974227905273, 0.039132320404052734, 0.0393054084777832, 0.03928742218017578, 0.039346046447753906, 0.03926646423339844, 0.03915776062011719, 0.03938304138183594, 0.03934537506103516, 0.039099166870117184, 0.039204864501953124, 0.039147518157958985, 0.039110145568847655, 0.03903539276123047, 0.03917004776000976, 0.04059340667724609, 0.04133180618286133, 0.04079868698120117, 0.03948361587524414, 0.039448768615722656, 0.03904924774169922, 0.03907379150390625, 0.03911379241943359, 0.03901462554931641, 0.039113441467285154, 0.03909132766723633]",tokens/s,25.436702806803435,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4354.02752,7027.42528,0.0,6624.903168,6437.168128,s,1,14.5382021484375,14.5382021484375,0.0,14.5382021484375,14.5382021484375,14.5382021484375,14.5382021484375,[14.5382021484375],,kWh,0.00015485326389167727,1.7063304250151812e-05,6.316838386799528e-05,0.00023508495200982437,,MB,1670.397952,7077.756928,0.0,6660.554752,5922.263552,s,10,2.188634948730469,0.21886349487304688,0.00026759605077926376,0.21889209747314453,0.2191195999145508,0.21914830856323242,0.21917127548217774,"[0.21891229248046876, 0.2188719024658203, 0.21869830322265624, 0.21868240356445312, 0.21881779479980468, 0.21911322021484375, 0.2191096649169922, 0.2182277069091797, 0.21902464294433593, 0.21917701721191407]",tokens/s,1169.678845476238,kWh,6.443672132155927e-06,7.106190228384847e-07,4.267376602304272e-06,1.1421667757298684e-05,tokens/kWh,22413539.374441236,MB,1677.934592,7090.33984,0.0,6673.137664,5922.266112,s,10,23.75267529296875,2.375267529296875,0.005042575736122446,2.3763852539062498,2.380043798828125,2.3820426513671875,2.3836417333984374,"[2.3745693359375, 2.37618603515625, 2.377553466796875, 2.3684287109375, 2.37658447265625, 2.373656982421875, 2.379599609375, 2.365330322265625, 2.376724853515625, 2.38404150390625]",tokens/s,26.523328097971863,kWh,6.911335203576039e-05,7.623132965591423e-06,4.097704848209554e-05,0.00011771353348344739,tokens/kWh,535197.594836102,,s,630,23.749357933044426,0.03769739354451498,0.000622454123953602,0.03759353446960449,0.037983586883544924,0.03825288734436035,0.039769835662841806,"[0.037978111267089845, 0.03774604797363281, 0.03766057586669922, 0.03790921783447266, 0.037648384094238284, 0.03769295883178711, 0.0377305908203125, 0.03751916885375976, 0.037740734100341795, 0.03753564834594727, 0.037855232238769534, 0.037785022735595704, 0.03788092803955078, 0.03784223937988281, 0.0374664306640625, 0.03752569580078125, 0.03739756774902344, 0.03754892730712891, 0.03778355026245117, 0.03761542510986328, 0.0377591667175293, 0.037827903747558594, 0.037751327514648436, 0.03771564865112305, 0.037493217468261716, 0.03761151885986328, 0.037483585357666015, 0.037701568603515624, 0.03793404769897461, 0.037777439117431644, 0.03786547088623047, 0.03778355026245117, 0.037770561218261715, 0.03784569549560547, 0.037607425689697264, 0.03766211318969727, 0.038160961151123045, 0.03778153610229492, 0.03761904144287109, 0.03770563125610352, 0.03790262222290039, 0.037493217468261716, 0.03770352172851563, 0.037619873046875, 0.037432991027832034, 0.037556255340576175, 0.039508289337158206, 0.037582847595214845, 0.03764223861694336, 0.037564224243164065, 0.03734675216674805, 0.037520126342773436, 0.03724288177490234, 0.03736076736450195, 0.03749539184570312, 0.0374420166015625, 0.03750012969970703, 0.03780448150634766, 0.037496990203857425, 0.03789209747314453, 0.03736576080322265, 0.03740671920776367, 0.03762790298461914, 0.03795145416259765, 0.03762031936645508, 0.037910526275634765, 0.0381214714050293, 0.04018707275390625, 0.037749568939208986, 0.03752959823608398, 0.03742924880981445, 0.03736361694335937, 0.03734649658203125, 0.03736054229736328, 0.03736576080322265, 0.037321823120117184, 0.037454753875732424, 0.037427200317382815, 0.03749683380126953, 0.037548030853271484, 0.037558273315429686, 0.03740262222290039, 0.03752499389648437, 0.03837164688110352, 0.037372062683105466, 0.03784489440917969, 0.03803459167480469, 0.03749679946899414, 0.03731763076782227, 0.03723168182373047, 0.03803622436523438, 0.039465152740478515, 0.03759308624267578, 0.03784908676147461, 0.03768924713134766, 0.037789791107177735, 0.03761113739013672, 0.03734771347045898, 0.03723263931274414, 0.037469886779785154, 0.037628097534179686, 0.03747395324707031, 0.03740105438232422, 0.03735756683349609, 0.03770582580566406, 0.03763504028320312, 0.03765283203125, 0.037587455749511715, 0.03762185668945312, 0.03783065414428711, 0.037539840698242184, 0.038981632232666014, 0.03907564926147461, 0.03772230529785156, 0.03763302230834961, 0.03768940734863281, 0.037714881896972655, 0.03786896133422851, 0.037542495727539066, 0.037425151824951174, 0.03779993438720703, 0.03769343948364258, 0.037615615844726565, 0.03753139114379883, 0.03739878463745117, 0.037322113037109375, 0.03827299118041992, 0.037848960876464846, 0.03759823989868164, 0.0374450569152832, 0.03769990539550781, 0.037566463470458986, 0.03789385604858399, 0.03778524780273437, 0.03761011123657226, 0.03761356735229492, 0.03748659133911133, 0.03751116943359375, 0.03765248107910156, 0.0375313606262207, 0.03777769470214844, 0.03766239929199219, 0.03780435180664062, 0.03783795166015625, 0.03761008071899414, 0.03748483276367188, 0.03765593719482422, 0.03755891036987305, 0.038809600830078124, 0.037591136932373044, 0.03921295928955078, 0.03739804840087891, 0.03755465698242187, 0.03767295837402344, 0.037722110748291016, 0.03730249786376953, 0.03755395126342773, 0.03738623809814453, 0.03771577453613281, 0.03784473419189453, 0.0376693115234375, 0.03738790512084961, 0.037388671875, 0.03748992156982422, 0.037485599517822266, 0.03756166458129883, 0.03772252655029297, 0.037795486450195315, 0.03742278289794922, 0.03777193450927734, 0.03768220901489258, 0.03918044662475586, 0.03746899032592774, 0.03763014221191406, 0.037728065490722655, 0.03741439819335937, 0.037655040740966796, 0.03914281463623047, 0.03763056182861328, 0.037580799102783204, 0.037577983856201175, 0.03759206390380859, 0.037789440155029295, 0.03791667175292969, 0.03777468872070312, 0.03808937454223633, 0.037646110534667966, 0.037781726837158205, 0.03755811309814453, 0.03843161773681641, 0.03818867111206055, 0.037572254180908204, 0.037869823455810546, 0.04087356948852539, 0.03778649520874024, 0.03742310333251953, 0.0373633918762207, 0.03749919891357422, 0.037816287994384766, 0.0376049919128418, 0.03762588882446289, 0.03752412796020508, 0.037824222564697266, 0.037879199981689454, 0.037496864318847654, 0.037504798889160154, 0.03746067047119141, 0.03821340942382812, 0.037545665740966794, 0.037322944641113284, 0.03745801544189453, 0.03747427368164063, 0.037445568084716795, 0.03736774444580078, 0.037556289672851566, 0.037822208404541015, 0.03769209671020508, 0.03737155151367187, 0.03742755126953125, 0.03751731109619141, 0.0377239990234375, 0.03771926498413086, 0.03756515121459961, 0.03707516860961914, 0.037379070281982424, 0.037550209045410156, 0.03765132904052734, 0.037556224822998044, 0.037698719024658205, 0.037344097137451175, 0.03768320083618164, 0.037556224822998044, 0.03784908676147461, 0.03727561569213867, 0.03759516906738281, 0.037254207611083986, 0.03717327880859375, 0.03723491287231445, 0.03711404800415039, 0.037162528991699216, 0.037219295501708986, 0.03737961578369141, 0.037437023162841795, 0.037239360809326175, 0.037213695526123046, 0.037520030975341796, 0.03734134292602539, 0.03770982360839844, 0.037588062286376955, 0.0375489616394043, 0.03733299255371094, 0.037459583282470704, 0.03814691162109375, 0.03764828872680664, 0.03751536178588867, 0.038053855895996094, 0.037521438598632814, 0.037804031372070314, 0.03760332870483398, 0.03749478530883789, 0.0375412483215332, 0.03751590347290039, 0.037738494873046875, 0.03767500686645508, 0.038158206939697265, 0.03789836883544922, 0.037722110748291016, 0.03765862274169922, 0.03775897598266602, 0.037692832946777347, 0.03769318389892578, 0.03761612701416016, 0.0378388786315918, 0.03747238540649414, 0.03762400054931641, 0.03763520050048828, 0.037593982696533204, 0.037817695617675784, 0.037510879516601564, 0.03777369689941406, 0.03745439910888672, 0.03777536010742188, 0.037648384094238284, 0.03722991943359375, 0.03754665756225586, 0.03738623809814453, 0.0372158088684082, 0.03756588745117188, 0.03928332901000976, 0.03883590316772461, 0.037913280487060545, 0.037709182739257815, 0.037523681640625, 0.037556640625, 0.03787571334838867, 0.037601280212402347, 0.03783475112915039, 0.037689342498779296, 0.03741030502319336, 0.03736012649536133, 0.03749689483642578, 0.037652416229248045, 0.03762374496459961, 0.037851200103759766, 0.038776126861572266, 0.037681854248046875, 0.037486400604248044, 0.03745196914672851, 0.0374950065612793, 0.03797318267822265, 0.037638751983642575, 0.03776054382324219, 0.03749923324584961, 0.03766899108886719, 0.0380313606262207, 0.03798303985595703, 0.037375297546386715, 0.03730502319335938, 0.03753574371337891, 0.037313888549804684, 0.037356193542480466, 0.03779993438720703, 0.03779993438720703, 0.037350849151611326, 0.03773321533203125, 0.03772515106201172, 0.037230655670166014, 0.03764643096923828, 0.0372578239440918, 0.03755417633056641, 0.03729587173461914, 0.03758310317993164, 0.03783603286743164, 0.04566281509399414, 0.03733110427856445, 0.03752447891235351, 0.03745663833618164, 0.03738035202026367, 0.03735251235961914, 0.03741196823120117, 0.037357376098632815, 0.03751321411132812, 0.037354976654052734, 0.03718796920776367, 0.03740182495117188, 0.03747116851806641, 0.03729817581176758, 0.037294078826904296, 0.037203968048095705, 0.037160961151123044, 0.03735724639892578, 0.03738399887084961, 0.037274112701416014, 0.037491870880126954, 0.038253089904785154, 0.04120979309082031, 0.03742758560180664, 0.03732787322998047, 0.03758524703979492, 0.03739529418945312, 0.03784796905517578, 0.03777833557128906, 0.03762790298461914, 0.03769494247436524, 0.03760550308227539, 0.037359424591064457, 0.037507583618164066, 0.037634143829345705, 0.03768320083618164, 0.03760883331298828, 0.03723123168945312, 0.03751116943359375, 0.037474174499511716, 0.037417087554931644, 0.03739215850830078, 0.03745606231689453, 0.037354656219482425, 0.037407806396484375, 0.03827302551269531, 0.03776099014282226, 0.03757468795776367, 0.037367809295654295, 0.03776889419555664, 0.03768147277832031, 0.038252639770507815, 0.03782851028442383, 0.03767295837402344, 0.03781222534179687, 0.03783475112915039, 0.037649951934814456, 0.03794172668457031, 0.03758860778808594, 0.03868710327148438, 0.03746166229248047, 0.03752175903320312, 0.03792812728881836, 0.03811616134643555, 0.03784854507446289, 0.038139934539794924, 0.03817740631103515, 0.037835872650146485, 0.037792545318603515, 0.03794944000244141, 0.037967872619628903, 0.03767846298217774, 0.03779443359375, 0.03784908676147461, 0.03783679962158203, 0.03780176162719726, 0.037754528045654295, 0.03796025466918945, 0.03800064086914062, 0.03759209442138672, 0.03769443130493164, 0.03811939239501953, 0.03792057418823242, 0.03833059310913086, 0.0377446403503418, 0.038580223083496096, 0.037988510131835934, 0.03762969589233398, 0.03754198455810547, 0.03785318374633789, 0.03750297546386719, 0.037493759155273435, 0.03790070343017578, 0.037269985198974606, 0.0381091194152832, 0.03739593505859375, 0.037757633209228515, 0.03735286331176758, 0.037311134338378904, 0.037357120513916015, 0.037222816467285154, 0.03751692962646484, 0.037669246673583984, 0.03725948715209961, 0.03788982391357422, 0.03748044967651367, 0.0373309440612793, 0.037357120513916015, 0.03806294250488281, 0.03751321411132812, 0.037891201019287106, 0.03787865447998047, 0.03809075164794922, 0.03749683380126953, 0.03724883270263672, 0.03750931167602539, 0.03738159942626953, 0.037505569458007815, 0.0375392951965332, 0.03781276702880859, 0.0372589111328125, 0.03747635269165039, 0.03710806274414063, 0.0375684814453125, 0.03747638320922852, 0.037781246185302736, 0.03739468765258789, 0.03724288177490234, 0.03747430419921875, 0.03742115020751953, 0.03727705764770508, 0.0371912956237793, 0.038047969818115236, 0.03767788696289062, 0.03774579238891602, 0.037506015777587894, 0.037572383880615234, 0.03752777481079102, 0.037442462921142575, 0.03749568176269531, 0.03740467071533203, 0.03756784057617187, 0.037274272918701175, 0.03760537719726562, 0.03739801788330078, 0.037496768951416015, 0.037195423126220706, 0.037569534301757815, 0.037498783111572266, 0.03766899108886719, 0.037658496856689455, 0.037967872619628903, 0.03772598266601562, 0.037531646728515625, 0.037471710205078126, 0.03741772842407227, 0.03762995147705078, 0.03745491027832031, 0.03745859146118164, 0.03728617477416992, 0.037459712982177734, 0.037445438385009765, 0.037636032104492186, 0.03745228958129883, 0.03772415924072266, 0.03732275390625, 0.03758185577392578, 0.03760022354125977, 0.03750611114501953, 0.037450687408447265, 0.03791846466064453, 0.03836438369750977, 0.0376894416809082, 0.037411006927490234, 0.03758950424194336, 0.037384353637695315, 0.03749001693725586, 0.03738880157470703, 0.037389888763427734, 0.037587646484375, 0.03727536010742188, 0.0374436149597168, 0.03761151885986328, 0.03766998291015625, 0.03787587356567383, 0.03783891296386719, 0.03755222320556641, 0.037945438385009765, 0.038189567565917966, 0.038144001007080076, 0.03760483169555664, 0.03756496047973633, 0.037510623931884764, 0.03763443374633789, 0.03762371063232422, 0.03754150390625, 0.03725932693481445, 0.03889132690429688, 0.03778432083129883, 0.037297409057617185, 0.03763846588134766, 0.03731296157836914, 0.03726934432983398, 0.03772022247314453, 0.037539840698242184, 0.037531646728515625, 0.03762790298461914, 0.037353473663330077, 0.037472511291503904, 0.03744905471801758, 0.03778342437744141, 0.03804415893554688, 0.037431327819824216, 0.03751321411132812, 0.037722110748291016, 0.03757379150390625, 0.037518177032470706, 0.038479297637939454, 0.037916606903076175, 0.03740755081176758, 0.03774755096435547, 0.03785801696777344, 0.037690750122070314, 0.03740777587890625, 0.03983087921142578, 0.03781033706665039, 0.04024931335449219, 0.03800921630859375, 0.037418624877929685, 0.037437503814697265, 0.03734457778930664, 0.03759017562866211, 0.03762979125976563, 0.037494110107421874, 0.03842038345336914, 0.03787148666381836, 0.03805971145629883, 0.037738719940185544, 0.03786735916137695, 0.037579391479492186, 0.03759833526611328, 0.037951904296875, 0.046198368072509766, 0.03777171325683594, 0.03778604888916016, 0.03751900863647461, 0.037419361114501955, 0.03757056045532227, 0.03742310333251953, 0.03722051239013672, 0.03736764907836914, 0.03753574371337891, 0.037528831481933596, 0.03758975982666016, 0.03750227355957031, 0.037657279968261716, 0.03745177459716797, 0.03757846450805664, 0.03759747314453125, 0.03762966537475586, 0.03786166381835938, 0.03803049468994141, 0.03766908645629883, 0.037720703125, 0.03962038421630859, 0.03892355346679687, 0.03769235229492188, 0.037682945251464844, 0.03772441482543945, 0.038090911865234375, 0.037851200103759766, 0.037797664642333986, 0.03775897598266602, 0.03753910446166992, 0.037507808685302735, 0.037591041564941405, 0.037533695220947266, 0.037529022216796874, 0.03802339172363281, 0.0380167350769043, 0.03768384170532227, 0.03762176132202148, 0.037623809814453124, 0.0375376968383789, 0.03752102279663086, 0.03753827285766602, 0.037558273315429686, 0.03763600158691406, 0.03731670379638672, 0.03743743896484375, 0.03729817581176758, 0.03776102447509765, 0.037384193420410154, 0.03754707336425781, 0.03764028930664062, 0.03741987228393555, 0.037580799102783204]",tokens/s,26.527032931843145,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4355.211264,7027.42528,0.0,6624.903168,6437.168128,s,1,15.764060546875,15.764060546875,0.0,15.764060546875,15.764060546875,15.764060546875,15.764060546875,[15.764060546875],,kWh,0.00015788489505834112,1.7397091278056832e-05,6.398394007599978e-05,0.00023926592641239773,,MB,1657.48736,7077.756928,0.0,6660.554752,5922.13248,s,10,2.1110002593994137,0.21110002593994143,0.0005360906515346191,0.21096794891357423,0.21178743743896483,0.21202196121215822,0.2122095802307129,"[0.2106103973388672, 0.21074954223632814, 0.2105292205810547, 0.21060438537597656, 0.21173532104492188, 0.21079852294921875, 0.21142137145996093, 0.2111373748779297, 0.21115763854980468, 0.21225648498535157]",tokens/s,1212.6952560054765,kWh,6.217747427748044e-06,6.85554196521589e-07,4.105422906085262e-06,1.1008724530354893e-05,tokens/kWh,23254283.390788708,MB,1660.346368,7090.33984,0.0,6673.137664,5922.13504,s,10,20.983874755859375,2.0983874755859375,0.010415086299225745,2.094615478515625,2.1125453125,2.1135874755859376,2.114421206054687,"[2.095390869140625, 2.084998046875, 2.090502685546875, 2.093840087890625, 2.114629638671875, 2.08720556640625, 2.090158447265625, 2.112313720703125, 2.107558837890625, 2.10727685546875]",tokens/s,30.023053765324427,kWh,6.298725443183557e-05,6.946891850732433e-06,3.90016393573146e-05,0.00010893578563988259,tokens/kWh,578322.3541276321,,s,630,20.98059271240234,0.03330252811492435,0.00047460868042727784,0.03322974395751953,0.033656649017333985,0.03385975399017334,0.035050760192871094,"[0.03341030502319336, 0.03287011337280273, 0.03266038513183594, 0.032958591461181644, 0.032825119018554685, 0.03288278579711914, 0.0328326416015625, 0.033305217742919925, 0.03306316757202148, 0.03387305450439453, 0.033557342529296874, 0.03294822311401367, 0.033024032592773436, 0.03291315078735352, 0.03299760055541992, 0.03312553787231445, 0.03301792144775391, 0.032860958099365234, 0.03285334396362305, 0.03280553436279297, 0.033019519805908205, 0.03286463928222656, 0.03469247817993164, 0.03326559829711914, 0.033344127655029296, 0.0332020492553711, 0.03311840057373047, 0.03345993423461914, 0.03333763122558594, 0.03410124969482422, 0.03323289489746094, 0.033173023223876955, 0.03453590393066406, 0.03330851364135742, 0.03439174270629883, 0.03310230255126953, 0.03342457580566406, 0.03338236618041992, 0.03300966262817383, 0.03347747039794922, 0.033554367065429684, 0.03352313613891601, 0.0336226577758789, 0.033345535278320314, 0.03319807815551758, 0.03313654327392578, 0.03321660614013672, 0.03334143829345703, 0.03313049697875976, 0.03322470474243164, 0.03401932907104492, 0.03286377716064453, 0.033100223541259764, 0.033089569091796875, 0.0331960334777832, 0.033137889862060545, 0.03298998260498047, 0.0332042236328125, 0.033051807403564455, 0.03353238296508789, 0.03318185424804688, 0.033267200469970705, 0.03293654251098633, 0.03341471862792969, 0.033071041107177734, 0.033412063598632816, 0.03297280120849609, 0.03300966262817383, 0.03290832138061523, 0.03304751968383789, 0.03312543869018555, 0.03464492797851562, 0.03355161666870117, 0.032989791870117184, 0.03291561508178711, 0.03321651077270508, 0.03303449630737305, 0.03280035018920899, 0.03297872161865235, 0.0328620491027832, 0.03306550216674805, 0.03298454284667969, 0.03310383987426758, 0.03309625625610352, 0.03299299240112305, 0.032908737182617186, 0.03283257675170898, 0.032967967987060545, 0.032784896850585936, 0.032882686614990234, 0.032935935974121096, 0.03304451370239258, 0.03298822402954102, 0.03301193618774414, 0.033264671325683594, 0.03318259048461914, 0.0328845443725586, 0.033046592712402345, 0.03305923080444336, 0.03299993515014649, 0.03291324615478516, 0.033232192993164066, 0.03302691268920899, 0.03314041519165039, 0.03296819305419922, 0.03280160140991211, 0.0328721923828125, 0.03283379364013672, 0.033105918884277344, 0.03291852951049805, 0.03300982284545898, 0.033249984741210936, 0.03318991851806641, 0.03339891052246094, 0.033113662719726565, 0.033272254943847654, 0.03307299041748047, 0.03293609619140625, 0.03343337631225586, 0.032925918579101564, 0.0330579833984375, 0.03310211181640625, 0.03306713485717774, 0.03331318283081055, 0.03347455978393555, 0.0332303352355957, 0.03343510437011719, 0.03323958587646485, 0.03294003295898437, 0.032942081451416014, 0.0331646728515625, 0.03308127975463867, 0.032953025817871094, 0.03302566528320312, 0.033294719696044923, 0.03382460784912109, 0.03298643112182617, 0.033147743225097656, 0.03293312072753906, 0.033170177459716794, 0.03323494338989258, 0.033312767028808594, 0.03348070526123047, 0.0328680305480957, 0.03292975997924805, 0.03289654541015625, 0.03298371124267578, 0.03365903854370117, 0.0329536018371582, 0.033253662109375, 0.033026271820068356, 0.0330728645324707, 0.033102367401123045, 0.03299327850341797, 0.033293407440185545, 0.03299638366699219, 0.033479873657226565, 0.03319801712036133, 0.033153472900390626, 0.03295673751831055, 0.03303014373779297, 0.03290288162231445, 0.03321036911010742, 0.03377129745483398, 0.032943904876708986, 0.03328483200073242, 0.033476608276367184, 0.03383222579956055, 0.032938720703125, 0.033028030395507814, 0.03303430557250977, 0.03306496047973633, 0.032851966857910156, 0.03301500701904297, 0.03295926284790039, 0.0331141128540039, 0.03328156661987305, 0.03494345474243164, 0.033699840545654294, 0.033230846405029296, 0.033105918884277344, 0.0331363525390625, 0.03309164810180664, 0.03301398468017578, 0.033138240814208984, 0.03280883026123047, 0.03300409698486328, 0.03307263946533203, 0.033221118927001955, 0.033390270233154294, 0.033184062957763674, 0.032927745819091796, 0.03309568023681641, 0.033166366577148436, 0.03296150588989258, 0.033073150634765625, 0.033310657501220704, 0.03306924819946289, 0.0331098861694336, 0.03298451232910156, 0.03333792114257812, 0.03470131301879883, 0.03343564987182617, 0.032990432739257815, 0.03313334274291992, 0.033365760803222656, 0.033129886627197264, 0.03302691268920899, 0.03304447937011719, 0.03300966262817383, 0.032917503356933595, 0.032925697326660154, 0.03285583877563476, 0.03287225723266601, 0.03311004638671875, 0.03312799835205078, 0.03274563217163086, 0.03286463928222656, 0.032798782348632816, 0.03303446578979492, 0.03277753448486328, 0.03307788848876953, 0.03278444671630859, 0.033159168243408206, 0.03323904037475586, 0.03298713684082031, 0.03311001586914063, 0.03356902313232422, 0.03377078247070313, 0.03335830307006836, 0.03336191940307617, 0.03331654357910156, 0.033882431030273434, 0.03356671905517578, 0.033123905181884766, 0.033623615264892576, 0.033248321533203125, 0.033259326934814454, 0.03343155288696289, 0.03342681503295898, 0.03328268814086914, 0.03305267333984375, 0.03318483352661133, 0.03344275283813476, 0.03347251129150391, 0.03374899291992187, 0.03335782241821289, 0.033318336486816404, 0.033485057830810544, 0.033737022399902346, 0.03360553741455078, 0.03305257415771484, 0.03361027145385742, 0.033378688812255856, 0.03328716659545899, 0.03323801422119141, 0.03309936141967774, 0.03314934539794922, 0.033355201721191406, 0.03339039993286133, 0.0332152328491211, 0.03378995132446289, 0.03316326522827148, 0.03298035049438477, 0.03299391937255859, 0.03343468856811523, 0.033254112243652344, 0.03349935913085938, 0.03352166366577149, 0.03342102432250976, 0.03364278411865235, 0.033467582702636715, 0.03357974243164062, 0.03343107223510742, 0.03357247924804688, 0.0332545280456543, 0.03316511917114258, 0.03330252838134766, 0.03343491363525391, 0.03358319854736328, 0.03419814300537109, 0.033554431915283206, 0.03363177490234375, 0.03354876708984375, 0.03365478515625, 0.03406358337402344, 0.03387472152709961, 0.0332217903137207, 0.03354864120483399, 0.03346255874633789, 0.033582561492919924, 0.03357350540161133, 0.034154624938964845, 0.03376537704467773, 0.0338671989440918, 0.033750816345214846, 0.03349379348754883, 0.03505152130126953, 0.033380001068115235, 0.03347491073608398, 0.0336234245300293, 0.033861568450927734, 0.0335346565246582, 0.03344179153442383, 0.033642494201660156, 0.03367731094360352, 0.033279552459716796, 0.033329055786132815, 0.03553878402709961, 0.03358176040649414, 0.033413120269775394, 0.03329951858520508, 0.033635265350341795, 0.03381619262695312, 0.03356710433959961, 0.033795680999755856, 0.03340617752075195, 0.033397663116455076, 0.03305276870727539, 0.033482688903808594, 0.03338694381713867, 0.03334905624389648, 0.033266239166259766, 0.03309571075439453, 0.03313081741333008, 0.03321651077270508, 0.03319136047363281, 0.03304710388183594, 0.03323699188232422, 0.03322000122070313, 0.032901599884033204, 0.033173633575439454, 0.03298057556152344, 0.033027809143066404, 0.033077953338623046, 0.03295199966430664, 0.033086910247802734, 0.03323788833618164, 0.03322812652587891, 0.03325609588623047, 0.03295808029174805, 0.03311824035644531, 0.033047073364257815, 0.032866111755371095, 0.03297075271606445, 0.033067008972167966, 0.03349094390869141, 0.0329543685913086, 0.03320979309082031, 0.033108543395996094, 0.03300556945800781, 0.03303241729736328, 0.03283126449584961, 0.03300172805786133, 0.03282614517211914, 0.03301193618774414, 0.03300553512573242, 0.032822048187255856, 0.03330620956420898, 0.033115550994873046, 0.03297500610351563, 0.0329859504699707, 0.033814208984375, 0.03298540878295898, 0.03333513641357422, 0.03313644790649414, 0.03347286224365234, 0.03304240036010742, 0.03310793685913086, 0.0334266242980957, 0.03318787384033203, 0.03294214248657226, 0.03298953628540039, 0.03287094497680664, 0.032906272888183596, 0.03287539291381836, 0.03275775909423828, 0.033142398834228516, 0.03316940689086914, 0.03304985427856445, 0.0328460807800293, 0.032997440338134766, 0.03319446563720703, 0.03314479827880859, 0.03289401626586914, 0.03289334487915039, 0.03319043350219727, 0.03286220932006836, 0.03305472183227539, 0.03295606231689453, 0.03307555389404297, 0.03298419189453125, 0.032766849517822265, 0.03326723098754883, 0.03308367919921875, 0.032947551727294924, 0.03283849716186524, 0.03277974319458008, 0.03301020812988281, 0.03313379287719727, 0.03298793411254883, 0.03314614486694336, 0.03331760025024414, 0.03357404708862305, 0.033420127868652345, 0.03347977447509766, 0.03334419250488281, 0.03325299072265625, 0.03323503875732422, 0.03326617431640625, 0.033197376251220705, 0.03303097534179687, 0.03306422424316406, 0.0333625602722168, 0.03290108871459961, 0.03318527984619141, 0.0329317741394043, 0.033100353240966794, 0.033199230194091794, 0.03312524795532226, 0.03303753662109375, 0.032935775756835935, 0.03321952056884766, 0.033097633361816405, 0.03278652954101562, 0.03329433441162109, 0.033030078887939456, 0.03302201461791992, 0.03345612716674805, 0.03324515151977539, 0.03321654510498047, 0.0335022087097168, 0.03346121597290039, 0.03323497772216797, 0.033388320922851565, 0.033544158935546874, 0.033656383514404295, 0.03360428619384766, 0.03342969512939453, 0.03346412658691406, 0.033928577423095706, 0.03344384002685547, 0.03311820983886719, 0.033043617248535155, 0.03287126541137695, 0.03338393783569336, 0.03450444793701172, 0.033714687347412106, 0.033304481506347655, 0.03397990417480469, 0.03342387390136719, 0.03322447967529297, 0.03355295944213867, 0.03335721588134766, 0.03344240188598633, 0.03344704055786133, 0.03330767822265625, 0.033635711669921874, 0.03354982376098633, 0.03325641632080078, 0.03323904037475586, 0.03303219223022461, 0.03305472183227539, 0.03283967971801758, 0.033046207427978515, 0.03322297668457031, 0.03311206436157227, 0.033174816131591796, 0.03299993515014649, 0.033060352325439454, 0.03292233657836914, 0.03321446228027344, 0.033495040893554685, 0.033277950286865234, 0.03335987091064453, 0.03823616027832031, 0.03855974578857422, 0.033476608276367184, 0.03321855926513672, 0.03340259170532227, 0.033134750366210934, 0.03320611190795898, 0.03349856185913086, 0.03303263854980469, 0.033302558898925784, 0.033116161346435545, 0.03323235321044922, 0.033048831939697265, 0.033425697326660155, 0.03327132797241211, 0.032981857299804684, 0.03307929611206055, 0.03297280120849609, 0.03329433441162109, 0.03313564682006836, 0.03338060760498047, 0.03340566253662109, 0.03351055908203125, 0.03316617584228516, 0.03335523223876953, 0.03356931304931641, 0.03402252960205078, 0.035256351470947266, 0.035060577392578125, 0.03391945648193359, 0.033857536315917966, 0.03373056030273437, 0.03384524917602539, 0.033683456420898435, 0.03380950546264649, 0.03367561721801758, 0.03378028869628906, 0.03372032165527344, 0.03342131042480469, 0.033285118103027346, 0.033423809051513674, 0.033384960174560545, 0.033564735412597656, 0.033758399963378906, 0.03350374221801758, 0.03369817733764648, 0.033682880401611326, 0.033435775756835935, 0.03332134246826172, 0.03327590560913086, 0.033605918884277344, 0.03336163330078125, 0.034197086334228514, 0.03328444671630859, 0.033194049835205075, 0.03305043029785156, 0.033592735290527344, 0.03345283126831055, 0.03307110214233398, 0.0331236801147461, 0.03320489501953125, 0.03329363250732422, 0.033364673614501954, 0.03342457580566406, 0.03322499084472656, 0.03323283386230469, 0.03329244613647461, 0.03330297470092773, 0.03391692733764649, 0.033157024383544925, 0.033140830993652344, 0.03310579299926758, 0.0333620491027832, 0.03297420883178711, 0.03330313491821289, 0.033185726165771486, 0.03340707015991211, 0.033244991302490236, 0.033435935974121096, 0.03385129547119141, 0.03383500671386719, 0.03328409576416016, 0.033570430755615235, 0.03335379028320312, 0.03350284957885742, 0.033667774200439454, 0.03324662399291992, 0.03353865432739258, 0.033330944061279295, 0.03318399810791016, 0.03330047988891602, 0.03325939178466797, 0.033743423461914064, 0.03326723098754883, 0.03320060729980469, 0.03335984039306641, 0.03341519927978515, 0.03327590560913086, 0.0331297607421875, 0.03312521743774414, 0.03338636779785156, 0.0332196159362793, 0.03353670501708984, 0.03655023956298828, 0.03350751876831055, 0.03319968032836914, 0.033131454467773436, 0.03320003128051758, 0.03328790283203125, 0.03322915267944336, 0.03354956817626953, 0.03339139175415039, 0.033234657287597655, 0.03341766357421875, 0.033347232818603516, 0.03326380920410156, 0.03323862457275391, 0.0334315185546875, 0.0332845458984375, 0.03333270263671875, 0.03355497741699219, 0.03344998550415039, 0.033585151672363284, 0.03339836883544922, 0.03322262573242187, 0.03344169616699219, 0.03338703918457031, 0.0335107536315918, 0.03317155075073242, 0.03351724624633789, 0.033393535614013675, 0.03326700973510742, 0.03321311950683594, 0.03321811294555664, 0.03322515106201172, 0.03325494384765625, 0.03323923110961914, 0.03346255874633789, 0.03346022415161133, 0.03340854263305664, 0.0333419189453125, 0.033398303985595706, 0.03328780746459961, 0.03336057662963867, 0.0331739501953125, 0.03336479949951172, 0.03328707122802734, 0.033320960998535154, 0.03504889678955078, 0.034722305297851565, 0.03337836837768555, 0.03332495880126953, 0.03359958267211914, 0.03345318222045898, 0.033224800109863284]",tokens/s,30.027750342228682,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4350.672896,6905.790464,0.0,6503.268352,6315.533312,s,1,14.4175380859375,14.4175380859375,0.0,14.4175380859375,14.4175380859375,14.4175380859375,14.4175380859375,[14.4175380859375],,kWh,0.00014996415466666613,1.6532370213032282e-05,6.142810469800253e-05,0.00022792462957770093,,MB,1653.448704,6956.122112,0.0,6538.919936,5800.628736,s,10,31.805233642578123,3.1805233642578123,0.003963072166665218,3.1806835937500004,3.1844994628906247,3.184654150390625,3.184777900390625,"[3.171090576171875, 3.177844970703125, 3.179498291015625, 3.179603515625, 3.178853515625, 3.182878662109375, 3.181763671875, 3.184426513671875, 3.184465087890625, 3.184808837890625]",tokens/s,80.48989763033501,kWh,9.268551276833269e-05,1.0223159351782206e-05,6.151821588120041e-05,0.0001644268880013153,tokens/kWh,1556922.9772076705,MB,1656.68864,6968.705024,0.0,6551.502848,5800.631296,s,10,23.354171142578128,2.3354171142578126,0.00945854502399356,2.3333594970703126,2.347409912109375,2.3488732910156247,2.350043994140625,"[2.343912841796875, 2.34224853515625, 2.350336669921875, 2.331380615234375, 2.333701171875, 2.322040283203125, 2.333017822265625, 2.326859130859375, 2.32358935546875, 2.347084716796875]",tokens/s,26.975909192144968,kWh,6.792044186083426e-05,7.490706100683975e-06,4.50103415637993e-05,0.00012042148952531748,tokens/kWh,523162.4376042521,,s,630,23.35081994628905,0.0370647935655382,0.0005735179980587423,0.036957473754882814,0.0374524242401123,0.037706271553039544,0.03915885784149171,"[0.03924044799804687, 0.03702950286865234, 0.03699359893798828, 0.03692940902709961, 0.03674310302734375, 0.03681465530395508, 0.03689849472045898, 0.037106369018554686, 0.03720627212524414, 0.036797664642333985, 0.036977184295654296, 0.03683942413330078, 0.03730771255493164, 0.03725177764892578, 0.036986305236816404, 0.03707920074462891, 0.03701945495605469, 0.037205726623535156, 0.037423999786376956, 0.03745177459716797, 0.037427200317382815, 0.03708256149291992, 0.0374400634765625, 0.03738972854614258, 0.038494815826416014, 0.03867587280273437, 0.037458271026611326, 0.04087744140625, 0.03753868865966797, 0.03727360153198242, 0.037168800354003904, 0.037056865692138674, 0.037373504638671874, 0.03702214431762695, 0.03705974578857422, 0.03701651382446289, 0.037273502349853514, 0.036947967529296875, 0.037253120422363284, 0.03703126525878906, 0.03683926391601562, 0.03704915237426758, 0.03705401611328125, 0.03700576019287109, 0.03720191955566406, 0.036939777374267575, 0.03697436904907227, 0.03689699172973633, 0.0373230094909668, 0.03685923385620117, 0.03689718246459961, 0.03706070327758789, 0.037218208312988284, 0.03696047973632813, 0.03696003341674805, 0.03679164886474609, 0.036866718292236325, 0.036746753692626956, 0.03671295928955078, 0.03671449661254883, 0.03680374526977539, 0.036743648529052736, 0.03681919860839844, 0.037713920593261716, 0.03702774429321289, 0.03695420837402344, 0.037062911987304686, 0.0370393295288086, 0.03739497756958008, 0.03709747314453125, 0.03723247909545899, 0.03730425643920898, 0.03703539276123047, 0.03706540679931641, 0.03731881713867188, 0.03722444915771484, 0.03727360153198242, 0.03771596908569336, 0.03767647933959961, 0.037211776733398434, 0.03716806411743164, 0.03726457595825195, 0.03725600051879883, 0.03751321411132812, 0.037378047943115236, 0.03745177459716797, 0.03742211151123047, 0.03719430541992187, 0.03710774230957031, 0.03712211227416992, 0.037224769592285156, 0.037427200317382815, 0.03720998382568359, 0.03706800079345703, 0.03741171264648437, 0.03687427139282227, 0.03672063827514648, 0.03689267349243164, 0.03718348693847656, 0.03723219299316406, 0.037171646118164064, 0.036945919036865234, 0.03716521453857422, 0.03705001449584961, 0.037803775787353514, 0.037487041473388674, 0.037193313598632816, 0.037212417602539065, 0.03720207977294922, 0.036947967529296875, 0.03708681488037109, 0.03694838333129883, 0.03726540756225586, 0.037572608947753904, 0.03691110229492187, 0.03725056076049805, 0.03706047821044922, 0.0370035514831543, 0.0368623046875, 0.03693708801269531, 0.03703257751464844, 0.036853759765625, 0.03683737564086914, 0.0369233283996582, 0.036687934875488284, 0.037043552398681644, 0.04211574554443359, 0.037338977813720704, 0.03708544158935547, 0.0371607666015625, 0.03716524887084961, 0.03688857650756836, 0.03694182586669922, 0.03718377685546875, 0.03749017715454102, 0.037365665435791014, 0.03731919860839844, 0.03728303909301758, 0.03765887832641602, 0.03723823928833008, 0.0370307502746582, 0.03708636856079101, 0.03713296127319336, 0.03727788925170898, 0.03720326232910156, 0.037327552795410154, 0.03729612731933594, 0.03752755355834961, 0.03738735961914062, 0.03783292770385742, 0.037736831665039064, 0.03728825759887695, 0.03747430419921875, 0.03732854461669922, 0.03805382537841797, 0.03728416061401367, 0.03759030532836914, 0.03719580841064453, 0.03728025436401367, 0.0370404167175293, 0.03725516891479492, 0.0370252799987793, 0.037390846252441406, 0.03741491317749023, 0.03735551834106445, 0.03704012680053711, 0.03708067321777344, 0.03748291015625, 0.037235870361328124, 0.03748540878295899, 0.037160545349121096, 0.03725936126708984, 0.036884799957275394, 0.03721337509155274, 0.037, 0.03732275390625, 0.03709900665283203, 0.03697657775878906, 0.03722252655029297, 0.03690131378173828, 0.03684473419189453, 0.03694675064086914, 0.03690086364746094, 0.03722639846801758, 0.0368636474609375, 0.03705276870727539, 0.03671868896484375, 0.03746524810791016, 0.036551521301269534, 0.037711742401123044, 0.03725120162963867, 0.03719286346435547, 0.03706166458129883, 0.037033790588378905, 0.037556224822998044, 0.03704012680053711, 0.03705651092529297, 0.03707814407348633, 0.037165950775146485, 0.03696230316162109, 0.036945247650146486, 0.03732547378540039, 0.036863998413085936, 0.03727360153198242, 0.03689267349243164, 0.03702374267578125, 0.036892513275146484, 0.03695017623901367, 0.036972545623779295, 0.03686105728149414, 0.037006366729736326, 0.03705356979370117, 0.036719390869140625, 0.036689857482910156, 0.03674230575561523, 0.036829120635986326, 0.03688288116455078, 0.03689257431030273, 0.03666793441772461, 0.03668585586547852, 0.03675868988037109, 0.03684543991088867, 0.03708150482177734, 0.03691372680664062, 0.03685078430175781, 0.03668252944946289, 0.03683135986328125, 0.0369147834777832, 0.037052799224853515, 0.03681024169921875, 0.037043903350830076, 0.03692937469482422, 0.036700641632080075, 0.036721183776855466, 0.037041759490966795, 0.03684924697875976, 0.03707129669189453, 0.037160545349121096, 0.03751318359375, 0.037403457641601565, 0.037276737213134764, 0.03689091110229492, 0.03698755264282227, 0.037163009643554686, 0.03735756683349609, 0.037141761779785155, 0.03691939163208008, 0.036811424255371095, 0.037013504028320314, 0.03702169418334961, 0.03704975891113281, 0.03696700668334961, 0.03792233657836914, 0.03681123352050781, 0.03710976028442383, 0.036953407287597655, 0.036866752624511716, 0.037104736328125, 0.036959136962890625, 0.03711948776245117, 0.03726496124267578, 0.03682118225097656, 0.0369543342590332, 0.037013408660888675, 0.03698137664794922, 0.03733913421630859, 0.036947967529296875, 0.03686809539794922, 0.03761283111572265, 0.036671905517578124, 0.037181758880615236, 0.036749313354492184, 0.03683737564086914, 0.03717059326171875, 0.03681340789794922, 0.03759718322753906, 0.03688857650756836, 0.03710976028442383, 0.036999008178710935, 0.036779617309570314, 0.03672470474243164, 0.03681289672851563, 0.03709183883666992, 0.037222400665283206, 0.0380447998046875, 0.03694992065429688, 0.03792284774780273, 0.03711011123657226, 0.03682160186767578, 0.036826496124267576, 0.03682777786254883, 0.03679641723632812, 0.037119937896728517, 0.03699513626098633, 0.03678617477416992, 0.0369312629699707, 0.03675100708007813, 0.036993824005126956, 0.036800384521484375, 0.03697840118408203, 0.03711011123657226, 0.036927425384521484, 0.03766681671142578, 0.036781982421875, 0.03711801528930664, 0.03681254577636719, 0.036999454498291014, 0.03671039962768555, 0.03676729583740234, 0.03687990570068359, 0.036754302978515625, 0.03680377578735351, 0.036952926635742185, 0.03675286483764648, 0.03838390350341797, 0.03756524658203125, 0.03676927947998047, 0.03665318298339844, 0.036655487060546874, 0.036566368103027345, 0.03669225692749024, 0.036534656524658204, 0.036679710388183594, 0.036841438293457034, 0.037539840698242184, 0.03701145553588867, 0.03667967987060547, 0.036549983978271486, 0.036595584869384766, 0.036659679412841796, 0.03653459167480469, 0.0365794563293457, 0.03657305526733398, 0.03658083343505859, 0.037314144134521485, 0.03645945739746094, 0.03667283248901367, 0.03667219161987305, 0.03667148971557617, 0.03743743896484375, 0.03664271926879883, 0.036961856842041015, 0.03766140747070312, 0.03677983856201172, 0.037055614471435544, 0.036735870361328124, 0.0369475212097168, 0.03677433776855469, 0.03680195236206055, 0.03666713714599609, 0.03674915313720703, 0.03689574432373047, 0.0371671028137207, 0.0384266242980957, 0.0376995849609375, 0.036720256805419925, 0.036673919677734375, 0.03674848175048828, 0.036940608978271484, 0.036767230987548825, 0.036821502685546875, 0.03685171127319336, 0.03663455963134766, 0.03670243072509766, 0.03676348876953125, 0.036706302642822264, 0.03802316665649414, 0.036843265533447266, 0.03668812942504883, 0.036913150787353514, 0.036595680236816405, 0.03680646514892578, 0.03668195343017578, 0.0366075210571289, 0.03659619140625, 0.036783134460449216, 0.036641761779785155, 0.03675545501708984, 0.038004638671875, 0.03692380905151367, 0.036796703338623046, 0.03669811248779297, 0.03663225555419922, 0.03675081634521484, 0.03937366485595703, 0.037053630828857424, 0.036956321716308596, 0.03688924789428711, 0.03707289505004883, 0.036894081115722656, 0.03679910278320313, 0.03692537689208984, 0.03681248092651367, 0.03690460968017578, 0.03683606338500976, 0.03689059066772461, 0.03708505630493164, 0.03694732666015625, 0.03708393478393555, 0.037156223297119144, 0.03695884704589844, 0.037564414978027344, 0.03683036804199219, 0.03678294372558594, 0.0368823356628418, 0.03689072036743164, 0.036934814453125, 0.03686422348022461, 0.036930400848388674, 0.0369881591796875, 0.03723462295532227, 0.03694982528686523, 0.03689756774902344, 0.03677951812744141, 0.03682963180541992, 0.036851776123046874, 0.036964351654052735, 0.03690441513061524, 0.036832801818847655, 0.03701763153076172, 0.03715343856811523, 0.03699539184570313, 0.036857856750488284, 0.03683942413330078, 0.03679641723632812, 0.036963871002197266, 0.036986465454101565, 0.03697321701049805, 0.03793683242797852, 0.03725296020507812, 0.037058494567871095, 0.03694873428344726, 0.036939777374267575, 0.03697609710693359, 0.03689936065673828, 0.03694086456298828, 0.03695305633544922, 0.03693155288696289, 0.03718143844604492, 0.03725107192993164, 0.0374128646850586, 0.03780156707763672, 0.03727167892456055, 0.03691548919677735, 0.03723052978515625, 0.03682419204711914, 0.036967166900634764, 0.0368416633605957, 0.036818431854248046, 0.0368441276550293, 0.03688438415527344, 0.038959102630615236, 0.03719718551635742, 0.03672332763671875, 0.036880382537841795, 0.036677024841308595, 0.03670924758911133, 0.03724415969848633, 0.03677990341186523, 0.03664918518066406, 0.03680499267578125, 0.03679641723632812, 0.03686809539794922, 0.036841567993164064, 0.03707027053833008, 0.036593921661376955, 0.036648639678955076, 0.03688079833984375, 0.03666342544555664, 0.03669913482666016, 0.03687641525268555, 0.036606849670410155, 0.03641139221191406, 0.03646464157104492, 0.036364383697509765, 0.03647478485107422, 0.036345855712890625, 0.03672063827514648, 0.038250495910644534, 0.03752140808105469, 0.037318656921386716, 0.03706060791015625, 0.03724697494506836, 0.038225921630859375, 0.03814748764038086, 0.03667209625244141, 0.03659676742553711, 0.03646358489990234, 0.03641254425048828, 0.03668876647949219, 0.036748512268066406, 0.03655667114257812, 0.03656105422973633, 0.03658623886108398, 0.03646054458618164, 0.036625633239746096, 0.036596511840820314, 0.03648102569580078, 0.037648193359375, 0.03647507095336914, 0.03630012893676758, 0.036383392333984375, 0.03771555328369141, 0.038392383575439455, 0.03743129730224609, 0.03661619186401367, 0.037308448791503905, 0.037152896881103514, 0.03721769714355469, 0.03673481750488281, 0.036890495300292966, 0.036864734649658205, 0.03687366485595703, 0.03654099273681641, 0.03672016143798828, 0.036676063537597656, 0.03659366226196289, 0.03670425415039062, 0.036633663177490235, 0.036829822540283205, 0.036557182312011716, 0.036798015594482425, 0.03677632141113281, 0.036708351135253905, 0.037070846557617186, 0.036978015899658205, 0.037067424774169924, 0.03720601654052735, 0.036568641662597656, 0.0368642578125, 0.03672812652587891, 0.03657612609863281, 0.03681075286865235, 0.036808704376220705, 0.0372490234375, 0.03702169418334961, 0.036945919036865234, 0.036708351135253905, 0.036915199279785156, 0.03678726577758789, 0.03666630554199219, 0.036853759765625, 0.03661641693115234, 0.03659491348266602, 0.03658992004394531, 0.03661641693115234, 0.036902976989746095, 0.03681235122680664, 0.03659404754638672, 0.036917121887207034, 0.03661350250244141, 0.03710550308227539, 0.03706351852416992, 0.037060638427734376, 0.03688246536254883, 0.03671859359741211, 0.037160961151123044, 0.03752124786376953, 0.036690078735351565, 0.03670220947265625, 0.03695516967773437, 0.03717833709716797, 0.0371539192199707, 0.03728883361816406, 0.03720780944824219, 0.03697689437866211, 0.03689046478271484, 0.037637344360351564, 0.037114078521728516, 0.037327423095703124, 0.03714048004150391, 0.036877792358398435, 0.037140670776367186, 0.03678243255615234, 0.03694160079956055, 0.0370546875, 0.03728908920288086, 0.03765264129638672, 0.03682358551025391, 0.03695862579345703, 0.03683100891113281, 0.03673907089233398, 0.03669558334350586, 0.0368267822265625, 0.036775840759277346, 0.03724950408935547, 0.036907455444335935, 0.03671551895141602, 0.03684249496459961, 0.03693772888183594, 0.03690291213989258, 0.03656905746459961, 0.03685136032104492, 0.03686848068237305, 0.03667763137817383, 0.03769055938720703, 0.036764511108398436, 0.03686764907836914, 0.03687164688110352, 0.03678508758544922, 0.036711456298828125, 0.04453884887695313, 0.037343231201171875, 0.03671852874755859, 0.03671865463256836, 0.03703305435180664, 0.03798896026611328, 0.036730335235595706, 0.04155887985229492, 0.03964313507080078, 0.03689471817016601, 0.03668726348876953, 0.03756265640258789, 0.03715913772583008, 0.03715411376953125, 0.03730307388305664, 0.03710086441040039, 0.03710543823242188, 0.03708937454223633, 0.037052703857421876, 0.03721376037597656, 0.03706569671630859, 0.03684713745117187, 0.03709497451782227, 0.0368666877746582, 0.03716124725341797, 0.036931583404541016, 0.03711385726928711, 0.037230945587158205, 0.03702851104736328]",tokens/s,26.97978064363946,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,4350.644224,6115.16416,0.0,5729.419264,5727.871488,s,1,12.7381435546875,12.7381435546875,0.0,12.7381435546875,12.7381435546875,12.7381435546875,12.7381435546875,[12.7381435546875],,kWh,0.0001043506513000011,1.1503354960394975e-05,3.160335861600305e-05,0.00014745736487639912,,MB,1641.140224,6295.519232,0.0,5880.414208,5851.877888,s,10,3.9358390197753907,0.39358390197753906,0.0017262474430172218,0.3936583862304688,0.39521044006347655,0.396464762878418,0.3974682211303711,"[0.3922969055175781, 0.3937979736328125, 0.3921661376953125, 0.39218798828125, 0.3937838134765625, 0.3913591003417969, 0.393532958984375, 0.3940633544921875, 0.39493170166015623, 0.3977190856933594]",tokens/s,650.4331064196049,kWh,1.1719996934333418e-05,1.2925028718389548e-06,7.80648402295997e-06,2.081898382913234e-05,tokens/kWh,12296469.515566608,MB,1646.247936,6308.102144,0.0,5892.99712,5851.880448,s,10,28.05309814453125,2.805309814453125,0.015486070944240317,2.8075356445312503,2.825998901367188,2.8269036254882813,2.827627404785156,"[2.8106142578125, 2.7972587890625, 2.7730419921875, 2.809191650390625, 2.8138056640625, 2.827808349609375, 2.8257978515625, 2.798830078125, 2.790869873046875, 2.805879638671875]",tokens/s,22.45741260926697,kWh,8.17418356994171e-05,9.0163100585386e-06,5.2376091900839946e-05,0.00014313423765879564,tokens/kWh,440146.2643073548,,s,630,28.04975001907349,0.044523412728688073,0.0005795985819855202,0.044421119689941405,0.045036482238769535,0.04530864334106445,0.04705224647521974,"[0.045652511596679685, 0.04469318389892578, 0.04476876831054687, 0.04452185440063477, 0.04484921646118164, 0.0441940803527832, 0.044142593383789064, 0.04444185638427734, 0.04434297561645508, 0.044138561248779296, 0.0441343994140625, 0.043993087768554685, 0.04465663909912109, 0.04515020751953125, 0.04422393417358399, 0.04495008087158203, 0.047355903625488284, 0.04456243133544922, 0.04451932907104492, 0.044358943939208986, 0.04464239883422851, 0.044454624176025394, 0.04463407897949219, 0.0446005744934082, 0.044383007049560545, 0.044505088806152344, 0.04476502227783203, 0.044800128936767575, 0.044722206115722654, 0.04463129425048828, 0.04448947143554687, 0.04479180908203125, 0.044690689086914065, 0.044534526824951175, 0.044959136962890625, 0.044103710174560544, 0.046039615631103516, 0.04506579208374024, 0.04462255859375, 0.04440054321289062, 0.04409084701538086, 0.04422060775756836, 0.044542110443115235, 0.045197311401367186, 0.0446396484375, 0.04468143844604492, 0.04446854400634766, 0.04447055816650391, 0.04434102249145508, 0.044314624786376954, 0.044693504333496094, 0.04432076644897461, 0.04439225769042969, 0.0448554573059082, 0.04443539047241211, 0.04442531204223633, 0.044246944427490234, 0.04399728012084961, 0.04408627319335937, 0.044309280395507813, 0.044350753784179686, 0.04450915145874024, 0.04507747268676758, 0.044926559448242184, 0.04456294250488281, 0.04408531188964844, 0.04391660690307617, 0.04426780700683594, 0.04409929656982422, 0.04423759841918945, 0.044146656036376956, 0.044372001647949216, 0.04403020858764648, 0.044273406982421874, 0.04437161636352539, 0.04481264114379883, 0.044464126586914066, 0.04424844741821289, 0.043985374450683595, 0.0440280647277832, 0.04424703979492187, 0.04430022430419922, 0.04485331344604492, 0.04607209777832031, 0.04414822387695312, 0.04447459030151367, 0.044474334716796876, 0.04460342407226563, 0.04442428970336914, 0.04429056167602539, 0.04410432052612305, 0.04410140609741211, 0.044050430297851564, 0.0441995849609375, 0.04423433685302734, 0.0443583984375, 0.04430611038208008, 0.04444192123413086, 0.044109825134277345, 0.04400969696044922, 0.04529721450805664, 0.04422646331787109, 0.04487200164794922, 0.044421119689941405, 0.0445665283203125, 0.04449087905883789, 0.04443878555297852, 0.04507712173461914, 0.044340736389160154, 0.0438809585571289, 0.04409139251708984, 0.043796478271484376, 0.044148735046386715, 0.04385279846191406, 0.04398825454711914, 0.04380614471435547, 0.04402524948120117, 0.04432726287841797, 0.044181278228759766, 0.04737712097167969, 0.045434913635253905, 0.044382209777832034, 0.0442716178894043, 0.04427772903442383, 0.04425116729736328, 0.04447974395751953, 0.04509423828125, 0.044143295288085936, 0.04417327880859375, 0.0438205451965332, 0.044054496765136716, 0.043684383392333985, 0.043783519744873045, 0.043853790283203124, 0.04406665420532226, 0.04382191848754883, 0.04387334442138672, 0.04371897506713867, 0.04378249740600586, 0.04390668869018555, 0.04394851303100586, 0.04391526412963867, 0.04447865676879883, 0.04426342391967773, 0.04437401580810547, 0.04399923324584961, 0.04415216064453125, 0.04393641662597656, 0.04403142547607422, 0.04401168060302734, 0.04370064163208008, 0.04378428649902344, 0.04387996673583985, 0.0443744010925293, 0.043804672241210936, 0.04436377716064453, 0.043932895660400394, 0.04387100982666016, 0.04366121673583984, 0.04371875381469727, 0.04380057525634766, 0.043663360595703124, 0.04390707015991211, 0.04374867248535156, 0.04381727981567383, 0.04394380950927734, 0.043829761505126956, 0.04411574554443359, 0.044297630310058594, 0.04404089736938477, 0.04391856002807617, 0.044030879974365236, 0.044009471893310545, 0.04389888000488281, 0.04388995361328125, 0.0442907829284668, 0.044183551788330076, 0.044128257751464846, 0.04425718307495117, 0.044095584869384766, 0.044232704162597655, 0.04406867218017578, 0.04401375961303711, 0.044042240142822264, 0.044042240142822264, 0.044423168182373046, 0.04402115249633789, 0.04405904006958008, 0.04397075271606445, 0.04514406585693359, 0.044650142669677734, 0.04478192138671875, 0.04518912124633789, 0.04566796875, 0.04443174362182617, 0.044267520904541016, 0.04429619216918945, 0.04428595352172852, 0.04432809448242187, 0.0444854736328125, 0.04406476974487305, 0.04436323165893555, 0.044647167205810544, 0.04467279815673828, 0.04457183837890625, 0.04456937789916992, 0.045055038452148435, 0.0447803840637207, 0.04440895843505859, 0.044594367980957034, 0.04418848037719727, 0.04452675247192383, 0.04426611328125, 0.0444090576171875, 0.0446607666015625, 0.04889187240600586, 0.044221790313720706, 0.04453433609008789, 0.04438844680786133, 0.04460051345825195, 0.044233535766601564, 0.04405452728271484, 0.04426927947998047, 0.044382209777832034, 0.04403129577636719, 0.04439664077758789, 0.04465692901611328, 0.04463270568847656, 0.04487360000610351, 0.04465024185180664, 0.045314273834228515, 0.04465641784667969, 0.04431087875366211, 0.04432304000854492, 0.044416065216064456, 0.04582841491699219, 0.04515248107910156, 0.04462768173217774, 0.04502166366577148, 0.04441088104248047, 0.04456592178344727, 0.04445244979858398, 0.04478771209716797, 0.0442347526550293, 0.04414169692993164, 0.044130847930908206, 0.044079456329345706, 0.0440893440246582, 0.04400268936157226, 0.044079456329345706, 0.0440728645324707, 0.04405859375, 0.0449128646850586, 0.04433100891113281, 0.04681523132324219, 0.04415488052368164, 0.04442931365966797, 0.04404611206054688, 0.04438447952270508, 0.04436377716064453, 0.044056640625, 0.04402579116821289, 0.04472627258300781, 0.04413849639892578, 0.04410367965698242, 0.04436787033081055, 0.04520755386352539, 0.04458086395263672, 0.04455014419555664, 0.044335105895996096, 0.04460294342041016, 0.04466118240356445, 0.04459929656982422, 0.04489625549316406, 0.044969120025634766, 0.04483567810058594, 0.04481744003295898, 0.04506051254272461, 0.044687934875488285, 0.044727359771728516, 0.044503166198730466, 0.044835647583007815, 0.04549631881713867, 0.044984321594238284, 0.044916736602783204, 0.04475289535522461, 0.04459465789794922, 0.0446346549987793, 0.044924705505371094, 0.04444745635986328, 0.044947582244873045, 0.044835201263427736, 0.0447487678527832, 0.044364959716796874, 0.044644447326660154, 0.04480182266235352, 0.044663326263427734, 0.04450950241088867, 0.04453609466552735, 0.04460531234741211, 0.044332767486572264, 0.044853534698486325, 0.04462387084960937, 0.04478566360473633, 0.04499251174926758, 0.0445266227722168, 0.04480828857421875, 0.04431523132324219, 0.04450332641601563, 0.044295616149902343, 0.04439507293701172, 0.044967937469482425, 0.04489814376831055, 0.0444683837890625, 0.04457040023803711, 0.0454532470703125, 0.04514239883422851, 0.04469091033935547, 0.04464380645751953, 0.04457292938232422, 0.04451152038574219, 0.044418689727783206, 0.044800254821777345, 0.04457254409790039, 0.044489280700683594, 0.04440668869018555, 0.04474879837036133, 0.04433715057373047, 0.04526668930053711, 0.04480640029907226, 0.04458700942993164, 0.04528742218017578, 0.045286624908447266, 0.04551913452148437, 0.044838401794433595, 0.044706817626953124, 0.044570049285888674, 0.04458758544921875, 0.04498972702026367, 0.045411006927490234, 0.044388385772705076, 0.04471574401855469, 0.044541664123535156, 0.04470636749267578, 0.04443545532226562, 0.045520896911621096, 0.045339775085449216, 0.04550336074829102, 0.045295616149902344, 0.04460745620727539, 0.044738590240478514, 0.04525875091552734, 0.04477337646484375, 0.0446580810546875, 0.04472208023071289, 0.046240478515625, 0.04488803100585938, 0.04499456024169922, 0.04524812698364258, 0.04515475082397461, 0.04489823913574219, 0.04509491348266602, 0.04487372970581055, 0.04508671951293945, 0.044934814453125, 0.044819873809814455, 0.04461395263671875, 0.04518735885620117, 0.04488636779785156, 0.044627967834472655, 0.04477542495727539, 0.04451107025146484, 0.0449087028503418, 0.0447770881652832, 0.044921215057373044, 0.044685310363769534, 0.044870849609375, 0.04463289642333984, 0.04514086532592773, 0.044872928619384765, 0.044745502471923826, 0.04514815902709961, 0.04516659164428711, 0.044622913360595706, 0.044827102661132816, 0.044800254821777345, 0.044466400146484376, 0.04469760131835938, 0.044827903747558594, 0.04491475296020508, 0.04599264144897461, 0.044916736602783204, 0.04470579147338867, 0.04475289535522461, 0.04484710311889648, 0.04459872055053711, 0.04453843307495117, 0.04455395126342773, 0.04446358489990234, 0.04464467239379883, 0.04462643051147461, 0.04424848175048828, 0.044347999572753906, 0.04458019256591797, 0.044663455963134764, 0.044474079132080076, 0.04414841461181641, 0.04430089569091797, 0.044307521820068356, 0.04411203384399414, 0.04416915130615234, 0.044563297271728516, 0.04714905548095703, 0.04500668716430664, 0.04422057723999023, 0.04548812866210938, 0.04472217559814453, 0.04502230453491211, 0.047698593139648436, 0.05025734329223633, 0.04409222412109375, 0.045575489044189454, 0.04440544128417969, 0.04446783828735352, 0.04458480072021485, 0.04426169586181641, 0.044504383087158206, 0.04478863906860352, 0.04423884963989258, 0.044197887420654294, 0.044578113555908204, 0.04416172790527344, 0.04567990493774414, 0.044671550750732425, 0.04503567886352539, 0.04561721420288086, 0.04449593734741211, 0.04439740753173828, 0.04426959991455078, 0.044742431640625, 0.04436195373535156, 0.04496384048461914, 0.04449043273925781, 0.044382526397705076, 0.044290046691894534, 0.04420608139038086, 0.04432281494140625, 0.044805633544921876, 0.04447488021850586, 0.04441078567504883, 0.04435574340820313, 0.04811945724487305, 0.04557648086547852, 0.044767230987548826, 0.044348926544189454, 0.04416972732543945, 0.04421222305297851, 0.044126209259033204, 0.044126079559326174, 0.04411609649658203, 0.044275680541992185, 0.0444222412109375, 0.044091552734375, 0.044412830352783206, 0.04429913711547852, 0.04430374526977539, 0.0442347526550293, 0.04422310256958008, 0.04440185546875, 0.044275585174560546, 0.04457712173461914, 0.04477308654785156, 0.04454703903198242, 0.04432255935668945, 0.04513808059692383, 0.04423193740844727, 0.043942657470703125, 0.044093441009521485, 0.04436124801635742, 0.04551926422119141, 0.04431267166137695, 0.04426339340209961, 0.04429209518432617, 0.044292030334472654, 0.044062782287597656, 0.04396441650390625, 0.04403763198852539, 0.04407523345947266, 0.044042720794677734, 0.044248897552490236, 0.044085216522216794, 0.0442531852722168, 0.04424502563476562, 0.04424051284790039, 0.04426995086669922, 0.04448444747924805, 0.044421279907226566, 0.0442507209777832, 0.04443724822998047, 0.044195648193359374, 0.044510047912597654, 0.044371967315673826, 0.044295871734619144, 0.04414905548095703, 0.0450437126159668, 0.044523265838623045, 0.04461177444458008, 0.04427324676513672, 0.04416543960571289, 0.04438963317871094, 0.044317600250244144, 0.04404537582397461, 0.04376409530639649, 0.04394678497314453, 0.04394678497314453, 0.044001632690429685, 0.044270240783691406, 0.04398896026611328, 0.0442665901184082, 0.04413740921020508, 0.04443340682983398, 0.044461727142333984, 0.044233055114746095, 0.04438195037841797, 0.04431897735595703, 0.044421119689941405, 0.04441088104248047, 0.04449824142456055, 0.04399747085571289, 0.044466590881347655, 0.044349441528320314, 0.04461939239501953, 0.044139041900634765, 0.04419977569580078, 0.04435670471191406, 0.04447043228149414, 0.04418636703491211, 0.04440233612060547, 0.04417366409301758, 0.04420579147338867, 0.044179744720458984, 0.04398080062866211, 0.04418086242675781, 0.044370433807373044, 0.04439023971557617, 0.044024097442626954, 0.04404019165039062, 0.04410892868041992, 0.044077888488769534, 0.04399673461914062, 0.044152446746826175, 0.044104480743408205, 0.04401059341430664, 0.04453273773193359, 0.04460134506225586, 0.04416307067871094, 0.04455014419555664, 0.04470783996582031, 0.04530176162719726, 0.044593151092529294, 0.04451945495605469, 0.04448863983154297, 0.04427276611328125, 0.044052799224853514, 0.044194206237792966, 0.04428108978271485, 0.04425542449951172, 0.04515667343139648, 0.0447259521484375, 0.0442691535949707, 0.044389087677001955, 0.04433241653442383, 0.04487260818481445, 0.04438959884643555, 0.04422038269042969, 0.044437278747558595, 0.04443603134155273, 0.04433536148071289, 0.0444304313659668, 0.044287841796875, 0.04477644729614258, 0.04451913452148438, 0.0442657585144043, 0.044488479614257816, 0.044466529846191406, 0.0444189453125, 0.0445849609375, 0.0446382064819336, 0.046358528137207033, 0.04487372970581055, 0.044386302947998044, 0.04444326400756836, 0.04455168151855469, 0.04415900802612305, 0.0443370246887207, 0.043985214233398434, 0.04416115188598633, 0.04433155059814453, 0.04440883255004883, 0.044249183654785154, 0.04414870452880859, 0.04437347030639648, 0.044263713836669924, 0.044284095764160154, 0.04444364929199219, 0.04422860717773437, 0.04441212844848633, 0.044483360290527345, 0.044365825653076174, 0.044505088806152344, 0.046458881378173826, 0.04478771209716797, 0.04483071899414062, 0.04492460632324219, 0.04480156707763672, 0.044939422607421876, 0.044517887115478515, 0.044603519439697266, 0.044242912292480466, 0.044197921752929685, 0.04440883255004883, 0.04431612777709961, 0.04460598373413086, 0.04436352157592773, 0.04426931381225586, 0.04475507354736328, 0.044109439849853514, 0.04464889526367188, 0.044876094818115234, 0.04469964981079102]",tokens/s,22.460093211939775,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3128.410112,4602.134528,0.0,4206.886912,4087.771648,s,1,11.1307451171875,11.1307451171875,0.0,11.1307451171875,11.1307451171875,11.1307451171875,11.1307451171875,[11.1307451171875],,kWh,0.00011638537738329735,1.2828940313779464e-05,4.410392417200648e-05,0.0001733182418690833,,MB,3141.451776,4774.100992,0.0,4366.270464,4273.703936,s,10,14.535161621093751,1.4535161621093748,0.0024993441056309243,1.4539345092773437,1.456260693359375,1.45683505859375,1.45729455078125,"[1.4518798828125, 1.4514942626953125, 1.4502095947265625, 1.4495482177734376, 1.454238037109375, 1.4536309814453126, 1.456133056640625, 1.4554744873046874, 1.4551436767578125, 1.457409423828125]",tokens/s,176.1246325795835,kWh,4.26348872075035e-05,4.7005923584355155e-06,2.844610609019915e-05,7.578158565613817e-05,tokens/kWh,3378129.367226621,MB,3141.451776,4776.198144,0.0,4368.367616,4274.361856,s,10,21.31460498046875,2.131460498046875,0.01040247824753476,2.1338648681640624,2.13996044921875,2.14533935546875,2.14964248046875,"[2.134578369140625, 2.116716796875, 2.113841552734375, 2.13179150390625, 2.15071826171875, 2.13876513671875, 2.1374931640625, 2.13352880859375, 2.134200927734375, 2.122970458984375]",tokens/s,29.55719801409827,kWh,6.293695610124964e-05,6.94365392428867e-06,3.6816446119800065e-05,0.00010669705614533838,tokens/kWh,590456.7780594056,,s,630,21.31210797500611,0.03382874281747,0.0006934402196118866,0.03361788940429687,0.03453700675964356,0.034875053215026854,0.03590497573852539,"[0.03524480056762695, 0.03443711853027344, 0.03434636688232422, 0.03448486328125, 0.03485887908935547, 0.03460905456542969, 0.03467190551757812, 0.03458553695678711, 0.034907520294189455, 0.03875084686279297, 0.034457599639892575, 0.034369537353515625, 0.03438796615600586, 0.03382812881469727, 0.03398294448852539, 0.03461084747314453, 0.03358486557006836, 0.03357990264892578, 0.033707839965820316, 0.033955230712890624, 0.03351836776733398, 0.0337999038696289, 0.03338787078857422, 0.03337311935424805, 0.03334143829345703, 0.033355777740478515, 0.033468414306640625, 0.033535999298095705, 0.033341598510742185, 0.033810272216796874, 0.033699840545654294, 0.03349420928955078, 0.033534782409667965, 0.03382675170898437, 0.03360979080200195, 0.0336097297668457, 0.0346295051574707, 0.03384332656860352, 0.03359539031982422, 0.033404800415039064, 0.03345625686645508, 0.03340284729003906, 0.0334541130065918, 0.03343270492553711, 0.03348889541625977, 0.033409408569335934, 0.03338905715942383, 0.0331673583984375, 0.033296382904052735, 0.033237152099609375, 0.033365856170654296, 0.033552257537841794, 0.03340531158447266, 0.03328351974487305, 0.033497535705566406, 0.03331651306152344, 0.03358451080322265, 0.0349315185546875, 0.034019359588623045, 0.03354009628295898, 0.03351347351074219, 0.03359139251708984, 0.03346601486206055, 0.03485081481933594, 0.0339046401977539, 0.03373446273803711, 0.03361587142944336, 0.03446393585205078, 0.034231647491455075, 0.033909408569335935, 0.03373875045776367, 0.03372851181030274, 0.033585151672363284, 0.03336806488037109, 0.03338240051269531, 0.03338649749755859, 0.03320627212524414, 0.03334143829345703, 0.033265056610107424, 0.03338915252685547, 0.033445888519287106, 0.033345535278320314, 0.03319804763793945, 0.03346799850463867, 0.03348320007324219, 0.03338560104370117, 0.03343244934082031, 0.033296382904052735, 0.03321596908569336, 0.03344643020629883, 0.03333324813842774, 0.03336294555664063, 0.03417190551757812, 0.03458364868164063, 0.03377388763427734, 0.03366764831542969, 0.03356470489501953, 0.03335987091064453, 0.03318796920776367, 0.03315206527709961, 0.03334428787231445, 0.03331878280639648, 0.03361772918701172, 0.03335120010375976, 0.03369043350219727, 0.03432470321655273, 0.034840545654296874, 0.03355148696899414, 0.03350291061401367, 0.03386617660522461, 0.033546558380126955, 0.03343795013427735, 0.03408662414550781, 0.0336091194152832, 0.0336446418762207, 0.0336596794128418, 0.03345388793945313, 0.033553791046142575, 0.03346720123291016, 0.03337830352783203, 0.03336521530151367, 0.03340073776245117, 0.033260414123535156, 0.03333052825927734, 0.033374881744384764, 0.03351513671875, 0.03515475082397461, 0.03435721588134766, 0.03401859283447266, 0.03362284851074219, 0.03367724609375, 0.03343564987182617, 0.0334516487121582, 0.03359945678710938, 0.03329679870605469, 0.0336732177734375, 0.033255424499511715, 0.03328585433959961, 0.0332720947265625, 0.03329974365234375, 0.03322496032714844, 0.03444070434570313, 0.03347964859008789, 0.033757217407226564, 0.033767391204833984, 0.033468318939208985, 0.03341494369506836, 0.033421630859375, 0.033537406921386716, 0.033321567535400394, 0.033140384674072265, 0.03339209747314453, 0.033197216033935543, 0.0332490234375, 0.033259521484375, 0.03355033493041992, 0.03332499313354492, 0.03354425430297851, 0.03320627212524414, 0.03343769454956055, 0.03337638473510742, 0.033736766815185545, 0.03374675369262695, 0.033519615173339845, 0.03327385711669922, 0.03383091354370117, 0.033492992401123044, 0.03391078567504883, 0.033729663848876955, 0.03352665710449219, 0.03325337600708008, 0.03357491302490234, 0.033388031005859374, 0.03348112106323242, 0.03328435134887695, 0.03335532760620117, 0.033315006256103515, 0.03334537506103516, 0.034027328491210936, 0.03389689636230469, 0.03348611068725586, 0.03356950378417969, 0.033459999084472655, 0.03376668930053711, 0.03325024032592774, 0.03342937469482422, 0.03349843215942383, 0.03356073760986328, 0.033955711364746094, 0.03519667053222656, 0.03428579330444336, 0.03436057662963867, 0.03408972930908203, 0.034457599639892575, 0.03432467269897461, 0.034062145233154296, 0.03401897430419922, 0.034043617248535156, 0.03413033676147461, 0.033884033203125, 0.033998367309570315, 0.03347743988037109, 0.03369574356079102, 0.03390067291259766, 0.03480131149291992, 0.03403571319580078, 0.0338741455078125, 0.03381452941894531, 0.03448585510253906, 0.03376988983154297, 0.03363225555419922, 0.03365801620483398, 0.034005599975585936, 0.03415884780883789, 0.03388415908813477, 0.03377062225341797, 0.03370800018310547, 0.033719200134277344, 0.03371731185913086, 0.033587329864501955, 0.033479103088378905, 0.03345446395874024, 0.03349651336669922, 0.033476158142089846, 0.03359641647338867, 0.035590145111083986, 0.033871295928955075, 0.03372499084472656, 0.03383295822143555, 0.03360153579711914, 0.03356467056274414, 0.033508575439453125, 0.03358595275878906, 0.03347030258178711, 0.033536128997802735, 0.03361407852172851, 0.03351116943359375, 0.03387334442138672, 0.03387241744995117, 0.033443264007568356, 0.03351030349731445, 0.03365788650512695, 0.03361417770385742, 0.033485313415527344, 0.03354403305053711, 0.033594528198242185, 0.033618785858154296, 0.033562625885009766, 0.033709598541259767, 0.03350147247314453, 0.03360713577270508, 0.03350191879272461, 0.03519452667236328, 0.034626911163330075, 0.03562188720703125, 0.035020801544189455, 0.03497539138793945, 0.03465439987182617, 0.034654369354248045, 0.035818912506103515, 0.036297534942626955, 0.03439574432373047, 0.03470684814453125, 0.03447040176391602, 0.03426947021484375, 0.03389379119873047, 0.03353596878051758, 0.033414878845214845, 0.033436416625976566, 0.033678592681884764, 0.033211135864257814, 0.03340227127075195, 0.033985279083251954, 0.03504435348510742, 0.034374046325683597, 0.03395340728759766, 0.03429475021362305, 0.03396953582763672, 0.03391897583007813, 0.035074302673339844, 0.034111873626708984, 0.03402454376220703, 0.043103134155273434, 0.034207454681396486, 0.034044193267822265, 0.034252799987792966, 0.033756832122802734, 0.033638591766357424, 0.03366057586669922, 0.03353036880493164, 0.03339033508300781, 0.03342361450195312, 0.03408617782592773, 0.03377961730957031, 0.03401612854003906, 0.03394118499755859, 0.03378163146972656, 0.033433982849121094, 0.03334761428833008, 0.033512641906738284, 0.03325763320922852, 0.033277664184570316, 0.03357583999633789, 0.03331404876708984, 0.033640865325927735, 0.033333087921142576, 0.03326822280883789, 0.0334964485168457, 0.03339123153686523, 0.03335372924804687, 0.03343360137939453, 0.03351718521118164, 0.03352630233764648, 0.033640289306640626, 0.03350899124145508, 0.03567241668701172, 0.03443526458740234, 0.034310142517089845, 0.0341399040222168, 0.03424895858764648, 0.033976318359375, 0.03405353546142578, 0.03402812957763672, 0.03418521499633789, 0.03467411041259766, 0.03457430267333984, 0.03488828659057617, 0.034590015411376955, 0.03450697708129883, 0.034762527465820314, 0.034667198181152346, 0.03503308868408203, 0.03483145523071289, 0.03480611038208008, 0.03467337417602539, 0.03451478576660156, 0.03427471923828125, 0.03432422256469726, 0.03459302520751953, 0.033995361328125, 0.03363840103149414, 0.0336814079284668, 0.033428577423095705, 0.03355126571655274, 0.03370598220825195, 0.033503231048583985, 0.03344300842285156, 0.03348748779296875, 0.033414783477783205, 0.033304222106933595, 0.03352054214477539, 0.033306625366210936, 0.03334918212890625, 0.034517120361328125, 0.03396163177490234, 0.03332777786254883, 0.033486846923828126, 0.03319561767578125, 0.033673633575439454, 0.034062335968017575, 0.0337894401550293, 0.0344725456237793, 0.03496656036376953, 0.033364543914794924, 0.033498847961425784, 0.033462879180908206, 0.03334348678588867, 0.03330867385864258, 0.033500511169433596, 0.03355491256713867, 0.033425216674804685, 0.03336435317993164, 0.033223934173583984, 0.03330889511108399, 0.03326211166381836, 0.03336191940307617, 0.03342335891723633, 0.03356854248046875, 0.03532489776611328, 0.03397359848022461, 0.03332163238525391, 0.03356441497802734, 0.03347798538208008, 0.0334672966003418, 0.033480449676513674, 0.03344547271728516, 0.03321513748168945, 0.03361587142944336, 0.033331199645996096, 0.03361587142944336, 0.03362547302246094, 0.037157440185546876, 0.03342063903808594, 0.03365126419067383, 0.03354336166381836, 0.03373878479003906, 0.03394831848144531, 0.0378309440612793, 0.03430569458007812, 0.03421820831298828, 0.03462908935546875, 0.03406915283203125, 0.03359436798095703, 0.03363849639892578, 0.03364361572265625, 0.033973888397216795, 0.034140350341796875, 0.034045951843261715, 0.03380633544921875, 0.03440639877319336, 0.033533119201660154, 0.03370064163208008, 0.033656734466552735, 0.033584671020507814, 0.03417747116088867, 0.034688320159912106, 0.034143070220947265, 0.03384524917602539, 0.033503009796142576, 0.03350889587402344, 0.03351587295532227, 0.03362627029418945, 0.03337235260009765, 0.03372639846801758, 0.03336198425292969, 0.03382259368896484, 0.03361804962158203, 0.03357299041748047, 0.033372032165527345, 0.03464601516723633, 0.033498462677001954, 0.03359417724609375, 0.033867008209228514, 0.03352579116821289, 0.0334378547668457, 0.033745086669921875, 0.0355002555847168, 0.03367942428588867, 0.03346015930175781, 0.03379609680175781, 0.03493798446655273, 0.03535238265991211, 0.03461542510986328, 0.034722049713134764, 0.034531326293945314, 0.035940128326416014, 0.03454521560668945, 0.034536094665527345, 0.03452108764648437, 0.03430604934692383, 0.03421184158325195, 0.03421593475341797, 0.03409907150268555, 0.03390476989746094, 0.03423385620117188, 0.03418281555175781, 0.0343724479675293, 0.03405590438842773, 0.03394384002685547, 0.033818496704101565, 0.03385152053833008, 0.033742847442626955, 0.033615200042724606, 0.033669696807861325, 0.03353571319580078, 0.03399103927612305, 0.03382681655883789, 0.033688671112060545, 0.033643199920654294, 0.03345363235473633, 0.03342403030395508, 0.03346799850463867, 0.03359580612182617, 0.033527969360351566, 0.03344163131713867, 0.03361324691772461, 0.03358723068237305, 0.03348534393310547, 0.03341721725463867, 0.03341696166992188, 0.03342361450195312, 0.03343360137939453, 0.033576961517333984, 0.03361929702758789, 0.0338704948425293, 0.033406688690185544, 0.03354447937011719, 0.033808383941650394, 0.0335294075012207, 0.03382931137084961, 0.03359539031982422, 0.033439456939697264, 0.03348019027709961, 0.03346249771118164, 0.0335241584777832, 0.03338252639770508, 0.0333675537109375, 0.03339519882202149, 0.03337625503540039, 0.033621726989746095, 0.03382710266113281, 0.03395139312744141, 0.034310497283935544, 0.034370815277099606, 0.035200927734375, 0.034167648315429684, 0.03440214538574219, 0.03393552017211914, 0.033744384765625, 0.03366390228271485, 0.03356835174560547, 0.033879615783691405, 0.033793758392333985, 0.033534687042236326, 0.033476608276367184, 0.03380428695678711, 0.03364659118652344, 0.03426303863525391, 0.03371408081054687, 0.033693790435791016, 0.03501670455932617, 0.03383295822143555, 0.03359280014038086, 0.0335590705871582, 0.033716129302978515, 0.03426108932495117, 0.03395580673217773, 0.03371820831298828, 0.03434710311889649, 0.035796798706054685, 0.033819999694824215, 0.03363516616821289, 0.033546241760253906, 0.03364803314208984, 0.033743038177490234, 0.03375718307495117, 0.03385795211791992, 0.0335843505859375, 0.03339753723144531, 0.03366899108886719, 0.033659008026123045, 0.03422822570800781, 0.033726463317871096, 0.03361177444458008, 0.03351968002319336, 0.033587039947509764, 0.03350742340087891, 0.03378176116943359, 0.033964031219482424, 0.03376451110839844, 0.03370684814453125, 0.03419558334350586, 0.03351055908203125, 0.037013374328613284, 0.033812511444091795, 0.03367814254760742, 0.033642494201660156, 0.03361574554443359, 0.03371446228027344, 0.03426816177368164, 0.03370070266723633, 0.03353148651123047, 0.03346883010864258, 0.033527774810791014, 0.03359052658081055, 0.03331951904296875, 0.03335596847534179, 0.03531779098510742, 0.034265056610107425, 0.034236415863037106, 0.0341401596069336, 0.033976318359375, 0.03384899139404297, 0.03372067260742188, 0.03388415908813477, 0.0335912971496582, 0.034086910247802735, 0.03349881744384765, 0.03358547210693359, 0.03350502395629883, 0.033360126495361325, 0.03336806488037109, 0.03455292892456055, 0.033549217224121096, 0.03352576065063476, 0.03344384002685547, 0.033288158416748044, 0.03356243133544922, 0.03329158401489258, 0.033391521453857424, 0.033304576873779294, 0.03337798309326172, 0.033242496490478515, 0.03350214385986328, 0.03357049560546875, 0.03400492858886719, 0.03366563034057617, 0.03351472091674805, 0.03350175857543945, 0.03333859252929688, 0.033408798217773435, 0.03334652709960938, 0.03337830352783203, 0.03473392105102539, 0.03347475051879883, 0.0335687370300293, 0.03403369522094726, 0.03417702484130859, 0.034205440521240235, 0.0335546875, 0.03395708847045899, 0.03379894256591797, 0.03371212768554688, 0.0336445426940918, 0.03366092681884766, 0.033557952880859374, 0.03378438568115234, 0.033538047790527346, 0.033630176544189455, 0.033427486419677736, 0.03349094390869141, 0.03359743881225586, 0.033694847106933594, 0.03433110427856445, 0.03344015884399414, 0.033498977661132814, 0.0333067855834961, 0.03361382293701172, 0.03342089462280273, 0.03371987152099609]",tokens/s,29.560661044831235,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.643584,11301.421056,0.0,10898.898944,10500.155392,s,1,15.0760732421875,15.0760732421875,0.0,15.0760732421875,15.0760732421875,15.0760732421875,15.0760732421875,[15.0760732421875],,kWh,0.0002322880344749933,2.5615526863700306e-05,0.00010319674922400213,0.0003611003105626957,,MB,3009.093632,11320.295424,0.0,10903.093248,10048.928256,s,10,58.723462890624994,5.8723462890625004,0.0032497952895616344,5.871911865234376,5.8770778808593755,5.877555786132812,5.877938110351562,"[5.86891748046875, 5.87013818359375, 5.8675283203125, 5.87061083984375, 5.873212890625, 5.87040234375, 5.87406689453125, 5.87358056640625, 5.8769716796875, 5.87803369140625]",tokens/s,43.594159369792465,kWh,0.00017127343585916606,1.889205722104725e-05,0.00011380903549159937,0.0003039745285718127,tokens/kWh,842175.8270430907,MB,3009.093632,11320.295424,0.0,10903.093248,10072.241664,s,10,29.368702880859374,2.9368702880859376,0.0015310096670841527,2.9373739013671876,2.93856103515625,2.938778564453125,2.938952587890625,"[2.93443505859375, 2.93500927734375, 2.934990966796875, 2.936197265625, 2.937611083984375, 2.937291259765625, 2.93745654296875, 2.93899609375, 2.93820263671875, 2.9385126953125]",tokens/s,21.451407049052662,kWh,8.595068042166874e-05,9.481013761268298e-06,5.7408323704401246e-05,0.0001528400178873382,tokens/kWh,412195.7120316402,,s,630,29.365865379333496,0.04661248472910079,0.0003796470001503052,0.04660024070739746,0.04696365661621094,0.047063209915161136,0.04845045204162598,"[0.048330753326416016, 0.046202880859375, 0.04609024047851563, 0.04595302581787109, 0.045829952239990236, 0.046098751068115236, 0.04611468887329102, 0.04604108810424805, 0.04654652786254883, 0.04640553665161133, 0.04644009780883789, 0.046363391876220704, 0.04688409423828125, 0.04650889587402344, 0.04607494354248047, 0.04659296035766602, 0.046491424560546876, 0.04639494323730469, 0.04635100936889648, 0.04652032089233398, 0.04646297454833984, 0.0461578254699707, 0.04638924789428711, 0.04652601623535156, 0.04627654266357422, 0.04626393508911133, 0.046543201446533206, 0.0462993278503418, 0.04633744049072266, 0.046478271484375, 0.046580833435058595, 0.046418846130371096, 0.04675174331665039, 0.046721023559570314, 0.046507999420166014, 0.04687785720825195, 0.046668670654296876, 0.046344192504882815, 0.046868480682373044, 0.046680065155029295, 0.04654694366455078, 0.046663551330566405, 0.046799041748046874, 0.04668204879760742, 0.046937854766845706, 0.04677603149414063, 0.046790592193603514, 0.046400096893310545, 0.04665926361083984, 0.04660870361328125, 0.04660793685913086, 0.04677471923828125, 0.04683366394042969, 0.046761024475097654, 0.047065025329589845, 0.04704473495483399, 0.04675455856323242, 0.04677369689941406, 0.047061695098876956, 0.04674560165405273, 0.046740959167480466, 0.04688332748413086, 0.04687801742553711, 0.048417598724365234, 0.04627568054199219, 0.045967681884765625, 0.046021217346191405, 0.04604313659667969, 0.046308895111083985, 0.04657404708862305, 0.04636876678466797, 0.04618239974975586, 0.04637081527709961, 0.046383102416992186, 0.04596736145019531, 0.04626198577880859, 0.04658396911621094, 0.04628083038330078, 0.04628092956542969, 0.0466255989074707, 0.04639603042602539, 0.04635388946533203, 0.04691750335693359, 0.04674784088134765, 0.046268577575683596, 0.04635919952392578, 0.046465023040771485, 0.0461578254699707, 0.046340255737304686, 0.04629628753662109, 0.046161537170410154, 0.046166561126708985, 0.046720703125, 0.04655984115600586, 0.046368545532226565, 0.0464686393737793, 0.046922622680664064, 0.046604286193847655, 0.04679679870605469, 0.04664115142822266, 0.046467071533203126, 0.04649539184570312, 0.046887264251708985, 0.046936065673828124, 0.04686000061035156, 0.04677568054199219, 0.046750625610351565, 0.04641350555419922, 0.04663532638549805, 0.04674969482421875, 0.046505760192871094, 0.04650723266601563, 0.046779422760009765, 0.04658377456665039, 0.046524417877197265, 0.046732414245605466, 0.046941055297851565, 0.04674339294433594, 0.047024288177490235, 0.04678860855102539, 0.046944255828857424, 0.046956127166748046, 0.04689142227172852, 0.04701385498046875, 0.047081504821777344, 0.047151103973388675, 0.048540897369384765, 0.04625078582763672, 0.04600831985473633, 0.04597555160522461, 0.04641948699951172, 0.045948734283447264, 0.04620969772338867, 0.046378368377685546, 0.04633459091186523, 0.046275775909423826, 0.04641449737548828, 0.04630876922607422, 0.0460211181640625, 0.04629529571533203, 0.04635033416748047, 0.04623155212402344, 0.0462760009765625, 0.04651068878173828, 0.046333953857421874, 0.046489601135253904, 0.04699135971069336, 0.04676992034912109, 0.046334209442138674, 0.04637033462524414, 0.04648783874511719, 0.046362815856933595, 0.04638508987426758, 0.04668803024291992, 0.046530208587646484, 0.046192543029785156, 0.04647804641723633, 0.0466247673034668, 0.0462391357421875, 0.0465250244140625, 0.04691664123535156, 0.04645926284790039, 0.046397342681884765, 0.0465909423828125, 0.04668182373046875, 0.046575454711914065, 0.04690143966674805, 0.046933792114257813, 0.046583999633789064, 0.04737615966796875, 0.04661884689331055, 0.04655023956298828, 0.04674639892578125, 0.046704639434814454, 0.04648672103881836, 0.046635200500488284, 0.04695862579345703, 0.046795360565185545, 0.046860286712646484, 0.046695873260498046, 0.046828033447265625, 0.04650604629516602, 0.04700166320800781, 0.04684588623046875, 0.04672275161743164, 0.0468823356628418, 0.047005760192871095, 0.04684463882446289, 0.04707942581176758, 0.04874892807006836, 0.04662681579589844, 0.045946880340576174, 0.04601446533203125, 0.0464153938293457, 0.04634268951416016, 0.04606560134887695, 0.0462295036315918, 0.046515743255615236, 0.04622288131713867, 0.0460335693359375, 0.04643664169311523, 0.046214176177978517, 0.04599932861328125, 0.04661769485473633, 0.046453536987304686, 0.04629436874389648, 0.04641231918334961, 0.04666896057128906, 0.04636963272094727, 0.046028190612792966, 0.04665200042724609, 0.04653055953979492, 0.04648755264282227, 0.04673126220703125, 0.04654403305053711, 0.04649382400512695, 0.0464284782409668, 0.046733024597167966, 0.04648211288452148, 0.04637651062011719, 0.046561729431152346, 0.046461952209472655, 0.046437374114990236, 0.04680089569091797, 0.04674764633178711, 0.04655104064941406, 0.04652851104736328, 0.046721023559570314, 0.046669822692871094, 0.04630527877807617, 0.046878719329833986, 0.04700774383544922, 0.046593280792236326, 0.04697756958007813, 0.04708784103393555, 0.046599456787109375, 0.04649852752685547, 0.04670054244995117, 0.04669440078735351, 0.046936065673828124, 0.04676095962524414, 0.04665446472167969, 0.04697292709350586, 0.046825313568115236, 0.04675804901123047, 0.046639102935791016, 0.04705279922485352, 0.04693116760253906, 0.04665628814697265, 0.046886913299560545, 0.04698316955566406, 0.04693932723999023, 0.04870899200439453, 0.04654348754882812, 0.045948638916015624, 0.0461091194152832, 0.04653039932250977, 0.04631129455566406, 0.04624732971191406, 0.04656611251831055, 0.04650931167602539, 0.046193214416503904, 0.046892608642578125, 0.04648812866210938, 0.04607385635375977, 0.046241310119628905, 0.046272670745849606, 0.04602304077148438, 0.04600128173828125, 0.04663539123535156, 0.046455295562744144, 0.04604927825927734, 0.04681475067138672, 0.047093856811523435, 0.04653094482421875, 0.04636671829223633, 0.046542720794677736, 0.04644467163085937, 0.04610047912597656, 0.04675369644165039, 0.04654703903198242, 0.046473217010498044, 0.04658790588378906, 0.04684799957275391, 0.04648076629638672, 0.04638579177856445, 0.04660374450683594, 0.04659254455566406, 0.04640134429931641, 0.046593246459960935, 0.04686332702636719, 0.046465023040771485, 0.04688246536254883, 0.04673574447631836, 0.0467547836303711, 0.04674457550048828, 0.046781566619873045, 0.0467198715209961, 0.04693196868896484, 0.04690512084960938, 0.046755615234375, 0.04668870544433594, 0.04691558456420898, 0.04685823822021484, 0.04669766235351563, 0.04699990463256836, 0.04697135925292969, 0.04667801666259765, 0.04669619369506836, 0.04674582290649414, 0.046721023559570314, 0.0469290885925293, 0.04695526504516601, 0.046857791900634764, 0.04710454559326172, 0.048640575408935544, 0.046249568939208986, 0.04572934341430664, 0.0461484489440918, 0.046413982391357425, 0.04643414306640625, 0.04603903961181641, 0.04651212692260742, 0.04634534454345703, 0.04607270431518555, 0.046626785278320315, 0.0464730224609375, 0.04635785675048828, 0.0465863037109375, 0.04686073684692383, 0.046465023040771485, 0.04612483215332031, 0.04624816131591797, 0.046360576629638675, 0.0462479362487793, 0.047064449310302736, 0.046889278411865236, 0.04616633605957031, 0.04635193634033203, 0.04637699127197266, 0.04632972717285156, 0.04617475128173828, 0.04675324630737305, 0.046887359619140624, 0.04647331237792969, 0.04651395034790039, 0.04657958221435547, 0.04654127883911133, 0.0469728012084961, 0.04681913757324219, 0.04662700653076172, 0.046862335205078126, 0.04677017593383789, 0.04647116851806641, 0.046252033233642575, 0.04692377471923828, 0.046695968627929685, 0.04651651382446289, 0.046774463653564455, 0.04676803207397461, 0.04680303955078125, 0.04665887832641601, 0.046844478607177734, 0.04674697494506836, 0.04692380905151367, 0.046949153900146486, 0.04675376129150391, 0.04694160079956055, 0.04696329498291016, 0.046813056945800784, 0.046868606567382814, 0.04671001434326172, 0.046883583068847656, 0.04655104064941406, 0.04685619354248047, 0.04689715194702149, 0.046661182403564455, 0.04673318481445313, 0.048535552978515625, 0.04635647964477539, 0.04594483184814453, 0.0462110710144043, 0.046489601135253904, 0.04625612640380859, 0.04612710571289062, 0.04635033416748047, 0.0461844482421875, 0.04627046585083008, 0.04659404754638672, 0.0462479362487793, 0.04600976181030274, 0.04655718231201172, 0.04628950500488281, 0.04590387344360351, 0.046223201751708985, 0.046683521270751954, 0.04658256149291992, 0.046405441284179685, 0.046962623596191404, 0.04686463928222656, 0.046190399169921875, 0.04637305450439453, 0.0464793586730957, 0.046202880859375, 0.04671078491210937, 0.046566783905029295, 0.04638297653198242, 0.04623846435546875, 0.0466063346862793, 0.04685388946533203, 0.04629939270019531, 0.04698051071166992, 0.04696246337890625, 0.04653039932250977, 0.04645516967773437, 0.046389854431152344, 0.04665494537353516, 0.04696118545532227, 0.04694015884399414, 0.04694015884399414, 0.046903297424316405, 0.04665958404541016, 0.046671871185302735, 0.046895103454589845, 0.04691763305664062, 0.046728416442871096, 0.04682217788696289, 0.0466596794128418, 0.04675369644165039, 0.046516223907470705, 0.04669388961791992, 0.04688886260986328, 0.046778976440429686, 0.04683091354370117, 0.046844608306884764, 0.04664432144165039, 0.04680956649780273, 0.04708806228637695, 0.04687011337280273, 0.04742390441894531, 0.04699545669555664, 0.04846387100219727, 0.04624588775634766, 0.04636262512207031, 0.04614070510864258, 0.04608278274536133, 0.04624588775634766, 0.046312576293945314, 0.046570369720458984, 0.046266368865966793, 0.04631552124023437, 0.04652774429321289, 0.04611475372314453, 0.04660102462768555, 0.04652236938476562, 0.046473217010498044, 0.04635238265991211, 0.04645872116088867, 0.04627872085571289, 0.046063838958740236, 0.04675571060180664, 0.046966911315917965, 0.04649692916870117, 0.046686943054199216, 0.04652588653564453, 0.04630339050292969, 0.04603126525878906, 0.04640467071533203, 0.046506942749023436, 0.04631552124023437, 0.04674886322021484, 0.04674233627319336, 0.04640576171875, 0.04675775909423828, 0.046644798278808595, 0.04664112091064453, 0.04654127883911133, 0.046626049041748045, 0.046752513885498045, 0.04649574279785156, 0.04661376190185547, 0.047180545806884765, 0.04685635375976562, 0.04698700714111328, 0.04676822280883789, 0.046704639434814454, 0.04665865707397461, 0.04687891387939453, 0.04674623870849609, 0.04697507095336914, 0.04688256072998047, 0.046753311157226564, 0.046545631408691404, 0.046986495971679684, 0.046841697692871095, 0.04697958374023437, 0.0470838394165039, 0.04685168075561524, 0.04663929748535156, 0.047039840698242186, 0.04703039932250976, 0.046930721282958984, 0.04707673645019531, 0.04687532806396484, 0.0480785903930664, 0.04637516784667969, 0.0460656623840332, 0.046034942626953124, 0.046288257598876954, 0.046473857879638675, 0.04621209716796875, 0.046031776428222655, 0.046409248352050785, 0.04655571365356445, 0.04630697631835937, 0.046659168243408204, 0.04646080017089844, 0.046313888549804685, 0.04600182342529297, 0.04635087966918945, 0.04666396713256836, 0.04634828948974609, 0.046637054443359374, 0.046772224426269535, 0.04636671829223633, 0.046635009765625, 0.04649760055541992, 0.04627065658569336, 0.04612300872802735, 0.04655104064941406, 0.04632572937011719, 0.046247871398925784, 0.046871841430664064, 0.046633792877197267, 0.046437984466552736, 0.04668662261962891, 0.04660163116455078, 0.046602046966552735, 0.04651830291748047, 0.04685491180419922, 0.046923454284667966, 0.046467391967773435, 0.046583263397216794, 0.047180030822753904, 0.04680323028564453, 0.04683366394042969, 0.04676784133911133, 0.04660444641113281, 0.046721153259277344, 0.04688217544555664, 0.046760574340820316, 0.04690937423706055, 0.046761150360107424, 0.0465766716003418, 0.04692566299438477, 0.046949951171875, 0.0469728012084961, 0.04697462463378906, 0.04693494415283203, 0.04675539016723633, 0.04690300750732422, 0.046879390716552734, 0.04684806442260742, 0.04666572952270508, 0.04706051254272461, 0.047095870971679686, 0.0468298225402832, 0.04857241439819336, 0.04630521774291992, 0.04590188980102539, 0.045755455017089844, 0.046072769165039065, 0.046018558502197264, 0.04617184066772461, 0.04644672012329101, 0.04639353561401367, 0.04639712142944336, 0.046693729400634765, 0.04644144058227539, 0.04622335815429687, 0.0465546875, 0.046612159729003906, 0.04637772750854492, 0.0466060791015625, 0.046926078796386717, 0.046599201202392575, 0.04603593444824219, 0.04688889694213867, 0.04662486267089844, 0.046368736267089844, 0.046241790771484374, 0.0462213134765625, 0.04631961441040039, 0.04629913711547851, 0.046460033416748044, 0.04657651138305664, 0.04638294219970703, 0.04678876876831055, 0.04679679870605469, 0.04640678405761719, 0.046774559020996094, 0.046621280670166014, 0.04647107315063476, 0.046790752410888675, 0.04694220733642578, 0.04666556930541992, 0.0470852165222168, 0.04709222412109375, 0.046668991088867184, 0.04647200012207031, 0.04653036880493164, 0.04669161605834961, 0.046308063507080076, 0.04647676849365234, 0.04655104064941406, 0.046701278686523434, 0.04664934539794922, 0.04694220733642578, 0.04677017593383789, 0.04698316955566406, 0.046992446899414064, 0.0468551025390625, 0.04707894515991211, 0.04703267288208008, 0.04688019180297852, 0.04704735946655274, 0.04729241561889649, 0.047306751251220705, 0.04714486312866211, 0.04693350219726562]",tokens/s,21.453479809362893,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 112973 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4088, in from_pretrained hf_quantizer.postprocess_model(model) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model return self._process_model_after_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 456, in post_init_awq_exllama_modules model = exllama_post_init(model) File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 133, in exllama_post_init submodule.post_init() File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 73, in post_init self.q4 = exl_ext.make_q4( RuntimeError: scales and qweight have incompatible shapes " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1545.949184,1823.408128,0.0,1428.160512,1322.516992,s,1,8.6151826171875,8.6151826171875,0.0,8.6151826171875,8.6151826171875,8.6151826171875,8.6151826171875,[8.6151826171875],,kWh,4.841402075417136e-05,5.333193352905008e-06,1.8674737161994415e-05,7.242195126907079e-05,,MB,1518.239744,1842.282496,0.0,1434.451968,1322.072064,s,10,5.334609802246093,0.5334609802246093,0.001341341049820948,0.533219970703125,0.5344022033691406,0.5357007049560547,0.536739506225586,"[0.5369992065429687, 0.5320140380859375, 0.533542236328125, 0.5341136474609375, 0.5322193603515625, 0.532897705078125, 0.5327489013671876, 0.5337323608398438, 0.5335829467773437, 0.5327593994140625]",tokens/s,479.88514528693986,kWh,1.5942756853948133e-05,1.7576546860210916e-06,1.0655330161684307e-05,2.8355741701653534e-05,tokens/kWh,9028153.89890054,MB,1523.48672,1842.282496,0.0,1434.451968,1374.923264,s,10,10.940359008789061,1.094035900878906,0.01615004752358528,1.0859066162109374,1.1225823608398438,1.124010968017578,1.1251538537597656,"[1.08465234375, 1.0914818115234375, 1.08587841796875, 1.085934814453125, 1.08191357421875, 1.0795816650390626, 1.1023470458984375, 1.0808648681640625, 1.1254395751953126, 1.122264892578125]",tokens/s,57.58494757748648,kWh,3.2475488793968615e-05,3.5824973199971697e-06,1.7150303193914406e-05,5.3208289307880186e-05,tokens/kWh,1184026.038414087,,s,630,10.93764793968201,0.01736134593600318,0.0004135646128487301,0.017225695610046388,0.017894233131408692,0.01801259527206421,0.018358398532867432,"[0.01812284851074219, 0.017483776092529296, 0.017249311447143555, 0.017118175506591796, 0.017028160095214843, 0.01730636787414551, 0.017556991577148438, 0.01759097671508789, 0.01823311996459961, 0.017380800247192383, 0.017294111251831053, 0.017173856735229493, 0.01725916862487793, 0.017096704483032226, 0.017145856857299805, 0.017160192489624023, 0.01706598472595215, 0.0176507511138916, 0.01714787292480469, 0.017175519943237304, 0.017143808364868163, 0.017043455123901367, 0.017073568344116212, 0.017077856063842774, 0.016966655731201173, 0.016965631484985352, 0.01702707290649414, 0.016947200775146484, 0.017047872543334962, 0.016972576141357422, 0.017046432495117187, 0.01697996711730957, 0.017006591796875, 0.017032255172729494, 0.016996992111206054, 0.017006912231445313, 0.016983200073242187, 0.01702537536621094, 0.017084928512573243, 0.017035263061523438, 0.017145856857299805, 0.017051647186279297, 0.01701683235168457, 0.01700454330444336, 0.017063232421875, 0.017040063858032226, 0.017152000427246093, 0.01700864028930664, 0.0170098876953125, 0.017062463760375977, 0.01715407943725586, 0.017057151794433595, 0.017050432205200194, 0.019209535598754882, 0.01836716842651367, 0.017218656539916992, 0.017159072875976563, 0.017135616302490234, 0.017172479629516603, 0.017006591796875, 0.017224992752075195, 0.01708720016479492, 0.017307647705078123, 0.01833692741394043, 0.017773408889770508, 0.01769264030456543, 0.01767628860473633, 0.01792140769958496, 0.01785513687133789, 0.017717248916625978, 0.01778278350830078, 0.017794912338256835, 0.017631391525268554, 0.017579679489135743, 0.017453407287597655, 0.017270784378051757, 0.01702707290649414, 0.017131519317626954, 0.017132959365844726, 0.017037792205810545, 0.017334400177001955, 0.017100799560546876, 0.017199199676513673, 0.01723161506652832, 0.017213056564331055, 0.017185184478759767, 0.01722995185852051, 0.01724006462097168, 0.017094655990600584, 0.017358495712280275, 0.017461599349975584, 0.017292352676391603, 0.01710995292663574, 0.017766368865966796, 0.017072160720825194, 0.01704140853881836, 0.017104000091552735, 0.01703001594543457, 0.016955392837524414, 0.01698406410217285, 0.017072128295898437, 0.01721343994140625, 0.01724790382385254, 0.01786899185180664, 0.01728060722351074, 0.017236576080322266, 0.017178592681884767, 0.017159616470336914, 0.017332799911499025, 0.01722572708129883, 0.017262624740600585, 0.01726051139831543, 0.01724006462097168, 0.01737068748474121, 0.017318592071533204, 0.017329919815063478, 0.017242111206054688, 0.01727004814147949, 0.01717692756652832, 0.017249792098999024, 0.017230655670166014, 0.01715558433532715, 0.01707241630554199, 0.01708880043029785, 0.01711427116394043, 0.01720355224609375, 0.018409183502197266, 0.017785120010375976, 0.017479679107666016, 0.01756979179382324, 0.0176680965423584, 0.017510719299316406, 0.01745814323425293, 0.017219423294067383, 0.01713417625427246, 0.017258783340454102, 0.017102848052978514, 0.017423519134521483, 0.017380191802978517, 0.017258495330810548, 0.01781760025024414, 0.017262592315673828, 0.017029184341430664, 0.017031103134155273, 0.01731123161315918, 0.017326143264770506, 0.017048255920410156, 0.017011871337890627, 0.01704377555847168, 0.01710108757019043, 0.017130975723266603, 0.01707676887512207, 0.017283071517944337, 0.017094655990600584, 0.017014720916748046, 0.01696076774597168, 0.017211711883544922, 0.0173941764831543, 0.01763475227355957, 0.017416767120361328, 0.01722163200378418, 0.017063936233520507, 0.01703321647644043, 0.017026208877563478, 0.017216352462768553, 0.01712335968017578, 0.017219551086425783, 0.017161600112915038, 0.017171072006225585, 0.01717452812194824, 0.01716864013671875, 0.01706572723388672, 0.01717862319946289, 0.017014240264892577, 0.016929056167602537, 0.016888063430786134, 0.017035263061523438, 0.016936960220336913, 0.017091615676879883, 0.017036256790161134, 0.017051647186279297, 0.01705369567871094, 0.017074176788330078, 0.01727039909362793, 0.017213823318481446, 0.01731283187866211, 0.01735321617126465, 0.017324480056762695, 0.017338367462158204, 0.021570175170898438, 0.017733631134033204, 0.01795686340332031, 0.017459199905395507, 0.017342464447021484, 0.017377279281616212, 0.017207456588745118, 0.017417119979858398, 0.01730860710144043, 0.01722947120666504, 0.01746774482727051, 0.017252351760864256, 0.01722777557373047, 0.017201152801513672, 0.01724323272705078, 0.017308416366577147, 0.01728937530517578, 0.01817804718017578, 0.017291040420532228, 0.01721571159362793, 0.01710908889770508, 0.01716534423828125, 0.016999103546142577, 0.017014303207397462, 0.017086944580078124, 0.017071935653686525, 0.017179008483886718, 0.017055744171142577, 0.017104415893554686, 0.017050079345703125, 0.01724569511413574, 0.01704966354370117, 0.01698252868652344, 0.01695737648010254, 0.01700454330444336, 0.016953344345092772, 0.016975616455078123, 0.01699456024169922, 0.017022975921630858, 0.017093727111816406, 0.01710812759399414, 0.017041088104248047, 0.017192703247070312, 0.017135711669921876, 0.01727724838256836, 0.01722972869873047, 0.017113311767578125, 0.01699203109741211, 0.017092191696166992, 0.017023391723632812, 0.01698726463317871, 0.016946048736572267, 0.017045568466186524, 0.016957536697387695, 0.017009599685668945, 0.01705049514770508, 0.01711516761779785, 0.017121280670166016, 0.016995552062988282, 0.01695414352416992, 0.016977920532226562, 0.01700044822692871, 0.01694713592529297, 0.018298112869262695, 0.01759644889831543, 0.01751318359375, 0.017565696716308594, 0.017336320877075196, 0.01738751983642578, 0.017330175399780275, 0.01722777557373047, 0.017392831802368162, 0.01742313575744629, 0.017325151443481446, 0.01734752082824707, 0.01743257522583008, 0.017238016128540038, 0.017370975494384766, 0.017217567443847656, 0.017180671691894533, 0.01700876808166504, 0.01700044822692871, 0.017001855850219728, 0.016933504104614257, 0.01699430465698242, 0.016982015609741212, 0.017063360214233398, 0.017119808197021483, 0.017112064361572265, 0.01705881690979004, 0.016999584197998047, 0.016948095321655275, 0.01690943908691406, 0.016984928131103517, 0.017058944702148436, 0.017131744384765626, 0.01711516761779785, 0.01711372756958008, 0.01707766342163086, 0.01697865676879883, 0.01717571258544922, 0.017005279541015626, 0.01691164779663086, 0.016885856628417968, 0.017007232666015625, 0.017253759384155274, 0.017316255569458008, 0.01741360092163086, 0.017117952346801756, 0.016973823547363282, 0.016910335540771485, 0.016945152282714843, 0.01696371269226074, 0.017000608444213867, 0.016969791412353517, 0.01688972854614258, 0.01686854362487793, 0.017015392303466798, 0.016914432525634765, 0.016924671173095703, 0.017074176788330078, 0.018288415908813478, 0.017115232467651367, 0.017847583770751952, 0.01704431915283203, 0.017067136764526366, 0.018201471328735352, 0.017452224731445313, 0.017306432723999024, 0.017717248916625978, 0.017092607498168946, 0.01701068878173828, 0.017047552108764647, 0.016920576095581053, 0.017178304672241212, 0.016949567794799805, 0.01697996711730957, 0.01710393524169922, 0.017081279754638672, 0.01703321647644043, 0.017121280670166016, 0.017111040115356444, 0.017111263275146484, 0.017043231964111328, 0.017084415435791016, 0.017036767959594728, 0.01701481628417969, 0.017041919708251953, 0.016946239471435545, 0.0169498233795166, 0.017327680587768554, 0.017023807525634767, 0.017006591796875, 0.01715814399719238, 0.017192384719848634, 0.01718329620361328, 0.017311744689941407, 0.01723311996459961, 0.017242528915405272, 0.017257055282592772, 0.017304672241210937, 0.01727110481262207, 0.017250688552856445, 0.01722163200378418, 0.017172479629516603, 0.017233728408813476, 0.0171167049407959, 0.01711497688293457, 0.01698054313659668, 0.016908544540405274, 0.016951295852661134, 0.01711836814880371, 0.016974687576293945, 0.016932863235473633, 0.01708028793334961, 0.017059423446655272, 0.017103296279907226, 0.01705286407470703, 0.016988128662109376, 0.016960351943969727, 0.016959423065185546, 0.01692073631286621, 0.017088415145874024, 0.017235008239746094, 0.017169599533081056, 0.017178367614746094, 0.017303552627563477, 0.017143552780151367, 0.017084672927856447, 0.01813007926940918, 0.01750307273864746, 0.017317888259887695, 0.01739571189880371, 0.01737113571166992, 0.01758185577392578, 0.017690752029418947, 0.017667871475219726, 0.017805055618286134, 0.01776896095275879, 0.017817567825317383, 0.017747167587280274, 0.01780415916442871, 0.017803232192993165, 0.017678367614746095, 0.017741823196411134, 0.01781475257873535, 0.01777743911743164, 0.01776870346069336, 0.017792768478393554, 0.017741823196411134, 0.01763532829284668, 0.017695903778076172, 0.017642335891723634, 0.017657888412475585, 0.018129024505615234, 0.01802009582519531, 0.017889503479003907, 0.017905120849609376, 0.017615264892578125, 0.017682432174682617, 0.01737113571166992, 0.017383232116699218, 0.017840320587158204, 0.017311744689941407, 0.017192928314208985, 0.017225664138793947, 0.0170897274017334, 0.01700752067565918, 0.016956672668457032, 0.016986047744750977, 0.016982175827026366, 0.017199775695800782, 0.017051807403564455, 0.017047199249267578, 0.017031360626220703, 0.017034591674804686, 0.017154912948608398, 0.017232927322387695, 0.017121248245239258, 0.017238847732543944, 0.017229824066162108, 0.017218656539916992, 0.01734079933166504, 0.017328672409057617, 0.017372671127319335, 0.017408512115478517, 0.017536863327026368, 0.017283103942871095, 0.01755763244628906, 0.01763942337036133, 0.017640607833862305, 0.01747849655151367, 0.018118656158447266, 0.017305503845214842, 0.017150047302246094, 0.017086528778076173, 0.017245664596557617, 0.017340896606445312, 0.017317888259887695, 0.017104576110839844, 0.017135583877563476, 0.01703887939453125, 0.017034048080444335, 0.017121023178100585, 0.017080575942993163, 0.016987520217895506, 0.017076576232910156, 0.016980255126953125, 0.016932863235473633, 0.016973823547363282, 0.017031167984008787, 0.017082368850708008, 0.01703321647644043, 0.016916479110717773, 0.017111040115356444, 0.01720524787902832, 0.01716758346557617, 0.01716281509399414, 0.017239519119262695, 0.017084192276000977, 0.017017631530761718, 0.017008544921875, 0.017053983688354493, 0.01699635124206543, 0.0170250244140625, 0.017203296661376953, 0.017232864379882813, 0.017206207275390625, 0.01707827186584473, 0.01742198371887207, 0.01733580780029297, 0.017378368377685548, 0.017264415740966797, 0.01721343994140625, 0.0172126407623291, 0.017117216110229493, 0.0170927677154541, 0.017192832946777342, 0.0170296630859375, 0.017179935455322266, 0.017070720672607422, 0.017043424606323243, 0.017121599197387694, 0.017342464447021484, 0.017284320831298827, 0.01718966484069824, 0.01720319938659668, 0.017099775314331055, 0.017107967376708985, 0.017250303268432618, 0.017082368850708008, 0.01705187225341797, 0.017102624893188475, 0.017169952392578125, 0.01717910385131836, 0.018556512832641602, 0.01809654426574707, 0.01800601577758789, 0.017975296020507812, 0.01801420783996582, 0.017893375396728514, 0.017985536575317384, 0.017909151077270508, 0.017930816650390625, 0.01794175910949707, 0.01825811195373535, 0.017934207916259767, 0.01795964813232422, 0.01785241508483887, 0.017903615951538086, 0.017845279693603517, 0.017879295349121093, 0.017804128646850586, 0.017950592041015626, 0.01787494468688965, 0.018028543472290038, 0.01796623992919922, 0.01795907211303711, 0.01800262451171875, 0.017886592864990235, 0.017793664932250975, 0.01783193588256836, 0.017868703842163085, 0.017997888565063475, 0.01788870429992676, 0.018012767791748048, 0.017853887557983398, 0.017840703964233397, 0.017977312088012697, 0.01805036735534668, 0.018016992568969728, 0.018045984268188476, 0.017994144439697265, 0.018055423736572266, 0.017959232330322265, 0.017934335708618163, 0.017737728118896484, 0.017829248428344727, 0.01786534309387207, 0.017835391998291015, 0.017848960876464842, 0.017741823196411134, 0.017679807662963867, 0.01758633613586426, 0.017600128173828125, 0.017599264144897462, 0.017582080841064454, 0.017675680160522463, 0.017559648513793946, 0.017639936447143553, 0.017515615463256837, 0.01755228805541992, 0.01767033576965332, 0.01757574462890625, 0.01759846305847168, 0.017682687759399414, 0.01764246368408203, 0.017578784942626952, 0.01811020851135254, 0.017701568603515624, 0.017364992141723632, 0.017335359573364257, 0.01726969528198242, 0.01728102493286133, 0.017320032119750976, 0.017170112609863283, 0.01725667190551758, 0.01718230438232422, 0.017237535476684572, 0.01722457695007324, 0.017238016128540038, 0.01778396797180176, 0.019261503219604494, 0.01764579200744629, 0.01768227195739746, 0.01780614471435547, 0.017790271759033204, 0.017779232025146485, 0.017788991928100586, 0.01785856056213379, 0.017796192169189453, 0.017875680923461913, 0.017831647872924804, 0.01779964828491211, 0.01777663993835449, 0.017844224929809572, 0.017854463577270507, 0.01778483200073242, 0.017810592651367186, 0.019446239471435547, 0.017901952743530274, 0.01785651206970215, 0.017811456680297853, 0.017954816818237306, 0.017703136444091796, 0.01785385513305664, 0.017858943939208984, 0.017739776611328126, 0.017725439071655275, 0.01772742462158203, 0.017874399185180665, 0.01791756820678711, 0.017943647384643553, 0.017907583236694335, 0.017876991271972655, 0.01786476707458496, 0.017964736938476562, 0.017813760757446288, 0.017817312240600586, 0.017809535980224608, 0.01780137634277344, 0.01787494468688965, 0.017987583160400392, 0.017864704132080078, 0.018001920700073244, 0.018147327423095702, 0.018060543060302733, 0.018118528366088866, 0.01810918426513672, 0.01801228713989258, 0.01801238441467285]",tokens/s,57.599220917903885,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1514.401792,1823.408128,0.0,1428.160512,1322.516992,s,1,8.8410439453125,8.8410439453125,0.0,8.8410439453125,8.8410439453125,8.8410439453125,8.8410439453125,[8.8410439453125],,kWh,4.956789914999717e-05,5.458563006720272e-06,1.883390395601703e-05,7.386036611273448e-05,,MB,1584.095232,1842.282496,0.0,1434.451968,1322.072064,s,10,5.343417846679688,0.5343417846679688,0.002258050791299455,0.5337992248535156,0.5352724182128906,0.5380207305908203,0.540219380493164,"[0.54076904296875, 0.5322550659179688, 0.5334747924804687, 0.5346616821289063, 0.5326629028320312, 0.5337119140625, 0.5343487548828125, 0.5333185424804687, 0.53432861328125, 0.5338865356445313]",tokens/s,479.0941067037724,kWh,1.5952725873246397e-05,1.759311121475994e-06,1.056739149484027e-05,2.827942848956266e-05,tokens/kWh,9052516.747093534,MB,1588.801536,1842.282496,0.0,1434.451968,1374.923264,s,10,11.133669555664062,1.113366955566406,0.009659703262118879,1.115445068359375,1.123347900390625,1.1244218139648436,1.1252809448242187,"[1.0983587646484374, 1.0961551513671874, 1.107653564453125, 1.1213216552734375, 1.110700439453125, 1.1127877197265625, 1.1231092529296876, 1.1254957275390625, 1.1181024169921876, 1.11998486328125]",tokens/s,56.58511749879432,kWh,3.164441814133704e-05,3.4902849014094375e-06,1.7112519537961604e-05,5.224722258070807e-05,tokens/kWh,1205805.7230254058,,s,630,11.13072278022767,0.017667813936869302,0.0004447551390950777,0.0175817928314209,0.01808372802734375,0.0183081262588501,0.018845265083312992,"[0.01878835105895996, 0.018011871337890624, 0.017658143997192382, 0.017508352279663086, 0.01740390396118164, 0.017478687286376953, 0.017476512908935548, 0.01736300849914551, 0.01733827209472656, 0.017284543991088867, 0.01727964782714844, 0.01719705581665039, 0.017356800079345702, 0.01731551933288574, 0.017320255279541015, 0.01738675117492676, 0.01744883155822754, 0.017396608352661134, 0.017487295150756837, 0.017453632354736327, 0.01738924789428711, 0.01744095993041992, 0.017496192932128906, 0.017438720703125, 0.01759846305847168, 0.017569696426391602, 0.01753878402709961, 0.01751641654968262, 0.017850879669189454, 0.017489919662475584, 0.017528480529785156, 0.017543519973754883, 0.01735820770263672, 0.017386112213134765, 0.017367040634155274, 0.017360895156860352, 0.01726464080810547, 0.017388576507568358, 0.017408992767333983, 0.01743052864074707, 0.01743667221069336, 0.018354175567626953, 0.017368864059448243, 0.017237279891967775, 0.017392000198364257, 0.017271360397338866, 0.017372991561889647, 0.01723516845703125, 0.01722835159301758, 0.01728553581237793, 0.017372831344604493, 0.01723129653930664, 0.017238784790039062, 0.017256128311157228, 0.017270463943481446, 0.017281824111938477, 0.017333856582641603, 0.017297536849975585, 0.017273120880126953, 0.017268287658691407, 0.017277376174926758, 0.017251840591430666, 0.01727743911743164, 0.018479103088378905, 0.01768409538269043, 0.017545600891113282, 0.01746028709411621, 0.01734342384338379, 0.01726438331604004, 0.017314048767089845, 0.01726246452331543, 0.01737238311767578, 0.01722460746765137, 0.01724006462097168, 0.017205120086669922, 0.01735487937927246, 0.017204927444458007, 0.017463167190551757, 0.01727123260498047, 0.017340192794799803, 0.017248031616210937, 0.017273279190063478, 0.017263999938964845, 0.017384063720703124, 0.017928064346313475, 0.017496192932128906, 0.017375232696533204, 0.017340127944946288, 0.017223968505859374, 0.017357919692993166, 0.017245088577270508, 0.017311744689941407, 0.017242111206054688, 0.01739753532409668, 0.017287391662597656, 0.017348608016967772, 0.01738956832885742, 0.017291263580322267, 0.017354751586914064, 0.017391616821289063, 0.017348512649536133, 0.0179117431640625, 0.017449119567871093, 0.01741209602355957, 0.017257728576660157, 0.017251007080078123, 0.017319616317749024, 0.01732851219177246, 0.017274303436279295, 0.017182464599609374, 0.017234432220458985, 0.01723814392089844, 0.017471328735351562, 0.01735043144226074, 0.017407840728759765, 0.01730384063720703, 0.017289663314819338, 0.017358848571777344, 0.017246208190917968, 0.0172728328704834, 0.01739366340637207, 0.017325567245483398, 0.017320255279541015, 0.017379295349121093, 0.017854175567626952, 0.01852262306213379, 0.01846067237854004, 0.017780096054077148, 0.017719327926635744, 0.017445472717285155, 0.01813020706176758, 0.017293472290039063, 0.017359424591064453, 0.017294719696044923, 0.017451648712158204, 0.01764556884765625, 0.01762719917297363, 0.017901472091674805, 0.018013216018676757, 0.018297855377197265, 0.01790764808654785, 0.017739839553833008, 0.01785651206970215, 0.017682432174682617, 0.01775119972229004, 0.01767100715637207, 0.017647199630737305, 0.017738048553466796, 0.017746015548706053, 0.017686527252197267, 0.01781350326538086, 0.017849760055541994, 0.01775062370300293, 0.01756889533996582, 0.017517152786254882, 0.01746767997741699, 0.01745289611816406, 0.01735491180419922, 0.017451007843017577, 0.01743833541870117, 0.017527135848999023, 0.01754115104675293, 0.01743657684326172, 0.01744495964050293, 0.01739776039123535, 0.01731292724609375, 0.017490943908691405, 0.018607967376708983, 0.018112512588500978, 0.01761075210571289, 0.01758992004394531, 0.017305919647216797, 0.017324064254760744, 0.017305599212646485, 0.017260032653808592, 0.01715782356262207, 0.017199935913085936, 0.017257503509521484, 0.01747862434387207, 0.017529983520507813, 0.017502208709716797, 0.01738630485534668, 0.017372991561889647, 0.017401504516601562, 0.017349216461181642, 0.01726166343688965, 0.017301631927490235, 0.017257247924804688, 0.017192960739135742, 0.018319616317749022, 0.01763711929321289, 0.017663551330566407, 0.017481279373168946, 0.017628000259399413, 0.017666080474853515, 0.017532928466796875, 0.0175861759185791, 0.017669567108154295, 0.01751897621154785, 0.017483232498168945, 0.0175416316986084, 0.01743840026855469, 0.017336864471435547, 0.017366111755371092, 0.01745193672180176, 0.017456832885742186, 0.01747283172607422, 0.017497087478637697, 0.017481695175170897, 0.01756982421875, 0.017584127426147463, 0.017555456161499023, 0.017497152328491212, 0.01817081642150879, 0.01864089584350586, 0.020850143432617187, 0.017783136367797853, 0.01768467140197754, 0.017584127426147463, 0.018764863967895506, 0.017503040313720703, 0.01766531181335449, 0.018856800079345704, 0.017934335708618163, 0.017881088256835938, 0.017524736404418945, 0.017452831268310546, 0.0174553279876709, 0.017356800079345702, 0.01728233528137207, 0.017386207580566405, 0.017666048049926757, 0.017600351333618165, 0.017668256759643553, 0.017541120529174805, 0.017459199905395507, 0.02064793586730957, 0.017683616638183595, 0.017791551589965822, 0.019484960556030273, 0.01758950424194336, 0.017545984268188475, 0.01757151985168457, 0.01751481628417969, 0.017497343063354494, 0.01753487968444824, 0.01750307273864746, 0.018792448043823243, 0.017485824584960938, 0.017532928466796875, 0.017721343994140625, 0.018030303955078125, 0.018448383331298827, 0.017710271835327147, 0.017660736083984375, 0.017528736114501953, 0.017354848861694337, 0.017262592315673828, 0.017338016510009765, 0.017297119140625, 0.017534751892089844, 0.017613279342651368, 0.017352575302124022, 0.01740595245361328, 0.017347072601318358, 0.017285120010375975, 0.017303455352783204, 0.01724835205078125, 0.01724412727355957, 0.01740598487854004, 0.017554943084716796, 0.017432512283325194, 0.017281600952148438, 0.017285120010375975, 0.01724608039855957, 0.017298559188842773, 0.017337343215942384, 0.017334272384643554, 0.017321887969970702, 0.017345792770385744, 0.017347423553466797, 0.01737222480773926, 0.01734137535095215, 0.01724006462097168, 0.01739776039123535, 0.017500160217285156, 0.01752604866027832, 0.017347295761108397, 0.017377279281616212, 0.017552543640136718, 0.01761142349243164, 0.018053119659423827, 0.017789119720458983, 0.01780531120300293, 0.018266111373901366, 0.018056575775146484, 0.017899871826171875, 0.01824083137512207, 0.017849311828613282, 0.017983488082885742, 0.01794767951965332, 0.01800422477722168, 0.018086624145507813, 0.018144895553588867, 0.0179116153717041, 0.018297407150268556, 0.018163711547851562, 0.017925792694091797, 0.018288991928100587, 0.017884544372558594, 0.017617536544799806, 0.017489919662475584, 0.01762099266052246, 0.017763711929321288, 0.01792473602294922, 0.018488128662109374, 0.018112512588500978, 0.017947872161865233, 0.017898271560668946, 0.017725439071655275, 0.017702911376953127, 0.017612127304077147, 0.01764009666442871, 0.0177040958404541, 0.01771811294555664, 0.017843551635742187, 0.017795167922973632, 0.017840703964233397, 0.017887231826782226, 0.01778611183166504, 0.017732351303100587, 0.017545055389404297, 0.01807375907897949, 0.01761017608642578, 0.018229408264160155, 0.017787071228027345, 0.017682111740112305, 0.01746384048461914, 0.017559551239013673, 0.017468767166137697, 0.017369760513305663, 0.01756979179382324, 0.017386911392211914, 0.018088544845581055, 0.01760051155090332, 0.017864416122436524, 0.017747936248779298, 0.017686368942260743, 0.017504735946655272, 0.01747148895263672, 0.017350656509399414, 0.01737932777404785, 0.017550399780273437, 0.017398719787597657, 0.01740390396118164, 0.017383424758911133, 0.017567743301391603, 0.018093088150024413, 0.01761769676208496, 0.01778054428100586, 0.017404287338256837, 0.017557504653930665, 0.0175817928314209, 0.01752092742919922, 0.017563648223876953, 0.01745715141296387, 0.017508352279663086, 0.01750592041015625, 0.017463264465332032, 0.01768396759033203, 0.017499040603637696, 0.017467391967773437, 0.01754515266418457, 0.017553184509277345, 0.01748524856567383, 0.017666112899780272, 0.017656608581542967, 0.017680351257324218, 0.018354976654052734, 0.01796665573120117, 0.01778118324279785, 0.01756979179382324, 0.01752057647705078, 0.017440832138061524, 0.01745305633544922, 0.017625087738037108, 0.01765376091003418, 0.018054431915283203, 0.017691360473632813, 0.017704959869384765, 0.017547264099121093, 0.01755292892456055, 0.017459680557250976, 0.01759231948852539, 0.017672191619873046, 0.017604608535766602, 0.018309120178222657, 0.017714847564697267, 0.01765782356262207, 0.017674623489379884, 0.01762073516845703, 0.017657472610473634, 0.017641311645507814, 0.017652511596679688, 0.01766508865356445, 0.01765839958190918, 0.017953184127807616, 0.01773155212402344, 0.017827871322631837, 0.017678016662597655, 0.017645503997802733, 0.017580415725708006, 0.017601791381835936, 0.0178920955657959, 0.017743871688842772, 0.017808639526367187, 0.017773311614990236, 0.01821286392211914, 0.017768096923828126, 0.01769487953186035, 0.01770515251159668, 0.018274303436279296, 0.0178606071472168, 0.01769267272949219, 0.017743871688842772, 0.017774463653564453, 0.017909151077270508, 0.017969535827636718, 0.01808345603942871, 0.01809891128540039, 0.01825382423400879, 0.0180633602142334, 0.018050912857055665, 0.01811471939086914, 0.01815724754333496, 0.018306911468505858, 0.018057056427001953, 0.017998464584350587, 0.018089216232299806, 0.018002399444580076, 0.0180861759185791, 0.018462751388549806, 0.017849727630615233, 0.018198495864868165, 0.017670080184936522, 0.017670175552368165, 0.017594783782958985, 0.017602752685546875, 0.0175568962097168, 0.01762067222595215, 0.017494655609130858, 0.017457408905029295, 0.01767843246459961, 0.017706911087036134, 0.017698240280151368, 0.017660575866699217, 0.01763532829284668, 0.017604576110839844, 0.01756163215637207, 0.017677503585815428, 0.01777542304992676, 0.017929311752319335, 0.01795574378967285, 0.017917503356933595, 0.017956544876098633, 0.018002687454223634, 0.018032352447509767, 0.018022207260131835, 0.01821232032775879, 0.01860710334777832, 0.018094079971313477, 0.01802444839477539, 0.017911808013916015, 0.017995519638061522, 0.017916160583496092, 0.017958911895751953, 0.01789030456542969, 0.017856576919555663, 0.017808319091796875, 0.01784828758239746, 0.017761760711669922, 0.0177956485748291, 0.017921760559082033, 0.0180164794921875, 0.018110111236572267, 0.01795465660095215, 0.01784454345703125, 0.017815584182739257, 0.017692256927490234, 0.017793664932250975, 0.017946624755859376, 0.01790771293640137, 0.017797056198120116, 0.01765996742248535, 0.017721343994140625, 0.017756160736083985, 0.017762304306030274, 0.0180380802154541, 0.01780191993713379, 0.017746015548706053, 0.017812959671020506, 0.018376895904541016, 0.01800217628479004, 0.017993728637695314, 0.01851913642883301, 0.017879968643188478, 0.017750015258789064, 0.017699871063232422, 0.017827871322631837, 0.017736448287963866, 0.017768640518188477, 0.01783193588256836, 0.017923360824584962, 0.01783260726928711, 0.01781907272338867, 0.017756799697875976, 0.018020351409912108, 0.01787676811218262, 0.017932191848754882, 0.0178176326751709, 0.019167520523071288, 0.01787494468688965, 0.018378528594970703, 0.018028799057006835, 0.017749439239501952, 0.01768707275390625, 0.017761503219604492, 0.01768732833862305, 0.017618335723876954, 0.017492576599121092, 0.017451007843017577, 0.01741983985900879, 0.01737740707397461, 0.017360416412353516, 0.017488672256469728, 0.0175817928314209, 0.017584064483642577, 0.017512800216674805, 0.017532928466796875, 0.017618175506591796, 0.017543935775756837, 0.01742438316345215, 0.017477632522583008, 0.017518592834472657, 0.01762409591674805, 0.01743734359741211, 0.01747385597229004, 0.01749740791320801, 0.017547967910766602, 0.01744486427307129, 0.01756368064880371, 0.01817385673522949, 0.017845504760742186, 0.017701696395874024, 0.018321407318115233, 0.01776639938354492, 0.017822912216186523, 0.01771718406677246, 0.017783679962158204, 0.01779916763305664, 0.01782374382019043, 0.01785241508483887, 0.017707008361816406, 0.017736991882324218, 0.017598688125610353, 0.017643552780151367, 0.01762761688232422, 0.018331647872924805, 0.017616159439086915, 0.017582815170288087, 0.017725439071655275, 0.017768447875976562, 0.017489152908325194, 0.017525503158569336, 0.01740764808654785, 0.017512800216674805, 0.01742972755432129, 0.01750614356994629, 0.017427391052246093, 0.01866035270690918, 0.018094783782958986, 0.017454496383666994, 0.017554304122924805, 0.01755958366394043, 0.01742582321166992, 0.017400415420532226, 0.017375232696533204, 0.017354751586914064, 0.017303071975708007, 0.017222112655639648, 0.017169792175292967, 0.017320735931396485, 0.017275999069213867, 0.017332992553710937, 0.017327199935913085, 0.017292192459106445, 0.017926111221313476, 0.022798368453979492, 0.0210882568359375, 0.017911359786987303, 0.0176746883392334, 0.017688255310058593, 0.01761497688293457, 0.017561792373657226, 0.017772544860839845, 0.017733631134033204, 0.017687936782836915, 0.017433216094970703, 0.018817024230957033, 0.017462656021118163, 0.017437311172485353, 0.01744691276550293, 0.017559551239013673, 0.017459199905395507, 0.017408000946044923, 0.017384864807128905, 0.017406560897827147, 0.01743052864074707, 0.01741619110107422, 0.017481727600097655, 0.017356800079345702, 0.018652671813964843, 0.018743167877197264, 0.017645856857299805, 0.017674591064453123, 0.017723392486572266, 0.01787494468688965, 0.01783193588256836, 0.01817804718017578, 0.017898687362670897]",tokens/s,56.600097984572606,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 28521 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6625.062912,9171.828736,0.0,8776.58112,8188.314112,s,1,13.9212919921875,13.9212919921875,0.0,13.9212919921875,13.9212919921875,13.9212919921875,13.9212919921875,[13.9212919921875],,kWh,0.0002018349085958486,2.224601848706114e-05,8.983729409201002e-05,0.00031391822117491974,,MB,1475.559424,9184.411648,0.0,8776.58112,7654.291456,s,10,51.66322021484375,5.166322021484374,0.003286428396186504,5.1669511718750005,5.17017724609375,5.1707001953125005,5.1711185546875,"[5.16087841796875, 5.16267724609375, 5.16403466796875, 5.1631455078125, 5.16671630859375, 5.16718603515625, 5.168517578125, 5.1687802734375, 5.17006103515625, 5.17122314453125]",tokens/s,49.551692468145205,kWh,0.00015074363526833317,1.6627404322531275e-05,0.0001002143301713998,0.00026758536976226424,tokens/kWh,956704.0239436213,MB,1479.76192,9184.411648,0.0,8776.58112,7908.775424,s,10,25.089062011718752,2.5089062011718752,0.003301189497220273,2.50890869140625,2.5116713867187497,2.5143839111328123,2.5165539306640623,"[2.506116455078125, 2.50870654296875, 2.511068603515625, 2.509246337890625, 2.504135498046875, 2.506937255859375, 2.509302734375, 2.517096435546875, 2.50911083984375, 2.50734130859375]",tokens/s,25.110544176810425,kWh,7.337681714708651e-05,8.094045056716978e-06,4.901428921140206e-05,0.00013048515141520552,tokens/kWh,482813.556306749,,s,630,25.08557906723024,0.039818379471794015,0.0003776241935428,0.039851808547973634,0.04026335830688477,0.040357406044006346,0.040607508697509766,"[0.040671230316162106, 0.03988601684570312, 0.03957843017578125, 0.03957705688476563, 0.03949622344970703, 0.0393175048828125, 0.03927449417114258, 0.039602176666259765, 0.03947283172607422, 0.03934444808959961, 0.03931039810180664, 0.03926726531982422, 0.03919462585449219, 0.03933161544799805, 0.03931343841552734, 0.039393184661865234, 0.03930755233764648, 0.039337982177734376, 0.03934003067016602, 0.039577598571777346, 0.03958169555664062, 0.039795936584472655, 0.03973795318603516, 0.03953398513793945, 0.03983849716186524, 0.03976806259155274, 0.03961600112915039, 0.03984371185302735, 0.039841793060302735, 0.03979942321777344, 0.03969638442993164, 0.03966886520385742, 0.03958992004394531, 0.03992867279052734, 0.03998044967651367, 0.039817825317382816, 0.03962611389160156, 0.03969292831420899, 0.039787616729736325, 0.03965020751953125, 0.039769569396972654, 0.039693984985351566, 0.040059680938720706, 0.04009379196166992, 0.04002406311035156, 0.03999948883056641, 0.04016707229614258, 0.04007766342163086, 0.04003184127807617, 0.04007513427734375, 0.04004240036010742, 0.04011894226074219, 0.04015472030639648, 0.04020848083496094, 0.040048736572265625, 0.039960769653320315, 0.03995443344116211, 0.04005177688598633, 0.04013561630249023, 0.040052734375, 0.04020633697509766, 0.04041625595092774, 0.040093952178955075, 0.0407894401550293, 0.0397993278503418, 0.03931545639038086, 0.03915974426269531, 0.03899193572998047, 0.0389815673828125, 0.03900831985473633, 0.039065601348876954, 0.03930112075805664, 0.03945062255859375, 0.03938703918457031, 0.0393891830444336, 0.03932918548583984, 0.03942575836181641, 0.03977238464355469, 0.039844608306884764, 0.039602176666259765, 0.03974515151977539, 0.039944576263427733, 0.0397844467163086, 0.03951385498046875, 0.039507518768310546, 0.039856830596923826, 0.03986431884765625, 0.039739391326904294, 0.03983564758300781, 0.04012851333618164, 0.04006256103515625, 0.039860641479492184, 0.03990249633789063, 0.03976470565795898, 0.03991686248779297, 0.03984764862060547, 0.039752094268798825, 0.03966214370727539, 0.03971072006225586, 0.03991551971435547, 0.04003014373779297, 0.03985987091064453, 0.03964076614379883, 0.03966230392456055, 0.03985391998291016, 0.0400425910949707, 0.04000473785400391, 0.039939006805419924, 0.040191009521484376, 0.04027695846557617, 0.040101886749267575, 0.039901023864746095, 0.040050846099853515, 0.04044134521484375, 0.04020684814453125, 0.040123519897460935, 0.03999337768554687, 0.04030550384521484, 0.04020560073852539, 0.04013312149047851, 0.04006115341186523, 0.04006911849975586, 0.04022886276245117, 0.040081409454345705, 0.04003587341308594, 0.04002819061279297, 0.04058723068237305, 0.03967388916015625, 0.03924582290649414, 0.03915161514282227, 0.03903286361694336, 0.03905503845214844, 0.03923791885375977, 0.03926015853881836, 0.03947689437866211, 0.03952060699462891, 0.039610366821289066, 0.03917824172973633, 0.03915161514282227, 0.03927027130126953, 0.03930928039550781, 0.03920016098022461, 0.039260929107666015, 0.03935574340820312, 0.03950864028930664, 0.03951520156860352, 0.039494590759277345, 0.03984780883789062, 0.03985120010375977, 0.03985049438476562, 0.039839649200439455, 0.0397410888671875, 0.03989184188842773, 0.03975167846679688, 0.039684097290039064, 0.039694271087646484, 0.03968928146362305, 0.040474624633789064, 0.04032614517211914, 0.03995647811889649, 0.04012851333618164, 0.04007673645019531, 0.04006278228759766, 0.03997126388549805, 0.04018207931518555, 0.040101886749267575, 0.040022014617919925, 0.039985088348388674, 0.03983699035644531, 0.03982617568969726, 0.040097793579101565, 0.040263294219970706, 0.040126846313476565, 0.040121631622314455, 0.040051422119140624, 0.03993804931640625, 0.03986191940307617, 0.0402940788269043, 0.04039132690429687, 0.040237056732177735, 0.040196094512939456, 0.04017484664916992, 0.040051456451416015, 0.04042339324951172, 0.04048073577880859, 0.04040915298461914, 0.04025139236450195, 0.04077977752685547, 0.04069375991821289, 0.040468544006347654, 0.03946316909790039, 0.039139934539794925, 0.03903193664550781, 0.03917708969116211, 0.03936822509765625, 0.03922991943359375, 0.03916912078857422, 0.039347103118896484, 0.03926800155639648, 0.03919036865234375, 0.03927248001098633, 0.039428577423095704, 0.03943219375610352, 0.039396991729736326, 0.03934419250488281, 0.03946937561035156, 0.039734432220458984, 0.039741695404052736, 0.039561824798583986, 0.03935372924804687, 0.03933996963500976, 0.03932640075683594, 0.03951375961303711, 0.03963324737548828, 0.039723007202148435, 0.03964518356323242, 0.039569408416748046, 0.03964313507080078, 0.039809024810791016, 0.03983359909057617, 0.0399911994934082, 0.04002825546264648, 0.040005630493164065, 0.03998847961425781, 0.040069889068603516, 0.039962623596191404, 0.039989246368408206, 0.0398106575012207, 0.03980710220336914, 0.03981545639038086, 0.040236095428466796, 0.04026809692382813, 0.04016550445556641, 0.04007372665405273, 0.040111167907714844, 0.04016624069213867, 0.040198078155517576, 0.040183967590332034, 0.04021014404296875, 0.04025724792480469, 0.04029292678833008, 0.040350784301757814, 0.040285118103027345, 0.04028726577758789, 0.040162273406982425, 0.04017356872558594, 0.040513473510742186, 0.040538177490234376, 0.040398750305175785, 0.04030169677734375, 0.04034659194946289, 0.04026572799682617, 0.040588127136230466, 0.03966300964355469, 0.03942015838623047, 0.03930928039550781, 0.03922982406616211, 0.039163902282714845, 0.03908377456665039, 0.03921065521240234, 0.03928115081787109, 0.039395423889160154, 0.03934207916259766, 0.039223297119140625, 0.039200736999511716, 0.039041057586669925, 0.03915161514282227, 0.03952947235107422, 0.03945510482788086, 0.039434879302978516, 0.03944038391113281, 0.03934822463989258, 0.0395365104675293, 0.03951001739501953, 0.03982553482055664, 0.03981107330322266, 0.039806976318359374, 0.039686145782470705, 0.0398900146484375, 0.040237983703613284, 0.04008262252807617, 0.03971974563598633, 0.039608318328857424, 0.03956844711303711, 0.039701438903808596, 0.039825408935546876, 0.03998432159423828, 0.039983936309814457, 0.03994819259643555, 0.03975177764892578, 0.0398131217956543, 0.03967747116088867, 0.039690719604492185, 0.03976732635498047, 0.03960198211669922, 0.039979263305664064, 0.0400022087097168, 0.03997081756591797, 0.03999110412597656, 0.04000787353515625, 0.03988246536254883, 0.03988918304443359, 0.04005068969726563, 0.0402303352355957, 0.040087329864501954, 0.0399529914855957, 0.03978464126586914, 0.04002540969848633, 0.04009632110595703, 0.04011840057373047, 0.03999440002441406, 0.04006111907958984, 0.04035868835449219, 0.04050675201416016, 0.04028889465332031, 0.040471649169921874, 0.03966191864013672, 0.03924028778076172, 0.03903894424438477, 0.038927486419677734, 0.039048065185546876, 0.039221248626708984, 0.03926985549926758, 0.03936924743652344, 0.03928678512573242, 0.03941785430908203, 0.039325374603271485, 0.03931987380981445, 0.03924582290649414, 0.03922739028930664, 0.03953782272338867, 0.03944739151000977, 0.03946223831176758, 0.039611038208007814, 0.03951001739501953, 0.039329792022705076, 0.03950796890258789, 0.0394911994934082, 0.03965990447998047, 0.03970457458496094, 0.03995238494873047, 0.0397817268371582, 0.039629470825195315, 0.039495681762695314, 0.03952640151977539, 0.039939136505126954, 0.03991033554077148, 0.039767200469970704, 0.0398524169921875, 0.039704383850097655, 0.03969247817993164, 0.04002838516235351, 0.04007731246948242, 0.039858272552490234, 0.04023721694946289, 0.0401797103881836, 0.040019359588623044, 0.040034912109375, 0.040005630493164065, 0.03992742538452149, 0.04009811019897461, 0.04029241561889649, 0.04030636978149414, 0.04011449432373047, 0.040134654998779294, 0.04017123031616211, 0.0401124153137207, 0.0401162223815918, 0.04011212921142578, 0.04008345413208008, 0.0405667839050293, 0.040502880096435545, 0.04047423934936523, 0.04016563034057617, 0.040042945861816406, 0.040011680603027344, 0.04018339157104492, 0.04018582534790039, 0.040546302795410154, 0.03985408020019531, 0.03958367919921875, 0.04019574356079102, 0.039250335693359374, 0.03991737747192383, 0.03984812927246094, 0.03968000030517578, 0.039489246368408205, 0.03955331039428711, 0.03920809555053711, 0.039218017578125, 0.03918812942504883, 0.03919286346435547, 0.039407646179199216, 0.03940969467163086, 0.03935232162475586, 0.039554080963134765, 0.03938812637329102, 0.039400577545166016, 0.03957440185546875, 0.039524192810058596, 0.039473312377929684, 0.0397720947265625, 0.039798175811767575, 0.03995238494873047, 0.04012099075317383, 0.039790592193603515, 0.039583518981933595, 0.039628543853759766, 0.039561695098876956, 0.03990937423706055, 0.040032257080078126, 0.04001177597045898, 0.03995852661132813, 0.04042502212524414, 0.040042945861816406, 0.039857887268066404, 0.03988508987426758, 0.03987865447998047, 0.03984384155273438, 0.03985408020019531, 0.039809024810791016, 0.039792640686035156, 0.03976764678955078, 0.03979510498046875, 0.03989503860473633, 0.040202239990234374, 0.04024729537963867, 0.040119873046875, 0.03990367889404297, 0.04016332626342774, 0.04007017517089844, 0.039930721282958985, 0.04001804733276367, 0.04024524688720703, 0.03999334335327148, 0.03998012924194336, 0.04023183822631836, 0.04035583877563476, 0.04017059326171875, 0.04017654418945313, 0.04029644775390625, 0.041139774322509766, 0.04004012680053711, 0.039780288696289065, 0.03935315322875976, 0.0390305290222168, 0.03908633422851562, 0.03926835250854492, 0.03936771011352539, 0.03939632034301758, 0.03954687881469727, 0.03957551956176758, 0.039583263397216795, 0.03950643157958984, 0.03966912078857422, 0.03980505752563476, 0.03986073684692383, 0.03967155075073242, 0.03990758514404297, 0.03966515350341797, 0.03971516799926758, 0.039690399169921876, 0.0399788818359375, 0.03975721740722656, 0.03992035293579101, 0.039875968933105466, 0.0398682861328125, 0.04026393508911133, 0.04022937774658203, 0.039822399139404295, 0.03995743942260742, 0.03987225723266601, 0.0398315200805664, 0.03981955337524414, 0.039922752380371095, 0.03981571197509766, 0.04020681762695313, 0.040040382385253905, 0.04007526397705078, 0.0401797103881836, 0.04033331298828125, 0.04033846282958985, 0.04018684768676758, 0.04016864013671875, 0.04027065658569336, 0.040080894470214845, 0.040290817260742184, 0.04031488037109375, 0.04013391876220703, 0.04028079986572266, 0.0401855697631836, 0.04012675094604492, 0.04028211212158203, 0.04016128158569336, 0.04023462295532226, 0.04014937591552734, 0.04016332626342774, 0.040013824462890625, 0.04007731246948242, 0.04040604782104492, 0.040166366577148435, 0.04000153732299805, 0.03996876907348633, 0.04024483108520508, 0.04061542510986328, 0.03952076721191406, 0.03927040100097656, 0.03989465713500977, 0.03923187255859375, 0.039163902282714845, 0.03902012634277344, 0.03904758453369141, 0.039196670532226564, 0.03918822479248047, 0.039205120086669924, 0.03942195129394531, 0.039396991729736326, 0.03942140960693359, 0.03976204681396484, 0.0397627182006836, 0.03954278564453125, 0.03960566329956055, 0.040185985565185545, 0.040046241760253905, 0.0397217903137207, 0.03947520065307617, 0.039388671875, 0.0397334098815918, 0.03983302307128906, 0.03986115264892578, 0.039792640686035156, 0.03984912109375, 0.0397196159362793, 0.03953065490722656, 0.039362560272216796, 0.039632801055908204, 0.039716064453125, 0.0398889274597168, 0.039887710571289064, 0.039814910888671874, 0.03962905502319336, 0.039782176971435546, 0.04019222259521484, 0.04023849487304688, 0.040039009094238284, 0.04001116943359375, 0.04006947326660156, 0.04032742309570313, 0.04012441635131836, 0.039962623596191404, 0.03984384155273438, 0.04016742324829101, 0.04017334365844726, 0.04012432098388672, 0.04013907241821289, 0.04021440124511719, 0.040402080535888674, 0.04027900695800781, 0.03989904022216797, 0.03986441421508789, 0.040089599609375, 0.04020627212524414, 0.04020844650268555, 0.0402083854675293, 0.04020147323608399, 0.04017023849487305, 0.04048486328125, 0.04073708724975586, 0.0398675537109375, 0.039295841217041015, 0.039229438781738284, 0.039213054656982424, 0.03918643188476562, 0.0393175048828125, 0.039329792022705076, 0.039419902801513675, 0.03922534561157227, 0.03913312149047852, 0.039147552490234376, 0.03956943893432617, 0.0395601921081543, 0.03951308822631836, 0.03950102233886719, 0.03955315017700195, 0.03951001739501953, 0.039440673828125, 0.03932355117797852, 0.03925449752807617, 0.039363838195800784, 0.03939609527587891, 0.039798782348632815, 0.040089599609375, 0.040103008270263675, 0.039842273712158205, 0.03966582489013672, 0.039450912475585936, 0.03949961471557617, 0.039550880432128906, 0.03981881713867187, 0.03986435317993164, 0.039790943145751954, 0.039721057891845706, 0.03976812744140625, 0.039790145874023436, 0.040149089813232425, 0.04009011077880859, 0.039994720458984376, 0.03995212936401367, 0.039981983184814454, 0.03995443344116211, 0.039948287963867186, 0.0402204818725586, 0.04033145523071289, 0.04005683135986328, 0.04019814300537109, 0.04032716751098633, 0.04055654525756836, 0.04022415924072266, 0.040227424621582034, 0.040081409454345705, 0.04009695816040039, 0.04006585693359375, 0.04009267044067383, 0.040010753631591796, 0.04000972747802734, 0.03996876907348633, 0.039927616119384765, 0.04011228942871094, 0.04027395248413086, 0.04027391815185547]",tokens/s,25.114030587517153,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8197.976064,12133.00736,0.0,11737.759744,11171.24352,s,1,16.15705859375,16.15705859375,0.0,16.15705859375,16.15705859375,16.15705859375,16.15705859375,[16.15705859375],,kWh,0.0002674889275624385,2.9477125894187513e-05,0.00011286620140399384,0.00040983225486061986,,MB,3844.460544,12147.687424,0.0,11739.856896,10924.35968,s,10,58.036142578124995,5.8036142578125,0.0021390588196081445,5.8043203125,5.805863720703125,5.805999731445312,5.806108540039062,"[5.79872802734375, 5.8013388671875, 5.802384765625, 5.80354345703125, 5.80465673828125, 5.80398388671875, 5.8046845703125, 5.80485302734375, 5.8061357421875, 5.80583349609375]",tokens/s,44.11044370417747,kWh,0.0001693208911624985,1.8675229001102623e-05,0.00011251667334660209,0.0003005127935102032,tokens/kWh,851877.209651336,MB,3844.460544,12149.784576,0.0,11741.954048,10924.36224,s,10,29.07179711914063,2.907179711914063,0.0021990716385634295,2.9076781005859376,2.909408544921875,2.9098297607421877,2.910166733398438,"[2.908518310546875, 2.908570556640625, 2.908798095703125, 2.9102509765625, 2.90931494140625, 2.906340087890625, 2.906837890625, 2.90321435546875, 2.905914794921875, 2.904037109375]",tokens/s,21.67048694713177,kWh,8.492498021875388e-05,9.36892409923487e-06,5.66316008607959e-05,0.0001509255051787847,tokens/kWh,417424.4765678994,,s,630,29.06872789764404,0.046140837932768325,0.00039546610824790876,0.046134544372558595,0.04650437202453613,0.04662674808502197,0.047907263565063474,"[0.047685951232910154, 0.045671966552734374, 0.04559734344482422, 0.04569497680664063, 0.04538380813598633, 0.04575600051879883, 0.046360286712646484, 0.04605587387084961, 0.04597481536865235, 0.04598255920410156, 0.04580147171020508, 0.04569497680664063, 0.04575641632080078, 0.04568016052246094, 0.046053855895996094, 0.0458419189453125, 0.0457564811706543, 0.04612908935546875, 0.046100990295410156, 0.04559872055053711, 0.045706783294677734, 0.04615011215209961, 0.045891166687011715, 0.045817569732666014, 0.04613584136962891, 0.0471404800415039, 0.04574057769775391, 0.046276607513427735, 0.04626227188110352, 0.04591820907592774, 0.04631455993652344, 0.04622022247314453, 0.04603887939453125, 0.046028926849365236, 0.046370849609375, 0.04622441482543945, 0.045937599182128905, 0.045948158264160155, 0.04641667175292969, 0.046349727630615234, 0.04619027328491211, 0.04615875244140625, 0.04602262496948242, 0.04618399810791016, 0.046508289337158205, 0.04642019271850586, 0.04618239974975586, 0.046053375244140625, 0.046537952423095705, 0.04658051300048828, 0.04630323028564453, 0.0465715217590332, 0.04640332794189453, 0.0461759033203125, 0.04671958541870117, 0.04661862564086914, 0.04628873443603516, 0.04673961639404297, 0.0466874885559082, 0.04631763076782226, 0.046364959716796876, 0.04636671829223633, 0.04633203125, 0.04797027206420899, 0.046301185607910154, 0.04573171234130859, 0.04546572875976562, 0.045362945556640624, 0.045827903747558595, 0.046047679901123045, 0.04549369430541992, 0.04565599822998047, 0.04662134552001953, 0.04610403060913086, 0.04573222351074219, 0.04568691253662109, 0.045830142974853515, 0.04561711883544922, 0.04550454330444336, 0.04608988952636719, 0.04595859146118164, 0.0458309440612793, 0.04575859069824219, 0.045946880340576174, 0.04637286376953125, 0.046432254791259765, 0.04611840057373047, 0.04597593688964844, 0.0459218864440918, 0.04634883117675781, 0.04636659240722656, 0.04606140899658203, 0.0459574089050293, 0.0460052490234375, 0.04615248107910156, 0.04637513732910156, 0.04640563201904297, 0.04602470397949219, 0.04631552124023437, 0.046273887634277346, 0.04638172912597656, 0.046274593353271484, 0.04606768035888672, 0.0461036491394043, 0.04602124786376953, 0.04616563034057617, 0.046342494964599606, 0.04674387359619141, 0.0465797119140625, 0.04618598556518555, 0.04669827270507813, 0.046631168365478516, 0.046206527709960935, 0.04613622283935547, 0.04648278427124024, 0.04599862289428711, 0.046225536346435545, 0.04632166290283203, 0.046114398956298826, 0.04626063919067383, 0.046634719848632815, 0.046346527099609375, 0.04645052719116211, 0.04660860824584961, 0.04639328002929687, 0.04619615936279297, 0.04867356872558594, 0.04640361785888672, 0.04590108871459961, 0.0458001594543457, 0.04580556869506836, 0.046233150482177736, 0.045695423126220706, 0.045537281036376956, 0.0462213134765625, 0.04594438552856445, 0.045528831481933596, 0.04609408187866211, 0.045910655975341795, 0.04579155349731445, 0.045914112091064455, 0.04604662322998047, 0.04559222412109375, 0.04549932861328125, 0.045958751678466796, 0.046125473022460936, 0.045969310760498046, 0.046071903228759765, 0.04612108612060547, 0.045873023986816405, 0.046152801513671876, 0.04615171051025391, 0.045933216094970704, 0.04577264022827148, 0.045790752410888674, 0.046196670532226564, 0.046013118743896485, 0.046104801177978515, 0.046243839263916016, 0.04630508804321289, 0.0459747200012207, 0.04605440139770508, 0.04626998519897461, 0.04605795288085938, 0.04594483184814453, 0.04634422302246094, 0.046458847045898435, 0.046247135162353514, 0.04615843200683594, 0.04650358581542969, 0.04660892868041992, 0.046227455139160156, 0.04694012832641602, 0.04652649688720703, 0.046157470703125, 0.04615974426269531, 0.046328289031982425, 0.046045055389404295, 0.04642364883422852, 0.04652291107177734, 0.04630527877807617, 0.04614348983764648, 0.04655926513671875, 0.046268383026123044, 0.046034942626953124, 0.046497791290283204, 0.04655513763427734, 0.04642566299438477, 0.046285247802734374, 0.048330753326416016, 0.0461578254699707, 0.04567606353759766, 0.045634014129638675, 0.045565185546875, 0.04548479843139648, 0.04542233657836914, 0.0456808967590332, 0.046159870147705076, 0.046276222229003905, 0.04629043197631836, 0.04636966323852539, 0.04600831985473633, 0.045926078796386716, 0.0462174072265625, 0.04598729705810547, 0.04562188720703125, 0.0458158073425293, 0.04619203186035156, 0.04594137573242187, 0.045969345092773437, 0.04642412948608399, 0.04625740814208985, 0.045868991851806644, 0.04589241409301758, 0.046233631134033205, 0.04605948638916016, 0.04572137451171875, 0.045891807556152346, 0.045967201232910156, 0.04570854568481445, 0.045969982147216794, 0.04637868881225586, 0.0461910400390625, 0.04601878356933594, 0.046333023071289066, 0.04635465621948242, 0.045873855590820314, 0.046086143493652344, 0.04635443115234375, 0.046153728485107424, 0.04614963150024414, 0.04690502548217773, 0.046682430267333985, 0.04632371139526367, 0.046635009765625, 0.04655923080444336, 0.0463394889831543, 0.04627475357055664, 0.04621542358398437, 0.04620307159423828, 0.04619260787963867, 0.046473217010498044, 0.04616115188598633, 0.04619545745849609, 0.04665676879882812, 0.04666444778442383, 0.04633190536499023, 0.04621279907226562, 0.046725440979003906, 0.046415870666503906, 0.04640361785888672, 0.0467059211730957, 0.04791497421264648, 0.04597577667236328, 0.04602243041992188, 0.04580988693237305, 0.04555699157714844, 0.04589235305786133, 0.046071807861328126, 0.04588336181640625, 0.04542876815795899, 0.04563558578491211, 0.046208576202392576, 0.04593222427368164, 0.04564249420166016, 0.04581785583496094, 0.04615167999267578, 0.04618035125732422, 0.04585062408447266, 0.04628684616088867, 0.0462437744140625, 0.045822017669677736, 0.046253089904785154, 0.04621001434326172, 0.04604111862182617, 0.04593660736083984, 0.04587519836425781, 0.04573593521118164, 0.04585881423950195, 0.04638003158569336, 0.04607664108276367, 0.045912353515625, 0.04572159957885742, 0.046284801483154295, 0.04611276626586914, 0.04610240173339844, 0.04638864135742188, 0.04638793563842773, 0.04635583877563477, 0.04622403335571289, 0.04661644744873047, 0.046458976745605465, 0.04645238494873047, 0.046333824157714844, 0.04664368057250977, 0.046273887634277346, 0.04640774536132813, 0.046219871520996096, 0.046094337463378904, 0.04614144134521484, 0.04649552154541016, 0.04629731369018555, 0.04615155029296875, 0.04656755065917969, 0.046311134338378905, 0.04595296096801758, 0.04627190399169922, 0.04661446380615234, 0.04634521484375, 0.046186496734619144, 0.046524417877197265, 0.04625411224365234, 0.046215137481689456, 0.04657094573974609, 0.046416446685791014, 0.048107391357421876, 0.04614569473266601, 0.04600035095214844, 0.04551446533203125, 0.04519811248779297, 0.046034175872802736, 0.045975616455078125, 0.04581036758422852, 0.04597350311279297, 0.04611686325073242, 0.04603084945678711, 0.04552499389648437, 0.045520126342773436, 0.04597836685180664, 0.04599529647827148, 0.04567728042602539, 0.04565164947509766, 0.04603110504150391, 0.04588959884643555, 0.04604108810424805, 0.046132926940917966, 0.045951297760009766, 0.04591001510620117, 0.046151103973388674, 0.04623996734619141, 0.04588374328613281, 0.04581990432739258, 0.04585027313232422, 0.046274078369140624, 0.045988670349121095, 0.04624796676635742, 0.04623049545288086, 0.04624281692504883, 0.045709312438964846, 0.04623360061645508, 0.04622335815429687, 0.045956417083740236, 0.046055999755859375, 0.04634339141845703, 0.046137344360351565, 0.04629391860961914, 0.04628275299072265, 0.04603289413452148, 0.04649123382568359, 0.0464101448059082, 0.04637635040283203, 0.046106849670410156, 0.046079486846923826, 0.046330753326416015, 0.04647731018066406, 0.04632758331298828, 0.04665315246582031, 0.0466099853515625, 0.04624246215820312, 0.04616016006469727, 0.046565376281738284, 0.04618035125732422, 0.04613324737548828, 0.046295040130615236, 0.0462213134765625, 0.046137344360351565, 0.04644764709472656, 0.04642505645751953, 0.049053215026855466, 0.046432254791259765, 0.04566860961914063, 0.0454760627746582, 0.04546559906005859, 0.04594073486328125, 0.045675807952880856, 0.04574841690063477, 0.04558287811279297, 0.045708961486816406, 0.045486431121826175, 0.04594879913330078, 0.046159038543701174, 0.04601513671875, 0.046020702362060545, 0.0457852783203125, 0.04549836730957031, 0.045606910705566404, 0.046034942626953124, 0.046053375244140625, 0.045837535858154296, 0.04637366485595703, 0.04620479965209961, 0.046181632995605466, 0.045918846130371095, 0.04615590286254883, 0.04619641494750976, 0.04590636825561523, 0.045590526580810545, 0.04621078491210937, 0.0461888313293457, 0.04592559814453125, 0.04643900680541992, 0.04642969512939453, 0.04614556884765625, 0.04609500885009766, 0.04645852661132813, 0.04611872100830078, 0.04586953735351563, 0.04627872085571289, 0.04634767913818359, 0.046201438903808595, 0.04641791915893555, 0.04671078491210937, 0.046358528137207033, 0.04605865478515625, 0.046433120727539065, 0.04614070510864258, 0.046035678863525394, 0.04593628692626953, 0.04615615844726562, 0.04632995223999024, 0.0464422721862793, 0.04641392135620117, 0.04637900924682617, 0.046247230529785154, 0.04636127853393555, 0.046456863403320316, 0.04613014221191406, 0.04606057739257813, 0.046452705383300784, 0.04644659042358398, 0.046166015625, 0.048121726989746094, 0.04624873733520508, 0.04612092971801758, 0.045854015350341795, 0.04557590484619141, 0.04574079895019531, 0.046020862579345706, 0.045956382751464846, 0.04579507064819336, 0.04604617691040039, 0.04610022354125977, 0.045660446166992184, 0.045733856201171874, 0.046013694763183594, 0.04583673477172852, 0.04552473449707031, 0.0458155517578125, 0.046166656494140625, 0.04593068695068359, 0.045625343322753906, 0.04583958435058594, 0.046082111358642576, 0.04605001449584961, 0.04570320129394531, 0.045711326599121097, 0.045676544189453126, 0.04612710571289062, 0.04579660797119141, 0.04612921524047851, 0.04612575912475586, 0.04596310424804687, 0.04614364624023438, 0.045963264465332034, 0.045707263946533204, 0.04571136093139649, 0.04603084945678711, 0.046207008361816404, 0.045975521087646486, 0.046209022521972655, 0.046266368865966793, 0.04597350311279297, 0.04578303909301758, 0.04622492980957031, 0.04628323364257812, 0.04613891220092774, 0.0464040641784668, 0.046360576629638675, 0.04631049728393555, 0.04601500701904297, 0.045994369506835935, 0.04642172622680664, 0.04637036895751953, 0.04630559921264649, 0.04642844772338867, 0.046127231597900394, 0.04606547164916992, 0.04640582275390625, 0.046513504028320315, 0.046457313537597654, 0.046591358184814455, 0.04631216049194336, 0.045973247528076175, 0.04615321731567383, 0.04788838577270508, 0.04567574310302734, 0.04563228988647461, 0.046018558502197264, 0.045694465637207034, 0.04562700653076172, 0.04554617691040039, 0.04596128082275391, 0.04604735946655274, 0.046018558502197264, 0.04585062408447266, 0.045742080688476565, 0.04583974456787109, 0.04572134399414062, 0.0459293441772461, 0.04584198379516601, 0.04611731338500977, 0.04588339233398438, 0.04560627365112305, 0.04551948928833008, 0.04606771087646484, 0.046129024505615235, 0.046155265808105465, 0.04623628616333008, 0.04619571304321289, 0.045943809509277345, 0.04578303909301758, 0.045840385437011716, 0.046159870147705076, 0.04605542373657227, 0.04620492935180664, 0.046227455139160156, 0.04605747222900391, 0.04641996765136719, 0.046607681274414066, 0.046459583282470705, 0.04619171142578125, 0.046082977294921876, 0.04643017578125, 0.04610460662841797, 0.04610012817382812, 0.046559585571289065, 0.046220672607421874, 0.04596316909790039, 0.04626710510253906, 0.04653875350952148, 0.04618361663818359, 0.0460173454284668, 0.045928001403808594, 0.04597574234008789, 0.04626457595825195, 0.04619440078735351, 0.04606595230102539, 0.0462110710144043, 0.04602182388305664, 0.04639622497558594, 0.04657561492919922, 0.04650393676757812, 0.04650300979614258, 0.04654127883911133, 0.046440895080566404, 0.04642816162109375, 0.04624089431762695, 0.047880001068115234, 0.04609452819824219, 0.04592771148681641, 0.04542867279052734, 0.04546160125732422, 0.04594348907470703, 0.04569251251220703, 0.04527145767211914, 0.04559657669067383, 0.046053470611572264, 0.04602675247192383, 0.04567859268188477, 0.04598732757568359, 0.04583033752441406, 0.04581795120239258, 0.04599625778198242, 0.046102142333984374, 0.045986175537109375, 0.04549337768554688, 0.04565900802612305, 0.04610412979125977, 0.04600467300415039, 0.04611686325073242, 0.04613040161132813, 0.0461561279296875, 0.045682113647460935, 0.04587567901611328, 0.04602729415893555, 0.04573507308959961, 0.04572361755371094, 0.04626931381225586, 0.046258174896240234, 0.045963264465332034, 0.04594825744628906, 0.045892127990722655, 0.04602399826049805, 0.04576544189453125, 0.046346176147460935, 0.04641187286376953, 0.046268192291259766, 0.046305473327636716, 0.04620492935180664, 0.04635036849975586, 0.046014430999755856, 0.046353790283203126, 0.046401409149169924, 0.046026878356933594, 0.04627020645141602, 0.046314334869384764, 0.046215198516845704, 0.04663296127319336, 0.04641097640991211, 0.04611334228515625, 0.04589561462402344, 0.0461962890625, 0.04614012908935547, 0.046020606994628906, 0.04637814331054688, 0.046526622772216794, 0.046641857147216796, 0.04665753555297852, 0.04655654525756836, 0.04652300643920899]",tokens/s,21.67277502539284,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4899.647488,7907.24608,0.0,7511.998464,6895.682048,s,1,12.6356240234375,12.6356240234375,0.0,12.6356240234375,12.6356240234375,12.6356240234375,12.6356240234375,[12.6356240234375],,kWh,0.00016642297527916223,1.8348216214458956e-05,6.775477642601002e-05,0.0002525259679196312,,MB,2781.53216,7921.926144,0.0,7514.095616,6822.139904,s,10,29.33606640625,2.9336066406250003,0.0027620805749914213,2.9343319091796873,2.9365601806640624,2.9366376342773437,2.936699597167969,"[2.927570556640625, 2.931563232421875, 2.93044921875, 2.93380517578125, 2.9352333984375, 2.934239013671875, 2.9344248046875, 2.93552294921875, 2.936715087890625, 2.93654296875]",tokens/s,87.26459657367683,kWh,8.558926053708242e-05,9.439313111170493e-06,5.69508511161998e-05,0.00015197942476445273,tokens/kWh,1684438.5376294516,MB,2785.857536,7924.023296,0.0,7516.192768,6822.142464,s,10,18.646573852539063,1.8646573852539063,0.014621137052411227,1.8582719116210937,1.8802652954101562,1.888691973876953,1.8954333166503907,"[1.850527099609375, 1.855883056640625, 1.859441650390625, 1.8488958740234376, 1.8571021728515624, 1.8702486572265624, 1.8531971435546875, 1.8783927001953125, 1.875766845703125, 1.89711865234375]",tokens/s,33.78636767173259,kWh,5.55465222549977e-05,6.127621784968093e-06,3.683152946519774e-05,9.850567350516351e-05,tokens/kWh,639557.070757936,,s,630,18.644053899765005,0.029593736348833357,0.00044405198722840183,0.029464848518371584,0.03013679370880127,0.03030251989364624,0.030792031955718994,"[0.03071254348754883, 0.029541919708251953, 0.02955721664428711, 0.029360128402709962, 0.029306880950927733, 0.029634559631347656, 0.029908992767333983, 0.02945372772216797, 0.02946928024291992, 0.02952396774291992, 0.029399295806884766, 0.029515520095825195, 0.0293021125793457, 0.029321151733398436, 0.029186784744262697, 0.029198335647583007, 0.02927622413635254, 0.029253568649291992, 0.029136064529418946, 0.029186880111694336, 0.029591552734375, 0.02934169578552246, 0.029371648788452148, 0.02945075225830078, 0.02935219192504883, 0.0291778564453125, 0.029337087631225587, 0.02923369598388672, 0.029177824020385743, 0.02927324867248535, 0.029203296661376953, 0.029161088943481444, 0.029220735549926758, 0.029493471145629883, 0.029196575164794923, 0.029216768264770508, 0.029190143585205077, 0.029231103897094726, 0.029251583099365236, 0.029257728576660157, 0.02933964729309082, 0.029360128402709962, 0.029249120712280273, 0.029301151275634766, 0.0293253116607666, 0.029261823654174804, 0.029237247467041014, 0.02935398483276367, 0.02917180824279785, 0.029302688598632814, 0.029535903930664062, 0.02949567985534668, 0.02934988784790039, 0.029505504608154296, 0.02945996856689453, 0.029374975204467774, 0.02934067153930664, 0.029500160217285156, 0.029519296646118163, 0.029403968811035155, 0.02938431930541992, 0.029304800033569337, 0.02924176025390625, 0.030669727325439454, 0.02971174430847168, 0.02929142379760742, 0.030680320739746095, 0.03008310317993164, 0.02933340835571289, 0.029467168807983397, 0.029480960845947264, 0.029282304763793947, 0.029179744720458985, 0.029290464401245116, 0.029220991134643555, 0.029202495574951172, 0.02922700881958008, 0.029241344451904298, 0.02904697608947754, 0.029086847305297852, 0.029428031921386717, 0.029157632827758788, 0.029153408050537108, 0.029778976440429688, 0.029229856491088866, 0.02905516815185547, 0.0290283203125, 0.029209951400756835, 0.029684415817260744, 0.029206527709960937, 0.029100160598754882, 0.02932086372375488, 0.029094112396240233, 0.029203935623168944, 0.029286943435668945, 0.029403135299682616, 0.02925132751464844, 0.029257984161376954, 0.02914303970336914, 0.02911846351623535, 0.029132799148559572, 0.02918400001525879, 0.02925676727294922, 0.029100992202758788, 0.029458431243896483, 0.02911430358886719, 0.029224992752075196, 0.029204511642456056, 0.029195808410644532, 0.02911894416809082, 0.02998851203918457, 0.02949068832397461, 0.03374371337890625, 0.029265920639038087, 0.029418560028076173, 0.02928531265258789, 0.02918604850769043, 0.02922697639465332, 0.02996227264404297, 0.029130752563476563, 0.029243040084838866, 0.029380224227905274, 0.029438432693481446, 0.029620479583740235, 0.031041536331176758, 0.029624319076538085, 0.03053727912902832, 0.029822399139404297, 0.02981990432739258, 0.030123872756958006, 0.029986719131469726, 0.029599903106689453, 0.029515071868896483, 0.029471391677856444, 0.029599424362182616, 0.029673919677734376, 0.02934979248046875, 0.029194303512573242, 0.029220544815063476, 0.029201791763305663, 0.029344736099243166, 0.02939888000488281, 0.02955446434020996, 0.02947439956665039, 0.029309759140014647, 0.029791807174682616, 0.030344959259033202, 0.029921632766723633, 0.029704095840454102, 0.02973891258239746, 0.029693695068359376, 0.029698816299438477, 0.029925376892089843, 0.029902847290039062, 0.029738239288330078, 0.02970086479187012, 0.029650623321533204, 0.029507904052734374, 0.029325023651123047, 0.029376800537109376, 0.029462528228759766, 0.029619295120239256, 0.029608863830566406, 0.029738176345825196, 0.029552576065063476, 0.02936835289001465, 0.029289024353027344, 0.029423103332519532, 0.029306880950927733, 0.02927043151855469, 0.029180288314819336, 0.02950044822692871, 0.029602815628051758, 0.029435680389404296, 0.029538496017456055, 0.029378559112548826, 0.029210176467895508, 0.029299135208129882, 0.029206527709960937, 0.02919590377807617, 0.02920863914489746, 0.029323232650756835, 0.02931315231323242, 0.029165824890136718, 0.02919808006286621, 0.029110496520996093, 0.029138944625854493, 0.02918400001525879, 0.029151264190673827, 0.030474943161010744, 0.029593791961669922, 0.029539936065673827, 0.02919196891784668, 0.029149824142456055, 0.02916348838806152, 0.029130144119262694, 0.029136800765991212, 0.029338336944580077, 0.029095935821533202, 0.029458431243896483, 0.029310976028442383, 0.029206527709960937, 0.029280256271362305, 0.02935603141784668, 0.029239295959472656, 0.029437408447265626, 0.030534175872802733, 0.03058483123779297, 0.02935753631591797, 0.029340192794799803, 0.02924492835998535, 0.029546848297119142, 0.0292737922668457, 0.02919487953186035, 0.029175552368164062, 0.029231199264526365, 0.029112447738647462, 0.029310848236083983, 0.029138944625854493, 0.029198495864868165, 0.029159263610839845, 0.029148319244384765, 0.029137567520141603, 0.029339712142944337, 0.029177440643310546, 0.029293088912963866, 0.029276159286499022, 0.029212671279907225, 0.029241344451904298, 0.02920038414001465, 0.029211999893188477, 0.029330080032348632, 0.029175487518310547, 0.02939321517944336, 0.02932512092590332, 0.029165760040283203, 0.02915033531188965, 0.02918012809753418, 0.02913267135620117, 0.029246240615844726, 0.029294591903686523, 0.029390815734863282, 0.029324447631835938, 0.029275007247924804, 0.02929420852661133, 0.02930521583557129, 0.029413375854492187, 0.02935603141784668, 0.02930624008178711, 0.029335296630859375, 0.029690752029418944, 0.029834848403930664, 0.030721311569213868, 0.029937824249267577, 0.030032543182373046, 0.029799423217773437, 0.02957155227661133, 0.02943948745727539, 0.029430719375610353, 0.02955366325378418, 0.029494272232055665, 0.029425664901733397, 0.02948310470581055, 0.02938256072998047, 0.029409151077270507, 0.029450368881225587, 0.02932326316833496, 0.029327360153198243, 0.029351936340332032, 0.029671424865722655, 0.02998476791381836, 0.03020595169067383, 0.02958892822265625, 0.029339967727661134, 0.02936454391479492, 0.029387872695922853, 0.029423711776733398, 0.029518592834472657, 0.029437952041625977, 0.029287647247314454, 0.02928838348388672, 0.029204511642456056, 0.029217344284057617, 0.029395200729370116, 0.029393983840942384, 0.029565887451171877, 0.029929279327392578, 0.03021126365661621, 0.029519968032836914, 0.02934480094909668, 0.029282175064086913, 0.029241344451904298, 0.029734144210815428, 0.02920729637145996, 0.029421567916870117, 0.029243392944335936, 0.02922217559814453, 0.029207263946533203, 0.029199552536010743, 0.029208736419677736, 0.029305503845214843, 0.029378719329833984, 0.029288288116455077, 0.02954185676574707, 0.02959823989868164, 0.029483007431030273, 0.029372415542602538, 0.029304832458496095, 0.029220096588134764, 0.02926464080810547, 0.029279903411865236, 0.02930031967163086, 0.029352703094482423, 0.0293571834564209, 0.029410175323486328, 0.030738048553466797, 0.02966771125793457, 0.0294072322845459, 0.030195711135864257, 0.02919363212585449, 0.029252191543579102, 0.029183744430541992, 0.029293983459472657, 0.029631328582763673, 0.029304832458496095, 0.02941542434692383, 0.02948646354675293, 0.029382272720336913, 0.02944118309020996, 0.029408767700195314, 0.02941302490234375, 0.029481664657592773, 0.02971238327026367, 0.030040063858032227, 0.030084672927856444, 0.03004607963562012, 0.03000172805786133, 0.030136320114135744, 0.03004342460632324, 0.0302271671295166, 0.03028995132446289, 0.030136287689208983, 0.03024844741821289, 0.03030067253112793, 0.030271232604980467, 0.03010367965698242, 0.0305296630859375, 0.0316231689453125, 0.030043296813964844, 0.029959007263183592, 0.029837312698364257, 0.0299234561920166, 0.029363199234008788, 0.029377248764038084, 0.029335712432861327, 0.02946236801147461, 0.0293602237701416, 0.029421632766723632, 0.02938012886047363, 0.029370784759521484, 0.029296703338623047, 0.029373600006103517, 0.029233983993530274, 0.029304864883422852, 0.029310976028442383, 0.02941129684448242, 0.029381664276123046, 0.0293304328918457, 0.029343391418457033, 0.029614431381225586, 0.029376319885253906, 0.029363744735717772, 0.029460575103759764, 0.02976326370239258, 0.02947564888000488, 0.029556543350219726, 0.029427967071533202, 0.029468671798706055, 0.03061199951171875, 0.029850496292114257, 0.029348831176757812, 0.029140960693359374, 0.029380640029907226, 0.029149183273315428, 0.0293621768951416, 0.029222911834716796, 0.02948054313659668, 0.029390239715576173, 0.029723712921142578, 0.02931705665588379, 0.029405183792114258, 0.02932918357849121, 0.029499616622924805, 0.029345792770385744, 0.029586816787719728, 0.029299423217773436, 0.029278175354003906, 0.029398975372314454, 0.02935353660583496, 0.02998931121826172, 0.029310976028442383, 0.029245407104492187, 0.02931100845336914, 0.029302783966064453, 0.029165536880493163, 0.029380640029907226, 0.029193887710571288, 0.029171104431152343, 0.0291409912109375, 0.029336191177368163, 0.02938211250305176, 0.02939334487915039, 0.029276191711425783, 0.029254016876220704, 0.02926348876953125, 0.02926630401611328, 0.029249759674072267, 0.02918806457519531, 0.02921855926513672, 0.029394144058227538, 0.02952432060241699, 0.02958729553222656, 0.029608287811279298, 0.029345184326171874, 0.029358144760131835, 0.029518400192260742, 0.030132511138916015, 0.02962227249145508, 0.029550592422485353, 0.02929193687438965, 0.029309375762939453, 0.029405344009399415, 0.029894496917724608, 0.029599903106689453, 0.02944144058227539, 0.029447872161865233, 0.029397375106811525, 0.02924527931213379, 0.029253631591796874, 0.029292736053466797, 0.029230880737304687, 0.030592735290527345, 0.029710464477539063, 0.02971664047241211, 0.02952396774291992, 0.029761375427246092, 0.029657344818115234, 0.029403039932250977, 0.02940310478210449, 0.02949081611633301, 0.029411584854125976, 0.029478559494018553, 0.029385215759277345, 0.029294591903686523, 0.029368064880371095, 0.029644319534301758, 0.02951807975769043, 0.029606367111206056, 0.02983526420593262, 0.02953011131286621, 0.029421567916870117, 0.029601791381835937, 0.029792255401611328, 0.029877504348754882, 0.02946735954284668, 0.029454208374023436, 0.02959561538696289, 0.029309247970581053, 0.029450111389160157, 0.029493408203125, 0.0295664005279541, 0.029517248153686525, 0.029534303665161132, 0.029401216506958008, 0.029442815780639647, 0.029576448440551757, 0.02962713623046875, 0.030141504287719726, 0.030131135940551758, 0.03040460777282715, 0.030574880599975585, 0.030158559799194337, 0.03015884780883789, 0.030029823303222656, 0.030103551864624024, 0.02999091148376465, 0.03073548889160156, 0.030911584854125977, 0.030304031372070314, 0.030256895065307616, 0.030145792007446288, 0.030112735748291014, 0.030151775360107422, 0.0302109432220459, 0.029769792556762695, 0.029744800567626954, 0.02976291275024414, 0.029649152755737304, 0.02982374382019043, 0.030008384704589844, 0.030035968780517577, 0.03004921531677246, 0.03024070358276367, 0.030019647598266603, 0.030801952362060545, 0.029956192016601563, 0.029792255401611328, 0.029654464721679687, 0.02974777603149414, 0.02970403289794922, 0.02966534423828125, 0.029595296859741212, 0.029691423416137695, 0.02948793601989746, 0.02970159912109375, 0.029938304901123047, 0.02974300765991211, 0.029611488342285157, 0.0298789119720459, 0.02953398323059082, 0.029583583831787108, 0.02964816093444824, 0.029874912261962892, 0.02962985610961914, 0.02969046401977539, 0.02953625679016113, 0.02971238327026367, 0.029566015243530273, 0.029665695190429688, 0.029766176223754885, 0.029575168609619142, 0.029679616928100585, 0.029859296798706053, 0.02960643196105957, 0.02992915153503418, 0.030304576873779295, 0.02971238327026367, 0.029668384552001954, 0.029842144012451173, 0.02988688087463379, 0.02977916717529297, 0.02964339256286621, 0.02994175910949707, 0.029799936294555664, 0.029732767105102538, 0.0297762565612793, 0.029710559844970702, 0.029621856689453125, 0.029633983612060547, 0.030079008102416992, 0.029864864349365236, 0.02992848014831543, 0.029894784927368166, 0.029808576583862306, 0.02972153663635254, 0.029685760498046877, 0.029638656616210936, 0.030123104095458986, 0.029653919219970702, 0.029587135314941407, 0.02957321548461914, 0.029908512115478517, 0.029604543685913087, 0.0295927677154541, 0.029620159149169923, 0.03045369529724121, 0.02989356803894043, 0.030905855178833007, 0.030767744064331054, 0.030205503463745117, 0.0299935359954834, 0.0330747184753418, 0.030167327880859376, 0.030105632781982423, 0.03005414390563965, 0.030148992538452147, 0.030074911117553713, 0.030242816925048828, 0.02999504089355469, 0.029949920654296875, 0.029949056625366212, 0.029920127868652342, 0.029945472717285156, 0.030054784774780272, 0.030155839920043944, 0.029997024536132812, 0.029995616912841798, 0.029865888595581053, 0.029923807144165038, 0.029951263427734375, 0.02988924789428711, 0.030100608825683595, 0.029935743331909178, 0.030046720504760743, 0.030060447692871094, 0.030239072799682618, 0.030564096450805663, 0.030438943862915038, 0.03017919921875, 0.03025391960144043, 0.03075481605529785, 0.030466047286987305, 0.030244319915771485, 0.030120512008666993, 0.03024278450012207, 0.030282943725585938, 0.030171968460083007, 0.030023679733276368, 0.029816831588745117, 0.03001475143432617, 0.029989599227905273, 0.02993152046203613, 0.029853696823120116, 0.029988927841186522, 0.029703615188598632, 0.02971446418762207, 0.029639135360717772, 0.029888511657714844, 0.029670751571655274, 0.02958736038208008, 0.02952668762207031, 0.029651039123535155, 0.029499391555786132, 0.029507583618164062, 0.03058086395263672, 0.03027302360534668, 0.03011529541015625, 0.030141056060791014, 0.03005264091491699, 0.030207712173461913]",tokens/s,33.79093427786863,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11116.38016,14586.6752,0.0,14191.427584,13325.783552,s,1,18.065916015625,18.065916015625,0.0,18.065916015625,18.065916015625,18.065916015625,18.065916015625,[18.065916015625],,kWh,0.0003254964083291725,3.589740193542263e-05,0.00014863761890999971,0.0005100314291745948,,MB,2042.126336,14599.258112,0.0,14191.427584,12582.029312,s,10,93.31914257812501,9.331914257812501,0.0027802590925932497,9.33111669921875,9.3355583984375,9.3362474609375,9.336798710937499,"[9.3316044921875, 9.3291533203125, 9.328056640625, 9.3303154296875, 9.33062890625, 9.3295869140625, 9.332990234375, 9.33446484375, 9.3354052734375, 9.3369365234375]",tokens/s,27.432742407130633,kWh,0.000272282078387085,3.003405638682655e-05,0.0001809637003264003,0.0004832798351003118,tokens/kWh,529713.8043155959,MB,2046.562304,14599.258112,0.0,14191.427584,12935.916032,s,10,41.51020703125,4.151020703125,0.0021570119003759226,4.150668701171875,4.153448681640625,4.154599829101562,4.155520747070312,"[4.1493603515625, 4.14903662109375, 4.1501279296875, 4.14786572265625, 4.15031494140625, 4.15220068359375, 4.15133447265625, 4.1557509765625, 4.15319287109375, 4.1510224609375]",tokens/s,15.176990072001304,kWh,0.00012134205874583093,1.3384976105635782e-05,8.099598146339845e-05,0.00021572301631486506,tokens/kWh,292041.16035558505,,s,630,41.505993812561016,0.06588252986120799,0.0005906783672297738,0.06589093017578125,0.06665264968872071,0.06687368431091309,0.06720562927246095,"[0.06626099395751953, 0.06524118041992187, 0.06492034912109375, 0.064827392578125, 0.06484786987304687, 0.06484092712402344, 0.06486438751220704, 0.06557571411132812, 0.06565261077880859, 0.0653803482055664, 0.06544515228271484, 0.06521520233154297, 0.06514073944091797, 0.06492105865478516, 0.06538006591796874, 0.0658372802734375, 0.06559180450439453, 0.06537757110595703, 0.06551020812988281, 0.06589641571044921, 0.06540496063232422, 0.06528419494628906, 0.06634639739990235, 0.06582073974609375, 0.06552150726318359, 0.06543830108642579, 0.06618729400634765, 0.06565475463867188, 0.06527999877929687, 0.06510352325439453, 0.06604576110839844, 0.0660684814453125, 0.06600758361816406, 0.06570515441894531, 0.06550406646728515, 0.06623951721191407, 0.06597277069091798, 0.06570409393310547, 0.06588444519042969, 0.06593052673339844, 0.06609379577636719, 0.06683760070800782, 0.06615337371826172, 0.06606752014160157, 0.06584416198730468, 0.06595708465576172, 0.06618927764892578, 0.06643695831298828, 0.06687615966796875, 0.06616633605957031, 0.06595228576660156, 0.06606655883789063, 0.06641868591308593, 0.06641769409179688, 0.06650355529785157, 0.06634915161132812, 0.06702799987792969, 0.06659078216552734, 0.06666336059570313, 0.06713929748535157, 0.06664335632324218, 0.06630287933349609, 0.06644115447998047, 0.06629222106933594, 0.06507231903076172, 0.06442066955566406, 0.06441280364990234, 0.06548953247070312, 0.06542082977294922, 0.06538041687011718, 0.06509980773925782, 0.06530937957763672, 0.06500717163085938, 0.06462838745117187, 0.0649447021484375, 0.06558060455322266, 0.06562876892089843, 0.06589234924316406, 0.06544793701171875, 0.06586473846435546, 0.06593170928955078, 0.06569833374023437, 0.06545817565917969, 0.06566242980957031, 0.06602403259277344, 0.06578377532958984, 0.06558249664306641, 0.06558367919921874, 0.06575103759765626, 0.06524723052978515, 0.06554768371582032, 0.0661673583984375, 0.06617407989501953, 0.06578883361816407, 0.06566297912597656, 0.0661770248413086, 0.06585689544677735, 0.06562060546875, 0.06565888214111328, 0.0655093765258789, 0.06647196960449218, 0.06593328094482422, 0.06592431640625, 0.06606864166259765, 0.0658458251953125, 0.0657347183227539, 0.06591487884521484, 0.06645145416259765, 0.06640156555175782, 0.06629792022705078, 0.06694297790527344, 0.06663382720947265, 0.06608892822265625, 0.06586339569091797, 0.0663315200805664, 0.0658001937866211, 0.0660664291381836, 0.06624463653564452, 0.0665640640258789, 0.06621718597412109, 0.06602428436279296, 0.06647801971435546, 0.06642073822021484, 0.06665113830566406, 0.06729558563232421, 0.06724787139892578, 0.06553238677978515, 0.06487657928466797, 0.06485542297363281, 0.06471334075927734, 0.06574015808105468, 0.06549565124511719, 0.06512374114990234, 0.06503689575195312, 0.06509954833984374, 0.06488070678710937, 0.06492790222167968, 0.06593897247314454, 0.06559539031982421, 0.06515711975097656, 0.06546685028076171, 0.0652779541015625, 0.06520140838623047, 0.0649324493408203, 0.06576557159423828, 0.06619132995605469, 0.06565888214111328, 0.06532710266113281, 0.06588591766357423, 0.06575337219238281, 0.06548905944824218, 0.06545801544189453, 0.06632653045654296, 0.0660921630859375, 0.06580461120605469, 0.06548127746582032, 0.0662193603515625, 0.06586214447021485, 0.0655013427734375, 0.06512786865234375, 0.06604447937011719, 0.06645670318603515, 0.06587811279296875, 0.06585424041748048, 0.06643228912353516, 0.06586646270751953, 0.06557491302490234, 0.06600498962402344, 0.06623436737060547, 0.06620159912109375, 0.06648627471923828, 0.06696550750732422, 0.06643014526367187, 0.06612665557861327, 0.06580413055419922, 0.06656425476074218, 0.06663168334960938, 0.06616473388671874, 0.066123779296875, 0.06614527893066406, 0.06655446624755859, 0.0663371810913086, 0.066700927734375, 0.0667795181274414, 0.0664815673828125, 0.06647459411621094, 0.06726860809326173, 0.06671155548095703, 0.06664575958251953, 0.06609497833251952, 0.06508803558349609, 0.06462870025634766, 0.06390496063232422, 0.06466854095458985, 0.064901123046875, 0.06564246368408203, 0.06497996520996094, 0.06475682830810547, 0.06573868560791016, 0.06549811553955077, 0.06557571411132812, 0.0658658905029297, 0.06543702697753906, 0.06525727844238281, 0.06545702362060547, 0.06522662353515625, 0.06529427337646485, 0.06475794982910156, 0.06517167663574219, 0.06517727661132812, 0.06561138916015626, 0.06542384338378907, 0.0656214370727539, 0.06545401763916016, 0.06592105865478516, 0.06636399841308593, 0.06572441864013671, 0.06566902160644532, 0.06583273315429687, 0.0656530532836914, 0.06568323516845703, 0.0655383071899414, 0.06576691436767577, 0.06586601257324219, 0.06592111968994141, 0.06610717010498048, 0.06587423706054688, 0.06589190673828126, 0.06572895812988282, 0.06637481689453124, 0.06676156616210938, 0.06644534301757812, 0.06635110473632813, 0.0664060821533203, 0.06620333099365235, 0.06616124725341797, 0.06618019104003907, 0.06607965087890624, 0.06591283416748046, 0.0661053466796875, 0.06583296203613281, 0.0666992645263672, 0.06638784027099609, 0.0661812515258789, 0.06648627471923828, 0.06707539367675781, 0.06690633392333985, 0.06724861145019531, 0.06665001678466796, 0.0665355224609375, 0.06711897277832031, 0.06665158081054688, 0.06610915374755859, 0.06554898834228516, 0.06490940856933594, 0.06470041656494141, 0.06445260620117188, 0.06464300537109376, 0.06483910369873047, 0.06538098907470703, 0.06500057220458984, 0.06501200103759766, 0.06483618927001954, 0.06601519775390625, 0.06618425750732422, 0.06602236938476562, 0.06553190612792968, 0.0659882583618164, 0.06559574127197265, 0.06539254760742187, 0.06500543975830078, 0.06539081573486329, 0.06481078338623047, 0.06480303955078125, 0.06545203399658203, 0.06559673309326172, 0.06553657531738281, 0.06552384185791016, 0.06597122955322265, 0.06633277130126954, 0.06618816375732423, 0.06612892913818359, 0.06629270172119141, 0.06640137481689454, 0.0658658905029297, 0.06587439727783204, 0.06595209503173828, 0.06571987152099609, 0.0655630111694336, 0.06584524536132813, 0.06587187194824219, 0.06576127624511718, 0.06588944244384766, 0.06610755157470703, 0.06677369689941406, 0.06623232269287109, 0.06665010833740234, 0.06692249298095704, 0.06625279998779297, 0.06590156555175782, 0.06626204681396484, 0.06638368225097656, 0.06623452758789063, 0.06584102630615235, 0.06643315124511719, 0.06618313598632812, 0.06618118286132812, 0.06619315338134765, 0.06646505737304688, 0.0668287353515625, 0.0671278076171875, 0.06718643188476563, 0.06650086212158203, 0.0666398696899414, 0.06671737670898438, 0.06626509094238281, 0.06515507507324218, 0.06491136169433594, 0.0649175033569336, 0.06528614044189453, 0.06595958709716797, 0.06577903747558594, 0.06584217834472657, 0.06535987091064453, 0.06508665466308594, 0.06498796844482421, 0.06522163391113281, 0.06521753692626953, 0.06495846557617188, 0.06546841430664062, 0.06557414245605468, 0.06559616088867187, 0.0652759017944336, 0.06517263793945313, 0.06620355224609376, 0.06595683288574218, 0.06603158569335937, 0.06593497467041015, 0.06601705932617187, 0.06547516632080078, 0.06515711975097656, 0.06521651458740234, 0.06587599945068359, 0.0658205795288086, 0.0657900161743164, 0.06575308990478515, 0.06596198272705078, 0.06596141052246093, 0.06579055786132812, 0.06635926055908203, 0.06631423950195313, 0.0662874526977539, 0.06598451232910156, 0.06677107238769531, 0.066236572265625, 0.06599871826171876, 0.06577766418457032, 0.06570751953125, 0.06599027252197266, 0.06556953430175781, 0.06606156921386719, 0.06619225311279296, 0.06609836578369141, 0.06609388732910157, 0.0663531494140625, 0.06694627380371093, 0.0663210220336914, 0.06721347045898438, 0.06667263793945312, 0.06664390563964843, 0.06640847778320312, 0.06645356750488281, 0.06678115081787109, 0.06640640258789063, 0.06641788482666015, 0.06628224182128906, 0.06607174682617188, 0.06641545867919922, 0.06640406036376953, 0.06514431762695312, 0.06491593933105469, 0.06480928039550782, 0.065185791015625, 0.06517286682128906, 0.0650083236694336, 0.06504166412353515, 0.06505545806884766, 0.06513436889648437, 0.06514412689208984, 0.06595059204101562, 0.06606438446044922, 0.0655440673828125, 0.06563001251220703, 0.06611385345458984, 0.065544189453125, 0.0651071014404297, 0.06483177947998046, 0.06518431854248047, 0.06530867004394532, 0.06538444519042969, 0.06523897552490235, 0.06586579132080078, 0.0659372787475586, 0.0659574432373047, 0.06568402862548828, 0.06574038696289063, 0.06609305572509766, 0.06609712219238281, 0.0659296646118164, 0.06590054321289063, 0.0656337890625, 0.06546482849121094, 0.06580429077148438, 0.0657223663330078, 0.0656009292602539, 0.06568816375732422, 0.06589030456542969, 0.06663782501220702, 0.06629580688476562, 0.06596518707275391, 0.06617996978759766, 0.06676070404052735, 0.06638387298583984, 0.06687065887451171, 0.06631897735595703, 0.06636665344238281, 0.06613478088378906, 0.0665109100341797, 0.06606230163574218, 0.06546025848388672, 0.06594290924072266, 0.06595590209960937, 0.06691836547851562, 0.06640402984619141, 0.06689017486572266, 0.06670912170410156, 0.06650975799560546, 0.06688668823242187, 0.06685785675048828, 0.06688563537597657, 0.06697164916992188, 0.06624665832519532, 0.06557843017578124, 0.06500614166259766, 0.06494316864013672, 0.06473564910888673, 0.06482179260253906, 0.06462611389160157, 0.06548470306396484, 0.06520694732666016, 0.06509363555908203, 0.06559334564208984, 0.06609101104736329, 0.06557494354248047, 0.06537216186523437, 0.0652135009765625, 0.06570054626464844, 0.06569596862792969, 0.06543740844726563, 0.06618669128417969, 0.06582972717285156, 0.06541311645507812, 0.06521036529541016, 0.06518169403076172, 0.06590025329589844, 0.06547078704833985, 0.06586969757080079, 0.06622118377685547, 0.06624457550048828, 0.06588028717041015, 0.06547065734863282, 0.065325439453125, 0.0656509780883789, 0.06563145446777344, 0.0658392333984375, 0.06612847900390625, 0.06589155578613282, 0.0658644790649414, 0.06595289611816406, 0.06659980773925782, 0.06636540985107423, 0.06623849487304688, 0.06664150238037109, 0.06630448150634766, 0.06620873260498047, 0.06615519714355468, 0.06625411224365234, 0.06647910308837891, 0.06599884796142579, 0.06629513549804687, 0.06681436920166016, 0.06626534271240235, 0.06607622528076172, 0.0670578842163086, 0.06675888061523437, 0.06628352355957032, 0.06639401245117188, 0.06706524658203125, 0.06669376373291015, 0.06676032257080078, 0.06732230377197265, 0.06697357177734375, 0.06680793762207031, 0.06678221130371094, 0.06632012939453125, 0.06544857788085938, 0.06523881530761719, 0.06496505737304688, 0.06505209350585937, 0.06550105285644531, 0.06508386993408204, 0.0657223663330078, 0.06530220794677734, 0.06527235412597657, 0.06477948760986328, 0.0651965103149414, 0.06550054168701172, 0.06551388549804688, 0.06523875427246094, 0.06561465454101563, 0.06575513458251953, 0.06559916687011719, 0.06523526763916015, 0.065830078125, 0.06573321533203125, 0.06583113861083985, 0.06542700958251953, 0.06547452545166016, 0.06575945281982422, 0.06594172668457031, 0.06554825592041015, 0.06578185272216797, 0.06594556427001953, 0.06572201538085938, 0.0655354232788086, 0.06576016235351563, 0.06627449798583984, 0.066233154296875, 0.06574697875976562, 0.06625651550292969, 0.06573296356201172, 0.06588992309570313, 0.06661772918701171, 0.06610739135742187, 0.06583622741699219, 0.06583094024658204, 0.06600953674316407, 0.06625929260253906, 0.06608086395263672, 0.0665077133178711, 0.06622415924072265, 0.06646578979492188, 0.06612220764160157, 0.06590306854248047, 0.06688153839111328, 0.06623232269287109, 0.06624665832519532, 0.06712899017333984, 0.06666226959228516, 0.06636796569824219, 0.06637126159667969, 0.06645791625976563, 0.06668284606933594, 0.06677916717529297, 0.06640636444091796, 0.06643714904785156, 0.0672799072265625, 0.06628553771972656, 0.06522652435302734, 0.0651204833984375, 0.06464508819580078, 0.0646421127319336, 0.06505366516113281, 0.06527385711669922, 0.06524518585205077, 0.06531890869140625, 0.06494617462158203, 0.06493097686767578, 0.06578672027587891, 0.06555648040771485, 0.06525746917724609, 0.0660859832763672, 0.06566595458984376, 0.06545536041259765, 0.06541577911376953, 0.06604345703125, 0.0657086410522461, 0.06542918395996093, 0.06523945617675782, 0.06537734222412109, 0.06566304016113281, 0.06552649688720703, 0.06531072235107421, 0.06576751708984375, 0.06611759948730468, 0.06609705352783203, 0.06594569396972656, 0.06618931579589844, 0.06594102478027344, 0.06593583679199219, 0.06607039642333984, 0.0665145263671875, 0.06590108489990235, 0.0653803482055664, 0.06601478576660157, 0.06617890930175781, 0.06621231842041016, 0.0657570571899414, 0.06587417602539063, 0.0662908172607422, 0.06630694580078125, 0.0666767349243164, 0.06631014251708985, 0.06598614501953125, 0.06570845031738282, 0.06675628662109374, 0.06618924713134766, 0.06589478302001953, 0.06573014068603515, 0.06681231689453125, 0.06645350646972656, 0.0661376953125, 0.06576783752441406, 0.06647193908691407, 0.06702035522460938, 0.06713311767578126, 0.06673075103759765, 0.06670921325683593, 0.066553955078125, 0.06688162994384765]",tokens/s,15.178530668246328,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3543.61344,5170.46272,0.0,4775.215104,4427.072512,s,1,11.0327451171875,11.0327451171875,0.0,11.0327451171875,11.0327451171875,11.0327451171875,11.0327451171875,[11.0327451171875],,kWh,0.00011698165616666547,1.288786926369206e-05,4.9650317498000154e-05,0.0001795198429283577,,MB,1464.664064,5201.92,0.0,4794.089472,4101.02016,s,10,24.840545654296875,2.4840545654296875,0.005153085087852347,2.4845689697265625,2.489054345703125,2.4893999755859375,2.4896764794921875,"[2.470856201171875, 2.481852783203125, 2.48210888671875, 2.482912841796875, 2.485000732421875, 2.48413720703125, 2.486998291015625, 2.48795556640625, 2.4889775390625, 2.48974560546875]",tokens/s,103.05731748518075,kWh,7.23527563062501e-05,7.980278552744877e-06,4.8081927354400034e-05,0.00012841496221339504,tokens/kWh,1993537.167223466,MB,1468.98944,5210.308608,0.0,4802.47808,4101.02272,s,10,16.609656982421875,1.6609656982421874,0.009591729412490228,1.659974853515625,1.6736848510742188,1.676056414794922,1.6779536657714844,"[1.66152978515625, 1.645281982421875, 1.6731578369140625, 1.6579595947265624, 1.6652694091796876, 1.648097900390625, 1.657560302734375, 1.658419921875, 1.6639522705078125, 1.678427978515625]",tokens/s,37.92974175605997,kWh,4.8826849376667114e-05,5.3859862495886415e-06,3.219610909019986e-05,8.640894471645554e-05,tokens/kWh,729091.1861813585,,s,630,16.607083940505998,0.026360450699215842,0.00044283413471687657,0.026275232315063478,0.0267891996383667,0.027089630413055416,0.02799394283294679,"[0.027376960754394532, 0.026368703842163086, 0.02675712013244629, 0.026236640930175782, 0.02658070373535156, 0.02652764892578125, 0.026981023788452147, 0.026679296493530274, 0.026828800201416016, 0.02613817596435547, 0.026312639236450195, 0.02611203193664551, 0.026216928482055663, 0.026341087341308595, 0.026441152572631837, 0.026542943954467775, 0.026488832473754883, 0.026295488357543945, 0.026131263732910158, 0.02632908821105957, 0.02611609649658203, 0.02612633514404297, 0.026101760864257813, 0.026068992614746093, 0.026284032821655274, 0.026497119903564452, 0.026433439254760743, 0.02652067184448242, 0.026581632614135743, 0.026333471298217774, 0.026310752868652344, 0.02627779197692871, 0.026443199157714845, 0.026215200424194337, 0.026232608795166014, 0.02735443115234375, 0.026434175491333006, 0.026458112716674805, 0.026445535659790038, 0.026218399047851563, 0.026506975173950197, 0.02614691162109375, 0.02625190353393555, 0.026293855667114258, 0.026276159286499023, 0.026189311981201172, 0.026765663146972655, 0.026099967956542968, 0.026248512268066407, 0.02612633514404297, 0.026204864501953126, 0.026270912170410155, 0.026274623870849608, 0.02674483108520508, 0.02627993583679199, 0.02613862419128418, 0.026365280151367188, 0.02610633659362793, 0.02603759956359863, 0.02607916831970215, 0.02628291130065918, 0.026199520111083983, 0.02625152015686035, 0.02738444709777832, 0.02653545570373535, 0.02613897514343262, 0.026787967681884767, 0.026032127380371094, 0.026220544815063477, 0.026028032302856444, 0.02615500831604004, 0.02614886474609375, 0.02609152030944824, 0.0261345272064209, 0.026647808074951172, 0.026542783737182617, 0.02639263916015625, 0.026472415924072266, 0.026093055725097656, 0.02595075225830078, 0.025968639373779297, 0.026064895629882814, 0.0258919677734375, 0.02594825553894043, 0.025881343841552735, 0.025950239181518554, 0.025852928161621092, 0.02591862487792969, 0.025816415786743162, 0.025860767364501953, 0.026040159225463867, 0.02589823913574219, 0.025890975952148437, 0.02703011131286621, 0.02599091148376465, 0.025952735900878907, 0.025980703353881834, 0.0262891845703125, 0.026416095733642578, 0.025862144470214843, 0.02593712043762207, 0.025867040634155274, 0.026091167449951172, 0.02597104072570801, 0.026527679443359375, 0.02595382308959961, 0.025868608474731446, 0.02622060775756836, 0.025979040145874023, 0.026107616424560547, 0.02598940849304199, 0.02591948890686035, 0.025837535858154296, 0.026324159622192384, 0.02601055908203125, 0.026016735076904298, 0.02591632080078125, 0.02599907112121582, 0.025913663864135742, 0.02591542434692383, 0.025829343795776366, 0.0258571834564209, 0.025954687118530273, 0.026548704147338866, 0.02603759956359863, 0.026161823272705078, 0.027317888259887697, 0.026281824111938478, 0.02615350341796875, 0.026267648696899414, 0.026218399047851563, 0.028845375061035156, 0.026424224853515626, 0.026544160842895508, 0.026750816345214843, 0.026462207794189452, 0.026352895736694335, 0.02632150459289551, 0.02627302360534668, 0.026359743118286132, 0.026470975875854494, 0.026880416870117187, 0.026326623916625977, 0.026646303176879882, 0.02620275115966797, 0.02610380744934082, 0.026455263137817382, 0.027392799377441407, 0.026728448867797853, 0.026576288223266603, 0.027406848907470704, 0.026787456512451173, 0.026614240646362305, 0.02631679916381836, 0.026705919265747072, 0.026529792785644532, 0.026421247482299806, 0.026161151885986327, 0.026508575439453126, 0.026304224014282226, 0.026403839111328126, 0.026425344467163086, 0.02690640068054199, 0.026300159454345703, 0.02640470314025879, 0.026459808349609374, 0.026327360153198243, 0.026282655715942384, 0.02632089614868164, 0.02633884811401367, 0.026120672225952147, 0.026300735473632812, 0.026688735961914064, 0.02625174331665039, 0.02619740867614746, 0.02619254493713379, 0.02613408088684082, 0.02627827262878418, 0.026543519973754884, 0.026554208755493164, 0.026323711395263672, 0.026484479904174806, 0.026335008621215822, 0.026310304641723632, 0.026311359405517577, 0.026580831527709962, 0.027404224395751953, 0.028201311111450196, 0.027394048690795897, 0.027249439239501953, 0.02650227165222168, 0.026217344284057618, 0.026578655242919923, 0.026470495223999024, 0.02613596725463867, 0.026179584503173828, 0.026032928466796876, 0.026176576614379884, 0.026001760482788086, 0.026048608779907226, 0.025915903091430666, 0.026013696670532226, 0.02585977554321289, 0.025852224349975587, 0.02593337631225586, 0.02642198371887207, 0.026099424362182617, 0.025972736358642577, 0.026908672332763672, 0.02646735954284668, 0.026254304885864256, 0.025968927383422852, 0.026096639633178712, 0.02630512046813965, 0.025960575103759764, 0.02592758369445801, 0.026042463302612305, 0.026103231430053712, 0.025967071533203125, 0.02645964813232422, 0.02696579170227051, 0.02608777618408203, 0.026270175933837892, 0.02615705680847168, 0.026426847457885743, 0.025864736557006836, 0.025999359130859375, 0.026011648178100585, 0.026089344024658203, 0.025954431533813476, 0.025939968109130858, 0.02671820831298828, 0.02613248062133789, 0.026086816787719725, 0.026157663345336913, 0.02624224090576172, 0.026073183059692383, 0.026133216857910157, 0.025890880584716797, 0.026298303604125977, 0.026275840759277344, 0.030109695434570313, 0.027120927810668945, 0.026553056716918946, 0.025851232528686523, 0.026221216201782225, 0.028276735305786133, 0.026429439544677736, 0.026210304260253905, 0.026195968627929687, 0.02632262420654297, 0.026527999877929687, 0.027556320190429688, 0.026664831161499022, 0.026531423568725586, 0.02668351936340332, 0.02637660789489746, 0.026441728591918946, 0.02623244857788086, 0.026370431900024412, 0.02623049545288086, 0.0262126407623291, 0.026701087951660155, 0.026555103302001955, 0.026351648330688475, 0.027228063583374023, 0.02629151916503906, 0.026620704650878905, 0.026594272613525392, 0.02656972885131836, 0.02689740753173828, 0.02631372833251953, 0.026892192840576173, 0.02761315155029297, 0.02687331199645996, 0.026489503860473634, 0.026351615905761717, 0.026619232177734375, 0.02621072006225586, 0.026206464767456056, 0.028094463348388672, 0.027322463989257813, 0.026348960876464843, 0.026054880142211915, 0.02613417625427246, 0.026095935821533203, 0.026252927780151366, 0.02653011131286621, 0.026530176162719726, 0.026371807098388673, 0.026876192092895507, 0.026290016174316408, 0.026286239624023437, 0.02613859176635742, 0.026115360260009764, 0.026091392517089845, 0.026364799499511718, 0.026332735061645508, 0.02612678337097168, 0.026211936950683593, 0.026276256561279295, 0.02607513618469238, 0.02587775993347168, 0.02684511947631836, 0.026249887466430664, 0.02595452880859375, 0.025846784591674804, 0.025922496795654296, 0.026392576217651367, 0.0261345272064209, 0.026026079177856445, 0.026277311325073244, 0.026095903396606446, 0.02589321517944336, 0.025917280197143556, 0.027365503311157228, 0.026208351135253907, 0.026167327880859376, 0.025980031967163086, 0.02598182487487793, 0.02587766456604004, 0.02598556709289551, 0.02596611213684082, 0.02650569534301758, 0.026212671279907226, 0.026247167587280275, 0.02614271926879883, 0.02592972755432129, 0.026402816772460938, 0.02596659278869629, 0.025896608352661134, 0.025854303359985353, 0.026066879272460937, 0.025869760513305664, 0.026528383255004884, 0.025900575637817384, 0.026014175415039063, 0.0258272647857666, 0.02604038429260254, 0.026012895584106445, 0.025848608016967773, 0.025869951248168946, 0.025997472763061524, 0.025894144058227538, 0.02685136032104492, 0.02605673599243164, 0.026184192657470705, 0.025946239471435546, 0.026018335342407228, 0.026248224258422853, 0.026272096633911134, 0.02607529640197754, 0.026073312759399413, 0.026267040252685548, 0.02607561683654785, 0.026073055267333986, 0.026052192687988283, 0.026245695114135742, 0.02612019157409668, 0.02611568069458008, 0.026110368728637694, 0.02693734359741211, 0.026187776565551758, 0.026583040237426758, 0.026048511505126954, 0.02607014465332031, 0.02659008026123047, 0.02623423957824707, 0.02620047950744629, 0.026172767639160155, 0.026047359466552733, 0.026086912155151368, 0.02604083251953125, 0.02660905647277832, 0.026251104354858397, 0.026088031768798828, 0.026148704528808592, 0.02618582344055176, 0.02747065544128418, 0.0267357120513916, 0.027333311080932617, 0.026831071853637697, 0.026584192276000975, 0.026600320816040038, 0.026355712890625, 0.026343423843383788, 0.026328800201416015, 0.026534175872802733, 0.026535200119018554, 0.026381023406982423, 0.02647039985656738, 0.026208192825317382, 0.0265947208404541, 0.02591606330871582, 0.02612224006652832, 0.026421375274658203, 0.026875776290893556, 0.026476415634155273, 0.026302175521850588, 0.026446239471435547, 0.026068992614746093, 0.026019615173339845, 0.026246591567993163, 0.026458879470825196, 0.026479967117309572, 0.026213247299194335, 0.02640412712097168, 0.02644432067871094, 0.026623104095458986, 0.026272640228271485, 0.02676531219482422, 0.02613862419128418, 0.026089471817016603, 0.026046464920043946, 0.025994688034057616, 0.026247743606567384, 0.02590105628967285, 0.025800703048706054, 0.02669955253601074, 0.026167520523071287, 0.026491039276123046, 0.02615894317626953, 0.026104000091552733, 0.02651087951660156, 0.02597324752807617, 0.02596019172668457, 0.02591334342956543, 0.026230815887451173, 0.026327039718627928, 0.02633260726928711, 0.026620479583740236, 0.02611404800415039, 0.025821184158325194, 0.026077184677124023, 0.02609766387939453, 0.025906303405761718, 0.02597999954223633, 0.025912832260131836, 0.02601603126525879, 0.025882623672485353, 0.025939968109130858, 0.027062719345092773, 0.026255264282226562, 0.026578592300415038, 0.026130847930908203, 0.02605801582336426, 0.026120927810668944, 0.026179584503173828, 0.026175071716308593, 0.026100128173828126, 0.026185855865478516, 0.026040128707885742, 0.026142911911010744, 0.025968416213989258, 0.026084703445434572, 0.026062816619873048, 0.026082015991210936, 0.02599065589904785, 0.026241600036621095, 0.02615705680847168, 0.02624620819091797, 0.026139200210571287, 0.02628646469116211, 0.02609516716003418, 0.026214847564697264, 0.026292224884033204, 0.026388479232788087, 0.026340543746948244, 0.026469087600708006, 0.026474592208862304, 0.026413055419921876, 0.02638643264770508, 0.02639023971557617, 0.026549568176269533, 0.027372447967529297, 0.027747840881347657, 0.027523647308349608, 0.026373567581176757, 0.02654879951477051, 0.0262524471282959, 0.026057567596435547, 0.0265229434967041, 0.02623967933654785, 0.02642051124572754, 0.026321632385253906, 0.0264105281829834, 0.026388479232788087, 0.026853599548339845, 0.026196224212646484, 0.0262938232421875, 0.02615750312805176, 0.026398719787597655, 0.02644742393493652, 0.026409343719482423, 0.026234943389892577, 0.02595193672180176, 0.026011327743530273, 0.02623142433166504, 0.02623641586303711, 0.02607155227661133, 0.02619980812072754, 0.026011680603027342, 0.02599776077270508, 0.02599888038635254, 0.02735113525390625, 0.026499296188354494, 0.026197664260864256, 0.026868064880371093, 0.026367488861083983, 0.02645625686645508, 0.026270015716552735, 0.026290176391601562, 0.02609766387939453, 0.02592972755432129, 0.025982208251953125, 0.02604310417175293, 0.026025056838989258, 0.0259880313873291, 0.02589014434814453, 0.02608780860900879, 0.026157344818115234, 0.02604243278503418, 0.02630771255493164, 0.02624764823913574, 0.02611814308166504, 0.02604217529296875, 0.026229280471801758, 0.02631679916381836, 0.026836992263793946, 0.027596799850463868, 0.026967391967773438, 0.02618844795227051, 0.026085376739501953, 0.02698240089416504, 0.02761315155029297, 0.02607516860961914, 0.026561887741088867, 0.02642799949645996, 0.026974271774291993, 0.02627174377441406, 0.026066944122314452, 0.02630451202392578, 0.026247167587280275, 0.02615091133117676, 0.026138143539428713, 0.02665920066833496, 0.026405055999755858, 0.026156896591186522, 0.02625551986694336, 0.026502880096435547, 0.02628348731994629, 0.02627043151855469, 0.026185056686401368, 0.02655302429199219, 0.026467679977416992, 0.026419551849365234, 0.02638876724243164, 0.02673459243774414, 0.027062271118164064, 0.02649616050720215, 0.026530656814575195, 0.026717792510986327, 0.026521600723266602, 0.026487199783325196, 0.02629804801940918, 0.02649648094177246, 0.02645903968811035, 0.027384191513061523, 0.026587295532226562, 0.02675302314758301, 0.030273536682128906, 0.026709600448608397, 0.026595775604248046, 0.026652639389038085, 0.026570816040039063, 0.026863231658935546, 0.02653398323059082, 0.02656073570251465, 0.02624928092956543, 0.02671392059326172, 0.026547903060913085, 0.02652614402770996, 0.026257408142089843, 0.026283103942871092, 0.026256288528442383, 0.026287679672241212, 0.026207904815673828, 0.026676000595092772, 0.026619903564453123, 0.026217472076416014, 0.02701148796081543, 0.02646086311340332, 0.026546079635620116, 0.026782751083374023, 0.026416095733642578, 0.02681875228881836, 0.026521343231201172, 0.026215904235839842, 0.026489280700683595, 0.026515392303466796, 0.026708192825317383, 0.026638111114501952, 0.027111648559570312, 0.026689855575561524, 0.026612831115722657, 0.026468544006347655, 0.02680678367614746, 0.026535488128662108, 0.026767711639404296, 0.02831974411010742, 0.02661974334716797, 0.026474655151367186, 0.02631385612487793, 0.0261760311126709, 0.026222816467285158, 0.02643891143798828, 0.026911327362060547, 0.026542367935180663, 0.02642723274230957, 0.026706079483032226, 0.02651955223083496, 0.026399904251098633, 0.026321344375610352, 0.026365919113159178, 0.02629033660888672, 0.026933536529541016, 0.026439680099487304, 0.026286176681518555, 0.0268002872467041, 0.026203903198242187]",tokens/s,37.93561845396473,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5169.401856,6234.701824,0.0,5832.179712,5645.103616,s,1,12.909220703125,12.909220703125,0.0,12.909220703125,12.909220703125,12.909220703125,12.909220703125,[12.909220703125],,kWh,0.0001573443653541517,1.7335612470661948e-05,6.905477746600719e-05,0.00024373475529082083,,MB,1766.281216,6291.324928,0.0,5874.122752,5138.323456,s,10,40.28709252929688,4.028709252929688,0.005671125681380243,4.0302406005859375,4.034338598632812,4.035376696777344,4.036207175292969,"[4.015229248046875, 4.026395751953125, 4.0234462890625, 4.0284091796875, 4.03117822265625, 4.030408203125, 4.030072998046875, 4.031429931640625, 4.036414794921875, 4.03410791015625]",tokens/s,63.54392534378998,kWh,0.00011739396909042094,1.2948705813527599e-05,7.81530347446005e-05,0.00020849570964854902,tokens/kWh,1227843.0114054943,MB,1766.920192,6310.199296,0.0,5892.99712,5138.326016,s,10,26.46400170898437,2.6464001708984375,0.0051657731429594084,2.648267822265625,2.65106181640625,2.6519804931640625,2.6527154345703123,"[2.650857666015625, 2.649217529296875, 2.647318115234375, 2.65020556640625, 2.652899169921875, 2.636313232421875, 2.639505615234375, 2.642402099609375, 2.650204345703125, 2.645078369140625]",tokens/s,23.80592349289785,kWh,0.00010147632789583062,1.119292522494738e-05,6.356818974340012e-05,0.00017623744286417815,tokens/kWh,357472.27703793085,,s,630,26.46138108825681,0.04200219220358228,0.000514603683284179,0.04189184188842773,0.04235059356689453,0.04278089790344238,0.044518069381713904,"[0.043229057312011716, 0.042273601531982424, 0.042123264312744144, 0.041902080535888675, 0.04228300857543945, 0.04324966430664062, 0.04237311935424805, 0.042098464965820315, 0.041853153228759765, 0.04195942306518555, 0.04512675094604492, 0.042848865509033204, 0.041871681213378906, 0.041727649688720704, 0.04150070571899414, 0.041705791473388674, 0.041489822387695316, 0.04145417785644531, 0.041371646881103515, 0.041750526428222655, 0.04209455871582031, 0.04199190521240234, 0.04188191986083984, 0.04182425689697265, 0.04182425689697265, 0.04169318389892578, 0.041842689514160154, 0.041635841369628904, 0.0417770881652832, 0.04214790344238281, 0.04204748916625976, 0.04172547149658203, 0.04213603210449219, 0.04169302368164062, 0.041947296142578125, 0.0417894401550293, 0.04197119903564453, 0.04206800079345703, 0.04185750579833984, 0.04223097610473633, 0.04203807830810547, 0.0421130256652832, 0.04205145645141602, 0.041774974822998044, 0.042016769409179686, 0.0420211181640625, 0.04205977630615235, 0.04214495849609375, 0.04239238357543945, 0.04216569519042969, 0.0420596809387207, 0.042291393280029295, 0.0421954231262207, 0.0419837760925293, 0.04188716888427734, 0.042179073333740234, 0.04208623886108399, 0.042074558258056644, 0.04218377685546875, 0.04216239929199219, 0.0421926383972168, 0.04226460647583008, 0.041880481719970705, 0.04288595199584961, 0.04200803375244141, 0.04213359832763672, 0.04225235366821289, 0.04192409515380859, 0.04212543869018555, 0.0420401611328125, 0.041816062927246093, 0.04316364669799805, 0.04233577728271484, 0.04255904006958008, 0.04297206497192383, 0.041957374572753905, 0.04185225677490234, 0.0418331184387207, 0.042240001678466796, 0.04176668930053711, 0.041801952362060545, 0.04149391937255859, 0.041620063781738284, 0.04201635360717773, 0.04198851013183594, 0.042231807708740236, 0.04231782531738281, 0.04217971038818359, 0.0417022705078125, 0.041643806457519535, 0.04177942276000977, 0.04163379287719727, 0.04177920150756836, 0.04175990295410156, 0.04168975830078125, 0.0422545280456543, 0.04203724670410156, 0.041903583526611325, 0.04183299255371094, 0.04199628829956055, 0.04167679977416992, 0.04177305603027344, 0.04243036651611328, 0.04184073638916016, 0.04191007995605469, 0.04176614379882813, 0.041630657196044925, 0.04197990417480469, 0.04170902252197266, 0.04358607864379883, 0.042872833251953124, 0.042164222717285156, 0.04163356781005859, 0.04189616012573242, 0.04154332733154297, 0.04152355194091797, 0.04165222549438476, 0.04193801498413086, 0.041673633575439455, 0.041717758178710936, 0.04378009414672852, 0.0421231689453125, 0.04208809661865234, 0.042247806549072266, 0.042277694702148434, 0.041998336791992184, 0.04346265411376953, 0.04245913696289062, 0.04230963134765625, 0.041987648010253904, 0.042275264739990236, 0.04189184188842773, 0.04182534408569336, 0.04192147064208984, 0.04195910263061523, 0.041836254119873045, 0.04166511917114258, 0.04228915023803711, 0.04253807830810547, 0.042099391937255856, 0.042208606719970704, 0.04182720184326172, 0.04250419235229492, 0.04197785568237305, 0.04239081573486328, 0.04199702453613281, 0.042071361541748044, 0.041724609375, 0.0418078727722168, 0.041809921264648435, 0.041923809051513675, 0.04192131042480469, 0.042336254119873046, 0.04188774490356445, 0.042039295196533204, 0.0420843505859375, 0.04183980941772461, 0.04185580825805664, 0.041793537139892575, 0.04184592056274414, 0.04197257614135742, 0.041984001159667966, 0.04187340927124023, 0.0416255989074707, 0.041815486907958985, 0.041635425567626956, 0.04259324645996094, 0.04259635162353516, 0.041979393005371096, 0.04258662414550781, 0.04212118530273438, 0.041602977752685545, 0.04167808151245117, 0.04185916900634765, 0.04164483261108398, 0.04197903823852539, 0.0420401611328125, 0.041678848266601565, 0.04190617752075195, 0.04169318389892578, 0.04179100799560547, 0.04229167938232422, 0.04195897674560547, 0.04209503936767578, 0.04207206344604492, 0.04160620880126953, 0.041966529846191404, 0.042022911071777344, 0.04202883148193359, 0.043675102233886716, 0.04189238357543945, 0.041538944244384764, 0.041982593536376955, 0.042240001678466796, 0.04191027069091797, 0.04171366500854492, 0.04158796691894531, 0.04167142486572266, 0.04153548812866211, 0.04189798355102539, 0.04478547286987305, 0.04215961456298828, 0.0418430061340332, 0.04168742370605469, 0.04206108856201172, 0.042172321319580076, 0.04184326553344726, 0.04254044723510742, 0.04198896026611328, 0.04181196975708008, 0.04232806396484375, 0.04201004791259766, 0.041812545776367185, 0.042110782623291015, 0.04188905715942383, 0.04226959991455078, 0.041734142303466795, 0.041816062927246093, 0.04221747207641602, 0.04185497665405274, 0.04166400146484375, 0.04172768020629883, 0.041935680389404296, 0.04208998489379883, 0.042095104217529294, 0.042254337310791014, 0.04184384155273437, 0.04191936111450195, 0.041940990447998046, 0.04222355270385742, 0.04201171112060547, 0.041968639373779294, 0.04194303894042969, 0.042164222717285156, 0.04204748916625976, 0.04186521530151367, 0.04235059356689453, 0.042362529754638674, 0.042184288024902344, 0.042199230194091795, 0.04211920166015625, 0.04203369522094726, 0.04195072174072265, 0.04171734237670898, 0.04183110427856445, 0.042074241638183595, 0.04238086318969726, 0.04197430419921875, 0.04203913497924805, 0.04217871856689453, 0.04235059356689453, 0.04193280029296875, 0.04285299301147461, 0.042137760162353516, 0.04210483169555664, 0.04198604965209961, 0.04196326446533203, 0.04186956787109375, 0.04179763031005859, 0.04306079864501953, 0.042747455596923827, 0.042036094665527345, 0.041973758697509765, 0.04260236740112305, 0.04179935836791992, 0.0420458869934082, 0.04192844772338867, 0.04169919967651367, 0.041662017822265626, 0.04187424087524414, 0.041586688995361325, 0.04226838302612305, 0.04533891296386719, 0.04220662307739258, 0.042170974731445314, 0.041870849609375, 0.0419780158996582, 0.04196387100219726, 0.041805408477783204, 0.04191888046264648, 0.04186316680908203, 0.04196099090576172, 0.0420970573425293, 0.04266976165771484, 0.04182463836669922, 0.042173694610595704, 0.041863937377929684, 0.04186316680908203, 0.04197990417480469, 0.04198140716552735, 0.042414623260498045, 0.04185497665405274, 0.041644031524658204, 0.04186111831665039, 0.04176188659667969, 0.04210575866699219, 0.042162174224853514, 0.04263919830322266, 0.0438633918762207, 0.042443008422851564, 0.04183097457885742, 0.041783294677734374, 0.041842689514160154, 0.04177920150756836, 0.041760768890380856, 0.04174233627319336, 0.041860862731933596, 0.041849086761474606, 0.04283939361572266, 0.04195600128173828, 0.042000385284423826, 0.04178662490844726, 0.04177753448486328, 0.04173440170288086, 0.04181363296508789, 0.042789985656738284, 0.041952159881591795, 0.041991233825683594, 0.041746494293212894, 0.04187161636352539, 0.04216486358642578, 0.04200188827514648, 0.04290409469604492, 0.041654048919677736, 0.041752799987792966, 0.04166451263427735, 0.04211011123657227, 0.04187631988525391, 0.041723617553710936, 0.04182815933227539, 0.042207584381103516, 0.04175817489624024, 0.04182697677612305, 0.04181961441040039, 0.04166915130615234, 0.041551456451416016, 0.041871776580810545, 0.042014720916748044, 0.04178255844116211, 0.04175331115722656, 0.04167679977416992, 0.041717758178710936, 0.04176588821411133, 0.04186214447021484, 0.04193689727783203, 0.04165836715698242, 0.042387454986572266, 0.04188518524169922, 0.041914878845214845, 0.041569694519042966, 0.04168764877319336, 0.04187750244140625, 0.04174230575561524, 0.041775135040283205, 0.04236207962036133, 0.043022815704345706, 0.041807998657226564, 0.0422279052734375, 0.041875457763671874, 0.04179558563232422, 0.041627105712890626, 0.04152579116821289, 0.041635841369628904, 0.041442783355712894, 0.04166915130615234, 0.04171366500854492, 0.04167475128173828, 0.04166451263427735, 0.04160067367553711, 0.04166195297241211, 0.04168790435791016, 0.04166041564941406, 0.041611263275146484, 0.04150579071044922, 0.041501121520996095, 0.041622081756591794, 0.04188300704956055, 0.041536128997802735, 0.042813438415527344, 0.04263270568847656, 0.041799232482910155, 0.04181702423095703, 0.04187936019897461, 0.04197600173950195, 0.04167174530029297, 0.04179840087890625, 0.041732288360595705, 0.041662464141845705, 0.04181103897094727, 0.04197468948364258, 0.04565155029296875, 0.04211059188842774, 0.04168544006347656, 0.04180822372436523, 0.041774974822998044, 0.04171721649169922, 0.04172982406616211, 0.0416420783996582, 0.041699230194091795, 0.04222236633300781, 0.0419238395690918, 0.04194390487670899, 0.04168703842163086, 0.04158464050292969, 0.04143308639526367, 0.041431041717529295, 0.04146176147460937, 0.04138393783569336, 0.041342273712158206, 0.0423818244934082, 0.041740478515625, 0.04157235336303711, 0.04146176147460937, 0.04144063949584961, 0.04143324661254883, 0.04141411209106445, 0.04142182540893555, 0.042412033081054686, 0.041602752685546876, 0.04176313781738281, 0.041822177886962894, 0.04185500717163086, 0.041745697021484375, 0.042436832427978514, 0.04161996841430664, 0.042264575958251956, 0.04237263870239258, 0.04194351959228516, 0.04218454360961914, 0.0417465934753418, 0.04194003295898437, 0.042378177642822264, 0.041834495544433595, 0.041805759429931644, 0.04179302215576172, 0.04198374557495117, 0.04184352111816406, 0.04177891159057617, 0.0417770881652832, 0.04180822372436523, 0.04186022567749023, 0.042979328155517575, 0.042074111938476565, 0.041893024444580075, 0.04202582550048828, 0.04209196853637695, 0.041753150939941405, 0.041676448822021483, 0.04324332809448242, 0.0418678092956543, 0.041852928161621096, 0.04163174438476563, 0.041807361602783207, 0.041640350341796875, 0.04158678436279297, 0.04145139312744141, 0.04163510513305664, 0.04150675201416015, 0.041535552978515626, 0.041767776489257814, 0.041858142852783206, 0.041788318634033206, 0.041823326110839845, 0.04174713516235352, 0.04192892837524414, 0.04172544097900391, 0.04180838394165039, 0.041695232391357424, 0.04165836715698242, 0.04152115249633789, 0.042014270782470706, 0.04175632095336914, 0.04192540740966797, 0.04146585464477539, 0.04166656112670898, 0.04184064102172851, 0.04208025741577148, 0.04170547103881836, 0.041539070129394534, 0.041923072814941405, 0.04182412719726562, 0.04164006423950195, 0.041625408172607424, 0.04155206298828125, 0.04172316741943359, 0.04173283386230469, 0.04341139221191406, 0.043413406372070314, 0.04225244903564453, 0.041895423889160156, 0.04194559860229492, 0.041809566497802736, 0.04185440063476562, 0.041710174560546875, 0.04229561614990234, 0.04199628829956055, 0.04204880142211914, 0.043796833038330076, 0.042207584381103516, 0.041955360412597655, 0.04204732894897461, 0.0418776626586914, 0.04193484878540039, 0.04207344055175781, 0.04276979064941406, 0.04200239944458008, 0.04165903854370117, 0.04191196823120117, 0.041654624938964845, 0.04165222549438476, 0.04173004913330078, 0.041611263275146484, 0.04177648162841797, 0.04162627029418945, 0.04230508804321289, 0.041656768798828125, 0.04192256164550781, 0.04204105758666992, 0.041930080413818356, 0.04191328048706055, 0.04227276611328125, 0.04220844650268555, 0.041915199279785154, 0.04182220840454102, 0.041890846252441404, 0.041866207122802736, 0.04200217437744141, 0.04206208038330078, 0.04192665481567383, 0.041990142822265625, 0.041852928161621096, 0.04223385620117188, 0.04209360122680664, 0.04207715225219726, 0.041828353881835936, 0.04169318389892578, 0.041711616516113284, 0.04169728088378906, 0.041710655212402345, 0.04199929428100586, 0.041997631072998046, 0.041928417205810545, 0.04178947067260742, 0.04560755157470703, 0.04212972640991211, 0.04215398406982422, 0.046782047271728515, 0.041910015106201175, 0.04208297729492187, 0.042020542144775394, 0.0418546257019043, 0.041904800415039065, 0.041942047119140624, 0.041866207122802736, 0.041901599884033205, 0.04183087921142578, 0.04169728088378906, 0.041788894653320315, 0.04221084976196289, 0.04203811264038086, 0.04171587371826172, 0.04177510452270508, 0.041793537139892575, 0.04178124618530273, 0.04188127899169922, 0.042316001892089845, 0.042215518951416016, 0.04299529647827149, 0.04221392059326172, 0.041994430541992187, 0.04200243377685547, 0.04189184188842773, 0.0420208625793457, 0.041902080535888675, 0.041918464660644535, 0.04190105438232422, 0.04184371185302734, 0.04201881790161133, 0.041971710205078124, 0.041842689514160154, 0.04540415954589844, 0.04211289596557617, 0.041836673736572266, 0.04190412902832031, 0.04192256164550781, 0.04170735931396485, 0.04188995361328125, 0.04214374542236328, 0.04187340927124023, 0.04185488128662109, 0.041803871154785156, 0.04173209762573242, 0.04164556884765625, 0.04206028747558594, 0.042004287719726564, 0.04185107040405273, 0.04192051315307617, 0.041867263793945314, 0.041744384765625, 0.04172390365600586, 0.041790687561035156, 0.041898494720458986, 0.0418675537109375, 0.04195654296875, 0.04181894302368164, 0.041984001159667966, 0.04178099060058594, 0.041862880706787106, 0.04202550506591797, 0.04189184188842773, 0.04184195327758789, 0.042089183807373046, 0.04185270309448242, 0.0420456657409668, 0.04190003204345703, 0.04170070266723633, 0.0419703369140625, 0.04182819366455078, 0.04176911926269531, 0.041778335571289064, 0.04185174560546875, 0.04174204635620117, 0.04186531066894531, 0.04178659057617187, 0.04178019332885742, 0.041883647918701174, 0.042186302185058595, 0.04226707077026367, 0.0425164794921875, 0.04176486587524414]",tokens/s,23.808281128591002,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,875.159552,690.946048,0.0,295.698432,277.263872,s,1,8.11909912109375,8.11909912109375,0.0,8.11909912109375,8.11909912109375,8.11909912109375,8.11909912109375,[8.11909912109375],,kWh,2.4179700970785235e-05,2.6600073390606967e-06,7.5708393899864834e-06,3.441054769983242e-05,,MB,1181.384704,743.374848,0.0,335.54432,312.39168,s,17,0.5514090576171875,0.03243582691865808,0.0016088973820605303,0.03220207977294922,0.03238558731079102,0.033655135345458984,0.03771621688842774,"[0.038731487274169925, 0.03220816040039062, 0.032158432006835935, 0.03160505676269531, 0.032301982879638674, 0.03220207977294922, 0.031267936706542966, 0.03217087936401367, 0.03163433647155762, 0.032350048065185544, 0.032299488067626954, 0.03149331283569336, 0.03238528060913086, 0.031835424423217774, 0.03221033477783203, 0.03216876983642578, 0.03238604736328125]",tokens/s,7892.507277276812,kWh,1.174110313102384e-06,1.2948132147932021e-07,7.818178945301146e-07,2.0854095291118186e-06,tokens/kWh,122757662.9080769,MB,1192.22272,770.637824,0.0,362.807296,312.39424,s,17,9.954290649414064,0.5855465087890624,0.00707897054558384,0.5835198364257812,0.5953194702148438,0.5984454467773438,0.6034490014648438,"[0.5942778930664062, 0.5881565551757812, 0.583928955078125, 0.587521484375, 0.5829520263671875, 0.5853764038085938, 0.5968818359375, 0.6046998901367188, 0.581895263671875, 0.579759765625, 0.5776902465820313, 0.5771841430664062, 0.5876484985351562, 0.5835198364257812, 0.5810463256835937, 0.58303857421875, 0.5787129516601562]",tokens/s,107.59179510827747,kWh,1.667274623395756e-05,1.8387166656205125e-06,6.767028013822432e-06,2.527849091340051e-05,tokens/kWh,2492237.3814096134,,s,1071,9.946050537109388,0.009286695179373833,0.00020343916655361172,0.009230015754699707,0.009521151542663574,0.009662544250488281,0.009936396598815917,"[0.009731743812561035, 0.009677151679992675, 0.00960921573638916, 0.009449472427368164, 0.009373536109924316, 0.009352992057800293, 0.009308095932006835, 0.00930844783782959, 0.009299231529235839, 0.009304224014282226, 0.009310367584228516, 0.009236960411071778, 0.00923862361907959, 0.00928876781463623, 0.009349663734436036, 0.009344544410705567, 0.009353119850158692, 0.00927023983001709, 0.00931158447265625, 0.009288191795349121, 0.009255071640014649, 0.009368991851806641, 0.009234335899353028, 0.009263808250427247, 0.009250368118286133, 0.009241024017333984, 0.00923680019378662, 0.009315936088562012, 0.009386303901672364, 0.009350943565368652, 0.009359040260314942, 0.009299360275268554, 0.009298015594482421, 0.009421504020690918, 0.009414400100708008, 0.009349504470825195, 0.009349120140075684, 0.009332159996032714, 0.009402272224426269, 0.009333024024963379, 0.009668416023254394, 0.009922975540161133, 0.009457023620605468, 0.009464608192443848, 0.009521151542663574, 0.009461888313293457, 0.00944320011138916, 0.009545727729797364, 0.009711615562438965, 0.009750144004821777, 0.0097609281539917, 0.009914848327636719, 0.009576191902160644, 0.009527296066284179, 0.009424896240234374, 0.00943120002746582, 0.009561951637268067, 0.00988486385345459, 0.009448224067687989, 0.009414688110351562, 0.009363455772399902, 0.009321791648864746, 0.009351584434509277, 0.009043487548828125, 0.009291808128356933, 0.009309120178222656, 0.00925267219543457, 0.009238719940185547, 0.00929910373687744, 0.009266016006469726, 0.009254688262939454, 0.009351391792297364, 0.009289728164672852, 0.009258655548095704, 0.009277888298034668, 0.009217951774597168, 0.009242624282836913, 0.009240032196044922, 0.00917967987060547, 0.00922812843322754, 0.009195584297180176, 0.009298015594482421, 0.00925596809387207, 0.009187999725341798, 0.009230655670166016, 0.009269248008728028, 0.009232352256774903, 0.009261311531066895, 0.009467552185058594, 0.009498751640319825, 0.009830400466918946, 0.009260640144348145, 0.009249024391174317, 0.00928758430480957, 0.00917734432220459, 0.009188480377197265, 0.00924454402923584, 0.009192447662353515, 0.01093222427368164, 0.011141119956970215, 0.009318400382995605, 0.009404224395751953, 0.009252927780151366, 0.009327903747558593, 0.009253888130187989, 0.009301888465881347, 0.009322688102722168, 0.009342752456665038, 0.00931811237335205, 0.009314656257629395, 0.009279711723327637, 0.009246432304382324, 0.009236191749572754, 0.009251296043395996, 0.009238335609436035, 0.009226240158081055, 0.009217696189880371, 0.00920406436920166, 0.009259072303771973, 0.009275327682495118, 0.009217568397521973, 0.009233887672424317, 0.009225215911865235, 0.009186592102050781, 0.009259743690490723, 0.009318400382995605, 0.008987839698791505, 0.009204480171203613, 0.009240223884582519, 0.009209535598754882, 0.009182175636291505, 0.009200639724731445, 0.009210111618041993, 0.009293984413146973, 0.009240511894226075, 0.009281951904296875, 0.009265151977539063, 0.009302016258239745, 0.009338879585266113, 0.009207807540893554, 0.009234399795532226, 0.009193152427673339, 0.009262911796569824, 0.00974287986755371, 0.009180319786071778, 0.009220959663391114, 0.00918876838684082, 0.00921455955505371, 0.009193471908569336, 0.00923363208770752, 0.009214079856872559, 0.009349791526794434, 0.009367551803588867, 0.009406463623046875, 0.009319519996643067, 0.009290816307067871, 0.009283552169799805, 0.009320351600646972, 0.009185279846191406, 0.00922976016998291, 0.009245216369628906, 0.009203071594238282, 0.009343615531921387, 0.009234399795532226, 0.009316384315490723, 0.009251008033752442, 0.00925654411315918, 0.00925062370300293, 0.009260576248168945, 0.009298815727233886, 0.009380064010620116, 0.009391519546508789, 0.00943171215057373, 0.009407456398010255, 0.009315072059631347, 0.009385984420776367, 0.009227840423583985, 0.009222047805786133, 0.00921180820465088, 0.009186976432800293, 0.00916374397277832, 0.009177087783813476, 0.009186528205871581, 0.009175775527954101, 0.00920787239074707, 0.009190815925598145, 0.00926371192932129, 0.009220095634460449, 0.00923401641845703, 0.009014559745788573, 0.009423871994018555, 0.009730048179626465, 0.009215616226196289, 0.009243007659912109, 0.009515263557434083, 0.009205056190490722, 0.009290240287780761, 0.009248384475708008, 0.009203264236450195, 0.00969600009918213, 0.009272800445556641, 0.009251359939575196, 0.009294015884399414, 0.009320256233215331, 0.009420000076293945, 0.009270048141479493, 0.009297247886657714, 0.009218943595886231, 0.009256735801696777, 0.009366815567016601, 0.009398943901062011, 0.00931334400177002, 0.009319583892822265, 0.009451583862304687, 0.00931817626953125, 0.00935321617126465, 0.00928707218170166, 0.009248640060424805, 0.00926313591003418, 0.009218400001525878, 0.00932694435119629, 0.00929321575164795, 0.009171551704406738, 0.009182751655578613, 0.009209568023681641, 0.009313055992126465, 0.009229887962341308, 0.00917955207824707, 0.009152640342712402, 0.009158528327941895, 0.00923635196685791, 0.009449600219726562, 0.009359519958496093, 0.009313247680664062, 0.009325440406799317, 0.009361408233642577, 0.009541791915893555, 0.00949232006072998, 0.00930611228942871, 0.009329792022705077, 0.009292767524719239, 0.009316255569458008, 0.009629088401794434, 0.00935587215423584, 0.00941977596282959, 0.00973840045928955, 0.009304832458496094, 0.009254112243652343, 0.009253824234008789, 0.009207743644714356, 0.009179136276245118, 0.00924073600769043, 0.009317983627319336, 0.009296287536621093, 0.00930611228942871, 0.009254655838012694, 0.009195679664611817, 0.009205120086669922, 0.009173888206481934, 0.009205344200134278, 0.009193663597106933, 0.009214015960693359, 0.009269248008728028, 0.009239744186401368, 0.009255104064941407, 0.009259519577026367, 0.009183327674865722, 0.009160736083984374, 0.009170944213867188, 0.009119775772094727, 0.009173215866088867, 0.009223039627075196, 0.009515680313110352, 0.009552127838134765, 0.009416000366210937, 0.009347904205322265, 0.009312095642089844, 0.009303263664245606, 0.009265952110290528, 0.00923852825164795, 0.00921395206451416, 0.009230015754699707, 0.009277759552001953, 0.009172863960266113, 0.009201791763305664, 0.009166848182678223, 0.009239551544189453, 0.009183679580688477, 0.009142848014831543, 0.009146368026733399, 0.009172736167907715, 0.009178720474243163, 0.00920847988128662, 0.009270336151123047, 0.009294783592224121, 0.009768959999084472, 0.009197312355041504, 0.009219903945922852, 0.009169343948364259, 0.009219264030456543, 0.0091942720413208, 0.009182720184326172, 0.00929856014251709, 0.009728032112121582, 0.009174880027770995, 0.009185312271118164, 0.009299679756164551, 0.009158944129943848, 0.009230015754699707, 0.009134336471557617, 0.009203776359558106, 0.009141856193542481, 0.009140159606933594, 0.009148896217346192, 0.00918563175201416, 0.009077119827270507, 0.009209088325500488, 0.009234623908996582, 0.009224127769470215, 0.009187968254089355, 0.009209856033325196, 0.009260448455810547, 0.009208415985107422, 0.009190815925598145, 0.009314687728881835, 0.009300191879272461, 0.009327775955200195, 0.00931715202331543, 0.009405823707580566, 0.00928179168701172, 0.009291872024536133, 0.009333087921142577, 0.009225855827331543, 0.009308544158935547, 0.009441280364990234, 0.009404416084289552, 0.00936143970489502, 0.009383904457092284, 0.009496576309204101, 0.00932863998413086, 0.009326623916625977, 0.009356575965881348, 0.009364192008972167, 0.009404383659362792, 0.00935321617126465, 0.009359040260314942, 0.009449248313903808, 0.009282079696655274, 0.009332736015319825, 0.009279808044433594, 0.009279168128967286, 0.009332736015319825, 0.009297120094299316, 0.009255711555480956, 0.009210911750793457, 0.009200927734375, 0.00928435230255127, 0.009196479797363282, 0.009171072006225587, 0.009148287773132324, 0.009137439727783202, 0.009206496238708495, 0.009230367660522461, 0.009295392036437988, 0.00948419189453125, 0.009329055786132813, 0.009330816268920898, 0.00929974365234375, 0.009289952278137207, 0.009328960418701172, 0.009340352058410644, 0.009244768142700196, 0.009261216163635254, 0.009223296165466308, 0.009198464393615723, 0.009148415565490722, 0.009164159774780273, 0.009168928146362304, 0.008978176116943359, 0.009177248001098632, 0.009235679626464844, 0.009191583633422852, 0.00923852825164795, 0.009224543571472168, 0.00926144027709961, 0.009307423591613769, 0.00932470417022705, 0.009371968269348144, 0.00944054412841797, 0.009470975875854493, 0.00949836826324463, 0.009414624214172364, 0.009404416084289552, 0.009512031555175781, 0.009319328308105468, 0.00943734359741211, 0.009784992218017579, 0.00940006446838379, 0.009429759979248047, 0.009385663986206055, 0.009420831680297852, 0.00944649600982666, 0.009388863563537598, 0.009461600303649903, 0.00950432014465332, 0.009396896362304688, 0.009358783721923829, 0.009509632110595703, 0.009485183715820312, 0.009565119743347167, 0.009440896034240722, 0.009501055717468261, 0.009449472427368164, 0.009455615997314454, 0.009541728019714356, 0.00956118392944336, 0.00956704044342041, 0.009621503829956055, 0.009667648315429687, 0.009649087905883789, 0.009653440475463867, 0.009630528450012207, 0.00954684829711914, 0.009528287887573242, 0.009607104301452637, 0.00964633560180664, 0.00960867214202881, 0.00953987216949463, 0.00951296043395996, 0.00957750415802002, 0.009636832237243651, 0.009539711952209472, 0.009707391738891602, 0.009590527534484864, 0.009701631546020507, 0.009517056465148926, 0.00933843231201172, 0.009386367797851562, 0.009386048316955567, 0.00942899227142334, 0.009451519966125489, 0.009762687683105469, 0.009825856208801269, 0.009831199645996094, 0.010010623931884765, 0.009989631652832032, 0.009808032035827637, 0.009736543655395507, 0.009832287788391113, 0.00966089630126953, 0.00966419219970703, 0.009672160148620606, 0.00967734432220459, 0.009697279930114745, 0.009645088195800781, 0.009729120254516601, 0.00966438388824463, 0.009651488304138183, 0.009741120338439942, 0.009580448150634765, 0.00962713623046875, 0.009621664047241211, 0.009630047798156738, 0.009644351959228515, 0.009603008270263672, 0.00969600009918213, 0.009614336013793945, 0.00963379192352295, 0.009574399948120118, 0.009659520149230957, 0.009605664253234864, 0.009557696342468261, 0.009563039779663086, 0.009530176162719726, 0.009477184295654298, 0.009541567802429199, 0.009611328125, 0.009811840057373046, 0.009597151756286622, 0.00961235237121582, 0.009691871643066407, 0.00961740779876709, 0.009537376403808594, 0.01026863956451416, 0.009468095779418945, 0.009485919952392579, 0.009451744079589843, 0.009508064270019531, 0.009446368217468262, 0.009453568458557129, 0.009437184333801269, 0.009490464210510254, 0.009437151908874512, 0.009441280364990234, 0.009365119934082032, 0.00928985595703125, 0.009369440078735351, 0.00936956787109375, 0.00932271957397461, 0.009318623542785644, 0.009276736259460449, 0.00928179168701172, 0.00924623966217041, 0.009255840301513671, 0.008937664031982422, 0.009189920425415039, 0.009183199882507324, 0.009181216239929199, 0.009158207893371581, 0.009146816253662109, 0.009114784240722656, 0.009098079681396485, 0.009133952140808105, 0.009097344398498535, 0.009136128425598144, 0.009514143943786621, 0.010959775924682617, 0.010619935989379882, 0.009345952033996583, 0.00922214412689209, 0.009215104103088378, 0.009132160186767577, 0.009325311660766602, 0.009209856033325196, 0.00925836753845215, 0.009120384216308593, 0.009431039810180664, 0.00919257640838623, 0.009148544311523437, 0.00935807991027832, 0.009140224456787109, 0.009138175964355469, 0.009151552200317383, 0.009164735794067383, 0.00926643180847168, 0.009193535804748534, 0.009223423957824706, 0.009178815841674804, 0.009118271827697753, 0.009164992332458497, 0.009144319534301757, 0.009164799690246582, 0.009109503746032714, 0.009140031814575195, 0.009151743888854981, 0.009104000091552735, 0.009156479835510253, 0.009187775611877442, 0.009119199752807618, 0.009103903770446778, 0.009136351585388183, 0.009238304138183594, 0.009223487854003906, 0.009155263900756836, 0.009121055603027345, 0.009149087905883788, 0.009164192199707032, 0.009195775985717773, 0.009300479888916016, 0.009119647979736328, 0.009152544021606446, 0.009131999969482421, 0.009174688339233399, 0.0091527681350708, 0.009105567932128905, 0.00916380786895752, 0.009155488014221192, 0.008931455612182617, 0.009167072296142578, 0.009155200004577637, 0.009161919593811034, 0.009159487724304199, 0.009138143539428711, 0.009121824264526367, 0.009138175964355469, 0.009111680030822755, 0.009137568473815917, 0.00918934440612793, 0.009284095764160156, 0.009119744300842286, 0.009228287696838379, 0.009516608238220215, 0.009161151885986329, 0.009191424369812011, 0.00922812843322754, 0.009181568145751953, 0.0092260160446167, 0.009199359893798827, 0.010575903892517089, 0.00928159999847412, 0.009180671691894531, 0.009163423538208008, 0.009210911750793457, 0.009171839714050292, 0.009148672103881835, 0.009175040245056153, 0.00957795238494873, 0.009149056434631347, 0.009210911750793457, 0.009133888244628905, 0.009147232055664062, 0.009126976013183593, 0.009194111824035645, 0.009130335807800294, 0.00909727954864502, 0.00916476821899414, 0.009108960151672363, 0.00924880027770996, 0.00909727954864502, 0.009240127563476563, 0.009094207763671876, 0.009115455627441406, 0.009126175880432129, 0.009091967582702637, 0.009237279891967773, 0.009153984069824218, 0.009165472030639648, 0.009119711875915527, 0.009147583961486816, 0.009141119956970215, 0.00912377643585205, 0.009103391647338867, 0.009366656303405762, 0.00915135955810547, 0.009183072090148925, 0.009146783828735352, 0.009182432174682616, 0.009101856231689453, 0.009140416145324707, 0.009089088439941407, 0.008951807975769043, 0.009136128425598144, 0.009203264236450195, 0.009165311813354492, 0.009153504371643067, 0.009099967956542969, 0.00914038372039795, 0.009157024383544921, 0.009111231803894042, 0.009099295616149902, 0.009142271995544434, 0.009237824440002441, 0.00911574363708496, 0.009192031860351562, 0.009180447578430177, 0.009135871887207032, 0.00918627166748047, 0.009244671821594238, 0.009224160194396973, 0.009387264251708985, 0.009204544067382813, 0.009205727577209473, 0.009121696472167968, 0.009244192123413085, 0.009113247871398927, 0.009105695724487304, 0.009184032440185548, 0.009099072456359863, 0.009142175674438476, 0.009134207725524902, 0.009368960380554198, 0.009149056434631347, 0.009109503746032714, 0.009179007530212403, 0.009164832115173339, 0.009148672103881835, 0.00912889575958252, 0.009165056228637695, 0.00920028781890869, 0.009172991752624511, 0.009224160194396973, 0.009209888458251953, 0.009102720260620117, 0.009120575904846192, 0.00915231990814209, 0.009148415565490722, 0.009135104179382325, 0.009145343780517578, 0.009101183891296386, 0.00911580753326416, 0.009088640213012695, 0.009158143997192383, 0.009223008155822754, 0.009103360176086426, 0.009326784133911133, 0.009200799942016601, 0.00914243221282959, 0.009173503875732422, 0.009150464057922364, 0.009154560089111329, 0.009117728233337402, 0.009184831619262696, 0.009133824348449707, 0.009065343856811523, 0.009183232307434081, 0.00910752010345459, 0.009154208183288574, 0.00922652816772461, 0.009280703544616698, 0.009169440269470214, 0.009213600158691406, 0.009253408432006836, 0.009184960365295411, 0.009170975685119629, 0.00918278408050537, 0.009163583755493164, 0.009129983901977539, 0.009142271995544434, 0.009116703987121583, 0.009164863586425782, 0.009319328308105468, 0.009152511596679687, 0.0091495361328125, 0.009184160232543946, 0.00912611198425293, 0.00917024040222168, 0.009154144287109376, 0.009175935745239257, 0.009188672065734864, 0.009221952438354492, 0.009149056434631347, 0.009183775901794434, 0.009166560173034667, 0.009153599739074708, 0.009116640090942383, 0.009129952430725098, 0.009123231887817383, 0.009124095916748048, 0.009144800186157227, 0.009178432464599609, 0.009120320320129395, 0.009143808364868163, 0.009180800437927246, 0.009158880233764648, 0.009105759620666504, 0.00909727954864502, 0.009113856315612793, 0.009125887870788574, 0.009111328125, 0.00912816047668457, 0.009135680198669434, 0.009130368232727051, 0.009127776145935058, 0.009156831741333008, 0.009219840049743652, 0.009105664253234863, 0.009129280090332032, 0.009108160018920898, 0.009127840042114258, 0.009095264434814452, 0.009136128425598144, 0.00913542366027832, 0.009140928268432618, 0.009181183815002441, 0.009131232261657714, 0.009141023635864259, 0.00924505615234375, 0.009340800285339356, 0.009297183990478516, 0.009321632385253907, 0.009225919723510742, 0.009322112083435058, 0.009471391677856445, 0.009503840446472168, 0.009500543594360351, 0.00949392032623291, 0.009496352195739747, 0.009342880249023437, 0.009273471832275391, 0.009305120468139649, 0.009289440155029296, 0.009334815979003906, 0.009473247528076172, 0.009223135948181152, 0.009225728034973145, 0.009240896224975585, 0.009232383728027344, 0.009271167755126953, 0.00928371238708496, 0.009256319999694824, 0.009211615562438965, 0.0091943998336792, 0.009150464057922364, 0.00919961643218994, 0.009150464057922364, 0.0092609281539917, 0.009905471801757812, 0.010527039527893066, 0.009738752365112305, 0.009246272087097168, 0.010176959991455079, 0.009252863883972168, 0.009269472122192382, 0.009390975952148438, 0.009339360237121582, 0.009269696235656738, 0.009256768226623534, 0.009357503890991211, 0.009222368240356445, 0.009342752456665038, 0.00922985553741455, 0.009181280136108399, 0.00925324821472168, 0.00920364761352539, 0.009138239860534667, 0.009240575790405273, 0.009166655540466308, 0.00917728042602539, 0.009146368026733399, 0.00911359977722168, 0.009225503921508789, 0.009280223846435547, 0.009166848182678223, 0.009168895721435547, 0.009342975616455078, 0.009181023597717285, 0.009262944221496582, 0.009121952056884765, 0.009146335601806641, 0.008925951957702637, 0.00912604808807373, 0.009217439651489258, 0.009218688011169434, 0.009207615852355957, 0.009249792098999024, 0.00919654369354248, 0.009201663970947266, 0.0091626558303833, 0.00913315200805664, 0.009123071670532227, 0.009481568336486817, 0.009398591995239259, 0.00996771240234375, 0.009885696411132813, 0.009217344284057617, 0.009226688385009766, 0.009304160118103028, 0.009156160354614258, 0.009183839797973633, 0.00922214412689209, 0.009154560089111329, 0.009265151977539063, 0.009157631874084473, 0.009341376304626465, 0.009555808067321777, 0.00927030372619629, 0.009237919807434082, 0.009228575706481933, 0.009273344039916993, 0.009148415565490722, 0.009122879981994628, 0.00915552043914795, 0.009129983901977539, 0.009211647987365722, 0.009545984268188477, 0.009383935928344727, 0.009209407806396484, 0.009247167587280274, 0.009224191665649414, 0.009240575790405273, 0.009265151977539063, 0.009203712463378906, 0.009187328338623046, 0.009125663757324218, 0.009158592224121093, 0.00923801612854004, 0.009235487937927247, 0.009148447990417481, 0.009144191741943359, 0.009176095962524415, 0.009222208023071289, 0.009261823654174805, 0.00922544002532959, 0.009201791763305664, 0.00923305606842041, 0.009265151977539063, 0.009445376396179199, 0.009269503593444825, 0.009272800445556641, 0.009285856246948242, 0.009344896316528321, 0.009212096214294434, 0.00928809642791748, 0.00932636833190918, 0.00935097599029541, 0.00931715202331543, 0.009386240005493164, 0.00927455997467041, 0.00927609634399414, 0.009412544250488281, 0.009304448127746582, 0.009311967849731446, 0.009277471542358398, 0.009259008407592773, 0.00920911979675293, 0.009237215995788575, 0.009269248008728028, 0.009244671821594238, 0.009191360473632813, 0.00914851188659668, 0.009119744300842286, 0.009146207809448243, 0.00909939193725586, 0.009143648147583007, 0.009107456207275391, 0.009171615600585938, 0.009170944213867188, 0.009129664421081544, 0.00916652774810791, 0.009194111824035645, 0.009184479713439941, 0.009255711555480956, 0.009234335899353028, 0.009171327590942384, 0.009793248176574707, 0.009351424217224121, 0.009511967658996583, 0.009210880279541016, 0.009209600448608399, 0.009166239738464355, 0.009149344444274902, 0.009198464393615723, 0.009142848014831543, 0.009097439765930176, 0.009168895721435547, 0.009140416145324707, 0.009142208099365234, 0.009113471984863281, 0.009154560089111329, 0.009138143539428711, 0.009145855903625488, 0.009169407844543457, 0.009099424362182617, 0.009154111862182616, 0.009095487594604492, 0.009158656120300293, 0.009150464057922364, 0.009158143997192383, 0.009187840461730956, 0.009178655624389648, 0.009157088279724121, 0.00912179183959961, 0.009179136276245118, 0.009312255859375, 0.009215999603271484, 0.009751328468322754, 0.009713760375976562, 0.009693087577819825, 0.009551520347595216, 0.009419103622436524, 0.009381855964660644, 0.009269280433654785, 0.009273152351379394, 0.009279232025146484, 0.009269087791442872, 0.009224639892578126, 0.009304224014282226, 0.009250016212463378, 0.009191583633422852, 0.009243264198303222, 0.009254303932189942, 0.009267680168151856, 0.009222271919250489, 0.00920588779449463, 0.009324416160583496, 0.00924403190612793, 0.00926540756225586, 0.009294207572937013, 0.00923801612854004, 0.009228832244873047, 0.00925487995147705, 0.009242719650268554, 0.009234335899353028, 0.00925209617614746, 0.009210623741149903, 0.00919046401977539, 0.009161664009094237, 0.00916431999206543, 0.009185759544372559, 0.009242464065551757, 0.009189536094665527, 0.009176959991455078, 0.009193599700927734, 0.009207807540893554, 0.009163840293884278, 0.009191360473632813, 0.009205984115600587, 0.009159456253051758, 0.009223615646362305, 0.009206080436706543, 0.009257216453552247, 0.009215680122375489, 0.00917478370666504, 0.009219776153564454, 0.009177984237670899, 0.009205023765563964, 0.009197407722473145, 0.009150527954101562, 0.009134528160095214, 0.009118047714233399, 0.00912816047668457, 0.009236384391784667, 0.009197471618652343, 0.009156864166259765, 0.009123295783996581, 0.009170656204223632, 0.009113183975219727, 0.009179743766784668, 0.00901696014404297, 0.009184800148010253, 0.009143136024475097, 0.009207039833068848, 0.00918115234375, 0.009212767601013184, 0.00915555191040039, 0.00948147201538086, 0.009291328430175782, 0.00942300796508789, 0.009319647789001465, 0.009251615524291993, 0.009183232307434081, 0.009215999603271484, 0.009227840423583985, 0.00917734432220459, 0.00919161605834961, 0.009148415565490722, 0.009116703987121583, 0.009092063903808593, 0.009166848182678223, 0.009156864166259765, 0.009148192405700684, 0.009189408302307129, 0.00909500789642334, 0.009139328002929688, 0.009126367568969727, 0.009171263694763183, 0.009146559715270995, 0.009132384300231933, 0.00914406394958496, 0.009091168403625489, 0.009164608001708985, 0.009154560089111329, 0.009168895721435547, 0.009141568183898927, 0.009135968208312988, 0.009179807662963868, 0.0091342716217041, 0.009101311683654785, 0.009111552238464356, 0.009136128425598144, 0.009098655700683593, 0.009124383926391602, 0.009152128219604492, 0.009093791961669921, 0.009149248123168946, 0.009151455879211425, 0.009194751739501954, 0.009193535804748534, 0.009228992462158202, 0.009351231575012208, 0.009172927856445313, 0.009326560020446777, 0.009420063972473145, 0.00916966438293457, 0.009134079933166504, 0.009134079933166504, 0.009166848182678223, 0.009180992126464844, 0.009150112152099609, 0.009140768051147462, 0.009155712127685546]",tokens/s,107.68093284907691,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4259.401728,5784.928256,0.0,5389.68064,5000.446464,s,1,11.729078125,11.729078125,0.0,11.729078125,11.729078125,11.729078125,11.729078125,[11.729078125],,kWh,0.00013139701470000394,1.4465109525743916e-05,5.4969766197981995e-05,0.00020083189042372987,,MB,1389.924352,5801.705472,0.0,5393.874944,4700.829696,s,10,29.1660693359375,2.9166069335937506,0.003379935020874598,2.9171240234375,2.919841650390625,2.9200343505859374,2.9201885107421877,"[2.908759033203125, 2.913849365234375, 2.914615966796875, 2.915674560546875, 2.916779296875, 2.91746875, 2.919466552734375, 2.919429931640625, 2.92022705078125, 2.919798828125]",tokens/s,87.7732261592634,kWh,8.504647543583359e-05,9.38051017194422e-06,5.6483989631600505e-05,0.00015091097523937833,tokens/kWh,1696364.3604709806,MB,1389.924352,5801.705472,0.0,5393.874944,4877.453824,s,10,16.09183557128906,1.6091835571289064,0.0016500610799483914,1.6091121826171875,1.6106735961914063,1.6117986511230469,1.6126986950683593,"[1.6086622314453125, 1.6089632568359375, 1.6129237060546875, 1.607747314453125, 1.60747509765625, 1.609567626953125, 1.6092611083984374, 1.606819091796875, 1.610423583984375, 1.6099925537109374]",tokens/s,39.150288182414776,kWh,4.717517559125327e-05,5.203773586761981e-06,3.1488108523800014e-05,8.386705770181517e-05,tokens/kWh,751188.8663602952,,s,630,16.088644878387438,0.025537531552995957,0.00032920167288641,0.025559120178222657,0.025860710906982422,0.025961028480529787,0.026728650798797608,"[0.026750688552856446, 0.026063552856445314, 0.025384960174560548, 0.02521897506713867, 0.025051231384277343, 0.025069120407104493, 0.025020864486694334, 0.02503696060180664, 0.025009695053100585, 0.025018688201904296, 0.02498150444030762, 0.025200639724731445, 0.025196544647216795, 0.025241600036621094, 0.025208127975463866, 0.025133760452270507, 0.02517196846008301, 0.025136768341064455, 0.025350048065185548, 0.025332191467285155, 0.025233407974243165, 0.025237472534179687, 0.025191680908203125, 0.0252607364654541, 0.025501792907714843, 0.025448448181152345, 0.02552422332763672, 0.025520128250122072, 0.025542240142822265, 0.025431999206542967, 0.025539039611816406, 0.025579519271850586, 0.025560192108154297, 0.02553945541381836, 0.025569280624389647, 0.025589344024658203, 0.025615999221801758, 0.025739904403686523, 0.02596236801147461, 0.02599679946899414, 0.025923583984375, 0.02587900733947754, 0.025742816925048827, 0.025584480285644532, 0.025551935195922852, 0.025559999465942382, 0.025622528076171876, 0.025818592071533204, 0.025608736038208006, 0.025594976425170897, 0.025612960815429686, 0.027001087188720702, 0.025448223114013672, 0.025607999801635743, 0.025720319747924804, 0.025756479263305664, 0.02578646469116211, 0.02568191909790039, 0.025589759826660157, 0.025675775527954102, 0.025831424713134765, 0.025833471298217774, 0.025769695281982422, 0.026617664337158203, 0.02584988784790039, 0.025318143844604492, 0.025222751617431642, 0.025077823638916016, 0.025186656951904297, 0.025114688873291015, 0.025151424407958985, 0.025049087524414062, 0.025008127212524413, 0.025470624923706053, 0.02510470390319824, 0.025241632461547852, 0.02521625518798828, 0.025117439270019533, 0.025083904266357423, 0.025275968551635743, 0.025254335403442383, 0.025081472396850588, 0.025160064697265627, 0.02521628761291504, 0.02538159942626953, 0.025438207626342774, 0.02543596839904785, 0.02537696075439453, 0.025425151824951173, 0.02531113624572754, 0.025471616744995117, 0.025583839416503905, 0.025645055770874024, 0.025558847427368164, 0.025518112182617188, 0.02555504035949707, 0.025444192886352537, 0.025569503784179687, 0.025605567932128905, 0.025657920837402343, 0.02575155258178711, 0.025961503982543946, 0.02603107261657715, 0.02575529670715332, 0.025649023056030274, 0.025737695693969727, 0.02571468734741211, 0.025640960693359374, 0.02569011116027832, 0.02571059226989746, 0.025665536880493164, 0.025606143951416017, 0.02549964714050293, 0.02555084800720215, 0.025612031936645508, 0.02569036865234375, 0.025825216293334962, 0.025744703292846678, 0.025735488891601564, 0.025817344665527344, 0.025830911636352538, 0.02575017547607422, 0.025959808349609374, 0.026086048126220705, 0.025961824417114258, 0.02584796714782715, 0.026719039916992187, 0.025845760345458983, 0.025427936553955078, 0.025190143585205077, 0.02499203109741211, 0.025038719177246094, 0.025045120239257812, 0.025034751892089844, 0.024982784271240236, 0.02506604766845703, 0.02518239974975586, 0.025184064865112304, 0.025589792251586915, 0.02745903968811035, 0.02511235237121582, 0.02508073616027832, 0.025116479873657227, 0.02537081527709961, 0.02533785629272461, 0.02532761573791504, 0.025270240783691406, 0.02549260711669922, 0.02550876808166504, 0.025413631439208984, 0.025479167938232423, 0.02553446388244629, 0.02552934455871582, 0.025481727600097655, 0.025459199905395507, 0.025444032669067383, 0.025749824523925782, 0.025389055252075195, 0.025472223281860353, 0.025522464752197264, 0.02561235237121582, 0.02563443183898926, 0.025757919311523436, 0.025756256103515625, 0.02585385513305664, 0.025996608734130858, 0.025958976745605468, 0.026241119384765626, 0.025780351638793945, 0.02577401542663574, 0.025626688003540038, 0.025585664749145507, 0.02572697639465332, 0.02572287940979004, 0.02572492790222168, 0.02573833656311035, 0.025840192794799804, 0.025769567489624022, 0.02571955108642578, 0.025734848022460937, 0.025710527420043944, 0.02563929557800293, 0.02568806457519531, 0.025886335372924806, 0.025804767608642577, 0.025786783218383787, 0.025823232650756835, 0.025911296844482422, 0.025960447311401368, 0.02671174430847168, 0.025864511489868163, 0.025480703353881837, 0.025338367462158205, 0.02520841598510742, 0.025155168533325195, 0.025155839920043947, 0.02505945587158203, 0.02508559989929199, 0.02506831932067871, 0.025124864578247072, 0.025091680526733398, 0.025083904266357423, 0.025239967346191407, 0.025245439529418944, 0.025166080474853515, 0.025210655212402344, 0.02534217643737793, 0.025370624542236327, 0.025341951370239257, 0.025403263092041016, 0.025315296173095702, 0.025323680877685547, 0.025343904495239256, 0.02537059211730957, 0.02556105613708496, 0.025718463897705077, 0.025610719680786133, 0.02550783920288086, 0.025444351196289062, 0.025464351654052735, 0.025424352645874025, 0.025513055801391602, 0.025617311477661133, 0.02557481575012207, 0.02543881607055664, 0.025641984939575195, 0.025684991836547853, 0.025676959991455077, 0.025666400909423827, 0.025806272506713867, 0.025827903747558594, 0.025831071853637696, 0.025866592407226562, 0.025841663360595703, 0.02575155258178711, 0.025655296325683592, 0.025647104263305662, 0.025618431091308593, 0.025487360000610353, 0.025449695587158202, 0.025555391311645508, 0.02555939292907715, 0.02561414337158203, 0.025692352294921873, 0.02569808006286621, 0.02568623924255371, 0.02555084800720215, 0.025630016326904297, 0.025625280380249024, 0.02572492790222168, 0.025773887634277345, 0.025905344009399416, 0.026732576370239257, 0.025757183074951173, 0.025468704223632812, 0.025313983917236327, 0.02514739227294922, 0.02501968002319336, 0.02504960060119629, 0.02512816047668457, 0.02519945526123047, 0.025151647567749024, 0.025133056640625, 0.02525814437866211, 0.025229120254516603, 0.02523753547668457, 0.025243648529052733, 0.02512892723083496, 0.025277631759643555, 0.02519331169128418, 0.025362432479858397, 0.025372671127319335, 0.025394752502441408, 0.025407360076904296, 0.025366432189941408, 0.025385631561279296, 0.02543404769897461, 0.025354303359985352, 0.025443679809570314, 0.025495904922485352, 0.025512256622314454, 0.025425920486450194, 0.025407487869262696, 0.02541904067993164, 0.025461471557617188, 0.025386592864990235, 0.02536899185180664, 0.02554265594482422, 0.02561177635192871, 0.025614048004150392, 0.025623327255249025, 0.02570595169067383, 0.02577187156677246, 0.025785024642944337, 0.025802751541137696, 0.025827327728271485, 0.025599807739257813, 0.025501184463500977, 0.025568960189819336, 0.02569113540649414, 0.025761472702026368, 0.02614918327331543, 0.02575542449951172, 0.02568828773498535, 0.025671680450439452, 0.025659391403198242, 0.025612287521362305, 0.025595903396606445, 0.025624576568603515, 0.02572083282470703, 0.02573334312438965, 0.02574006462097168, 0.02574569511413574, 0.02569196891784668, 0.02571356773376465, 0.02693596839904785, 0.02584726333618164, 0.02541638374328613, 0.025191648483276367, 0.02514998435974121, 0.025098623275756834, 0.025034208297729493, 0.025016864776611327, 0.025077632904052734, 0.025194623947143554, 0.025116672515869142, 0.02511788749694824, 0.025137983322143554, 0.025294527053833008, 0.025243967056274415, 0.02517196846008301, 0.02532761573791504, 0.02533955192565918, 0.025343360900878905, 0.025406400680541993, 0.025483295440673827, 0.02546832084655762, 0.025375200271606446, 0.025327392578125, 0.02534163284301758, 0.025402015686035156, 0.025397184371948243, 0.025710655212402345, 0.025689439773559572, 0.02552284812927246, 0.025393056869506835, 0.025451871871948244, 0.02547727966308594, 0.02546339225769043, 0.02546073532104492, 0.025750688552856445, 0.02573967933654785, 0.02572127914428711, 0.025972736358642577, 0.025851903915405275, 0.02579257583618164, 0.025564832687377928, 0.025575391769409178, 0.025589439392089845, 0.025655935287475586, 0.025728416442871094, 0.025723007202148436, 0.025719263076782226, 0.025637983322143554, 0.025651872634887694, 0.025690048217773438, 0.025641088485717774, 0.02562393569946289, 0.025916000366210938, 0.025939584732055664, 0.026108512878417967, 0.02590924835205078, 0.025763391494750976, 0.025778528213500976, 0.025677919387817383, 0.0256407356262207, 0.025682144165039063, 0.025773855209350587, 0.02698716735839844, 0.025975008010864258, 0.025462560653686524, 0.025212928771972655, 0.025130815505981445, 0.025069536209106444, 0.02502668762207031, 0.02511676788330078, 0.02516927909851074, 0.025154048919677735, 0.025067327499389648, 0.025049087524414062, 0.02508012771606445, 0.02509516716003418, 0.02500819206237793, 0.025099199295043947, 0.025243648529052733, 0.025432064056396485, 0.02547439956665039, 0.025442975997924805, 0.025347583770751952, 0.025333824157714843, 0.025229759216308593, 0.025274368286132814, 0.02537676811218262, 0.02536857604980469, 0.0253439998626709, 0.025491104125976563, 0.025590112686157226, 0.025573375701904297, 0.025475072860717773, 0.0255467529296875, 0.025587711334228515, 0.02555084800720215, 0.025571136474609374, 0.02558790397644043, 0.025646207809448242, 0.025705343246459962, 0.025860288619995116, 0.025843488693237306, 0.025780256271362306, 0.02571059226989746, 0.02557535934448242, 0.02556710433959961, 0.02558332824707031, 0.025786848068237306, 0.02591744041442871, 0.025832895278930665, 0.025735744476318358, 0.02573107147216797, 0.025726112365722656, 0.025731935501098632, 0.025651199340820312, 0.0256975040435791, 0.025768415451049805, 0.02571295928955078, 0.025735008239746095, 0.02581315231323242, 0.025810304641723632, 0.02582796859741211, 0.026036224365234374, 0.025896575927734374, 0.02573686408996582, 0.026806079864501953, 0.025878271102905272, 0.02539155197143555, 0.025081920623779296, 0.025094079971313476, 0.025012224197387696, 0.02501356887817383, 0.025004512786865236, 0.025091999053955077, 0.02506755256652832, 0.025137439727783203, 0.024979455947875977, 0.025241119384765625, 0.02605923271179199, 0.02504425621032715, 0.025035327911376953, 0.025249759674072267, 0.025237312316894533, 0.025179744720458985, 0.025215776443481445, 0.025247488021850586, 0.02529100799560547, 0.025212928771972655, 0.025273504257202147, 0.025449312210083008, 0.02545254325866699, 0.025392543792724608, 0.025370527267456054, 0.025365184783935547, 0.025372671127319335, 0.02550783920288086, 0.025643007278442383, 0.025595903396606445, 0.02548476791381836, 0.025351776123046874, 0.02561324882507324, 0.025718751907348632, 0.02563484764099121, 0.02561756706237793, 0.025735904693603515, 0.02572096061706543, 0.025572704315185546, 0.02553910446166992, 0.02555507278442383, 0.02557868766784668, 0.025532480239868163, 0.02555366325378418, 0.025703487396240236, 0.02564806365966797, 0.025655040740966795, 0.025644607543945312, 0.0256375675201416, 0.025552896499633788, 0.025685279846191407, 0.025729759216308594, 0.025788415908813478, 0.025894336700439453, 0.025927488327026366, 0.025920255661010742, 0.02588057518005371, 0.025876415252685546, 0.02581100845336914, 0.02590460777282715, 0.026578208923339845, 0.025716800689697266, 0.025268735885620116, 0.025078144073486328, 0.025097888946533205, 0.025051519393920897, 0.02499305534362793, 0.025010623931884766, 0.02500761604309082, 0.025008895874023437, 0.02494879913330078, 0.0251473274230957, 0.025264127731323242, 0.025266176223754884, 0.025134368896484374, 0.025184864044189452, 0.025229440689086915, 0.025264127731323242, 0.025263200759887694, 0.025332639694213867, 0.025365983963012696, 0.025754079818725586, 0.025368640899658203, 0.025409536361694338, 0.025439264297485352, 0.025485696792602538, 0.025391199111938476, 0.025435871124267578, 0.025455232620239257, 0.02535785675048828, 0.025243295669555663, 0.025445247650146486, 0.025606239318847656, 0.02560985565185547, 0.025705984115600586, 0.0257030086517334, 0.02585219192504883, 0.02592767906188965, 0.025944063186645508, 0.02590105628967285, 0.025915391921997072, 0.02583318328857422, 0.025874271392822265, 0.025798559188842773, 0.025690271377563478, 0.025678207397460937, 0.02566044807434082, 0.025647775650024414, 0.02560767936706543, 0.025929759979248047, 0.02594691276550293, 0.025795583724975587, 0.025758527755737306, 0.02583683204650879, 0.025856927871704103, 0.025843711853027345, 0.02583510398864746, 0.02576630401611328, 0.025774080276489256, 0.025773567199707033, 0.02603670310974121, 0.02605673599243164, 0.02587843132019043, 0.02668339157104492, 0.025769535064697265, 0.025364927291870117, 0.02520659255981445, 0.025089887619018553, 0.025038656234741212, 0.02510406494140625, 0.02523017692565918, 0.025217023849487305, 0.025135103225708007, 0.025216960906982423, 0.02521238327026367, 0.025148000717163086, 0.025162912368774413, 0.025148256301879883, 0.02530508804321289, 0.025237407684326172, 0.025161823272705077, 0.025212608337402343, 0.025214431762695312, 0.025270303726196288, 0.02534662437438965, 0.025311487197875977, 0.025464832305908205, 0.02615910339355469, 0.02532761573791504, 0.025392223358154296, 0.026376672744750976, 0.025403839111328125, 0.02534121513366699, 0.025285343170166015, 0.025444351196289062, 0.025579200744628907, 0.02557779121398926, 0.025577472686767577, 0.025535871505737304, 0.025557056427001953, 0.025817663192749023, 0.02598297691345215, 0.025948160171508788, 0.02581830406188965, 0.025801536560058593, 0.02584899139404297, 0.025705312728881834, 0.02576755142211914, 0.025767616271972656, 0.025683807373046874, 0.025717599868774414, 0.025838943481445314, 0.025702207565307618, 0.02566435241699219, 0.025761791229248047, 0.025768991470336913, 0.025717599868774414, 0.0256943359375, 0.02575526428222656, 0.025768320083618165, 0.02575926399230957, 0.025651679992675782, 0.025634815216064453, 0.02571059226989746, 0.025792032241821288, 0.025803232192993165]",tokens/s,39.15805245016659,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2188.980224,2907.635712,0.0,2512.388096,2240.694784,s,1,9.7695615234375,9.7695615234375,0.0,9.7695615234375,9.7695615234375,9.7695615234375,9.7695615234375,[9.7695615234375],,kWh,7.267370287916795e-05,8.008815164698533e-06,2.8941412041988412e-05,0.0001096239300858549,,MB,2220.744704,2922.315776,0.0,2514.485248,2226.413568,s,10,11.320579589843748,1.1320579589843747,0.00220818888242787,1.131412109375,1.135014697265625,1.1358196655273438,1.1364636401367187,"[1.13111181640625, 1.1293829345703126, 1.13000927734375, 1.129998291015625, 1.13131396484375, 1.132047607421875, 1.1337449951171874, 1.13151025390625, 1.1366246337890624, 1.1348358154296876]",tokens/s,226.1368315714774,kWh,3.3219415524163195e-05,3.66364249167958e-06,2.218648997140138e-05,5.906954798724415e-05,tokens/kWh,4333874.3688250715,MB,2223.202304,2922.315776,0.0,2514.485248,2337.090048,s,10,14.727046752929688,1.472704675292969,0.01827515411526005,1.4760736694335939,1.4904201782226563,1.496946661376953,1.5021678479003906,"[1.50347314453125, 1.4839168701171874, 1.4779920654296874, 1.488410888671875, 1.4889698486328125, 1.4606937255859376, 1.4510174560546876, 1.4741552734375, 1.4511300048828124, 1.4472874755859375]",tokens/s,42.77843416737117,kWh,4.2461431748752146e-05,4.683793481167955e-06,2.5790492854596556e-05,7.293571808451665e-05,tokens/kWh,863774.3159942114,,s,630,14.723933340072621,0.023371322762020048,0.0005964451738736854,0.023385295867919924,0.023886754608154297,0.024053939056396485,0.02478540544509888,"[0.024895488739013674, 0.02428108787536621, 0.024162208557128906, 0.02412566375732422, 0.02428505516052246, 0.02400796890258789, 0.02394316864013672, 0.02396342468261719, 0.02384748840332031, 0.023877727508544923, 0.023895711898803712, 0.024006847381591798, 0.024020736694335937, 0.02381279945373535, 0.02369740867614746, 0.023749664306640626, 0.02341142463684082, 0.02344576072692871, 0.023819679260253905, 0.023547584533691407, 0.023505823135375976, 0.02391244888305664, 0.02404351997375488, 0.023994367599487306, 0.02394281578063965, 0.02386569595336914, 0.023822336196899413, 0.023814144134521483, 0.023744512557983398, 0.023799808502197265, 0.023793664932250977, 0.02387353515625, 0.02390220832824707, 0.023851007461547852, 0.023953407287597657, 0.02406982421875, 0.02416774368286133, 0.023966720581054687, 0.023871360778808595, 0.023928960800170897, 0.023993535995483397, 0.0239005126953125, 0.02383830451965332, 0.023816991806030273, 0.023870784759521483, 0.023820703506469726, 0.023885759353637695, 0.02446156883239746, 0.02357676887512207, 0.023729183197021483, 0.023614431381225588, 0.0235696964263916, 0.023460575103759766, 0.023627775192260742, 0.023582719802856447, 0.023625728607177734, 0.023752639770507813, 0.023735712051391602, 0.02366326332092285, 0.023674400329589843, 0.023770784378051756, 0.023790399551391603, 0.02373222351074219, 0.02451420783996582, 0.024062463760375977, 0.02376246452331543, 0.023933248519897463, 0.023619583129882812, 0.02367830467224121, 0.023575199127197265, 0.023662271499633788, 0.023514944076538084, 0.02358527946472168, 0.023488512039184572, 0.023840192794799805, 0.02371027183532715, 0.023816192626953125, 0.0237172794342041, 0.023749216079711914, 0.023424543380737305, 0.02331286430358887, 0.023435264587402343, 0.023625728607177734, 0.023727167129516603, 0.023395263671875, 0.023448671340942383, 0.023449920654296876, 0.02324540710449219, 0.023433216094970705, 0.02329315185546875, 0.02328451156616211, 0.02309939193725586, 0.022988800048828126, 0.02294131278991699, 0.022978879928588866, 0.02282604789733887, 0.022969343185424804, 0.02285276794433594, 0.022995807647705077, 0.022923263549804687, 0.02289459228515625, 0.022841344833374022, 0.02304614448547363, 0.023334144592285156, 0.023698144912719727, 0.023719871520996094, 0.02369955253601074, 0.023662591934204103, 0.02369126319885254, 0.023618783950805664, 0.023712543487548827, 0.023649696350097657, 0.02380041694641113, 0.02372515106201172, 0.02380064010620117, 0.023760128021240234, 0.02386147117614746, 0.02380044746398926, 0.024550975799560545, 0.024170944213867188, 0.02375657653808594, 0.023644384384155274, 0.02377471923828125, 0.023900672912597655, 0.023805824279785157, 0.02377452850341797, 0.02434809684753418, 0.023167552947998046, 0.02290265655517578, 0.02283123207092285, 0.022769632339477538, 0.022892000198364258, 0.022755231857299805, 0.025170591354370116, 0.022978080749511718, 0.02284351921081543, 0.022686048507690428, 0.0227061767578125, 0.02264473533630371, 0.022683647155761717, 0.022648448944091796, 0.02259744071960449, 0.022615840911865234, 0.0226309757232666, 0.022636768341064453, 0.02272051239013672, 0.022700031280517577, 0.022923263549804687, 0.02319273567199707, 0.02330300712585449, 0.023971424102783204, 0.023386112213134767, 0.023476640701293947, 0.023481632232666017, 0.023667072296142577, 0.023775583267211915, 0.0240392951965332, 0.023793792724609374, 0.02370355224609375, 0.023810047149658203, 0.02372403144836426, 0.02384486389160156, 0.02382972717285156, 0.023990528106689453, 0.02390275192260742, 0.02407356834411621, 0.023795743942260743, 0.024021631240844728, 0.023954559326171875, 0.023962495803833007, 0.023861183166503906, 0.023791168212890627, 0.02357913589477539, 0.023570432662963867, 0.023564287185668945, 0.023549951553344727, 0.023489824295043947, 0.02358255958557129, 0.02342710494995117, 0.023516000747680663, 0.023488191604614257, 0.023400768280029297, 0.02328972816467285, 0.023367807388305663, 0.023262367248535157, 0.023530336380004884, 0.026771039962768556, 0.0236200008392334, 0.023451648712158202, 0.02421299171447754, 0.02359129524230957, 0.023473695755004884, 0.023906911849975586, 0.023584768295288085, 0.023644159317016602, 0.02342911911010742, 0.02364825630187988, 0.02343231964111328, 0.02361612892150879, 0.023560447692871092, 0.0239554557800293, 0.02353936004638672, 0.023603551864624022, 0.02343756866455078, 0.023461631774902344, 0.02328371238708496, 0.023529472351074218, 0.023412736892700195, 0.02348793601989746, 0.023394527435302733, 0.023666784286499022, 0.02362563133239746, 0.023658559799194335, 0.023503135681152344, 0.023766815185546877, 0.02355574417114258, 0.023476415634155274, 0.023511423110961913, 0.023359487533569336, 0.02342483139038086, 0.023455455780029298, 0.023490655899047853, 0.023548288345336912, 0.023463935852050782, 0.023566335678100587, 0.02345779228210449, 0.023756799697875978, 0.023435264587402343, 0.023436447143554688, 0.023384479522705077, 0.02353740882873535, 0.02340675163269043, 0.02375734329223633, 0.023526527404785155, 0.02362566375732422, 0.02355414390563965, 0.023628639221191405, 0.023545856475830077, 0.023576576232910155, 0.023479520797729494, 0.023604000091552734, 0.023635456085205078, 0.02378598403930664, 0.023541759490966797, 0.023567647933959962, 0.023485151290893555, 0.02345369529724121, 0.023330816268920897, 0.023353343963623048, 0.024657920837402345, 0.02670182418823242, 0.023580671310424805, 0.024350048065185547, 0.02363216018676758, 0.02364044761657715, 0.023745952606201173, 0.023666624069213868, 0.023709407806396486, 0.02359187126159668, 0.02369740867614746, 0.023576576232910155, 0.023635967254638672, 0.023533567428588868, 0.023644128799438478, 0.02353139114379883, 0.023736480712890626, 0.02367487907409668, 0.023726367950439455, 0.023561952590942382, 0.023644159317016602, 0.023564287185668945, 0.023644159317016602, 0.02361667251586914, 0.02353424072265625, 0.023563552856445312, 0.02366556739807129, 0.023447551727294923, 0.023539327621459962, 0.023521663665771485, 0.02364825630187988, 0.02353705596923828, 0.023725824356079103, 0.02364627265930176, 0.02361356735229492, 0.0234967041015625, 0.023409311294555663, 0.02346182441711426, 0.023506208419799803, 0.023423391342163084, 0.0234520320892334, 0.023394304275512694, 0.023433216094970705, 0.023430688858032227, 0.0236343994140625, 0.02349260711669922, 0.023708927154541017, 0.02379648017883301, 0.023731487274169922, 0.02354243278503418, 0.023822399139404298, 0.023553983688354492, 0.023598880767822267, 0.02355023956298828, 0.02364543914794922, 0.02355027198791504, 0.023818687438964845, 0.02355718421936035, 0.023702463150024413, 0.023637983322143556, 0.02369334411621094, 0.02365235137939453, 0.023788799285888673, 0.02370227241516113, 0.023919967651367186, 0.02394588851928711, 0.024335968017578126, 0.0239069766998291, 0.023363359451293947, 0.023546079635620117, 0.023568384170532225, 0.02364825630187988, 0.02335651206970215, 0.023186431884765626, 0.023230112075805665, 0.023402463912963866, 0.023341344833374023, 0.0236168327331543, 0.023314271926879883, 0.02355081558227539, 0.023432384490966796, 0.023470144271850586, 0.02290902328491211, 0.022977184295654297, 0.02289459228515625, 0.023311519622802736, 0.02307302474975586, 0.023215904235839843, 0.02303878402709961, 0.02293328094482422, 0.0226777286529541, 0.023011327743530274, 0.02276959991455078, 0.02279020881652832, 0.02276300811767578, 0.022876672744750977, 0.02272051239013672, 0.02315660858154297, 0.02312716865539551, 0.02324787139892578, 0.02331443214416504, 0.02334716796875, 0.02372719955444336, 0.023352256774902345, 0.022865055084228515, 0.022922079086303712, 0.022793792724609376, 0.022815168380737303, 0.022820863723754883, 0.023043264389038087, 0.023571264266967772, 0.023027231216430664, 0.023144351959228517, 0.023199327468872072, 0.02287289619445801, 0.023074304580688477, 0.022886528015136718, 0.02299139213562012, 0.02303385543823242, 0.023366880416870118, 0.02309404754638672, 0.023197696685791015, 0.023230464935302734, 0.023203840255737306, 0.02313113594055176, 0.02311833572387695, 0.023120384216308593, 0.02327552032470703, 0.023116928100585937, 0.02428108787536621, 0.02344550323486328, 0.023260543823242188, 0.023128704071044923, 0.02284339141845703, 0.022740320205688478, 0.022678176879882814, 0.022855199813842774, 0.022821344375610352, 0.023007232666015624, 0.023166976928710937, 0.02314035224914551, 0.02273219108581543, 0.022700000762939453, 0.022641279220581054, 0.022730752944946288, 0.022675455093383787, 0.0228351993560791, 0.022740991592407226, 0.022992576599121094, 0.022700063705444334, 0.023000511169433593, 0.023165792465209962, 0.02333807945251465, 0.023315359115600585, 0.023392255783081056, 0.02327552032470703, 0.023339008331298827, 0.023154624938964842, 0.02334316825866699, 0.023181312561035155, 0.02344960021972656, 0.023224319458007812, 0.023271360397338868, 0.023212095260620118, 0.023109632492065428, 0.022874111175537108, 0.02283235168457031, 0.02276793670654297, 0.02290121650695801, 0.022801599502563476, 0.023685184478759766, 0.02448851203918457, 0.023138496398925783, 0.02287775993347168, 0.023214527130126953, 0.022855039596557616, 0.02278054428100586, 0.02348646354675293, 0.02285136032104492, 0.022597696304321287, 0.022798175811767577, 0.022740415573120117, 0.023046943664550783, 0.02296022415161133, 0.023158784866333007, 0.022885408401489258, 0.02273369598388672, 0.02257315254211426, 0.02265212821960449, 0.022739168167114257, 0.02266579246520996, 0.022765567779541016, 0.02425651168823242, 0.02450432014465332, 0.02449564743041992, 0.02472742462158203, 0.031978080749511716, 0.02352742385864258, 0.023504896163940428, 0.0235231990814209, 0.02303603172302246, 0.0233123836517334, 0.02315385627746582, 0.023290687561035157, 0.023224319458007812, 0.022990848541259764, 0.022688831329345703, 0.02283932876586914, 0.022901151657104494, 0.02309328079223633, 0.023037824630737305, 0.022950496673583985, 0.02281881523132324, 0.022844640731811524, 0.022993696212768554, 0.023386112213134767, 0.023117055892944337, 0.023061248779296876, 0.02289664077758789, 0.02284339141845703, 0.02273040008544922, 0.022827360153198244, 0.02270207977294922, 0.02306662368774414, 0.023138208389282225, 0.023126079559326173, 0.023152576446533204, 0.023177312850952148, 0.023721536636352538, 0.023816640853881837, 0.023312128067016602, 0.02344166374206543, 0.023388160705566406, 0.023500799179077148, 0.023814079284667968, 0.024817407608032225, 0.024035263061523437, 0.023375999450683593, 0.023172704696655274, 0.023007360458374024, 0.022837791442871094, 0.023006752014160158, 0.022994815826416017, 0.02303027153015137, 0.02304595184326172, 0.022980831146240235, 0.022666559219360352, 0.022749536514282225, 0.022800384521484376, 0.022736480712890625, 0.022644672393798828, 0.02271321678161621, 0.02267955207824707, 0.023828479766845705, 0.0248090877532959, 0.02422166442871094, 0.02366464042663574, 0.02346188735961914, 0.023438976287841796, 0.023300479888916016, 0.023336416244506837, 0.023220415115356444, 0.023320928573608398, 0.02331443214416504, 0.02323967933654785, 0.02297078323364258, 0.022929407119750975, 0.022931488037109374, 0.023011552810668946, 0.02300713539123535, 0.023153087615966798, 0.022967647552490235, 0.022985023498535158, 0.02282326316833496, 0.022738943099975584, 0.022808000564575194, 0.022851551055908203, 0.022796255111694336, 0.02305878448486328, 0.02298819160461426, 0.02285436820983887, 0.022700191497802735, 0.022769311904907226, 0.02287183952331543, 0.023244543075561522, 0.02318623924255371, 0.023117536544799804, 0.023033472061157228, 0.02312259292602539, 0.02303900718688965, 0.023202144622802734, 0.023108224868774414, 0.023150592803955077, 0.023056192398071287, 0.02309343910217285, 0.022990848541259764, 0.023095071792602537, 0.022884576797485352, 0.022775808334350587, 0.022664543151855468, 0.02287273597717285, 0.023154495239257812, 0.022857311248779297, 0.022849311828613283, 0.02281350326538086, 0.022618112564086915, 0.022773759841918945, 0.023173120498657225, 0.022958080291748048, 0.022960128784179686, 0.022781951904296875, 0.02270207977294922, 0.022761472702026365, 0.02272870445251465, 0.02279529571533203, 0.022872447967529297, 0.023422943115234376, 0.023231103897094728, 0.024118783950805665, 0.023091327667236327, 0.02265331268310547, 0.02267046356201172, 0.02254435157775879, 0.022706432342529295, 0.022643360137939453, 0.02305622482299805, 0.023043455123901366, 0.02325984001159668, 0.023365663528442382, 0.023320640563964844, 0.023164928436279295, 0.023637472152709962, 0.0233907527923584, 0.023330816268920897, 0.023195648193359376, 0.023283327102661133, 0.023136383056640626, 0.023138175964355467, 0.023567808151245116, 0.023251903533935546, 0.022848960876464843, 0.0228603515625, 0.0227891845703125, 0.02301228713989258, 0.023004959106445313, 0.022823007583618164, 0.02274675178527832, 0.022931968688964844, 0.022734848022460938, 0.022849536895751952, 0.02268569564819336, 0.022781951904296875, 0.022841344833374022, 0.022826496124267577, 0.022919679641723634, 0.02288435173034668, 0.02292451286315918, 0.02311382484436035, 0.02309190368652344, 0.023121919631958008, 0.02284339141845703, 0.02285977554321289, 0.022804479598999023, 0.022844863891601563, 0.022685407638549804, 0.02272489547729492, 0.02265350341796875, 0.022752864837646485, 0.02270863914489746, 0.02273823928833008, 0.022633152008056642, 0.022824256896972657, 0.022671968460083007, 0.02291516876220703, 0.02300912094116211, 0.023048351287841797, 0.022845312118530272, 0.023006752014160158, 0.022968927383422853, 0.023063840866088866, 0.02303971290588379]",tokens/s,42.78747977521694,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1302.376448,1127.153664,0.0,731.906048,703.86944,s,1,8.321544921875,8.321544921875,0.0,8.321544921875,8.321544921875,8.321544921875,8.321544921875,[8.321544921875],,kWh,3.9562046445826126e-05,4.354408836396406e-06,1.2616121204039876e-05,5.653257648626241e-05,,MB,1333.53472,1406.07488,0.0,998.244352,942.608384,s,10,1.3574564971923828,0.13574564971923828,0.0006022325849746154,0.1357723388671875,0.13615484466552735,0.13659751815795898,0.1369516569519043,"[0.13704019165039064, 0.1360564727783203, 0.13602336120605468, 0.13559088134765626, 0.13500009155273437, 0.1360320281982422, 0.13476524353027344, 0.13590611267089844, 0.13540354919433595, 0.13563856506347657]",tokens/s,1885.8799565914849,kWh,4.1684940085098125e-06,4.5957960022677493e-07,2.7506633898877353e-06,7.3787369986243225e-06,tokens/kWh,34694284.407714784,MB,1351.610368,1414.463488,0.0,1006.63296,942.610944,s,10,12.693910522460937,1.2693910522460938,0.00947761483403399,1.2684224853515624,1.2760179931640625,1.283985498046875,1.290359501953125,"[1.2726170654296876, 1.2716322021484374, 1.2919530029296875, 1.2742159423828125, 1.2652127685546875, 1.263024658203125, 1.2567308349609374, 1.2608265380859376, 1.2634500732421876, 1.2742474365234375]",tokens/s,49.6300961697549,kWh,3.7052500298990594e-05,4.086647627488998e-06,1.592885155371036e-05,5.706799948018996e-05,tokens/kWh,1103946.1795374344,,s,630,12.691761045455934,0.020145652453104658,0.00039547894563328847,0.020049711227416993,0.02049824275970459,0.020685639858245848,0.021514591522216804,"[0.021014944076538086, 0.02051215934753418, 0.02039033508300781, 0.020323808670043946, 0.020648063659667967, 0.020427360534667968, 0.02026851272583008, 0.020166208267211914, 0.020063199996948243, 0.020060159683227538, 0.019875648498535157, 0.01978816032409668, 0.019892032623291016, 0.019791040420532226, 0.01980499267578125, 0.0199169921875, 0.019912511825561523, 0.019830272674560546, 0.02010982322692871, 0.019912704467773438, 0.020035232543945312, 0.019884384155273438, 0.019961311340332032, 0.019886016845703125, 0.020058719635009766, 0.020555776596069338, 0.020473823547363282, 0.020477983474731447, 0.020447231292724608, 0.02038969612121582, 0.020399328231811523, 0.02032089614868164, 0.020771167755126954, 0.02040003204345703, 0.02041152000427246, 0.020441823959350586, 0.02041472053527832, 0.020454431533813478, 0.020433887481689453, 0.02023139190673828, 0.020179744720458984, 0.020033536911010744, 0.020154367446899413, 0.02020102310180664, 0.020150592803955078, 0.020244319915771483, 0.020246816635131837, 0.020213760375976563, 0.020036960601806642, 0.02012550354003906, 0.020101280212402345, 0.019978336334228516, 0.020092927932739257, 0.020095584869384765, 0.020354175567626955, 0.020146720886230467, 0.020072799682617187, 0.020064159393310545, 0.019963552474975586, 0.020052288055419924, 0.02015363121032715, 0.020281183242797853, 0.020307167053222656, 0.021242591857910158, 0.020348928451538087, 0.020164384841918945, 0.020166879653930665, 0.020033023834228517, 0.02022585678100586, 0.020150976181030275, 0.02026038360595703, 0.02031158447265625, 0.019882944107055663, 0.01992684745788574, 0.019947711944580077, 0.0198922233581543, 0.019946592330932617, 0.019897247314453127, 0.019863168716430665, 0.01991516876220703, 0.019944799423217772, 0.02006825637817383, 0.01995030403137207, 0.020011199951171874, 0.019848352432250978, 0.019966623306274415, 0.019793920516967774, 0.01997327995300293, 0.019870559692382814, 0.020010719299316405, 0.020119840621948243, 0.020022623062133788, 0.02010998344421387, 0.020020544052124025, 0.020069055557250977, 0.020125215530395507, 0.02005971145629883, 0.0206529598236084, 0.02009449577331543, 0.02041644859313965, 0.02023232078552246, 0.019976608276367186, 0.01991641616821289, 0.01999679946899414, 0.019942848205566407, 0.020005056381225586, 0.020006816864013673, 0.02012656021118164, 0.02005561637878418, 0.02014761543273926, 0.020411136627197266, 0.020518688201904296, 0.020289535522460937, 0.02019571113586426, 0.020487808227539064, 0.020319807052612306, 0.020322687149047853, 0.020785600662231445, 0.02049827194213867, 0.020321727752685547, 0.020389631271362306, 0.020605503082275392, 0.020679071426391603, 0.020353023529052734, 0.02083635139465332, 0.02066966438293457, 0.02101068878173828, 0.020642879486083985, 0.020489152908325196, 0.020442848205566407, 0.020418079376220703, 0.020417280197143554, 0.02042812728881836, 0.020456096649169923, 0.02060054397583008, 0.020635520935058594, 0.020539552688598632, 0.020436511993408204, 0.020533279418945314, 0.02081407928466797, 0.02062585639953613, 0.024229280471801756, 0.020761184692382813, 0.020666048049926757, 0.020508703231811524, 0.020731647491455077, 0.020415008544921873, 0.02042470359802246, 0.020653120040893556, 0.020720352172851564, 0.02045155143737793, 0.02015350341796875, 0.020220767974853514, 0.020340736389160157, 0.020358335494995116, 0.020455583572387696, 0.020626176834106447, 0.02057206344604492, 0.02039193534851074, 0.02047385597229004, 0.02043903923034668, 0.020427999496459962, 0.020426816940307617, 0.02035785675048828, 0.020129791259765627, 0.020760223388671874, 0.020304351806640624, 0.020166048049926756, 0.020134368896484376, 0.020170751571655272, 0.02013100814819336, 0.020007743835449218, 0.020063776016235352, 0.01998076820373535, 0.02003763198852539, 0.020067840576171874, 0.020617055892944335, 0.02003830337524414, 0.019979583740234376, 0.02005471992492676, 0.020125696182250977, 0.020140031814575195, 0.020178943634033202, 0.021573055267333986, 0.021371456146240236, 0.020505887985229492, 0.020249120712280272, 0.02083420753479004, 0.02080963134765625, 0.02111497688293457, 0.020498239517211914, 0.020428991317749022, 0.020416383743286134, 0.020392223358154295, 0.020440927505493166, 0.02031718444824219, 0.020271968841552735, 0.020690176010131837, 0.02251254463195801, 0.0206167049407959, 0.020451135635375976, 0.02045529556274414, 0.02024892807006836, 0.02038217544555664, 0.020469087600708008, 0.020343008041381835, 0.020382144927978515, 0.02023423957824707, 0.021823232650756835, 0.02080998420715332, 0.020315616607666016, 0.02026905632019043, 0.020255327224731445, 0.02026412773132324, 0.020177663803100584, 0.020266624450683595, 0.020178815841674805, 0.020220415115356445, 0.020123392105102538, 0.020086496353149415, 0.019902111053466797, 0.020061023712158205, 0.020018655776977538, 0.02008323287963867, 0.019963136672973635, 0.019950368881225585, 0.020059200286865236, 0.019687871932983398, 0.019892736434936522, 0.019844192504882813, 0.019958208084106446, 0.020074975967407226, 0.019951168060302733, 0.020090911865234377, 0.019987903594970702, 0.019864543914794922, 0.020000127792358397, 0.019845184326171876, 0.019939903259277345, 0.019893503189086913, 0.01988275146484375, 0.019935232162475586, 0.019842239379882814, 0.019954496383666993, 0.01985273551940918, 0.019718719482421876, 0.019900415420532228, 0.020129119873046875, 0.02003420829772949, 0.02006220817565918, 0.02003558349609375, 0.02011942481994629, 0.02127257537841797, 0.02061123275756836, 0.02033833694458008, 0.020100704193115236, 0.02012015914916992, 0.020002815246582033, 0.019899423599243165, 0.01990297508239746, 0.020083168029785155, 0.020129791259765627, 0.020106592178344727, 0.01988060760498047, 0.019961151123046875, 0.019860160827636718, 0.020074527740478517, 0.019941183090209962, 0.01992207908630371, 0.019876928329467775, 0.019892448425292968, 0.01979363250732422, 0.019850624084472655, 0.0197761287689209, 0.02004991912841797, 0.020048927307128907, 0.020403167724609376, 0.020418560028076172, 0.0203656005859375, 0.02012950325012207, 0.02012371253967285, 0.020059551239013672, 0.020032032012939453, 0.0200949764251709, 0.020059871673583984, 0.019910367965698242, 0.019864128112792968, 0.020107263565063475, 0.01989017677307129, 0.019903968811035157, 0.019936960220336915, 0.01990083122253418, 0.019906400680541992, 0.019847776412963865, 0.019861503601074217, 0.019959808349609375, 0.020002687454223633, 0.02030780792236328, 0.020228384017944336, 0.02018636894226074, 0.02017910385131836, 0.020447839736938478, 0.02070528030395508, 0.020379552841186522, 0.020195423126220705, 0.020129472732543945, 0.020166976928710938, 0.02005526351928711, 0.019952064514160157, 0.020201568603515626, 0.019834463119506835, 0.019863487243652344, 0.01993187141418457, 0.019951456069946288, 0.02000726318359375, 0.02103910446166992, 0.02066022491455078, 0.020802879333496095, 0.020128448486328124, 0.020199424743652345, 0.02007244873046875, 0.02051683235168457, 0.02019945526123047, 0.020395360946655273, 0.020388511657714842, 0.020428064346313477, 0.02040115165710449, 0.020199071884155272, 0.01990662384033203, 0.019912704467773438, 0.01977894401550293, 0.019823232650756837, 0.019842048645019532, 0.01975359916687012, 0.01975859260559082, 0.01985215950012207, 0.019763200759887696, 0.019834880828857423, 0.01993907165527344, 0.01984342384338379, 0.020145856857299804, 0.019850847244262695, 0.0199051513671875, 0.019906143188476562, 0.01992336082458496, 0.019955711364746095, 0.019800064086914062, 0.01998847961425781, 0.01995088005065918, 0.019825376510620118, 0.02006220817565918, 0.019908607482910155, 0.01990870475769043, 0.019762880325317384, 0.01986172866821289, 0.019877887725830077, 0.019942848205566407, 0.019839551925659178, 0.019938432693481445, 0.020003711700439453, 0.019984224319458007, 0.01999068832397461, 0.02007859230041504, 0.019954719543457032, 0.019956703186035155, 0.02002118492126465, 0.020000831604003906, 0.020446367263793945, 0.020009824752807617, 0.020150144577026366, 0.02010451126098633, 0.020036287307739258, 0.020115583419799805, 0.01996985626220703, 0.019994367599487306, 0.02018092727661133, 0.020010784149169923, 0.020023040771484375, 0.020304832458496094, 0.02017001533508301, 0.020011104583740235, 0.0199420166015625, 0.020060159683227538, 0.0199968318939209, 0.019956928253173828, 0.019904928207397463, 0.019978496551513673, 0.020092863082885742, 0.02043280029296875, 0.019975711822509765, 0.01999078369140625, 0.01976563262939453, 0.01986355209350586, 0.019881759643554688, 0.019908767700195312, 0.01981760025024414, 0.01986886405944824, 0.019859136581420897, 0.019873184204101564, 0.019788255691528322, 0.019800256729125977, 0.01976937675476074, 0.019691488265991212, 0.019797344207763672, 0.019794208526611328, 0.019758623123168947, 0.01980624008178711, 0.019929920196533203, 0.019979616165161133, 0.020128416061401366, 0.019957759857177734, 0.020028703689575194, 0.019944160461425782, 0.019998720169067383, 0.019978239059448243, 0.019902463912963866, 0.019882144927978514, 0.019969984054565428, 0.019926015853881835, 0.01982352066040039, 0.01983897590637207, 0.019853311538696287, 0.019941375732421874, 0.019874975204467772, 0.019917631149291994, 0.020088863372802735, 0.019947519302368166, 0.020028928756713867, 0.02004150390625, 0.019964639663696288, 0.019981920242309572, 0.019962272644042968, 0.019797792434692384, 0.0200930233001709, 0.01991641616821289, 0.019908735275268555, 0.019888288497924806, 0.019929311752319337, 0.020080608367919923, 0.020156448364257812, 0.02000486373901367, 0.020390207290649415, 0.02018070411682129, 0.020049503326416016, 0.02004038429260254, 0.019947519302368166, 0.020090879440307616, 0.020098623275756837, 0.01983296012878418, 0.019965696334838866, 0.019865888595581055, 0.019966239929199218, 0.020395519256591797, 0.019920799255371095, 0.020116064071655275, 0.019838144302368164, 0.019913536071777344, 0.01993427276611328, 0.01989913558959961, 0.019926816940307616, 0.01988240051269531, 0.019970048904418947, 0.019797056198120118, 0.019921855926513674, 0.019961856842041017, 0.02007676887512207, 0.019861183166503905, 0.019953760147094726, 0.01993654441833496, 0.01993187141418457, 0.01989017677307129, 0.019968000411987305, 0.019766624450683595, 0.019983007431030275, 0.019936479568481446, 0.01988185691833496, 0.02004470443725586, 0.02004377555847168, 0.020133535385131837, 0.019804288864135742, 0.019953887939453126, 0.02068009567260742, 0.020265567779541017, 0.02010259246826172, 0.020019168853759765, 0.020053951263427735, 0.0199354248046875, 0.020263328552246093, 0.02011974334716797, 0.02008460807800293, 0.020125696182250977, 0.01993471908569336, 0.020007423400878906, 0.020016639709472657, 0.02034239959716797, 0.020102016448974608, 0.019910655975341796, 0.019868896484375, 0.019968799591064453, 0.019937280654907227, 0.020024511337280275, 0.019938112258911133, 0.01980620765686035, 0.020019039154052735, 0.020642911911010742, 0.020136255264282227, 0.020114015579223633, 0.019978239059448243, 0.019929088592529298, 0.02084774398803711, 0.023999359130859373, 0.020039680480957032, 0.020015104293823242, 0.019963903427124022, 0.01984105682373047, 0.01986147117614746, 0.019957056045532228, 0.019749568939208983, 0.01989836883544922, 0.019904512405395508, 0.019826688766479493, 0.019964128494262694, 0.019961055755615235, 0.019825151443481445, 0.019869760513305666, 0.02001919937133789, 0.019916767120361327, 0.0198492488861084, 0.019947519302368166, 0.019861503601074217, 0.019862592697143553, 0.019815359115600586, 0.01985868835449219, 0.019946239471435548, 0.019740575790405272, 0.019882080078125, 0.019843072891235353, 0.0198922233581543, 0.01998847961425781, 0.020019359588623047, 0.01990991973876953, 0.020671039581298827, 0.019895776748657227, 0.019843519210815428, 0.019949535369873046, 0.01987187194824219, 0.01982784080505371, 0.020158527374267578, 0.019861503601074217, 0.020082592010498047, 0.02010767936706543, 0.02006675148010254, 0.01992198371887207, 0.020072736740112306, 0.01993187141418457, 0.01997209548950195, 0.02001817512512207, 0.020224767684936522, 0.019996927261352538, 0.020077951431274416, 0.019937919616699218, 0.01997545623779297, 0.020107999801635742, 0.019998720169067383, 0.01985536003112793, 0.019965919494628906, 0.020161727905273437, 0.02130534362792969, 0.020923551559448243, 0.02159702491760254, 0.020706687927246094, 0.02038438415527344, 0.020373504638671876, 0.020336416244506834, 0.02030204772949219, 0.02012892723083496, 0.020056095123291016, 0.01995859146118164, 0.020099071502685546, 0.02013987159729004, 0.02011494445800781, 0.020169023513793946, 0.020094655990600587, 0.02009766387939453, 0.020081888198852538, 0.020083520889282228, 0.02009280014038086, 0.020236192703247072, 0.020003040313720702, 0.020606271743774413, 0.02029142379760742, 0.020251487731933592, 0.02297654342651367, 0.020439008712768554, 0.02048409652709961, 0.020469343185424805, 0.020453792572021484, 0.020590591430664062, 0.02013987159729004, 0.020205728530883788, 0.020092927932739257, 0.02014364814758301, 0.020128223419189455, 0.02006220817565918, 0.0200086727142334, 0.019810592651367188, 0.019974143981933593, 0.019896480560302736, 0.019863391876220705, 0.020022560119628906, 0.020064159393310545, 0.019976415634155274, 0.020083295822143556, 0.019994623184204103, 0.020023296356201172, 0.020023296356201172, 0.019983903884887695, 0.020220319747924806, 0.020105279922485352, 0.020139968872070313, 0.02004083251953125, 0.01995871925354004, 0.0198922233581543, 0.01992252731323242, 0.01993974494934082, 0.019848640441894532, 0.01986761665344238, 0.020012767791748046, 0.01981939125061035, 0.019933183670043944]",tokens/s,49.63850152422785,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1844.895744,2861.498368,0.0,2466.250752,2401.696256,s,1,9.251365234375,9.251365234375,0.0,9.251365234375,9.251365234375,9.251365234375,9.251365234375,[9.251365234375],,kWh,6.886548576247454e-05,7.588989187789802e-06,2.5445020356035242e-05,0.00010189949530629958,,MB,1858.654208,3087.990784,0.0,2680.160256,2582.173696,s,10,5.429983032226562,0.5429983032226563,0.002382280051480081,0.5424951171875,0.5447377258300781,0.546928646850586,0.5486813836669922,"[0.5491195678710937, 0.5414999389648437, 0.53960498046875, 0.5442508544921875, 0.5422152099609375, 0.5430211791992188, 0.5424531860351562, 0.5425370483398437, 0.5415059814453125, 0.5437750854492187]",tokens/s,471.45635351097457,kWh,1.623639444745482e-05,1.7905768134000214e-06,1.0804900619222778e-05,2.883187188007762e-05,tokens/kWh,8879062.763069924,MB,1858.654208,3087.990784,0.0,2680.160256,2582.176256,s,10,13.049833618164062,1.3049833618164064,0.007199874367472601,1.3030529174804688,1.314584814453125,1.3170796997070313,1.3190756079101562,"[1.3195745849609375, 1.3140303955078125, 1.2986925048828124, 1.3074959716796875, 1.2964578857421876, 1.302795654296875, 1.3090628662109376, 1.3018612060546875, 1.2965523681640625, 1.3033101806640626]",tokens/s,48.27647757310124,kWh,3.8289459017962514e-05,4.223309476993651e-06,2.0908124751180103e-05,6.342089324613628e-05,tokens/kWh,993363.4923036674,,s,630,13.047791374206549,0.020710779959058005,0.0004612778528481184,0.020622047424316406,0.02097887725830078,0.021442092704772948,0.022525767879486087,"[0.02205936050415039, 0.02130191993713379, 0.0210729923248291, 0.021205503463745116, 0.02084262466430664, 0.020707391738891603, 0.020690143585205076, 0.02068355178833008, 0.02060492706298828, 0.02081996726989746, 0.020704383850097655, 0.020716415405273438, 0.02071347236633301, 0.020641311645507813, 0.02115836715698242, 0.02083635139465332, 0.020979711532592774, 0.021147872924804686, 0.02109347152709961, 0.021140159606933592, 0.020791296005249024, 0.02077270317077637, 0.020770303726196288, 0.020738624572753907, 0.0208787841796875, 0.020781728744506837, 0.020749631881713866, 0.020687551498413087, 0.020733951568603515, 0.020782527923583986, 0.020783327102661134, 0.020899551391601563, 0.020688735961914062, 0.02077788734436035, 0.02072153663635254, 0.020682207107543947, 0.021793312072753906, 0.022495231628417968, 0.02146713638305664, 0.02076176071166992, 0.020931423187255858, 0.020862079620361327, 0.020771583557128905, 0.02112227249145508, 0.020870048522949217, 0.020733951568603515, 0.020717567443847656, 0.020809471130371095, 0.02080384063720703, 0.02066201591491699, 0.020832063674926758, 0.02071187210083008, 0.020708383560180663, 0.020844512939453125, 0.020714496612548827, 0.020742143630981445, 0.02068070411682129, 0.02253824043273926, 0.022119775772094726, 0.020828128814697266, 0.0207891845703125, 0.020785823822021484, 0.02090617561340332, 0.02188924789428711, 0.020978784561157225, 0.02098883247375488, 0.020808895111083983, 0.02162361526489258, 0.022987936019897463, 0.02112188720703125, 0.021212671279907228, 0.02082252883911133, 0.020618976593017577, 0.02062099266052246, 0.02070742416381836, 0.020662784576416016, 0.020614656448364257, 0.020677120208740234, 0.02073948860168457, 0.02077142333984375, 0.02085478401184082, 0.020776960372924806, 0.020789247512817383, 0.020839487075805664, 0.020699520111083985, 0.02077110481262207, 0.020859167098999022, 0.020840320587158203, 0.020883583068847657, 0.02127667236328125, 0.02088140869140625, 0.020738048553466795, 0.020716863632202147, 0.020605056762695313, 0.020523584365844727, 0.020535295486450195, 0.02047567939758301, 0.021408000946044923, 0.022445535659790038, 0.020861440658569336, 0.021606399536132814, 0.020637855529785157, 0.020483104705810547, 0.02046441650390625, 0.020743743896484375, 0.02053779220581055, 0.020682271957397462, 0.0206115837097168, 0.020577375411987304, 0.020626335144042968, 0.02070230484008789, 0.02052182388305664, 0.020737375259399414, 0.021017311096191406, 0.020914176940917968, 0.02062131118774414, 0.02064588737487793, 0.0206777286529541, 0.020788127899169923, 0.020744192123413087, 0.0209039363861084, 0.020939008712768555, 0.020622783660888672, 0.020433216094970702, 0.020446271896362306, 0.020526016235351562, 0.02182143974304199, 0.02087222480773926, 0.020816864013671874, 0.020650272369384767, 0.020520671844482422, 0.020496383666992187, 0.020336639404296874, 0.02041004753112793, 0.0204039363861084, 0.020666976928710938, 0.020607040405273436, 0.020488128662109376, 0.020667455673217774, 0.020908992767333986, 0.020723712921142577, 0.020678016662597658, 0.020973695755004882, 0.020863168716430663, 0.020729663848876954, 0.02070992088317871, 0.02063680076599121, 0.020724576950073244, 0.020692991256713866, 0.020723520278930666, 0.02060857582092285, 0.02079529571533203, 0.020669151306152343, 0.02065203285217285, 0.020588544845581053, 0.020977664947509765, 0.02045747184753418, 0.02049843215942383, 0.02046073532104492, 0.020501184463500976, 0.02098361587524414, 0.020594783782958984, 0.02054956817626953, 0.02057040023803711, 0.020471807479858398, 0.020506528854370116, 0.020545215606689454, 0.020469568252563478, 0.02040687942504883, 0.02047590446472168, 0.02047920036315918, 0.020470560073852537, 0.02044623947143555, 0.0204932804107666, 0.02054083251953125, 0.020707935333251954, 0.020821855545043944, 0.02100239944458008, 0.02059878349304199, 0.020367328643798827, 0.020395168304443358, 0.020439935684204102, 0.02038374328613281, 0.0204716796875, 0.020453119277954103, 0.020382080078125, 0.020348928451538087, 0.020383487701416014, 0.02041200065612793, 0.02153654479980469, 0.020964351654052735, 0.020740095138549804, 0.02082815933227539, 0.02088153648376465, 0.02069286346435547, 0.02072310447692871, 0.020707168579101563, 0.020943616867065428, 0.020776960372924806, 0.020879167556762696, 0.02073619270324707, 0.020754432678222655, 0.02059996795654297, 0.02053558349609375, 0.020539487838745117, 0.020511199951171875, 0.0206429443359375, 0.020590560913085938, 0.020583328247070314, 0.020588544845581053, 0.02059836769104004, 0.02058255958557129, 0.02052534484863281, 0.020594655990600588, 0.020455423355102538, 0.020510719299316405, 0.020588544845581053, 0.020538496017456054, 0.020554527282714844, 0.020566112518310548, 0.020495967864990236, 0.020527551651000977, 0.020520320892333986, 0.021537376403808595, 0.02082611274719238, 0.020749952316284178, 0.020796031951904298, 0.020673343658447266, 0.02073846435546875, 0.020716127395629884, 0.020893632888793947, 0.020791296005249024, 0.020717567443847656, 0.0206561279296875, 0.020679872512817384, 0.020903743743896485, 0.020738367080688477, 0.02059129524230957, 0.02062131118774414, 0.02058563232421875, 0.020605791091918946, 0.020579967498779297, 0.020566400527954103, 0.020576000213623047, 0.020581727981567384, 0.020478752136230467, 0.020533376693725586, 0.020496383666992187, 0.02036083221435547, 0.02043497657775879, 0.02038979148864746, 0.025665151596069337, 0.022013952255249023, 0.021399551391601563, 0.021126815795898438, 0.020969472885131835, 0.020926464080810548, 0.02084899139404297, 0.020651071548461915, 0.02055059242248535, 0.020462848663330077, 0.020687200546264647, 0.02063385581970215, 0.02059596824645996, 0.02060176086425781, 0.020608991622924806, 0.020529184341430664, 0.02042470359802246, 0.020372831344604492, 0.02042947196960449, 0.020368736267089845, 0.020406944274902344, 0.02042464065551758, 0.020361215591430663, 0.02039200019836426, 0.020396032333374024, 0.02042265510559082, 0.020459007263183594, 0.020507135391235352, 0.020563968658447264, 0.020688608169555665, 0.02056787109375, 0.020424768447875975, 0.02051728057861328, 0.020451391220092773, 0.02036479949951172, 0.020477760314941407, 0.02258188819885254, 0.02088256072998047, 0.020593536376953124, 0.020393983840942383, 0.020307296752929686, 0.02029542350769043, 0.02048847961425781, 0.020316799163818358, 0.02049542427062988, 0.02035807991027832, 0.02046156883239746, 0.020295839309692382, 0.020372320175170898, 0.02031718444824219, 0.020588544845581053, 0.02025062370300293, 0.020309343338012695, 0.020373247146606446, 0.02027369689941406, 0.02038617515563965, 0.02041164779663086, 0.020351743698120116, 0.020387712478637694, 0.02047942352294922, 0.02056867218017578, 0.02051081657409668, 0.020750335693359375, 0.020776832580566406, 0.02162892723083496, 0.021072927474975585, 0.021029855728149412, 0.020744192123413087, 0.020564992904663085, 0.020435136795043947, 0.02033132743835449, 0.02039743995666504, 0.02082809638977051, 0.020497087478637696, 0.020475967407226563, 0.020491424560546874, 0.020607776641845703, 0.020387104034423828, 0.020316064834594725, 0.02202908706665039, 0.020552928924560548, 0.020548416137695313, 0.0206376953125, 0.020625408172607423, 0.02065407943725586, 0.020774911880493165, 0.020785120010375975, 0.020705312728881837, 0.0208035831451416, 0.020750335693359375, 0.02069868850708008, 0.020764671325683593, 0.020810176849365234, 0.020714784622192384, 0.020520671844482422, 0.020513792037963868, 0.020533248901367186, 0.02060655975341797, 0.020658655166625975, 0.020651584625244142, 0.02086515235900879, 0.02065177536010742, 0.02061568069458008, 0.02065203285217285, 0.020645471572875978, 0.02050079917907715, 0.02045961570739746, 0.020616384506225587, 0.020578496932983397, 0.020523231506347658, 0.020607391357421876, 0.020690208435058595, 0.020619680404663086, 0.020506784439086913, 0.0207238712310791, 0.02129644775390625, 0.020973663330078125, 0.020656736373901367, 0.02048975944519043, 0.020559520721435548, 0.020486976623535155, 0.020783103942871094, 0.020602880477905275, 0.02073097610473633, 0.02056048011779785, 0.02050899124145508, 0.020547584533691408, 0.02179088020324707, 0.02084864044189453, 0.020719615936279297, 0.020679935455322266, 0.020574975967407226, 0.022984703063964843, 0.021057024002075195, 0.020564640045166015, 0.020538400650024415, 0.020937536239624025, 0.02062745666503906, 0.020725727081298828, 0.02073193550109863, 0.020703231811523438, 0.020665695190429687, 0.0207663688659668, 0.021684959411621095, 0.02074448013305664, 0.02064793586730957, 0.02082611274719238, 0.023066272735595705, 0.02162723159790039, 0.020570112228393556, 0.020393407821655274, 0.020533023834228517, 0.020554527282714844, 0.020555231094360353, 0.020584800720214843, 0.020512960433959962, 0.020444543838500976, 0.02049078369140625, 0.020389984130859375, 0.02042780876159668, 0.020466272354125976, 0.02045580863952637, 0.020411840438842775, 0.02044780731201172, 0.02049225616455078, 0.020460704803466796, 0.020469919204711914, 0.020566368103027345, 0.020619199752807616, 0.02081964874267578, 0.020767648696899413, 0.020801376342773438, 0.020922367095947265, 0.020720672607421876, 0.020690944671630858, 0.020804576873779297, 0.02067865562438965, 0.02072166442871094, 0.020731903076171874, 0.020731903076171874, 0.020888799667358397, 0.020808479309082032, 0.02074131202697754, 0.02067683219909668, 0.02060102462768555, 0.020653600692749022, 0.020577119827270507, 0.020770847320556642, 0.020709280014038087, 0.020671712875366212, 0.02193484878540039, 0.02124736022949219, 0.021462656021118163, 0.02186332893371582, 0.02097350311279297, 0.020758687973022463, 0.020542816162109376, 0.02044380760192871, 0.02047385597229004, 0.020797216415405273, 0.02066044807434082, 0.02068070411682129, 0.020508159637451173, 0.020434688568115235, 0.020429567337036134, 0.020409887313842773, 0.020506591796875, 0.02037727928161621, 0.020411199569702148, 0.020477951049804686, 0.02050668716430664, 0.02054547119140625, 0.020530752182006836, 0.02215318489074707, 0.02078767967224121, 0.020840448379516603, 0.020781055450439453, 0.020612863540649413, 0.020627712249755858, 0.02044483184814453, 0.020744064331054687, 0.020627935409545897, 0.020520191192626953, 0.020638463973999023, 0.020631168365478517, 0.020659936904907226, 0.020652608871459963, 0.02086921691894531, 0.020746240615844725, 0.020665599822998048, 0.020728288650512697, 0.020703104019165038, 0.02072822380065918, 0.020854623794555664, 0.020590784072875977, 0.02049363136291504, 0.02045404815673828, 0.020459232330322267, 0.020475488662719726, 0.020550336837768555, 0.02052297592163086, 0.02047996711730957, 0.02039574432373047, 0.02042300796508789, 0.02037555122375488, 0.02046335983276367, 0.02041468811035156, 0.02041859245300293, 0.02046156883239746, 0.020389888763427736, 0.020402175903320312, 0.02045747184753418, 0.02044313621520996, 0.021608192443847655, 0.020867231369018555, 0.020637407302856445, 0.02058323287963867, 0.02046156883239746, 0.020501535415649415, 0.02044822311401367, 0.02044108772277832, 0.02044108772277832, 0.02051420783996582, 0.020429407119750977, 0.021032960891723632, 0.020477216720581056, 0.020419071197509766, 0.020318431854248045, 0.02047385597229004, 0.020522176742553713, 0.02035139274597168, 0.020558048248291015, 0.020474048614501954, 0.02039129638671875, 0.0205216007232666, 0.020442367553710938, 0.02039596748352051, 0.020507455825805664, 0.02042470359802246, 0.020461055755615236, 0.020395647048950194, 0.02050486373901367, 0.020554336547851562, 0.021661600112915038, 0.020693088531494142, 0.020325536727905272, 0.020403039932250976, 0.020445184707641603, 0.020363103866577147, 0.02036751937866211, 0.020391424179077147, 0.020361984252929687, 0.020495679855346678, 0.02052467155456543, 0.02059753608703613, 0.020733983993530273, 0.020651391983032227, 0.020653823852539062, 0.02067955207824707, 0.02068889617919922, 0.020659744262695314, 0.020693471908569336, 0.020684799194335936, 0.020715167999267578, 0.02061961555480957, 0.020526847839355468, 0.02056163215637207, 0.02050102424621582, 0.020586048126220703, 0.02050092887878418, 0.020594623565673827, 0.0207524471282959, 0.020733951568603515, 0.02067046356201172, 0.02067068862915039, 0.020669216156005858, 0.022004735946655272, 0.021416959762573243, 0.021354496002197267, 0.021351776123046874, 0.02108671951293945, 0.020891807556152345, 0.02092051124572754, 0.02079315185546875, 0.020695039749145508, 0.020822015762329102, 0.02569215965270996, 0.02064384078979492, 0.020544864654541015, 0.020630048751831054, 0.020367071151733397, 0.020423072814941406, 0.02042464065551758, 0.02045529556274414, 0.02042902374267578, 0.020412384033203126, 0.020289535522460937, 0.020338464736938476, 0.02032252883911133, 0.020309823989868164, 0.020387840270996094, 0.02034694480895996, 0.02041254425048828, 0.020510719299316405, 0.02040227127075195, 0.020299007415771484, 0.020472480773925782, 0.02038140869140625, 0.020414751052856447, 0.020518655776977538, 0.020541696548461913, 0.020532800674438478, 0.02067296028137207, 0.020694528579711914, 0.021086336135864258, 0.02120537567138672, 0.020854496002197267, 0.020738208770751953, 0.020715295791625978, 0.020664224624633788, 0.02053494453430176, 0.020542240142822264, 0.020502304077148436, 0.020477855682373047, 0.02045574378967285, 0.02065772819519043, 0.020531328201293945, 0.020451648712158203, 0.020353023529052734, 0.020377344131469726, 0.02043110466003418, 0.020483936309814453, 0.020336448669433595, 0.02039638328552246, 0.020406272888183592, 0.020330495834350586, 0.020387840270996094, 0.020352352142333986, 0.0206342716217041]",tokens/s,48.28403382088191,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,837.832704,558.825472,0.0,163.577856,152.009216,s,1,7.81829833984375,7.81829833984375,0.0,7.81829833984375,7.81829833984375,7.81829833984375,7.81829833984375,[7.81829833984375],,kWh,2.061342946251443e-05,2.2665893650147675e-06,6.590283049973111e-06,2.947030187750231e-05,,MB,1144.639488,607.059968,0.0,199.22944,184.525824,s,30,0.27252745628356934,0.009084248542785644,0.00012152637885798035,0.009049232006072997,0.009166883373260498,0.009344044923782347,0.009509320259094237,"[0.009488736152648926, 0.00904524803161621, 0.009084927558898925, 0.009018976211547852, 0.009019359588623047, 0.008982655525207519, 0.009010944366455078, 0.008999648094177247, 0.0090098876953125, 0.009012224197387696, 0.00906704044342041, 0.00901411247253418, 0.00906492805480957, 0.00910752010345459, 0.00910636806488037, 0.009166848182678223, 0.00910752010345459, 0.009037407875061035, 0.009167200088500976, 0.00909222412109375, 0.009102399826049806, 0.009023072242736817, 0.008996543884277343, 0.009065407752990723, 0.009096256256103516, 0.009053215980529786, 0.009517727851867675, 0.009028767585754395, 0.009033151626586914, 0.009007136344909667]",tokens/s,28180.646840987767,kWh,3.128453211593004e-07,3.448620549427338e-08,2.0713932255303204e-07,5.544708492066058e-07,tokens/kWh,461701458.9068321,MB,1156.018176,611.254272,0.0,203.423744,184.528384,s,30,9.887180541992185,0.3295726847330729,0.0058644436390758664,0.3279118041992187,0.33756495361328126,0.3415366729736328,0.34511653106689455,"[0.3344143981933594, 0.3261952209472656, 0.32928411865234375, 0.32592599487304685, 0.32275234985351564, 0.3224992980957031, 0.32375885009765626, 0.32371170043945313, 0.3241580505371094, 0.32543087768554685, 0.32709075927734377, 0.32853652954101564, 0.33719573974609374, 0.34088787841796875, 0.34636190795898436, 0.33470394897460937, 0.327072021484375, 0.3322218627929687, 0.3420675048828125, 0.3320158386230469, 0.3324908752441406, 0.32653515625, 0.328955810546875, 0.33036843872070315, 0.3292696838378906, 0.32728707885742186, 0.3310740051269531, 0.3248435974121094, 0.3233717041015625, 0.3266993408203125]",tokens/s,191.1566186106257,kWh,9.326762520640574e-06,1.0285978483634633e-06,3.632299232866651e-06,1.3987659601870692e-05,tokens/kWh,4503970.0559752295,,s,1890,9.87304197835922,0.005223831734581599,0.0001788884848357813,0.005182575941085815,0.005374908828735352,0.005455828928947449,0.005879215574264526,"[0.004988800048828125, 0.005204288005828858, 0.005177023887634277, 0.005194784164428711, 0.005305312156677246, 0.005215328216552734, 0.005198751926422119, 0.005212063789367676, 0.005212480068206787, 0.005111680030822754, 0.005128096103668213, 0.005083136081695557, 0.005173344135284424, 0.005100927829742432, 0.005124639987945556, 0.00509116792678833, 0.005113408088684082, 0.0051513280868530275, 0.005275648117065429, 0.006165664196014404, 0.0067430720329284665, 0.006778687953948975, 0.006813856124877929, 0.0058837437629699705, 0.0052367358207702636, 0.005208064079284668, 0.005130239963531494, 0.005141600131988526, 0.005143040180206298, 0.005113440036773682, 0.005102047920227051, 0.005159232139587402, 0.005126175880432129, 0.005117951869964599, 0.005142528057098389, 0.005119552135467529, 0.005118400096893311, 0.005100607872009277, 0.005180287837982178, 0.005127391815185547, 0.005116159915924072, 0.005226655960083008, 0.0052044157981872554, 0.005121119976043701, 0.005147552013397217, 0.005166624069213867, 0.005138912200927734, 0.005115903854370117, 0.0051241922378540035, 0.005178976058959961, 0.00513267183303833, 0.005117856025695801, 0.005166207790374756, 0.005570687770843506, 0.005112192153930664, 0.0057675838470458984, 0.005724160194396972, 0.005162335872650146, 0.005238944053649902, 0.00515558385848999, 0.0060659837722778324, 0.005370048046112061, 0.005129983901977539, 0.004917247772216797, 0.00511187219619751, 0.005158239841461181, 0.0051320638656616215, 0.005188127994537353, 0.005182752132415772, 0.0051682558059692385, 0.005157919883728027, 0.005120863914489746, 0.005170656204223633, 0.005108255863189698, 0.005107391834259034, 0.005155136108398437, 0.0051773438453674315, 0.005170752048492431, 0.005228991985321045, 0.005181439876556396, 0.005172639846801758, 0.0052351999282836915, 0.005164959907531738, 0.005370048046112061, 0.005203904151916504, 0.0051809921264648435, 0.005196224212646485, 0.0051933760643005375, 0.00516703987121582, 0.005232831954956055, 0.005175903797149658, 0.005163008213043213, 0.005151872158050537, 0.005268032073974609, 0.005207712173461914, 0.0051329278945922855, 0.0051538882255554195, 0.005269983768463135, 0.0051487040519714354, 0.0051736001968383785, 0.005154592037200928, 0.005166175842285156, 0.005163519859313965, 0.005194431781768799, 0.005158656120300293, 0.005154784202575684, 0.005186751842498779, 0.005138463973999023, 0.005149663925170899, 0.00515008020401001, 0.005171648025512695, 0.005150784015655518, 0.00513427209854126, 0.005156032085418701, 0.005155648231506348, 0.005132287979125977, 0.00513427209854126, 0.00521398401260376, 0.005157087802886963, 0.005155104160308838, 0.005166656017303467, 0.005158815860748291, 0.005156703948974609, 0.0051964159011840825, 0.005203487873077392, 0.005261727809906006, 0.004899871826171875, 0.005165919780731201, 0.005156991958618164, 0.0051835842132568356, 0.0051905598640441895, 0.0051968002319335935, 0.0051968002319335935, 0.005169280052185058, 0.00528601598739624, 0.0059461121559143066, 0.005298175811767578, 0.005432896137237549, 0.0055710082054138185, 0.00602294397354126, 0.005326208114624024, 0.005280608177185058, 0.005258336067199707, 0.005222432136535645, 0.005272575855255127, 0.0052358717918395995, 0.00532147216796875, 0.005255136013031006, 0.005222400188446045, 0.0053350720405578615, 0.005337215900421142, 0.005215104103088379, 0.005194623947143554, 0.005219744205474853, 0.005178368091583252, 0.005152448177337647, 0.005195775985717774, 0.0051363840103149415, 0.005158912181854248, 0.005238783836364746, 0.00516096019744873, 0.0051571521759033205, 0.005213151931762696, 0.005165887832641602, 0.005135839939117432, 0.00512662410736084, 0.005107295989990234, 0.005199999809265137, 0.005215871810913086, 0.005138944149017334, 0.00513040018081665, 0.005117248058319092, 0.005141183853149414, 0.005126143932342529, 0.005140768051147461, 0.005103007793426513, 0.005087423801422119, 0.005398655891418457, 0.005119999885559082, 0.005288000106811523, 0.005142303943634033, 0.005095583915710449, 0.005111807823181152, 0.005099487781524658, 0.005142560005187988, 0.005128191947937012, 0.005116159915924072, 0.005100319862365723, 0.005151711940765381, 0.00482473611831665, 0.005212736129760742, 0.005142784118652344, 0.00507689619064331, 0.005098944187164307, 0.005112224102020264, 0.005089439868927002, 0.005074783802032471, 0.005115839958190918, 0.005315775871276855, 0.005878655910491943, 0.006088160037994385, 0.00587014389038086, 0.005425119876861572, 0.0051584959030151365, 0.005160799980163574, 0.005155263900756836, 0.005125760078430176, 0.00514844799041748, 0.005155551910400391, 0.005162015914916992, 0.005112095832824707, 0.005112512111663818, 0.005119999885559082, 0.005099743843078613, 0.005123040199279785, 0.005089248180389404, 0.0051307201385498045, 0.0051055998802185054, 0.00514518404006958, 0.005105311870574951, 0.005095615863800049, 0.005105728149414062, 0.005092576026916504, 0.0051207361221313474, 0.0051363840103149415, 0.0051066880226135255, 0.0050800638198852536, 0.005115007877349854, 0.005105792045593261, 0.005190400123596192, 0.00512224006652832, 0.005123904228210449, 0.005120160102844239, 0.005111040115356446, 0.00510422420501709, 0.00510975980758667, 0.005081408023834229, 0.005125823974609375, 0.00510262393951416, 0.00511075210571289, 0.005089280128479004, 0.005115359783172607, 0.0051324481964111325, 0.005190144062042237, 0.005121920108795166, 0.005136447906494141, 0.005136320114135742, 0.00511353588104248, 0.005183519840240479, 0.005131807804107666, 0.005158783912658692, 0.0051495680809020995, 0.004891776084899903, 0.005127871990203857, 0.005142528057098389, 0.005173247814178467, 0.005109119892120362, 0.005155456066131592, 0.0051036162376403805, 0.005148672103881836, 0.005080128192901611, 0.005145535945892334, 0.005094912052154541, 0.005118207931518555, 0.005079264163970947, 0.005130271911621093, 0.005127840042114258, 0.00509552001953125, 0.00514243221282959, 0.005112160205841065, 0.005221407890319824, 0.005116384029388427, 0.005126431941986084, 0.005132512092590332, 0.005103040218353272, 0.005172095775604248, 0.0051049599647521975, 0.005228127956390381, 0.005118559837341308, 0.0051519680023193355, 0.005108704090118408, 0.005072959899902344, 0.0051506562232971195, 0.0051036162376403805, 0.005130239963531494, 0.005093376159667969, 0.005136256217956543, 0.005126527786254883, 0.005101119995117188, 0.005115263938903809, 0.005111743927001953, 0.00513318395614624, 0.005087456226348877, 0.0051847038269042965, 0.005124703884124756, 0.005105440139770508, 0.005089503765106201, 0.0051344318389892575, 0.005105567932128906, 0.0050973758697509764, 0.005107840061187744, 0.005092959880828857, 0.005104000091552734, 0.0050787200927734375, 0.005120319843292236, 0.005097472190856934, 0.005087232112884522, 0.005126016139984131, 0.005097184181213379, 0.005096960067749024, 0.005095712184906006, 0.005106304168701172, 0.005070847988128662, 0.00510262393951416, 0.005136832237243653, 0.004888031959533691, 0.005095967769622803, 0.005125408172607422, 0.005098400115966797, 0.005095071792602539, 0.005081247806549072, 0.005132287979125977, 0.005119999885559082, 0.005150720119476319, 0.005112160205841065, 0.005139488220214844, 0.005104288101196289, 0.0050869441032409665, 0.005101151943206787, 0.005091487884521484, 0.005091040134429931, 0.005085311889648438, 0.005128479957580566, 0.005106048107147217, 0.0050728960037231445, 0.005113664150238037, 0.005085375785827637, 0.005103519916534424, 0.00524399995803833, 0.005132512092590332, 0.005139071941375733, 0.005095583915710449, 0.005142528057098389, 0.005107711791992187, 0.005150720119476319, 0.005115903854370117, 0.005152768135070801, 0.0051036162376403805, 0.005152768135070801, 0.005137504100799561, 0.005155744075775147, 0.00513263988494873, 0.005092864036560059, 0.005132480144500733, 0.005102719783782959, 0.005139296054840088, 0.005107103824615478, 0.005124671936035156, 0.005110079765319824, 0.005098879814147949, 0.00509772777557373, 0.0050748162269592285, 0.005130559921264649, 0.005086143970489502, 0.005145503997802735, 0.0051456642150878905, 0.005103871822357177, 0.00509600019454956, 0.00511190414428711, 0.005086463928222656, 0.005084000110626221, 0.005126143932342529, 0.005127647876739502, 0.005155360221862793, 0.005097472190856934, 0.005115039825439453, 0.005104159832000733, 0.00509164810180664, 0.004860095977783203, 0.005099616050720215, 0.005144959926605225, 0.005108160018920898, 0.005126016139984131, 0.005107615947723389, 0.0051138558387756345, 0.005095712184906006, 0.005110879898071289, 0.005081888198852539, 0.005095583915710449, 0.005093056201934815, 0.005134335994720459, 0.00509065580368042, 0.0050730562210083005, 0.005105535984039306, 0.005074975967407226, 0.005102208137512207, 0.00511520004272461, 0.005130911827087402, 0.005107327938079834, 0.0051279678344726565, 0.005150400161743164, 0.0051166400909423825, 0.005294271945953369, 0.005134335994720459, 0.005144576072692871, 0.005119999885559082, 0.00521830415725708, 0.005105663776397705, 0.005154880046844482, 0.005128223896026612, 0.005113759994506836, 0.005117951869964599, 0.005093376159667969, 0.005152768135070801, 0.005208000183105469, 0.005125664234161377, 0.005153312206268311, 0.005121312141418457, 0.005142367839813232, 0.005321599960327148, 0.005163008213043213, 0.005160543918609619, 0.005093920230865479, 0.0051504640579223635, 0.005133535861968994, 0.005110688209533691, 0.0051129918098449705, 0.005128608226776123, 0.005132863998413086, 0.005115263938903809, 0.005145055770874023, 0.0051337919235229495, 0.00516377592086792, 0.005147456169128418, 0.005154943943023681, 0.005155712127685547, 0.005275455951690674, 0.00514083194732666, 0.005145792007446289, 0.005157343864440918, 0.005165247917175293, 0.004903520107269287, 0.005132287979125977, 0.005154111862182617, 0.005147264003753662, 0.005092864036560059, 0.0051205759048461915, 0.005124095916748047, 0.005119391918182373, 0.005128448009490967, 0.0051244478225708, 0.00511411190032959, 0.0051212477684020995, 0.005125887870788574, 0.005157663822174073, 0.005156576156616211, 0.0051298561096191405, 0.005132959842681885, 0.005128191947937012, 0.005132287979125977, 0.0051567678451538085, 0.005149951934814453, 0.005127007961273193, 0.005115903854370117, 0.0051363840103149415, 0.005185535907745361, 0.005105663776397705, 0.0051333122253417966, 0.005134880065917969, 0.005117599964141846, 0.005124927997589112, 0.005130239963531494, 0.005139743804931641, 0.00513097620010376, 0.00510969591140747, 0.005138495922088623, 0.005137663841247558, 0.0051138558387756345, 0.005329631805419922, 0.005125152111053467, 0.005100063800811768, 0.005120480060577392, 0.005154399871826172, 0.005130784034729004, 0.005134304046630859, 0.0051567678451538085, 0.0051223039627075195, 0.00516707181930542, 0.005150496006011963, 0.00517091178894043, 0.005151008129119873, 0.005121568202972412, 0.005167359828948974, 0.005158944129943848, 0.005154399871826172, 0.005128640174865723, 0.0051645121574401855, 0.005130815982818604, 0.00509555196762085, 0.005107711791992187, 0.0051138558387756345, 0.005105663776397705, 0.005079040050506592, 0.0050936322212219234, 0.004838687896728516, 0.005122432231903076, 0.005089568138122559, 0.005109824180603027, 0.005066751956939697, 0.005122047901153564, 0.005102687835693359, 0.005096288204193116, 0.005090367794036865, 0.005139455795288086, 0.005082399845123291, 0.005092127799987793, 0.00511081600189209, 0.00507587194442749, 0.005085343837738037, 0.005078879833221436, 0.005105855941772461, 0.005110591888427734, 0.005116256237030029, 0.005134816169738769, 0.005097311973571777, 0.005116511821746826, 0.005090047836303711, 0.005135359764099121, 0.005154208183288574, 0.00514739179611206, 0.005132480144500733, 0.00512992000579834, 0.005139904022216797, 0.005147168159484863, 0.00513801622390747, 0.005111999988555908, 0.005105887889862061, 0.005129759788513184, 0.005144927978515625, 0.005128511905670166, 0.005173056125640869, 0.005133887767791748, 0.005147071838378906, 0.005142528057098389, 0.005142335891723633, 0.00516428804397583, 0.005151167869567871, 0.005110335826873779, 0.005121984004974365, 0.0051212158203125, 0.005149824142456055, 0.005262400150299072, 0.005190271854400635, 0.005414912223815918, 0.005136320114135742, 0.005173312187194824, 0.005260992050170899, 0.005153151988983154, 0.005355807781219482, 0.005158656120300293, 0.005144000053405762, 0.005155072212219238, 0.005196000099182129, 0.005189663887023925, 0.005182655811309814, 0.005185664176940918, 0.005164000034332275, 0.004919328212738037, 0.00517571210861206, 0.005123807907104492, 0.005140128135681152, 0.005148672103881836, 0.005124127864837647, 0.005152736186981201, 0.005163743972778321, 0.005162911891937256, 0.0051138558387756345, 0.00516096019744873, 0.005142848014831543, 0.005125408172607422, 0.005140895843505859, 0.005167103767395019, 0.0051586880683898925, 0.00511846399307251, 0.0051463360786437986, 0.005124095916748047, 0.005484384059906006, 0.005137695789337158, 0.005150591850280762, 0.005153791904449463, 0.00514412784576416, 0.0051719040870666505, 0.005115647792816162, 0.005123136043548584, 0.005124544143676758, 0.005128704071044922, 0.005132287979125977, 0.005105311870574951, 0.005142111778259278, 0.005130176067352295, 0.005109568119049072, 0.005123072147369385, 0.005127327919006348, 0.0051178879737854, 0.005118879795074463, 0.0051157760620117185, 0.00514031982421875, 0.005120287895202637, 0.0051223039627075195, 0.005154560089111328, 0.00516096019744873, 0.0051764798164367675, 0.005145440101623535, 0.005177023887634277, 0.005160831928253174, 0.005359903812408447, 0.005220511913299561, 0.0051951041221618656, 0.005233344078063965, 0.005246751785278321, 0.0052154879570007326, 0.005179647922515869, 0.0052353920936584475, 0.005175295829772949, 0.005178751945495605, 0.005143424034118652, 0.005176352024078369, 0.005152671813964844, 0.005185632228851318, 0.005236832141876221, 0.004978432178497315, 0.005244416236877441, 0.005194752216339111, 0.005207871913909912, 0.005178912162780762, 0.0052269759178161625, 0.005187871932983398, 0.005150496006011963, 0.005185472011566162, 0.005193215847015381, 0.0051717119216918945, 0.005240447998046875, 0.005175680160522461, 0.005222400188446045, 0.005173247814178467, 0.00522873592376709, 0.005160160064697265, 0.005160831928253174, 0.005185440063476563, 0.005159743785858154, 0.0051402878761291505, 0.005134016036987305, 0.005144639968872071, 0.005124544143676758, 0.00510368013381958, 0.0051506562232971195, 0.005140160083770752, 0.0051346559524536135, 0.00512553596496582, 0.0051512961387634275, 0.005151840209960937, 0.005153120040893555, 0.005167712211608886, 0.005140607833862305, 0.005128064155578613, 0.005119999885559082, 0.005171199798583984, 0.005167103767395019, 0.00511353588104248, 0.005173567771911621, 0.005152768135070801, 0.005138432025909424, 0.005165056228637695, 0.005224448204040527, 0.005230048179626465, 0.0051617598533630375, 0.005195519924163818, 0.005172895908355713, 0.005202271938323975, 0.005259263992309571, 0.0052633600234985355, 0.005211264133453369, 0.005337952136993408, 0.005318016052246094, 0.005360191822052002, 0.005364128112792969, 0.005260992050170899, 0.005227551937103271, 0.0051844801902771, 0.0051652159690856935, 0.005242720127105713, 0.005152031898498535, 0.0051914558410644535, 0.004919072151184082, 0.005292543888092041, 0.005234399795532227, 0.005251071929931641, 0.005201920032501221, 0.005154816150665284, 0.005175583839416504, 0.005165952205657959, 0.0052145919799804685, 0.005243360042572021, 0.005204256057739258, 0.005226111888885498, 0.005228640079498291, 0.005248511791229248, 0.005259744167327881, 0.0051940159797668455, 0.005228288173675537, 0.005185535907745361, 0.005244224071502685, 0.005196479797363282, 0.005166592121124268, 0.005199423789978027, 0.005196735858917236, 0.005353407859802246, 0.005232704162597656, 0.005250688076019287, 0.005212543964385986, 0.005210112094879151, 0.00517523193359375, 0.005169216156005859, 0.0051807999610900875, 0.005214144229888916, 0.005216159820556641, 0.005184512138366699, 0.00519491195678711, 0.00519868803024292, 0.005148352146148682, 0.005150815963745117, 0.0052080960273742675, 0.005175392150878907, 0.005152703762054444, 0.005197760105133057, 0.005148575782775879, 0.005174367904663086, 0.005223264217376709, 0.0052287039756774905, 0.005261312007904053, 0.005217631816864013, 0.005285791873931885, 0.005235455989837647, 0.005203968048095703, 0.00522649621963501, 0.005238783836364746, 0.005222400188446045, 0.0052425918579101565, 0.0051981120109558104, 0.005203968048095703, 0.005186944007873535, 0.005162816047668457, 0.005221248149871826, 0.005232575893402099, 0.0052787518501281735, 0.00521724796295166, 0.004969056129455566, 0.005335135936737061, 0.005207968235015869, 0.005227935791015625, 0.005193696022033691, 0.00524780797958374, 0.00529798412322998, 0.005328192234039307, 0.005284543991088867, 0.005341184139251709, 0.005252831935882568, 0.00528764820098877, 0.005288512229919434, 0.005305984020233154, 0.005302656173706055, 0.0052984957695007325, 0.005382016181945801, 0.005364927768707275, 0.005353248119354248, 0.005372767925262451, 0.005351424217224121, 0.005380415916442871, 0.005293983936309815, 0.005346591949462891, 0.005306015968322754, 0.005303135871887207, 0.005310143947601318, 0.005376319885253906, 0.005334047794342041, 0.005381120204925537, 0.005367775917053222, 0.005318848133087159, 0.005364799976348877, 0.005361760139465332, 0.0053275198936462405, 0.0053591041564941405, 0.005376512050628662, 0.005380095958709717, 0.00537772798538208, 0.005426623821258545, 0.005471168041229248, 0.005396255970001221, 0.005443136215209961, 0.005412864208221436, 0.005391039848327637, 0.005420959949493408, 0.005389440059661865, 0.005413856029510498, 0.00545577621459961, 0.005390336036682129, 0.005404672145843506, 0.005391551971435547, 0.005425504207611084, 0.005466591835021972, 0.005414912223815918, 0.00539024019241333, 0.005393727779388428, 0.005374752044677735, 0.0053203201293945315, 0.005312511920928955, 0.005322656154632568, 0.005271743774414062, 0.005337376117706299, 0.005039391994476318, 0.005370175838470459, 0.005334688186645508, 0.005325439929962158, 0.005273600101470947, 0.005267583847045899, 0.005281792163848877, 0.00531660795211792, 0.0053344001770019535, 0.005315199851989746, 0.005337056159973145, 0.005318687915802002, 0.005327136039733887, 0.005347199916839599, 0.005422944068908691, 0.005347328186035156, 0.005361120223999024, 0.0053294401168823245, 0.005302239894866943, 0.005339168071746826, 0.005484543800354004, 0.005539072036743164, 0.00532147216796875, 0.005312511920928955, 0.005303487777709961, 0.005303199768066406, 0.0053820481300354, 0.0053608322143554685, 0.005419839859008789, 0.0054579200744628905, 0.00542310380935669, 0.005486176013946533, 0.005448319911956787, 0.0054754562377929685, 0.005499551773071289, 0.005543039798736572, 0.005516128063201904, 0.005474336147308349, 0.005441855907440185, 0.005460864067077637, 0.005430079936981201, 0.0054488320350646975, 0.005526624202728272, 0.005465888023376465, 0.005550079822540284, 0.005503039836883545, 0.005526527881622314, 0.005581759929656983, 0.005484543800354004, 0.005480447769165039, 0.005464096069335938, 0.005479648113250733, 0.005426176071166992, 0.005428991794586182, 0.0053853759765625, 0.005391200065612793, 0.005373407840728759, 0.005414912223815918, 0.005418879985809326, 0.005452383995056152, 0.005433407783508301, 0.0054206719398498535, 0.005355807781219482, 0.005096640110015869, 0.005421216011047363, 0.00542521619796753, 0.005372767925262451, 0.005386079788208008, 0.005431168079376221, 0.005424191951751709, 0.005400991916656494, 0.00543123197555542, 0.005456511974334717, 0.005371551990509033, 0.0054860482215881344, 0.005505375862121582, 0.005476352214813233, 0.0055541439056396484, 0.005634431838989258, 0.006486207962036133, 0.0063569917678833006, 0.006600287914276123, 0.005706143856048584, 0.005570047855377197, 0.005565248012542725, 0.005732031822204589, 0.005527552127838135, 0.005423200130462646, 0.005476255893707275, 0.005471456050872803, 0.005499680042266846, 0.005464032173156739, 0.005480768203735352, 0.005488096237182617, 0.005418399810791016, 0.005413407802581787, 0.005404352188110351, 0.005390399932861328, 0.0053534722328186036, 0.005384768009185791, 0.005342527866363526, 0.005296192169189453, 0.005327775955200196, 0.005361408233642578, 0.005670080184936524, 0.005365920066833496, 0.005916768074035644, 0.005345823764801025, 0.00532700777053833, 0.005330783843994141, 0.005804351806640625, 0.005291007995605469, 0.005302080154418945, 0.005351871967315674, 0.005326687812805176, 0.005341792106628418, 0.005378079891204834, 0.005373631954193115, 0.005371327877044678, 0.0053556480407714845, 0.005388480186462402, 0.0054646081924438475, 0.005390336036682129, 0.005413055896759033, 0.005486400127410889, 0.005433343887329102, 0.005168416023254394, 0.005481376171112061, 0.005430272102355957, 0.005416895866394043, 0.005276544094085693, 0.0052674560546875, 0.005401599884033203, 0.005324863910675049, 0.005297056198120117, 0.005334752082824707, 0.005312992095947266, 0.005302271842956543, 0.005381120204925537, 0.005356383800506592, 0.005316319942474365, 0.005328800201416016, 0.005257599830627442, 0.0052772159576416015, 0.005337567806243897, 0.0053678078651428224, 0.005308191776275634, 0.005342527866363526, 0.005363999843597412, 0.005319295883178711, 0.005358880043029785, 0.005389023780822754, 0.005363071918487549, 0.0053582401275634765, 0.005343200206756592, 0.005349311828613281, 0.0053589119911193845, 0.005310624122619629, 0.0053108158111572264, 0.005294335842132568, 0.005275648117065429, 0.005248159885406494, 0.005310688018798828, 0.005290880203247071, 0.005242623805999756, 0.005289504051208496, 0.005281472206115722, 0.005274335861206055, 0.0053125758171081545, 0.005306367874145508, 0.00528553581237793, 0.005323103904724121, 0.00532700777053833, 0.0052713918685913085, 0.005287871837615967, 0.005273663997650146, 0.0052674560546875, 0.005267104148864746, 0.0052247681617736816, 0.00522979211807251, 0.005177631855010987, 0.005220895767211914, 0.0052074241638183595, 0.005315455913543701, 0.0052624959945678715, 0.00524348783493042, 0.005234687805175781, 0.005287936210632324, 0.005277152061462402, 0.005014592170715332, 0.005278528213500977, 0.005172351837158203, 0.005221343994140625, 0.005145055770874023, 0.005261856079101563, 0.0052032961845397945, 0.005210336208343506, 0.005199776172637939, 0.005190048217773437, 0.005169151782989502, 0.005209919929504395, 0.00522873592376709, 0.005197824001312256, 0.005152768135070801, 0.005162112236022949, 0.00514140796661377, 0.005170720100402832, 0.005171648025512695, 0.005168191909790039, 0.005176256179809571, 0.005185215950012207, 0.005140096187591553, 0.005309120178222656, 0.005139616012573242, 0.005177408218383789, 0.005194528102874756, 0.005174943923950195, 0.005168767929077148, 0.00516809606552124, 0.005183231830596924, 0.005146624088287354, 0.005193503856658935, 0.00518393611907959, 0.0052856640815734865, 0.005202047824859619, 0.005177216053009033, 0.00521830415725708, 0.005228096008300781, 0.005173696041107178, 0.0052219839096069335, 0.005148159980773926, 0.0051905598640441895, 0.005189631938934326, 0.005195295810699463, 0.005155519962310791, 0.005154240131378174, 0.005163104057312012, 0.005330495834350586, 0.005157567977905273, 0.005224480152130127, 0.005137792110443115, 0.00518179178237915, 0.005148416042327881, 0.0051914558410644535, 0.005159647941589356, 0.005146304130554199, 0.005212480068206787, 0.0051580162048339845, 0.005155712127685547, 0.0051651840209960935, 0.005174176216125488, 0.005160064220428467, 0.004895391941070556, 0.005168960094451904, 0.005132224082946777, 0.005187744140625, 0.005169439792633057, 0.005131423950195313, 0.005122752189636231, 0.005177087783813476, 0.005183743953704834, 0.00513427209854126, 0.00517033576965332, 0.0053190398216247554, 0.0051943359375, 0.0051875200271606445, 0.005187583923339844, 0.0051998720169067385, 0.005170207977294922, 0.005202911853790283, 0.005142848014831543, 0.005238463878631592, 0.005181632041931152, 0.005178912162780762, 0.005195487976074218, 0.005126272201538086, 0.005633823871612549, 0.005163360118865967, 0.005185855865478516, 0.005265408039093018, 0.005244927883148193, 0.0053095040321350095, 0.005280704021453858, 0.0052509760856628415, 0.005228640079498291, 0.005294079780578613, 0.005307648181915283, 0.0052432317733764645, 0.005257631778717041, 0.0053125758171081545, 0.005238080024719238, 0.005307007789611817, 0.005296000003814697, 0.005320543766021728, 0.005304800033569336, 0.005269248008728028, 0.00527513599395752, 0.005280320167541504, 0.005351424217224121, 0.005355519771575928, 0.005392384052276611, 0.00535920000076294, 0.005359360218048096, 0.0054289278984069825, 0.005382783889770508, 0.005396448135375977, 0.005433279991149903, 0.005396927833557129, 0.005392191886901855, 0.005360896110534668, 0.005366720199584961, 0.005337215900421142, 0.005365344047546386, 0.005398848056793213, 0.005361631870269775, 0.005116799831390381, 0.005351232051849365, 0.005370048046112061, 0.005410143852233887, 0.006420127868652344, 0.005540863990783691, 0.005390336036682129, 0.005463263988494873, 0.005409440040588379, 0.005365888118743897, 0.005390336036682129, 0.00536575984954834, 0.0052941122055053715, 0.0053731842041015625, 0.005416959762573242, 0.005518208026885987, 0.005485439777374267, 0.005409056186676025, 0.005438144207000732, 0.005431295871734619, 0.005407999992370605, 0.005448448181152344, 0.005451776027679443, 0.005439551830291748, 0.005475999832153321, 0.005433631896972656, 0.005455872058868408, 0.0054139518737792965, 0.005384352207183838, 0.005427487850189209, 0.005374495983123779, 0.005404416084289551, 0.0053311681747436526, 0.0053002238273620605, 0.005268928050994873, 0.005287487983703613, 0.005241856098175048, 0.005207168102264404, 0.005225664138793946, 0.0052397122383117676, 0.005190783977508545, 0.005174943923950195, 0.005224736213684082, 0.005229695796966553, 0.00519868803024292, 0.0053309440612792965, 0.005246784210205078, 0.005221920013427734, 0.005237152099609375, 0.005201695919036865, 0.005232863903045655, 0.005328320026397705, 0.0063136000633239745, 0.008728863716125489, 0.005476255893707275, 0.005293983936309815, 0.005258080005645752, 0.005302624225616455, 0.005238431930541992, 0.005231999874114991, 0.005274240016937256, 0.0052288961410522465, 0.0052486081123352055, 0.004963456153869629, 0.00524563217163086, 0.005227488040924072, 0.005312032222747802, 0.005252863883972168, 0.00522108793258667, 0.005315872192382812, 0.005257184028625488, 0.005221183776855469, 0.005252895832061767, 0.005253536224365234, 0.0052193598747253414, 0.005229279994964599, 0.005236320018768311, 0.005204383850097656, 0.005203711986541748, 0.005259520053863526, 0.005213632106781006, 0.005220928192138672, 0.005251071929931641, 0.0052280001640319824, 0.00521884822845459, 0.005238624095916748, 0.005236320018768311, 0.005241631984710693, 0.005265183925628662, 0.005234272003173828, 0.005267072200775147, 0.00527235221862793, 0.005283040046691894, 0.005268447875976563, 0.005254687786102295, 0.005300511837005615, 0.0053014721870422365, 0.005264448165893555, 0.005303775787353516, 0.00528166389465332, 0.0052780799865722655, 0.005304448127746582, 0.005270431995391846, 0.0052717761993408205, 0.0052846078872680665, 0.005267263889312744, 0.00521235179901123, 0.005289919853210449, 0.005236000061035156, 0.005235487937927246, 0.005316351890563965, 0.005241087913513184, 0.0052674560546875, 0.005246975898742676, 0.005230591773986816, 0.0052791681289672855, 0.0053043198585510255, 0.005344096183776855, 0.005316319942474365, 0.0053613119125366215, 0.005327199935913086, 0.005321760177612305, 0.005304351806640625, 0.0053441920280456546, 0.005324416160583496, 0.005285247802734375, 0.00495420789718628, 0.005272607803344726, 0.005307295799255371, 0.0053108158111572264, 0.0053075838088989255, 0.005519328117370606, 0.005347487926483155, 0.005324128150939941, 0.005309599876403809, 0.00529807996749878, 0.005273536205291748, 0.005324128150939941, 0.005253791809082031, 0.005265408039093018, 0.00526694393157959, 0.005274112224578857, 0.005265408039093018, 0.005334527969360352, 0.005274112224578857, 0.005237823963165284, 0.0052581758499145505, 0.005278719902038574, 0.005248032093048096, 0.005351583957672119, 0.005286943912506103, 0.005263423919677735, 0.005298367977142334, 0.0052936959266662595, 0.005285024166107178, 0.0053010878562927246, 0.005514143943786621, 0.005330016136169433, 0.005792128086090088, 0.0053144960403442385, 0.005286272048950195, 0.005287807941436767, 0.005648736000061035, 0.005291232109069824, 0.005230751991271973, 0.0052302079200744625, 0.005230944156646728, 0.005253280162811279, 0.00526796817779541, 0.005207935810089112, 0.0052633600234985355, 0.005238463878631592, 0.005226848125457764, 0.005210207939147949, 0.005216256141662597, 0.005165056228637695, 0.005160448074340821, 0.005184000015258789, 0.005171520233154297, 0.005157792091369629, 0.005184351921081543, 0.005128223896026612, 0.0051437759399414066, 0.005130943775177002, 0.005210112094879151, 0.0051456642150878905, 0.005157760143280029, 0.005205183982849121, 0.005243775844573975, 0.004900383949279785, 0.0052368960380554195, 0.00521017599105835, 0.005234687805175781, 0.005236320018768311, 0.00522819185256958, 0.005212672233581543, 0.005193920135498047, 0.005196864128112793, 0.005172416210174561, 0.005160319805145264, 0.005191264152526856, 0.005174047946929931, 0.005205376148223877, 0.005239200115203857, 0.005189280033111573, 0.005195615768432617, 0.005235424041748047, 0.005196991920471191, 0.005198624134063721, 0.005175327777862549, 0.005212224006652832, 0.005216191768646241, 0.005181727886199951, 0.0052211518287658696, 0.0051943678855896, 0.0051346559524536135, 0.005193727970123291, 0.0051404800415039064, 0.00516870403289795, 0.0051792640686035155, 0.0051875200271606445, 0.005207712173461914, 0.00516809606552124, 0.0052135357856750485, 0.00518006420135498, 0.005158847808837891, 0.005167263984680176, 0.00515231990814209, 0.005162335872650146, 0.00517632007598877, 0.005177023887634277, 0.005153088092803955, 0.005162816047668457, 0.005225728034973145, 0.00514899206161499, 0.005155456066131592, 0.005122208118438721, 0.005175136089324951, 0.005136320114135742, 0.005099743843078613, 0.00514031982421875, 0.005107615947723389, 0.005154911994934082, 0.005161983966827393, 0.0052070398330688475, 0.005166399955749512, 0.0051430721282958985, 0.0051874880790710445, 0.005175551891326904, 0.005158559799194336, 0.0051838397979736325, 0.00514243221282959, 0.004873983860015869, 0.005161215782165527, 0.005197824001312256, 0.005212160110473632, 0.00517299222946167, 0.005158175945281982, 0.005153600215911865, 0.005152959823608399, 0.00516707181930542, 0.005154943943023681, 0.005132160186767578, 0.005166111946105957, 0.005128672122955322, 0.005169760227203369, 0.00513424015045166, 0.00513801622390747, 0.0051677761077880855, 0.005136127948760986, 0.005132512092590332, 0.005140031814575195, 0.00517139196395874, 0.005124127864837647, 0.005131423950195313, 0.005135136127471924, 0.005146368026733399, 0.005226816177368164, 0.005177279949188232, 0.005183263778686524, 0.005177631855010987, 0.005208352088928223, 0.005192416191101074, 0.005184768199920654, 0.005235583782196045, 0.0052089600563049315, 0.005182496070861816, 0.005227488040924072, 0.005273952007293701, 0.005592351913452149, 0.005508768081665039, 0.005796639919281006, 0.005468224048614502, 0.0057998719215393065, 0.005203904151916504, 0.005233888149261475, 0.005161087989807129, 0.005220191955566406, 0.00518230390548706, 0.005213568210601807, 0.005191936016082764, 0.005191391944885254, 0.005296768188476563, 0.005203392028808594, 0.005177631855010987, 0.005222815990447998, 0.005155712127685547, 0.005190656185150146, 0.005219840049743653, 0.005226208209991455, 0.005208032131195068, 0.005182144165039063, 0.0052286720275878905, 0.005177663803100586, 0.005203648090362549, 0.004898528099060059, 0.005188127994537353, 0.005572800159454346, 0.0051998720169067385, 0.005212160110473632, 0.005167103767395019, 0.005209695816040039, 0.005212639808654785, 0.005181375980377197, 0.005246975898742676, 0.005206143856048584, 0.0052202239036560055, 0.005277344226837158, 0.005203519821166992, 0.005185887813568115, 0.005201791763305664, 0.00522211217880249, 0.005242976188659668, 0.005190400123596192, 0.005201920032501221, 0.005232639789581299, 0.005234848022460938, 0.00528380823135376, 0.005252927780151367, 0.005238495826721192, 0.005273952007293701, 0.005238783836364746, 0.005305920124053955, 0.005238495826721192, 0.005227231979370117, 0.005224448204040527, 0.0051998720169067385, 0.005230591773986816, 0.005226816177368164, 0.00521292781829834, 0.005256383895874023, 0.005240575790405273, 0.005242015838623047, 0.005247039794921875, 0.0052106881141662596, 0.005265183925628662, 0.005259712219238281, 0.005244927883148193, 0.005276832103729248, 0.005251167774200439, 0.005287744045257568, 0.005266528129577636, 0.005253024101257324, 0.005274975776672363, 0.005363296031951905, 0.005231711864471435, 0.005263519763946533, 0.005305632114410401, 0.005261792182922363, 0.0053012800216674805, 0.005190432071685791, 0.00517958402633667, 0.005212063789367676, 0.0051981120109558104, 0.005220128059387207, 0.0052080960273742675, 0.005276991844177246, 0.0052353920936584475, 0.0049963841438293456, 0.0052623038291931155, 0.005205728054046631, 0.0052304000854492185, 0.00522876787185669, 0.005253344058990478, 0.005216191768646241, 0.005229663848876953, 0.005216351985931397, 0.005259967803955078, 0.005247039794921875, 0.0053144640922546385, 0.005248703956604004, 0.005250688076019287, 0.005218080043792725, 0.00521833610534668, 0.005253600120544434, 0.005232160091400147, 0.005187647819519043, 0.00519865608215332, 0.005194752216339111, 0.005198847770690918, 0.005220608234405517, 0.005170623779296875, 0.005183807849884033, 0.005163328170776367, 0.005189311981201172, 0.005154176235198975, 0.005185152053833008, 0.0059955201148986816, 0.005222655773162842, 0.005168896198272705, 0.005203711986541748, 0.005187679767608642, 0.005183296203613281, 0.005207551956176758, 0.005180255889892578, 0.005167103767395019, 0.0051645441055297855, 0.0052202239036560055, 0.005201536178588867, 0.005173727989196777, 0.005216991901397705, 0.005259071826934815, 0.005265408039093018, 0.005339104175567627, 0.005233952045440674, 0.005179200172424316, 0.005224575996398926, 0.005165952205657959, 0.005178463935852051, 0.005151584148406983, 0.0051998720169067385, 0.005180960178375244, 0.005155295848846436, 0.00517523193359375, 0.005174943923950195, 0.005177919864654541, 0.005201759815216064, 0.005224703788757324, 0.0053452482223510745, 0.005148575782775879, 0.005205728054046631, 0.005071680068969727, 0.006368544101715088, 0.005179967880249024, 0.005208415985107422, 0.005132287979125977, 0.0051354880332946775, 0.0051454720497131345, 0.005187839984893799, 0.005137856006622314, 0.005128511905670166, 0.0051571521759033205, 0.005157887935638428, 0.005153120040893555, 0.005134719848632813, 0.005198048114776611, 0.005168352127075195, 0.005138336181640625, 0.005173759937286377, 0.0051528639793396, 0.0051179838180541995, 0.0050833277702331545, 0.005119840145111084, 0.005111807823181152, 0.005128384113311768, 0.00514140796661377, 0.005135263919830322, 0.005139808177947998, 0.005124767780303955, 0.005142528057098389, 0.005133664131164551, 0.0051271038055419925, 0.00516870403289795, 0.005127999782562256, 0.005126336097717285, 0.005114367961883545, 0.005180480003356934, 0.00512662410736084, 0.005175519943237305, 0.005246880054473877, 0.005171199798583984, 0.005140672206878662, 0.005127647876739502, 0.005163360118865967, 0.005121119976043701, 0.005134528160095215, 0.0051535038948059084, 0.005142528057098389, 0.005209695816040039, 0.005201344013214111, 0.005254111766815186, 0.00524399995803833, 0.005190656185150146, 0.0052174081802368166, 0.0052334399223327634, 0.005187583923339844, 0.005240543842315674, 0.005220895767211914, 0.005186751842498779, 0.005196352005004883, 0.00519379186630249, 0.005208000183105469, 0.005185823917388916, 0.005561439990997314, 0.00509503984451294, 0.00523967981338501, 0.005183487892150879, 0.005215871810913086, 0.00515231990814209, 0.005192160129547119, 0.005150176048278808, 0.005218880176544189, 0.005203519821166992, 0.005202688217163086, 0.005197824001312256, 0.0051363840103149415, 0.005156256198883056, 0.005197408199310302, 0.0052009282112121585, 0.005189760208129883, 0.005152512073516846, 0.005187679767608642, 0.005183775901794433, 0.005176383972167969, 0.005170944213867187, 0.00516380786895752, 0.005167295932769775, 0.005162911891937256, 0.005572800159454346, 0.005828447818756103, 0.0052631359100341795, 0.005767615795135498, 0.005229695796966553, 0.005311135768890381, 0.005228352069854736, 0.005204031944274903, 0.005660927772521972, 0.0052202239036560055, 0.005207647800445556, 0.005239200115203857, 0.005197824001312256, 0.005183775901794433, 0.00522160005569458, 0.005226687908172607, 0.00524457597732544, 0.005216479778289795, 0.005257120132446289, 0.005224991798400879, 0.005208064079284668, 0.005242879867553711, 0.005295584201812744, 0.0054494719505310055, 0.005304096221923828, 0.005291007995605469, 0.005244160175323486, 0.005354239940643311, 0.005268832206726074, 0.005288479804992675, 0.0053060798645019535, 0.0052494401931762694, 0.005238783836364746, 0.005197663784027099, 0.005199039936065674, 0.00515990400314331, 0.005173247814178467, 0.005172927856445312, 0.005171520233154297, 0.004961984157562256, 0.005313920021057129, 0.0051923198699951175, 0.005285759925842285, 0.005164639949798584, 0.005171199798583984, 0.005171455860137939, 0.00522211217880249, 0.005232480049133301, 0.005157663822174073, 0.005185472011566162, 0.005195551872253418, 0.00517142391204834, 0.005184607982635498, 0.005155807971954346, 0.0051608958244323735, 0.005123551845550537, 0.005139135837554932, 0.005154272079467773, 0.005098176002502441, 0.005213568210601807, 0.005105343818664551, 0.005139071941375733, 0.005124095916748047, 0.005121280193328857, 0.005118720054626465, 0.005097472190856934, 0.005100671768188477, 0.005126880168914795, 0.005111968040466309, 0.005105663776397705, 0.005135903835296631, 0.005110400199890137, 0.005111392021179199, 0.005195551872253418, 0.00516048002243042, 0.005224671840667725, 0.005181951999664307, 0.00513804817199707, 0.005106272220611573, 0.0052633600234985355, 0.005107615947723389, 0.005112959861755371, 0.005104640007019043, 0.005101759910583496, 0.005170783996582031, 0.005123487949371338, 0.005098112106323242, 0.0051212158203125, 0.0051660480499267575, 0.005097472190856934, 0.005109407901763916, 0.005153120040893555, 0.005156576156616211, 0.0051857600212097165, 0.005127583980560303, 0.005173920154571533, 0.005203968048095703, 0.005122047901153564, 0.005130239963531494, 0.005137504100799561, 0.005112287998199463, 0.005112256050109863, 0.004868480205535888, 0.005117536067962647, 0.005118271827697754, 0.005170944213867187, 0.005097951889038086, 0.005107232093811035, 0.005114175796508789, 0.005120223999023438, 0.005110079765319824, 0.005215936183929444, 0.005101823806762695, 0.005100319862365723, 0.005108704090118408, 0.0051138558387756345, 0.0051036162376403805, 0.005123744010925293, 0.005122047901153564, 0.005107168197631836, 0.005095680236816406, 0.005091231822967529, 0.0050993280410766605, 0.005122047901153564, 0.005104544162750244, 0.005079040050506592, 0.005130239963531494, 0.005119808197021484, 0.0051099519729614256, 0.005135359764099121, 0.005132768154144287, 0.0051307520866394046, 0.005124127864837647, 0.005162879943847656, 0.005163136005401611, 0.0051468157768249515, 0.005207871913909912, 0.005192927837371826, 0.0051691198348999025, 0.005110239982604981, 0.005185887813568115, 0.0051708478927612305, 0.005163392066955566, 0.005151008129119873, 0.005161920070648193, 0.005119935989379883, 0.00509830379486084, 0.005122047901153564, 0.0051335678100585935, 0.005112768173217774, 0.005197184085845947, 0.005130688190460205, 0.005126016139984131, 0.0051159682273864745, 0.005138783931732178, 0.0051290240287780765, 0.0051608958244323735, 0.0051765117645263675, 0.005138207912445068, 0.005111264228820801, 0.005089824199676514, 0.005111807823181152, 0.00510975980758667, 0.005092991828918457, 0.005128575801849365, 0.004975872039794922, 0.005141248226165771, 0.005150015830993652, 0.0051528959274292, 0.005130623817443848, 0.005139711856842041, 0.005159872055053711, 0.005148672103881836, 0.0051734399795532226, 0.005199135780334473, 0.005153632164001465, 0.005117631912231445, 0.005660672187805176, 0.005142528057098389, 0.005149759769439697, 0.005143008232116699, 0.005105184078216553, 0.005110720157623291, 0.005115231990814209, 0.005135007858276367, 0.005109920024871826, 0.005101408004760742, 0.005166528224945069, 0.005195839881896973, 0.005122560024261475, 0.005137919902801514, 0.005135007858276367, 0.005125984191894532, 0.005212160110473632, 0.005123456001281738, 0.005134975910186768, 0.0051211838722229, 0.005112095832824707, 0.0051504321098327635, 0.005126336097717285, 0.005149343967437744, 0.005139776229858398, 0.005117824077606201, 0.005112639904022217, 0.0051660799980163576, 0.005131264209747314, 0.005243135929107666, 0.005103360176086426, 0.006318079948425293, 0.005168223857879638, 0.005143455982208252, 0.005154816150665284, 0.005201888084411621, 0.005109792232513428, 0.005128191947937012, 0.005119999885559082, 0.0051212477684020995, 0.005102591991424561, 0.005137184143066406, 0.005139455795288086, 0.005093376159667969, 0.005162015914916992, 0.005659264087677002, 0.005111648082733154, 0.005169760227203369, 0.005127168178558349, 0.005194655895233154, 0.00539631986618042]",tokens/s,191.43036200420318,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1846.423552,3107.848192,0.0,2705.32608,2414.155264,s,1,9.9567001953125,9.9567001953125,0.0,9.9567001953125,9.9567001953125,9.9567001953125,9.9567001953125,[9.9567001953125],,kWh,5.659266561666906e-05,6.223961711480642e-06,2.357362997001955e-05,8.639025729816925e-05,,MB,1907.003392,3135.111168,0.0,2717.908992,2120.958976,s,10,9.467796447753907,0.9467796447753904,0.002088601927578589,0.9472850952148437,0.9484224426269532,0.9490611053466796,0.9495720355224609,"[0.941804931640625, 0.9457415161132813, 0.9464447631835937, 0.9451437377929688, 0.9477608032226562, 0.9479855346679688, 0.9468093872070312, 0.94812548828125, 0.948280517578125, 0.9496997680664062]",tokens/s,270.3902660061225,kWh,2.771767295870714e-05,3.0566193359355513e-06,1.8406378361454394e-05,4.9180670656097084e-05,tokens/kWh,5205297.052374842,MB,1907.003392,3137.20832,0.0,2720.006144,2120.961536,s,10,15.029914916992187,1.5029914916992186,0.005131962077803709,1.5023720092773438,1.509916796875,1.5101361938476563,1.5103117114257814,"[1.50816455078125, 1.5103555908203126, 1.49437890625, 1.497725341796875, 1.5052423095703125, 1.5021422119140626, 1.4980130615234375, 1.502601806640625, 1.5098680419921875, 1.501423095703125]",tokens/s,41.91640494835727,kWh,4.408565543295397e-05,4.863086597574076e-06,2.549604564934552e-05,7.444478767987356e-05,tokens/kWh,846264.7549068407,,s,630,15.027614328384416,0.023853356076800634,0.0004605370270510887,0.023791616439819335,0.024141801643371583,0.024531841087341306,0.025579704284667982,"[0.024910655975341797, 0.02736947250366211, 0.023828479766845705, 0.02371187210083008, 0.02373244857788086, 0.02397577667236328, 0.024862207412719727, 0.023832895278930663, 0.02371379280090332, 0.02389606475830078, 0.023812095642089845, 0.02383647918701172, 0.023808191299438477, 0.023924736022949217, 0.023803136825561524, 0.023779647827148438, 0.023693695068359374, 0.023774496078491213, 0.023743263244628908, 0.024363008499145508, 0.02389366340637207, 0.023791967391967775, 0.023857152938842774, 0.02392019271850586, 0.023822784423828125, 0.024018463134765626, 0.023949695587158204, 0.023992416381835937, 0.023820287704467775, 0.02388582420349121, 0.023838720321655273, 0.023879680633544922, 0.023905887603759765, 0.024416671752929688, 0.024057504653930663, 0.0244204158782959, 0.0238654727935791, 0.023948896408081056, 0.023712320327758787, 0.023871488571166992, 0.024049663543701173, 0.023984128952026368, 0.02394316864013672, 0.023788576126098634, 0.023806367874145508, 0.023757152557373047, 0.023920448303222656, 0.02396780776977539, 0.02393942451477051, 0.023795711517333985, 0.023969791412353517, 0.023707136154174805, 0.023654720306396485, 0.023770784378051756, 0.023690879821777342, 0.023650848388671875, 0.02404560089111328, 0.023703136444091798, 0.02358278465270996, 0.023569087982177734, 0.023383359909057617, 0.02363052749633789, 0.02341289520263672, 0.024756895065307618, 0.02406387138366699, 0.023873056411743164, 0.024027551651000977, 0.0241461124420166, 0.023863296508789062, 0.02379132843017578, 0.02401718330383301, 0.02372812843322754, 0.023737440109252928, 0.023759775161743164, 0.023853023529052733, 0.02366262435913086, 0.023574527740478517, 0.025143295288085937, 0.02377961540222168, 0.023742176055908202, 0.023510368347167968, 0.02354243278503418, 0.023524415969848632, 0.023359519958496094, 0.023331743240356445, 0.02347145652770996, 0.023233184814453123, 0.023392095565795898, 0.02341606330871582, 0.023780160903930665, 0.023644256591796874, 0.02372403144836426, 0.023934175491333008, 0.02372003173828125, 0.023724735260009764, 0.023840639114379884, 0.02410915184020996, 0.02383260726928711, 0.023957504272460937, 0.02373222351074219, 0.02483510398864746, 0.0252589111328125, 0.023893632888793946, 0.024523199081420897, 0.025686016082763673, 0.024211135864257813, 0.023877952575683595, 0.024129823684692384, 0.02378688049316406, 0.02375484848022461, 0.023795967102050782, 0.023748607635498048, 0.02390166473388672, 0.023685663223266602, 0.023787519454956055, 0.02365987205505371, 0.02389811134338379, 0.02673142433166504, 0.023813247680664062, 0.023781375885009767, 0.023841407775878905, 0.02454960060119629, 0.024657472610473633, 0.023891807556152344, 0.02405824089050293, 0.024059072494506836, 0.02450924873352051, 0.02379475212097168, 0.023828800201416016, 0.023953855514526366, 0.023791807174682617, 0.02384252738952637, 0.023783231735229494, 0.02388559913635254, 0.02439036750793457, 0.024700639724731445, 0.02425881576538086, 0.02392678451538086, 0.023750431060791017, 0.023912319183349608, 0.024043136596679688, 0.023986303329467773, 0.02401103973388672, 0.024104576110839843, 0.0238353271484375, 0.023953407287597657, 0.02378950309753418, 0.02380601692199707, 0.023779327392578126, 0.023644159317016602, 0.02368511962890625, 0.023635967254638672, 0.023758848190307616, 0.023566335678100587, 0.02350454330444336, 0.02349500846862793, 0.023571615219116212, 0.023442272186279298, 0.02352742385864258, 0.023414783477783203, 0.023455743789672853, 0.02356800079345703, 0.023441152572631838, 0.02355011177062988, 0.023586879730224608, 0.0242193603515625, 0.02343343925476074, 0.02343369674682617, 0.023427072525024413, 0.02351046371459961, 0.02344950485229492, 0.023784095764160157, 0.023586816787719726, 0.02369264030456543, 0.023421600341796876, 0.023305984497070314, 0.0232225284576416, 0.023347200393676756, 0.02329804801940918, 0.023496095657348632, 0.02343177604675293, 0.023439359664916993, 0.023594688415527344, 0.023562559127807615, 0.023708831787109374, 0.023618400573730467, 0.02394726371765137, 0.023705184936523436, 0.024015104293823242, 0.02472563171386719, 0.023895776748657227, 0.02378563117980957, 0.023599231719970703, 0.024068000793457032, 0.023660640716552734, 0.023513023376464843, 0.023555551528930664, 0.023403104782104493, 0.023498464584350585, 0.023300384521484373, 0.023375871658325196, 0.023746559143066406, 0.023607295989990236, 0.023627775192260742, 0.023610528945922853, 0.023705663681030272, 0.023647008895874025, 0.023738367080688477, 0.02370969581604004, 0.023758848190307616, 0.02390630340576172, 0.023736032485961914, 0.023937023162841797, 0.023654687881469728, 0.023937023162841797, 0.02386534309387207, 0.023953407287597657, 0.023815263748168947, 0.023927711486816407, 0.024151615142822266, 0.02373206329345703, 0.023788127899169922, 0.023816192626953125, 0.023871488571166992, 0.02387763214111328, 0.023795711517333985, 0.023808000564575195, 0.02378108787536621, 0.02379395294189453, 0.023791616439819335, 0.023940895080566408, 0.023843040466308595, 0.023815296173095704, 0.023683679580688476, 0.023657760620117187, 0.023621984481811523, 0.024002399444580078, 0.023712320327758787, 0.02379961585998535, 0.023697376251220703, 0.0237238712310791, 0.02375129508972168, 0.024020992279052734, 0.023727840423583984, 0.023817663192749024, 0.023726943969726563, 0.023659744262695313, 0.02378188705444336, 0.02369715118408203, 0.023638559341430665, 0.024004608154296874, 0.02369945526123047, 0.024669376373291016, 0.025041727066040038, 0.025298944473266603, 0.023836671829223634, 0.023816192626953125, 0.023807071685791017, 0.023957439422607422, 0.023840736389160157, 0.02392588806152344, 0.02408956718444824, 0.023826911926269533, 0.027824127197265625, 0.023982112884521484, 0.023777536392211914, 0.023768543243408203, 0.023902719497680663, 0.02388547134399414, 0.023847455978393554, 0.023946464538574217, 0.024312128067016603, 0.023920608520507813, 0.023839328765869142, 0.023967647552490236, 0.02406809616088867, 0.023795808792114258, 0.023905887603759765, 0.02385145568847656, 0.02409663963317871, 0.023820287704467775, 0.02368511962890625, 0.023623680114746092, 0.023590911865234376, 0.023570432662963867, 0.023533567428588868, 0.023562240600585937, 0.023601152420043944, 0.023810047149658203, 0.02367616081237793, 0.023490495681762695, 0.023398656845092774, 0.023382591247558593, 0.023371776580810546, 0.02370355224609375, 0.023556255340576173, 0.02418876838684082, 0.023424352645874023, 0.023427743911743164, 0.023442880630493164, 0.023646303176879883, 0.023816640853881837, 0.023836576461791992, 0.02366476821899414, 0.023750656127929686, 0.023680511474609374, 0.023618047714233398, 0.02371583938598633, 0.02368716812133789, 0.023793664932250977, 0.023941120147705077, 0.023625728607177734, 0.023866655349731446, 0.023904991149902344, 0.023791616439819335, 0.02463324737548828, 0.023591007232666016, 0.023390207290649414, 0.023572479248046875, 0.023645280838012695, 0.023664928436279296, 0.023612031936645506, 0.02350464057922363, 0.023676288604736327, 0.023879808425903322, 0.023622400283813478, 0.023670656204223633, 0.02368115234375, 0.023791135787963866, 0.023732959747314455, 0.023853952407836915, 0.02385804748535156, 0.023822559356689452, 0.023893247604370116, 0.023775232315063476, 0.023908191680908204, 0.02381177520751953, 0.023856128692626953, 0.024830207824707032, 0.02383395195007324, 0.023955936431884765, 0.023881631851196287, 0.02385103988647461, 0.02388991928100586, 0.024004608154296874, 0.02405990409851074, 0.024393728256225586, 0.023885568618774413, 0.02403318405151367, 0.023982431411743162, 0.023893791198730467, 0.023836864471435546, 0.023836704254150392, 0.023838623046875, 0.023969152450561523, 0.02397257614135742, 0.023870719909667968, 0.023736480712890626, 0.02376563262939453, 0.023771039962768553, 0.02380396842956543, 0.023840768814086914, 0.02381804847717285, 0.024610464096069335, 0.024586463928222658, 0.024219392776489258, 0.024230432510375977, 0.023584768295288085, 0.02349372863769531, 0.023499711990356446, 0.023907808303833007, 0.023497184753417968, 0.023584800720214842, 0.02361667251586914, 0.02360361671447754, 0.023527904510498045, 0.023597055435180665, 0.023387807846069336, 0.02475436782836914, 0.024287519454956056, 0.02412351989746094, 0.023961151123046875, 0.024305120468139648, 0.024134624481201173, 0.023847999572753905, 0.023791616439819335, 0.023894975662231446, 0.023764991760253908, 0.0237076473236084, 0.023713024139404296, 0.0236810245513916, 0.023655168533325194, 0.02369913673400879, 0.023638336181640626, 0.023568384170532225, 0.023522464752197266, 0.02372060775756836, 0.02356447982788086, 0.023541952133178713, 0.023492000579833985, 0.02347792053222656, 0.023403263092041014, 0.023654399871826173, 0.023784927368164063, 0.023455904006958007, 0.02348687934875488, 0.023321855545043946, 0.02338803291320801, 0.02340336036682129, 0.02353971290588379, 0.023619424819946288, 0.02355830383300781, 0.023488512039184572, 0.023583871841430664, 0.02350886344909668, 0.02365951919555664, 0.023764991760253908, 0.02371788787841797, 0.02381119918823242, 0.02374540710449219, 0.023781375885009767, 0.02384486389160156, 0.023654399871826173, 0.023755840301513672, 0.023665376663208008, 0.023828895568847656, 0.023770336151123048, 0.02371235275268555, 0.023810047149658203, 0.02429542350769043, 0.02431590461730957, 0.02424959945678711, 0.02415078353881836, 0.024122783660888672, 0.02376150321960449, 0.023825536727905272, 0.02402761650085449, 0.023809791564941406, 0.02392336082458496, 0.02384671974182129, 0.023859359741210937, 0.02453891181945801, 0.023765119552612304, 0.023768831253051757, 0.023772768020629883, 0.02362393569946289, 0.023674688339233398, 0.023689632415771485, 0.023782976150512697, 0.02734783935546875, 0.02394086456298828, 0.02370569610595703, 0.023821439743041992, 0.023628032684326172, 0.02383251190185547, 0.024067968368530274, 0.02379011154174805, 0.02378112030029297, 0.02374505615234375, 0.023905344009399414, 0.02384588813781738, 0.02390790367126465, 0.023819839477539063, 0.023763776779174805, 0.023942560195922852, 0.02390470314025879, 0.024004768371582032, 0.023801471710205076, 0.023970399856567383, 0.023825824737548826, 0.023813535690307617, 0.023857791900634764, 0.023920991897583007, 0.023758848190307616, 0.023905344009399414, 0.02380486488342285, 0.02413481521606445, 0.023939935684204102, 0.02404159927368164, 0.023859071731567382, 0.02389129638671875, 0.02388649559020996, 0.023812095642089845, 0.023744512557983398, 0.023670783996582033, 0.02370560073852539, 0.02389321517944336, 0.02367977523803711, 0.023625728607177734, 0.023633504867553713, 0.023433631896972656, 0.02344483184814453, 0.023485088348388673, 0.02337958335876465, 0.023608831405639647, 0.023378816604614258, 0.02382988739013672, 0.02357721519470215, 0.02367692756652832, 0.02365235137939453, 0.02361334419250488, 0.023785184860229493, 0.02377868843078613, 0.024170623779296876, 0.02461129570007324, 0.02368307113647461, 0.023502752304077147, 0.02351251220703125, 0.023481279373168944, 0.023662303924560545, 0.023434431076049804, 0.023355871200561523, 0.02329430389404297, 0.02391859245300293, 0.024657920837402345, 0.02408243179321289, 0.023614719390869142, 0.024052448272705078, 0.02356227111816406, 0.023547296524047853, 0.02424687957763672, 0.024147968292236328, 0.023719615936279297, 0.023642431259155272, 0.02367487907409668, 0.028078079223632812, 0.02390435218811035, 0.023738336563110352, 0.023700767517089844, 0.024097440719604492, 0.023848831176757814, 0.02386137580871582, 0.02389606475830078, 0.02395136070251465, 0.02409984016418457, 0.024022016525268555, 0.023993791580200194, 0.023875520706176757, 0.024007200241088867, 0.02383014488220215, 0.02422422409057617, 0.02386262321472168, 0.023870111465454102, 0.02616339111328125, 0.02380985641479492, 0.023774784088134767, 0.02437984085083008, 0.02381964874267578, 0.023668512344360352, 0.023628416061401366, 0.023637983322143556, 0.0238922233581543, 0.023767040252685546, 0.02389401626586914, 0.024110208511352538, 0.024101728439331054, 0.02402921676635742, 0.023899839401245116, 0.023753023147583006, 0.023914176940917967, 0.023902143478393555, 0.023882112503051757, 0.024145919799804686, 0.023932928085327147, 0.023871456146240234, 0.023711776733398436, 0.023684640884399415, 0.024690784454345704, 0.023945152282714845, 0.024034751892089843, 0.02410905647277832, 0.023919456481933592, 0.02407423973083496, 0.02405171203613281, 0.023979232788085936, 0.02404969596862793, 0.023988351821899415, 0.023872127532958986, 0.024071392059326173, 0.02406684875488281, 0.02414134407043457, 0.023945695877075197, 0.02369945526123047, 0.023736320495605468, 0.0238919677734375, 0.023757024765014647, 0.023752479553222655, 0.023734272003173826, 0.023729663848876953, 0.023558656692504884, 0.02352102470397949, 0.023619712829589842, 0.0235296630859375, 0.023457727432250976, 0.023500799179077148, 0.023488512039184572, 0.02348588752746582, 0.02346246337890625, 0.02531942367553711, 0.023640064239501952, 0.024184831619262694, 0.02355331230163574, 0.023629695892333984, 0.023635936737060548, 0.023440256118774414, 0.02351865577697754, 0.023527360916137694, 0.023908992767333985, 0.02352659225463867, 0.02327619171142578, 0.023267488479614257, 0.02326937675476074, 0.02343731117248535, 0.023357215881347655, 0.023544031143188475, 0.023469760894775392, 0.02360259246826172, 0.02346188735961914, 0.023607263565063475, 0.02361030387878418, 0.02370256042480469, 0.024136415481567385, 0.023938560485839845, 0.02394393539428711, 0.02489139175415039, 0.02450022315979004, 0.02420844841003418, 0.023991231918334962, 0.023981216430664063, 0.025227487564086912]",tokens/s,41.92282196183634,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4758.638592,7476.215808,0.0,7073.693696,6385.996288,s,1,12.47103125,12.47103125,0.0,12.47103125,12.47103125,12.47103125,12.47103125,[12.47103125],,kWh,0.0001461604601333723,1.61149996467028e-05,6.451088494202617e-05,0.00022678634472210127,,MB,1767.6288,7495.090176,0.0,7077.888,5801.885696,s,10,36.178533203125006,3.6178533203125,0.007323446482560682,3.61758447265625,3.626488305664062,3.6271366088867185,3.6276552514648435,"[3.60854638671875, 3.609850341796875, 3.611226806640625, 3.610940673828125, 3.613195556640625, 3.6238212890625, 3.621973388671875, 3.624849609375, 3.62634423828125, 3.627784912109375]",tokens/s,70.76019322361235,kWh,0.00010550110913124929,1.1636829695801765e-05,7.00999449687989e-05,0.00018723788379584996,tokens/kWh,1367244.677253045,MB,1768.943616,7495.090176,0.0,7077.888,5803.067904,s,10,25.486088623046875,2.5486088623046874,0.2860066902354416,2.7653590087890625,2.7913159912109373,2.8007942260742187,2.808376813964844,"[2.22319287109375, 2.203414794921875, 2.17664404296875, 2.192458984375, 2.776743408203125, 2.789209716796875, 2.779825927734375, 2.780351806640625, 2.8102724609375, 2.753974609375]",tokens/s,24.719367860562794,kWh,6.46985766291709e-05,7.1367294968556965e-06,4.302603442080111e-05,0.00011486134054682773,tokens/kWh,548487.4170897873,,s,630,25.483525489807125,0.04045004046001131,0.004564647528137248,0.04376895904541016,0.044457348632812504,0.044721921730041504,0.046142571907043475,"[0.03597939300537109, 0.03495977783203125, 0.03685798263549805, 0.034914398193359376, 0.034867008209228514, 0.034754558563232424, 0.03483596801757813, 0.03483084869384766, 0.0347852783203125, 0.035006656646728515, 0.03479532623291016, 0.03469308853149414, 0.03498601531982422, 0.034923744201660153, 0.03502972793579102, 0.03533420944213867, 0.03494044876098633, 0.034802112579345706, 0.035046974182128904, 0.036362720489501954, 0.035211265563964846, 0.034936832427978515, 0.03490982437133789, 0.035180927276611325, 0.03493603134155274, 0.03500931167602539, 0.03502640151977539, 0.03481407928466797, 0.03483075332641602, 0.03499212646484375, 0.0359725112915039, 0.035993568420410155, 0.036147712707519535, 0.03622467041015625, 0.03539606475830078, 0.03482553482055664, 0.03650336074829102, 0.03513638305664062, 0.03505881500244141, 0.03515071868896484, 0.03497382354736328, 0.03498348617553711, 0.0351399040222168, 0.034961406707763674, 0.03544400024414063, 0.0375096321105957, 0.035333889007568356, 0.03550246429443359, 0.035297088623046875, 0.03533830261230469, 0.03772643280029297, 0.03522969436645508, 0.03547340774536133, 0.035477054595947265, 0.03532796859741211, 0.035109344482421874, 0.03496755218505859, 0.03482419204711914, 0.034953216552734374, 0.03497100830078125, 0.03492262268066406, 0.034959392547607424, 0.03558448028564453, 0.03596691131591797, 0.035095550537109374, 0.03517030334472656, 0.03512080001831055, 0.034971424102783207, 0.03488550567626953, 0.034903968811035156, 0.03476684951782227, 0.034808609008789064, 0.03488972854614258, 0.03508838272094727, 0.035, 0.03481372833251953, 0.03474691009521484, 0.03508428955078125, 0.034852542877197266, 0.035057857513427736, 0.03486291122436523, 0.034740543365478514, 0.03499827194213867, 0.0354051513671875, 0.034996543884277344, 0.035043678283691405, 0.03491350555419922, 0.03481625747680664, 0.03503772735595703, 0.03478700637817383, 0.03472611236572266, 0.035061855316162106, 0.03485257720947266, 0.03481955337524414, 0.03489580917358399, 0.034917247772216796, 0.034869152069091795, 0.03480380630493164, 0.03478937530517578, 0.03482828903198242, 0.03471939086914062, 0.034635936737060544, 0.034767040252685545, 0.034858753204345706, 0.03502924728393555, 0.03472943878173828, 0.03475510406494141, 0.03482799911499023, 0.035024833679199216, 0.03482969665527344, 0.03487641525268555, 0.03500431823730469, 0.03611385726928711, 0.035988094329833985, 0.034902015686035154, 0.03482815933227539, 0.034891841888427734, 0.03494095993041992, 0.034977825164794925, 0.03485696029663086, 0.034995582580566405, 0.03524262237548828, 0.03495913696289062, 0.03489382553100586, 0.03505107116699219, 0.035082176208496095, 0.036040702819824216, 0.03493471908569336, 0.034872478485107425, 0.034948001861572264, 0.03587686538696289, 0.034727489471435544, 0.03506835174560547, 0.03460505676269531, 0.034531326293945314, 0.03459481430053711, 0.03489382553100586, 0.0345703353881836, 0.03452249526977539, 0.03447145462036133, 0.03450982284545898, 0.034576385498046876, 0.03447808074951172, 0.03441263961791992, 0.03457833480834961, 0.034592769622802735, 0.03456752014160156, 0.03435996627807617, 0.03452108764648437, 0.03469311904907227, 0.034439071655273434, 0.03450790405273438, 0.034443614959716796, 0.0344455680847168, 0.03439449691772461, 0.03419036865234375, 0.03466864013671875, 0.03457676696777344, 0.03462556838989258, 0.03460086441040039, 0.03449913787841797, 0.0347770881652832, 0.03446169662475586, 0.034400161743164064, 0.03434822463989258, 0.03453839874267578, 0.034610912322998046, 0.034586910247802735, 0.0344002571105957, 0.034283519744873044, 0.03444326400756836, 0.034353153228759765, 0.03422115325927735, 0.03423430252075195, 0.034455936431884764, 0.03462819290161133, 0.03440176010131836, 0.03433321762084961, 0.034396095275878905, 0.034340225219726565, 0.03423507308959961, 0.03423756790161133, 0.03428851318359375, 0.03428870391845703, 0.03430646514892578, 0.03432281494140625, 0.03446761703491211, 0.03435356903076172, 0.0343326416015625, 0.03583776092529297, 0.03507583999633789, 0.03479801559448242, 0.036023681640625, 0.03565631866455078, 0.03464396667480469, 0.03458662414550781, 0.03461523056030273, 0.03473433685302734, 0.03467654418945312, 0.034500831604003905, 0.0346662712097168, 0.03478755187988281, 0.03500352096557617, 0.03484044647216797, 0.03465500640869141, 0.03457632064819336, 0.03456819152832031, 0.03492460632324219, 0.03487539291381836, 0.03463996887207031, 0.03479935836791992, 0.03494723129272461, 0.03462348937988281, 0.03493273544311523, 0.034756702423095705, 0.03474422454833984, 0.03480070495605469, 0.034474945068359374, 0.03456425476074219, 0.03469500732421875, 0.03498553466796875, 0.03480416107177734, 0.03501875305175781, 0.03518668746948242, 0.034813953399658204, 0.03480752182006836, 0.0349002571105957, 0.03464601516723633, 0.03473408126831055, 0.03466819381713867, 0.036872543334960935, 0.0348416633605957, 0.03467139053344727, 0.034588832855224606, 0.03461715316772461, 0.03444249725341797, 0.034665630340576174, 0.03475843048095703, 0.03462752151489258, 0.034430816650390626, 0.03459708786010742, 0.034516735076904295, 0.034697471618652345, 0.03467385482788086, 0.03474310302734375, 0.03454268646240234, 0.034402271270751954, 0.03453216171264648, 0.034449535369873045, 0.03459875106811523, 0.03468508911132812, 0.03466582489013672, 0.04520774459838867, 0.04417500686645508, 0.04414886474609375, 0.044147071838378904, 0.04365926361083984, 0.043804672241210936, 0.04368988800048828, 0.0437474250793457, 0.04398080062866211, 0.04382515335083008, 0.04396646499633789, 0.04463616180419922, 0.04376496124267578, 0.04389558410644531, 0.04400537490844727, 0.04397702407836914, 0.04390208053588867, 0.0440387191772461, 0.04415283203125, 0.04399731063842773, 0.043953792572021484, 0.04412339019775391, 0.04405964660644531, 0.04392451095581055, 0.044227550506591794, 0.04433327865600586, 0.04417718505859375, 0.043710750579833986, 0.04360188674926758, 0.044136512756347654, 0.04368147277832031, 0.04367721557617187, 0.04376774215698242, 0.043840030670166015, 0.04404787063598633, 0.04418307113647461, 0.04394707107543945, 0.04411958312988281, 0.043943359375, 0.043977664947509765, 0.04410121536254883, 0.04423311996459961, 0.04446796798706055, 0.0441080322265625, 0.04423884963989258, 0.0442347526550293, 0.0439788818359375, 0.04401750564575195, 0.04387750244140625, 0.0447558708190918, 0.04427951812744141, 0.044319007873535154, 0.04414822387695312, 0.04431244659423828, 0.04415961456298828, 0.044077056884765625, 0.04448665618896484, 0.04420016098022461, 0.044240673065185546, 0.043974655151367184, 0.044265727996826175, 0.04388009643554688, 0.04394198226928711, 0.045262847900390625, 0.044230209350585935, 0.044114303588867185, 0.04407712173461914, 0.043786239624023435, 0.044031455993652345, 0.04443753433227539, 0.04406937789916992, 0.04395951843261719, 0.044861984252929685, 0.04551295852661133, 0.04404838562011719, 0.04398448181152344, 0.04436624145507812, 0.04383065414428711, 0.04380704116821289, 0.04377017593383789, 0.043921409606933595, 0.04360396957397461, 0.043533470153808596, 0.044020030975341795, 0.04386172866821289, 0.04370121765136719, 0.04491766357421875, 0.043779006958007814, 0.04390092849731445, 0.04419379043579102, 0.044044288635253906, 0.044060447692871096, 0.04415027236938476, 0.04400815963745117, 0.04404975891113281, 0.044423839569091794, 0.044117534637451175, 0.046279136657714846, 0.044719615936279294, 0.04414835357666016, 0.0441864013671875, 0.04414992141723633, 0.04424595260620117, 0.04387596893310547, 0.043968894958496096, 0.04408892822265625, 0.044067424774169923, 0.04670579147338867, 0.044493343353271486, 0.04399635314941406, 0.04424393463134765, 0.04415283203125, 0.044265087127685544, 0.04719449615478516, 0.04415311813354492, 0.04390063858032227, 0.04414668655395508, 0.04407910537719727, 0.04434969711303711, 0.04418124771118164, 0.04428310394287109, 0.0442125129699707, 0.04438880157470703, 0.04432822418212891, 0.04395305633544922, 0.04378611373901367, 0.04529510498046875, 0.04466118240356445, 0.04393369674682617, 0.044283905029296876, 0.043812862396240236, 0.044119552612304686, 0.043950592041015625, 0.04372684860229492, 0.04348928070068359, 0.043493377685546876, 0.0436121597290039, 0.04476518249511719, 0.04402780914306641, 0.04396656036376953, 0.04391731262207031, 0.04404633712768555, 0.04423440170288086, 0.04423715209960938, 0.04408652877807617, 0.0441635513305664, 0.04386844635009766, 0.04379593658447266, 0.04380060958862304, 0.04418815994262695, 0.043919361114501954, 0.04390707015991211, 0.043853214263916016, 0.043972896575927733, 0.04430265426635742, 0.04418560028076172, 0.04408636856079102, 0.04391823959350586, 0.04391731262207031, 0.04380393600463867, 0.046526206970214846, 0.04463715362548828, 0.044112895965576174, 0.044206592559814455, 0.044001792907714846, 0.044240894317626955, 0.04428758239746094, 0.04423721694946289, 0.04431052780151367, 0.04393574523925781, 0.04412960052490234, 0.044263103485107425, 0.04416409683227539, 0.04405558395385742, 0.04532118225097656, 0.044104736328125, 0.04408623886108398, 0.0441212158203125, 0.04391001510620117, 0.04400067138671875, 0.04384320068359375, 0.04394492721557617, 0.044470272064208984, 0.044092864990234376, 0.04382572937011719, 0.04371865463256836, 0.04370022583007813, 0.04388454437255859, 0.044063838958740234, 0.04526489639282227, 0.045029151916503904, 0.04448233413696289, 0.04390956878662109, 0.04403200149536133, 0.0442081298828125, 0.04389056015014649, 0.0443823356628418, 0.04436787033081055, 0.04385766220092773, 0.04417494583129883, 0.0440327033996582, 0.04456224060058594, 0.04382531356811523, 0.043960319519042966, 0.0436607666015625, 0.04355126571655273, 0.04372592163085937, 0.04393612670898438, 0.04410367965698242, 0.04416921615600586, 0.04406531143188477, 0.04404121780395508, 0.04430131149291992, 0.04404358291625977, 0.04399353790283203, 0.04388227081298828, 0.044028385162353516, 0.04394803237915039, 0.043888126373291016, 0.04397628784179688, 0.044030879974365236, 0.0439659538269043, 0.04418783950805664, 0.043948352813720705, 0.04424297714233399, 0.04407049560546875, 0.04403580856323242, 0.04393231964111328, 0.04451123046875, 0.04390899276733398, 0.043996990203857424, 0.043985214233398434, 0.04422579193115234, 0.044132831573486325, 0.04401795196533203, 0.043843006134033205, 0.04413907241821289, 0.04401971054077149, 0.04396588897705078, 0.0439315185546875, 0.04380332946777344, 0.043883872985839845, 0.04384214401245117, 0.044005439758300784, 0.044224544525146486, 0.04424230575561523, 0.04394630432128906, 0.04403782272338867, 0.0445118408203125, 0.04660815811157227, 0.0443598403930664, 0.0442305908203125, 0.04566838455200195, 0.0449917106628418, 0.04465945434570313, 0.044797760009765625, 0.044375648498535154, 0.04423740768432617, 0.044267711639404295, 0.044730175018310545, 0.0442081298828125, 0.04409139251708984, 0.04404220962524414, 0.044240638732910155, 0.044684608459472655, 0.04431923294067383, 0.04404195022583008, 0.044474910736083985, 0.04468915176391602, 0.044110305786132814, 0.04415078353881836, 0.044068862915039066, 0.04410534286499023, 0.04407743835449219, 0.044281856536865234, 0.04429209518432617, 0.04448624038696289, 0.044466590881347655, 0.04436787033081055, 0.04527824020385742, 0.044622814178466794, 0.04436905670166016, 0.044429790496826174, 0.044456321716308596, 0.044158977508544923, 0.04456243133544922, 0.044546176910400394, 0.044703006744384766, 0.04444220733642578, 0.044783615112304685, 0.04439654541015625, 0.04444979095458984, 0.044453601837158206, 0.04470608139038086, 0.04440835189819336, 0.04465507125854492, 0.04464604949951172, 0.044990814208984375, 0.04466841506958008, 0.04472380828857422, 0.04412646484375, 0.04492748641967773, 0.044634273529052734, 0.04753132629394531, 0.04521980667114258, 0.04469424057006836, 0.04440063858032227, 0.04404019165039062, 0.044165119171142575, 0.04430847930908203, 0.04505190277099609, 0.04466614532470703, 0.04452422332763672, 0.04782854461669922, 0.04450761413574219, 0.04510265731811523, 0.044071361541748046, 0.04423811340332031, 0.044158977508544923, 0.04414527893066406, 0.044318817138671876, 0.04550179290771485, 0.04440092849731445, 0.04395660781860351, 0.04395008087158203, 0.04375897598266602, 0.04391113662719726, 0.04423132705688477, 0.04393292617797852, 0.043905216217041014, 0.04340700912475586, 0.0433765754699707, 0.043721664428710935, 0.04332521438598633, 0.04402406311035156, 0.043919361114501954, 0.04344803237915039, 0.043501087188720707, 0.043608512878417965, 0.043511390686035156, 0.04363516616821289, 0.04359939193725586, 0.04376380920410156, 0.043926143646240236, 0.043843582153320314, 0.04327542495727539, 0.043135231018066406, 0.043706207275390624, 0.043514625549316406, 0.043601665496826175, 0.04329497528076172, 0.04335001754760742, 0.043488353729248044, 0.043272735595703125, 0.043474369049072266, 0.043772865295410156, 0.043595775604248044, 0.04352732849121094, 0.04358038330078125, 0.0436486701965332, 0.04330281448364258, 0.043165985107421874, 0.04320668792724609, 0.04343190383911133, 0.04342931365966797, 0.043466751098632815, 0.04580822372436524, 0.04442275238037109, 0.043403678894042966, 0.04336633682250977, 0.04307923126220703, 0.04329523086547851, 0.04351926422119141, 0.043227840423583984, 0.043147232055664064, 0.04322844696044922, 0.043205406188964846, 0.04357324981689453]",tokens/s,24.721854134820816,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 224123 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7394.459648,9457.041408,0.0,9061.793792,8463.626752,s,1,14.789779296875,14.789779296875,0.0,14.789779296875,14.789779296875,14.789779296875,14.789779296875,[14.789779296875],,kWh,0.00021770074485833292,2.400667013197282e-05,9.690757752599255e-05,0.0003386149925162983,,MB,1728.344064,9469.62432,0.0,9061.793792,7981.246464,s,10,57.42651806640625,5.742651806640625,0.003014339069672028,5.7430356445312505,5.7456560546875,5.74627919921875,5.74677771484375,"[5.736865234375, 5.7391357421875, 5.740529296875, 5.741982421875, 5.7416044921875, 5.7440888671875, 5.745517578125, 5.74690234375, 5.745162109375, 5.74472998046875]",tokens/s,44.578708342367115,kWh,0.00016747126064083507,1.8472623745901746e-05,0.00011132408905920182,0.00029726797344593864,tokens/kWh,861175.8509752695,MB,1733.210112,9469.62432,0.0,9061.793792,8267.75296,s,10,27.192193359375,2.7192193359375,0.0020808683062563346,2.718815185546875,2.7216886962890627,2.7225609008789062,2.7232586645507815,"[2.71697265625, 2.7162060546875, 2.71926171875, 2.718357421875, 2.71836865234375, 2.719740478515625, 2.721494873046875, 2.720678466796875, 2.72343310546875, 2.717679931640625]",tokens/s,23.168414245730425,kWh,7.98462075979137e-05,8.806621371887446e-06,5.331282042799756e-05,0.0001419656493977987,tokens/kWh,443769.3221369991,,s,630,27.18830438232421,0.04315603870210193,0.0004030313345170255,0.04318473625183106,0.0436389347076416,0.043749713706970214,0.04395503551483154,"[0.04369161605834961, 0.0429202880859375, 0.04282505416870117, 0.042560001373291016, 0.04252892684936523, 0.0425082893371582, 0.042571327209472654, 0.042624446868896486, 0.042531841278076174, 0.04251206588745117, 0.04270521545410156, 0.04283571243286133, 0.042911998748779295, 0.042708992004394535, 0.04255718231201172, 0.042590465545654294, 0.04277043151855469, 0.04260262298583985, 0.04261193466186523, 0.043023006439208984, 0.043069438934326174, 0.04301004791259765, 0.04294246292114258, 0.04290764617919922, 0.04274995040893555, 0.04309404754638672, 0.043157089233398435, 0.0430780143737793, 0.042982528686523434, 0.04280575942993164, 0.042822017669677734, 0.0429035530090332, 0.04323638534545898, 0.04375651168823242, 0.04356300735473633, 0.04335823822021485, 0.04316342544555664, 0.04296438217163086, 0.04308252716064453, 0.04298342514038086, 0.043130847930908205, 0.04309987258911133, 0.04354694366455078, 0.04351724624633789, 0.04376851272583008, 0.043385921478271486, 0.04356806564331055, 0.043587390899658206, 0.043561153411865235, 0.04382905578613281, 0.04372304153442383, 0.04327414321899414, 0.04323328018188476, 0.04336649703979492, 0.04346665573120117, 0.04350678253173828, 0.04373331069946289, 0.04368239974975586, 0.04339712142944336, 0.04337583923339844, 0.04360675048828125, 0.04361619186401367, 0.04347865676879883, 0.043917343139648436, 0.042971168518066406, 0.04242230224609375, 0.04248982238769531, 0.0422459831237793, 0.04228316879272461, 0.04256361770629883, 0.0425975341796875, 0.04255007934570312, 0.04246323013305664, 0.042912864685058595, 0.042779743194580076, 0.042687873840332034, 0.04252454376220703, 0.042496063232421874, 0.04240166473388672, 0.04293286514282227, 0.0431512336730957, 0.043516033172607424, 0.04324147033691406, 0.042797054290771484, 0.04277644729614258, 0.04286886215209961, 0.04292403030395508, 0.04289737701416016, 0.04279040145874023, 0.04316009521484375, 0.043156959533691405, 0.04288547134399414, 0.043036865234375, 0.042987518310546875, 0.04306687927246094, 0.043080257415771483, 0.04318611145019531, 0.043322368621826174, 0.0432193603515625, 0.04300588989257813, 0.042799774169921874, 0.0429854736328125, 0.04352159881591797, 0.043544864654541014, 0.04344351959228516, 0.04332406234741211, 0.04335161590576172, 0.04355955123901367, 0.043769889831542966, 0.04353839874267578, 0.04321887969970703, 0.04319830322265625, 0.043469215393066404, 0.04349529647827149, 0.04345996856689453, 0.04325235366821289, 0.04325539016723633, 0.04318032073974609, 0.04323452758789063, 0.043614688873291015, 0.04365760040283203, 0.04363468933105469, 0.043611583709716795, 0.04358816146850586, 0.043753471374511715, 0.043919361114501954, 0.044273662567138675, 0.04301824188232422, 0.0425615348815918, 0.04251836776733398, 0.042737823486328125, 0.04262057495117187, 0.04251475143432617, 0.0426844482421875, 0.04270284652709961, 0.04259955215454102, 0.04263155364990234, 0.042743297576904295, 0.04269120025634766, 0.04265407943725586, 0.043087871551513675, 0.042831871032714845, 0.04300732803344726, 0.0428243522644043, 0.04283801651000976, 0.04261478424072265, 0.04260659027099609, 0.04311040115356445, 0.04317593765258789, 0.04298543930053711, 0.04286377716064453, 0.04280819320678711, 0.042904735565185544, 0.04286515045166016, 0.04320495986938477, 0.04335001754760742, 0.043218944549560545, 0.043194366455078126, 0.043022174835205075, 0.042976863861083986, 0.04295699310302734, 0.043286048889160156, 0.043281246185302734, 0.04334499359130859, 0.04343603134155274, 0.04328291320800781, 0.04306784057617188, 0.04326604843139648, 0.04362854385375976, 0.0436357421875, 0.04356195068359375, 0.043632640838623046, 0.04387343978881836, 0.04359987258911133, 0.043426624298095705, 0.043232864379882815, 0.04355321502685547, 0.04350774383544922, 0.04345033645629883, 0.043718593597412106, 0.043726913452148436, 0.04355180740356445, 0.04342870330810547, 0.04323337554931641, 0.043629665374755856, 0.04351273727416992, 0.04347289657592773, 0.04361769485473633, 0.04359228897094727, 0.04399923324584961, 0.04312473678588867, 0.042788864135742184, 0.04256972885131836, 0.04243820953369141, 0.042445247650146484, 0.042657791137695314, 0.04242764663696289, 0.042211231231689454, 0.042654464721679684, 0.042686561584472656, 0.042780670166015625, 0.04268547058105469, 0.04256252670288086, 0.04270489501953125, 0.04274995040893555, 0.04269670486450195, 0.04251574325561523, 0.043086559295654296, 0.04319612884521484, 0.04305292892456054, 0.04307513427734375, 0.043096160888671874, 0.043425537109375, 0.04299059295654297, 0.04272127914428711, 0.04282572937011719, 0.04318207931518555, 0.04315878295898438, 0.04308044815063477, 0.04278409576416015, 0.04266870498657226, 0.04273152160644531, 0.04335327911376953, 0.043389217376708984, 0.04327494430541992, 0.04321059036254883, 0.04334710311889649, 0.043431968688964845, 0.04322537612915039, 0.04332598495483399, 0.04346060943603516, 0.04328409576416015, 0.04324390411376953, 0.04350054550170898, 0.04344934463500977, 0.043649024963378906, 0.043668704986572264, 0.04346540832519531, 0.04351359939575195, 0.04341372680664062, 0.04336838531494141, 0.043278430938720705, 0.04330876922607422, 0.043513278961181644, 0.0437011833190918, 0.043622398376464845, 0.04362035369873047, 0.04372873687744141, 0.04369171142578125, 0.04356143951416016, 0.04359372711181641, 0.04395212936401367, 0.04394675064086914, 0.04282550430297852, 0.042463294982910155, 0.04242432022094727, 0.042649600982666014, 0.04260659027099609, 0.042610431671142576, 0.042481918334960934, 0.042491584777832034, 0.04248128128051758, 0.042533214569091794, 0.04248806381225586, 0.04263945770263672, 0.04289344024658203, 0.04285427093505859, 0.04270694351196289, 0.042727294921875, 0.04289308929443359, 0.0430263671875, 0.043024799346923825, 0.04297318267822266, 0.04312473678588867, 0.04354662322998047, 0.043450366973876955, 0.043184127807617184, 0.04283596801757812, 0.04284774398803711, 0.04299008178710938, 0.04301824188232422, 0.042821792602539065, 0.04299321746826172, 0.04296102523803711, 0.04294416046142578, 0.04318054580688477, 0.043093441009521484, 0.043264190673828126, 0.043458911895751955, 0.043248863220214845, 0.043400001525878903, 0.043334815979003904, 0.043162464141845706, 0.043476608276367186, 0.043620864868164064, 0.04356083297729492, 0.04349731063842773, 0.04396883010864258, 0.043847518920898436, 0.043592864990234376, 0.04351881790161133, 0.04348700714111328, 0.0433355827331543, 0.04317011260986328, 0.043276287078857424, 0.04325785446166992, 0.04372889709472656, 0.04375868988037109, 0.04344022369384765, 0.04343017578125, 0.04335833740234375, 0.04328694534301758, 0.043617313385009765, 0.04361904144287109, 0.0435810546875, 0.04391788864135742, 0.043101505279541014, 0.042659934997558595, 0.04260310363769531, 0.04245625686645508, 0.04251846313476562, 0.042840576171875, 0.042879360198974606, 0.04279292678833008, 0.04272745513916015, 0.04274176025390625, 0.04294451141357422, 0.043005344390869144, 0.042687007904052734, 0.04254521560668945, 0.04264720153808594, 0.04267164611816406, 0.042834335327148435, 0.04280771255493164, 0.042668033599853515, 0.042733345031738285, 0.043071712493896484, 0.04312473678588867, 0.043053054809570314, 0.04302428817749023, 0.04297942352294922, 0.04295270538330078, 0.04294406509399414, 0.04286243057250977, 0.043172382354736326, 0.043280448913574215, 0.04312473678588867, 0.043071487426757815, 0.04316774368286133, 0.04330627059936523, 0.04339990234375, 0.04319968032836914, 0.043195201873779294, 0.043138240814208986, 0.043168350219726564, 0.0431577262878418, 0.04328230285644531, 0.04349942398071289, 0.04327651214599609, 0.0434666862487793, 0.04369974517822266, 0.043638336181640626, 0.043432319641113284, 0.04348579025268555, 0.04352534484863281, 0.04343068695068359, 0.04329676818847656, 0.043716480255126956, 0.04367577743530274, 0.04356915283203125, 0.04364432144165039, 0.043862625122070314, 0.043843582153320314, 0.043720703125, 0.043433982849121096, 0.043390975952148435, 0.0433704948425293, 0.04383961486816406, 0.044400257110595705, 0.04305286407470703, 0.04248223876953125, 0.042251392364501955, 0.04251123046875, 0.04254719924926758, 0.042641407012939454, 0.042372608184814455, 0.042336769104003906, 0.042964607238769534, 0.042903934478759766, 0.04314726257324219, 0.04318207931518555, 0.04309126281738281, 0.04274422454833984, 0.04263504028320313, 0.04284425735473633, 0.04321116638183594, 0.043020286560058595, 0.0431756477355957, 0.04299750518798828, 0.04285446548461914, 0.043006431579589846, 0.043079681396484375, 0.043185344696044924, 0.04321516799926758, 0.04300032043457031, 0.0429567985534668, 0.04302643203735351, 0.042901599884033206, 0.0429403190612793, 0.0433520622253418, 0.04333363342285156, 0.043243518829345705, 0.043235328674316405, 0.043632640838623046, 0.04347084808349609, 0.04317184066772461, 0.04329657745361328, 0.04327443313598633, 0.04322918319702149, 0.04319027328491211, 0.043409534454345707, 0.043443103790283204, 0.04358652877807617, 0.043597824096679685, 0.043492767333984376, 0.04355910491943359, 0.04363510513305664, 0.04339507293701172, 0.04325580978393555, 0.043386207580566404, 0.04356163024902344, 0.043493377685546876, 0.0434436149597168, 0.04352259063720703, 0.04329068756103516, 0.04347644805908203, 0.04386051177978516, 0.043730430603027344, 0.043614688873291015, 0.04366543960571289, 0.04351795196533203, 0.04399484634399414, 0.04309251022338867, 0.04267567825317383, 0.042358592987060545, 0.042380126953125, 0.04255958557128906, 0.042567584991455076, 0.04247347259521484, 0.042446849822998046, 0.04244275283813476, 0.042774528503417966, 0.0433438720703125, 0.04320870590209961, 0.042802783966064455, 0.0426824951171875, 0.0426662712097168, 0.04263971328735352, 0.04294403076171875, 0.04310847854614258, 0.04301532745361328, 0.042952606201171875, 0.04291696166992188, 0.04291056060791015, 0.04301475143432617, 0.043183937072753906, 0.04319692611694336, 0.0432143669128418, 0.04296761703491211, 0.04282540893554688, 0.04287500762939453, 0.043083969116210936, 0.0429854736328125, 0.043194366455078126, 0.04352614212036133, 0.0433889274597168, 0.04336640167236328, 0.04317340850830078, 0.042969566345214844, 0.043253761291503906, 0.04327423858642578, 0.043255966186523435, 0.043890529632568356, 0.04373503875732422, 0.043585601806640624, 0.043317024230957034, 0.043348129272460935, 0.04341459274291992, 0.04354553604125976, 0.04360396957397461, 0.043577407836914064, 0.043370079040527344, 0.04307759857177734, 0.04326438522338867, 0.04366505432128906, 0.043558750152587894, 0.043833408355712894, 0.04373139190673828, 0.04361747360229492, 0.043579681396484375, 0.04350006484985352, 0.043788288116455076, 0.04398284912109375, 0.043649024963378906, 0.043940513610839844, 0.04284931182861328, 0.0425931510925293, 0.04257187271118164, 0.04259404754638672, 0.04281769561767578, 0.04277993774414063, 0.042633377075195315, 0.04259104156494141, 0.04251193618774414, 0.04263068771362305, 0.04245375823974609, 0.04274176025390625, 0.04276633453369141, 0.043044864654541014, 0.04304076766967774, 0.04310220718383789, 0.04293632125854492, 0.0428851203918457, 0.042883071899414066, 0.04328755187988281, 0.043337791442871094, 0.04331206512451172, 0.04318003082275391, 0.04311782455444336, 0.04272742462158203, 0.042868927001953126, 0.043399742126464844, 0.04320665740966797, 0.04311145782470703, 0.04314006423950195, 0.04292755126953125, 0.042856704711914065, 0.04292025756835938, 0.043423744201660154, 0.0433070068359375, 0.04337631988525391, 0.04336633682250977, 0.043524318695068356, 0.04325987243652344, 0.04343622589111328, 0.04341145706176758, 0.04335411071777344, 0.043390975952148435, 0.0436545295715332, 0.04338137435913086, 0.04340879821777344, 0.043555423736572264, 0.04367792129516602, 0.04374489593505859, 0.04369359970092773, 0.043631103515625, 0.043544158935546876, 0.04374512100219727, 0.04375417709350586, 0.04354374313354492, 0.04363052749633789, 0.04362444686889649, 0.04361305618286133, 0.04395622253417969, 0.04384143829345703, 0.04375971221923828, 0.04361759948730469, 0.043859649658203125, 0.0427770881652832, 0.04242208099365234, 0.042065921783447265, 0.042633216857910154, 0.042796607971191405, 0.04267446517944336, 0.042582176208496095, 0.04299769592285156, 0.042452640533447265, 0.042326431274414066, 0.04268236923217773, 0.04276236724853515, 0.04262041473388672, 0.04250467300415039, 0.04256249618530274, 0.042807392120361325, 0.04272422409057617, 0.04289251327514648, 0.04283488082885742, 0.04274883270263672, 0.04311734390258789, 0.04341980743408203, 0.04336844635009766, 0.04322073745727539, 0.0429304313659668, 0.04306739044189453, 0.04295017623901367, 0.0430536003112793, 0.04320832061767578, 0.043321792602539065, 0.04329619216918945, 0.043137470245361326, 0.0429752311706543, 0.04299980926513672, 0.043353248596191406, 0.04333039855957031, 0.0433287353515625, 0.04326819229125976, 0.04333843231201172, 0.043587646484375, 0.043267135620117185, 0.043431968688964845, 0.04344102478027344, 0.04359369659423828, 0.04346883010864258, 0.04353225708007812, 0.043425792694091796, 0.04338079833984375, 0.04334995269775391, 0.04354252624511719, 0.04344627380371094, 0.0436346549987793, 0.04347292709350586, 0.04337209701538086, 0.04330873489379883, 0.043526241302490234, 0.043664096832275394, 0.0434804801940918, 0.04342217636108398, 0.0433092155456543, 0.0436275520324707, 0.043660160064697265]",tokens/s,23.1717282233157,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v1-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1028.931584,965.67296,0.0,570.425344,525.840896,s,1,8.756587890625,8.756587890625,0.0,8.756587890625,8.756587890625,8.756587890625,8.756587890625,[8.756587890625],,kWh,3.2744425933303015e-05,3.60483427928465e-06,1.2091120784013532e-05,4.8440380996601195e-05,,MB,1234.219008,1005.518848,0.0,597.68832,584.940544,s,10,1.2835491333007811,0.12835491333007812,0.0008062953252037128,0.1281603240966797,0.12885750885009767,0.1297009635925293,0.13037572738647463,"[0.13054441833496094, 0.12826966857910158, 0.12867007446289064, 0.1277258529663086, 0.12850579833984374, 0.12770556640625, 0.12805097961425782, 0.12850090026855468, 0.12774406433105467, 0.12783180999755858]",tokens/s,1994.4698131007199,kWh,3.962032842454707e-06,4.369392790356769e-07,2.6291199711619972e-06,7.028092092652381e-06,tokens/kWh,36425248.36401032,MB,1245.32736,1020.198912,0.0,612.368384,597.290496,s,10,10.94657263183594,1.0946572631835938,0.017085611733700476,1.0881235961914062,1.1167506591796874,1.118696008300781,1.1202522875976562,"[1.0778565673828124, 1.084336181640625, 1.0810809326171875, 1.069919189453125, 1.0909920654296874, 1.120641357421875, 1.107659912109375, 1.085255126953125, 1.112512939453125, 1.116318359375]",tokens/s,57.55226052835659,kWh,3.2114559489632315e-05,3.541848400897781e-06,1.332450390283957e-05,4.8980911793369663e-05,tokens/kWh,1286215.3376354263,,s,630,10.940668748855604,0.01736614087119935,0.0004322146221876801,0.017292176246643066,0.017899991035461426,0.018038656044006347,0.018869532661437993,"[0.018772064208984376, 0.017807743072509766, 0.017474048614501952, 0.017560800552368163, 0.01753718376159668, 0.01734662437438965, 0.01703318405151367, 0.01691913604736328, 0.016983264923095702, 0.016830432891845704, 0.01706505584716797, 0.01690595245361328, 0.0170732479095459, 0.016969728469848632, 0.01701718330383301, 0.016942720413208007, 0.01697273635864258, 0.017477184295654296, 0.016902591705322264, 0.016916479110717773, 0.016887519836425783, 0.016920671463012696, 0.016910528182983397, 0.016906240463256835, 0.017026399612426756, 0.016926816940307617, 0.017056447982788086, 0.01707379150390625, 0.01711948776245117, 0.017551616668701173, 0.01711692810058594, 0.017020896911621095, 0.01710492706298828, 0.016979455947875977, 0.01698975944519043, 0.017187776565551757, 0.017101055145263673, 0.016858112335205077, 0.016898815155029296, 0.017084384918212892, 0.016897727966308593, 0.017330528259277344, 0.017354751586914064, 0.016965408325195313, 0.016926464080810548, 0.017324512481689452, 0.017031295776367188, 0.01694643211364746, 0.016988800048828124, 0.017163360595703125, 0.01715702438354492, 0.017075872421264647, 0.017508703231811522, 0.017302719116210938, 0.016974655151367188, 0.017153568267822265, 0.016898368835449217, 0.016957504272460937, 0.016775264739990234, 0.016887359619140625, 0.016833984375, 0.016809024810791016, 0.016869312286376954, 0.017879072189331054, 0.01745792007446289, 0.01732211112976074, 0.01715622329711914, 0.017129344940185545, 0.017690624237060547, 0.017164287567138673, 0.01714566421508789, 0.01712761688232422, 0.017008352279663085, 0.017328384399414063, 0.01723776054382324, 0.017232160568237304, 0.017143808364868163, 0.017442592620849608, 0.017550655364990234, 0.01750912094116211, 0.017464736938476562, 0.017349376678466796, 0.01721507263183594, 0.01713203239440918, 0.01712019157409668, 0.017122079849243164, 0.017231136322021483, 0.017343391418457033, 0.017139711380004884, 0.017286527633666993, 0.01752239990234375, 0.017259424209594726, 0.017510400772094727, 0.017201152801513672, 0.0172258243560791, 0.018036895751953125, 0.019379711151123045, 0.017354848861694337, 0.017074335098266603, 0.01702911949157715, 0.016959487915039064, 0.01699430465698242, 0.01702911949157715, 0.017099775314331055, 0.01692527961730957, 0.0169946231842041, 0.016945247650146485, 0.017070207595825195, 0.01708457565307617, 0.017002208709716797, 0.01676288032531738, 0.016788639068603516, 0.016802047729492186, 0.016798240661621094, 0.01682975959777832, 0.01676691246032715, 0.01677395248413086, 0.017487871170043946, 0.017001663208007813, 0.016910688400268555, 0.017002975463867188, 0.016985183715820314, 0.01712748718261719, 0.016986976623535155, 0.01702092742919922, 0.017102848052978514, 0.01782595252990723, 0.017573728561401367, 0.017944576263427735, 0.017744064331054688, 0.017585248947143556, 0.01753766441345215, 0.017342016220092772, 0.017195199966430662, 0.017181024551391602, 0.017506303787231444, 0.017722944259643554, 0.0174268798828125, 0.01730352020263672, 0.017301536560058593, 0.01757209587097168, 0.017348352432250976, 0.017290592193603516, 0.017869024276733397, 0.01711532783508301, 0.017000703811645507, 0.016893951416015626, 0.01678335952758789, 0.0168407039642334, 0.016704864501953125, 0.016765600204467775, 0.01682636833190918, 0.016855039596557618, 0.01680384063720703, 0.016887231826782225, 0.016863807678222657, 0.0168853759765625, 0.01698649597167969, 0.016959455490112303, 0.01697372817993164, 0.017092735290527343, 0.017160192489624023, 0.016990272521972657, 0.016919872283935548, 0.01695414352416992, 0.017079967498779297, 0.016949440002441408, 0.0169881591796875, 0.017074176788330078, 0.017092607498168946, 0.01692982482910156, 0.016859935760498046, 0.01684217643737793, 0.01679020881652832, 0.016826271057128906, 0.016758975982666017, 0.016852960586547852, 0.01723910331726074, 0.016927679061889647, 0.016893951416015626, 0.016863231658935548, 0.016969728469848632, 0.016865280151367186, 0.01696767997741699, 0.01768387222290039, 0.01769059181213379, 0.01784281539916992, 0.017375423431396485, 0.01763270378112793, 0.01795359992980957, 0.01754710388183594, 0.017178815841674806, 0.016924671173095703, 0.01700364875793457, 0.01722867202758789, 0.016946687698364257, 0.01665184020996094, 0.01674950408935547, 0.016928319931030274, 0.0168288631439209, 0.016948448181152344, 0.016691999435424806, 0.016689151763916017, 0.016754016876220704, 0.016611391067504883, 0.016715967178344726, 0.016697759628295897, 0.016760671615600586, 0.01675507164001465, 0.016764320373535157, 0.016648576736450194, 0.016778783798217775, 0.01677359962463379, 0.016740352630615234, 0.016693119049072266, 0.016709760665893556, 0.016742399215698242, 0.016804031372070313, 0.01677622413635254, 0.016820415496826172, 0.01676348876953125, 0.016850719451904295, 0.016762592315673827, 0.017019392013549805, 0.016865440368652344, 0.016957504272460937, 0.0168056640625, 0.01700454330444336, 0.016943103790283204, 0.016990207672119142, 0.01683456039428711, 0.01784832000732422, 0.01676288032531738, 0.016773120880126953, 0.01683430480957031, 0.017082624435424805, 0.0170150089263916, 0.01698588752746582, 0.01712758445739746, 0.01734009552001953, 0.017123487472534178, 0.017184768676757813, 0.017149696350097655, 0.017108415603637694, 0.01700044822692871, 0.017079103469848634, 0.01720854377746582, 0.017179487228393554, 0.017067968368530275, 0.017104896545410156, 0.017121248245239258, 0.018423103332519532, 0.01776710319519043, 0.017552608489990233, 0.01732246398925781, 0.01736355209350586, 0.017381504058837892, 0.01720307159423828, 0.017149791717529297, 0.017040672302246093, 0.017154272079467774, 0.01702895927429199, 0.017271200180053712, 0.017066015243530273, 0.017106847763061525, 0.017041727066040038, 0.016997568130493163, 0.0169418888092041, 0.017010528564453124, 0.017116607666015624, 0.017333120346069337, 0.01710905647277832, 0.01691231918334961, 0.016975616455078123, 0.017249984741210936, 0.01732211112976074, 0.017506399154663087, 0.017563711166381835, 0.01760678482055664, 0.017380767822265625, 0.01730415916442871, 0.017225791931152343, 0.01717219161987305, 0.017070304870605468, 0.016977855682373047, 0.016990272521972657, 0.017041568756103517, 0.016999776840209962, 0.016970239639282226, 0.01699542427062988, 0.017167488098144532, 0.017272607803344726, 0.01744895935058594, 0.01751862335205078, 0.017579999923706055, 0.01752272033691406, 0.01766953659057617, 0.017660480499267578, 0.01768435287475586, 0.017669504165649413, 0.01760233688354492, 0.01765475273132324, 0.01756483268737793, 0.017555456161499023, 0.01745337677001953, 0.017458911895751952, 0.017498943328857423, 0.017551328659057615, 0.017624223709106445, 0.01763603210449219, 0.01730384063720703, 0.017260448455810547, 0.017254400253295898, 0.017268735885620116, 0.017290815353393555, 0.01960140800476074, 0.018509599685668947, 0.01923708724975586, 0.017690624237060547, 0.017824960708618165, 0.017793983459472657, 0.017545087814331055, 0.017520511627197265, 0.017740991592407225, 0.01761401557922363, 0.017678144454956055, 0.017704896926879883, 0.017663999557495116, 0.01890934371948242, 0.01765155220031738, 0.01733568000793457, 0.017320512771606445, 0.0174715518951416, 0.017545087814331055, 0.017518047332763673, 0.01763372802734375, 0.017612127304077147, 0.017748863220214843, 0.017711103439331053, 0.017650976181030273, 0.018195167541503906, 0.019247295379638672, 0.017751615524291994, 0.017660160064697266, 0.01780531120300293, 0.017565248489379882, 0.01741804885864258, 0.01753971290588379, 0.0173253116607666, 0.017305696487426758, 0.017427104949951172, 0.017334175109863282, 0.017255840301513673, 0.01759916877746582, 0.01724415969848633, 0.017137664794921875, 0.018577056884765623, 0.01753327941894531, 0.017342527389526366, 0.017602495193481445, 0.017776416778564452, 0.017725568771362304, 0.017680479049682618, 0.01779088020324707, 0.01783203125, 0.017776607513427734, 0.01793846321105957, 0.01777663993835449, 0.01799577522277832, 0.017954944610595703, 0.017840000152587892, 0.01790755271911621, 0.017875328063964843, 0.017835840225219727, 0.017800447463989257, 0.017814239501953124, 0.01777199935913086, 0.01778332710266113, 0.018114559173583983, 0.018169055938720702, 0.0179965763092041, 0.01805721664428711, 0.017905664443969727, 0.01791958427429199, 0.017937984466552735, 0.01792255973815918, 0.01901398468017578, 0.017779935836791994, 0.017920896530151366, 0.01776639938354492, 0.017704864501953126, 0.0177457275390625, 0.017764575958251955, 0.017880960464477538, 0.01777027130126953, 0.01782329559326172, 0.017690975189208983, 0.017807775497436524, 0.01780531120300293, 0.01794047927856445, 0.017889280319213868, 0.017924095153808595, 0.01807151985168457, 0.01792207908630371, 0.0176312313079834, 0.01764249610900879, 0.017693056106567382, 0.01759846305847168, 0.017477439880371093, 0.01753481674194336, 0.017538015365600585, 0.01751785659790039, 0.017446943283081055, 0.01720185661315918, 0.017182336807250977, 0.017227743148803713, 0.0173121280670166, 0.017338399887084962, 0.01722153663635254, 0.017295455932617186, 0.017275903701782228, 0.01736729621887207, 0.017308416366577147, 0.01722163200378418, 0.01720319938659668, 0.017129791259765624, 0.017179647445678712, 0.017127712249755858, 0.017135040283203125, 0.017099391937255858, 0.0172109432220459, 0.01721014404296875, 0.0171824951171875, 0.017160415649414062, 0.017708032608032227, 0.017711135864257814, 0.017167327880859375, 0.01718396759033203, 0.01714796829223633, 0.017084415435791016, 0.017194911956787108, 0.017934303283691406, 0.017719072341918947, 0.017641056060791017, 0.017436288833618165, 0.01728371238708496, 0.017110687255859375, 0.017174560546875, 0.017179136276245118, 0.017282976150512695, 0.017090272903442384, 0.01749788856506348, 0.017620832443237304, 0.01710995292663574, 0.01706825637817383, 0.01699523162841797, 0.016999263763427735, 0.017039392471313478, 0.01715814399719238, 0.0171909122467041, 0.017029024124145507, 0.017931552886962892, 0.01716307258605957, 0.017102848052978514, 0.016963584899902344, 0.017102848052978514, 0.017104671478271483, 0.016972000122070313, 0.01710201644897461, 0.01694803237915039, 0.016917823791503906, 0.017154943466186522, 0.016873472213745116, 0.017004352569580078, 0.016854976654052733, 0.01700592041015625, 0.01696339225769043, 0.017245088577270508, 0.017124383926391602, 0.017218528747558595, 0.017360895156860352, 0.017328128814697266, 0.01717987251281738, 0.017406911849975587, 0.01714364814758301, 0.017268863677978516, 0.017542015075683592, 0.01713808059692383, 0.01712393569946289, 0.017196319580078126, 0.017203424453735353, 0.017281536102294923, 0.01728102493286133, 0.017294399261474608, 0.017445152282714843, 0.01739228820800781, 0.017294784545898438, 0.017304128646850585, 0.017297088623046877, 0.017326400756835936, 0.017289567947387695, 0.017267744064331056, 0.01708095932006836, 0.016977567672729493, 0.018184192657470705, 0.017681888580322266, 0.01768294334411621, 0.01757801628112793, 0.01729100799560547, 0.017215744018554687, 0.0174583683013916, 0.017213823318481446, 0.017293344497680663, 0.01733468818664551, 0.01723187255859375, 0.01778179168701172, 0.01748908805847168, 0.01763100814819336, 0.01762940788269043, 0.01784566307067871, 0.017927648544311524, 0.017695680618286132, 0.01788105583190918, 0.01780531120300293, 0.017735679626464843, 0.017711103439331053, 0.017809600830078126, 0.017871904373168945, 0.017750944137573242, 0.01789529609680176, 0.018027807235717775, 0.017879776000976563, 0.017961984634399415, 0.017937408447265626, 0.01782579231262207, 0.01784560012817383, 0.01783875274658203, 0.017571584701538086, 0.017464607238769532, 0.01746019172668457, 0.017477632522583008, 0.017707008361816406, 0.017743871688842772, 0.01760256004333496, 0.017739776611328126, 0.017623136520385742, 0.01747052764892578, 0.017537471771240234, 0.017604192733764647, 0.017474176406860352, 0.01811631965637207, 0.017688959121704102, 0.01759651184082031, 0.017383424758911133, 0.017262592315673828, 0.01737673568725586, 0.017537343978881837, 0.017514720916748047, 0.01731724739074707, 0.017656448364257813, 0.01764761543273926, 0.017872831344604493, 0.01801628875732422, 0.017947872161865233, 0.01758195114135742, 0.0175216007232666, 0.01746112060546875, 0.01786591911315918, 0.01752556800842285, 0.017302848815917968, 0.017377983093261717, 0.017204416275024413, 0.017103328704833984, 0.017148256301879883, 0.01730544090270996, 0.01722368049621582, 0.017295520782470705, 0.01747884750366211, 0.017360992431640625, 0.017152063369750975, 0.017457311630249023, 0.01727743911743164, 0.017260032653808592, 0.01744060707092285, 0.01750886344909668, 0.017462656021118163, 0.01746614456176758, 0.01734623908996582, 0.01717030334472656, 0.017383264541625976, 0.017097312927246092, 0.01720921516418457, 0.017190528869628907, 0.017250240325927733, 0.017579872131347655, 0.017611743927001954, 0.017573888778686524, 0.017759775161743162, 0.017616159439086915, 0.017879999160766602, 0.01814463996887207, 0.017924928665161134, 0.01804284858703613, 0.01831065559387207, 0.017912160873413085, 0.017930240631103517, 0.019090431213378906, 0.018377216339111328, 0.018013952255249023, 0.018182912826538087, 0.01789936065673828, 0.01803455924987793, 0.018040096282958985, 0.017884159088134767, 0.017834175109863282, 0.017833791732788085, 0.017831680297851562, 0.018064895629882814, 0.018292543411254882, 0.018266431808471678, 0.018264703750610352, 0.018222240447998046, 0.018083999633789063, 0.01801900863647461, 0.018149375915527344, 0.017991680145263672, 0.018231296539306642, 0.017768447875976562, 0.01770086479187012, 0.017483903884887696]",tokens/s,57.58331729638546,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking _ = backend.generate(self.inputs, self.config.generate_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 718, in forward hidden_states = residual + hidden_states RuntimeError: CUDA error: an illegal memory access was encountered CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.696832,10248.650752,0.0,9862.905856,9797.835264,s,1,12.6677099609375,12.6677099609375,0.0,12.6677099609375,12.6677099609375,12.6677099609375,12.6677099609375,[12.6677099609375],,kWh,0.0001564087551833192,1.724564190963619e-05,5.322004257599794e-05,0.00022687443966895333,,MB,3037.663232,10626.138112,0.0,10211.033088,10097.347072,s,10,6.949856872558593,0.6949856872558593,0.0016327644485380204,0.6956404418945312,0.6960719543457031,0.6961941497802734,0.6962919061279297,"[0.6908757934570312, 0.6930463256835937, 0.6948036499023438, 0.6956387939453125, 0.6963163452148438, 0.6959058837890625, 0.6955938720703125, 0.6960447998046875, 0.6959893188476562, 0.69564208984375]",tokens/s,368.35290955532076,kWh,2.0330627241944765e-05,2.242116567629049e-06,1.3515288589999637e-05,3.608803239957345e-05,tokens/kWh,7093764.413795689,MB,3039.010816,10628.235264,0.0,10213.13024,10097.349632,s,10,33.050490234375,3.3050490234375003,0.009476094742290402,3.3068792724609377,3.312250415039063,3.314654992675781,3.316578654785156,"[3.2810458984375, 3.29672509765625, 3.303554443359375, 3.306720458984375, 3.3070380859375, 3.311716064453125, 3.3170595703125, 3.31084716796875, 3.30630126953125, 3.309482177734375]",tokens/s,19.06174448646309,kWh,9.664655558055466e-05,1.0660639885171222e-05,6.440607930260033e-05,0.0001717132747683262,tokens/kWh,366890.6791568617,,s,630,33.047305759429925,0.05245604088798402,0.0007997177349419684,0.05240371131896973,0.05318418121337891,0.05353638820648193,0.056124251632690435,"[0.05483919906616211, 0.051114078521728515, 0.05127782440185547, 0.052346336364746095, 0.0521418228149414, 0.051079071044921875, 0.05183071899414062, 0.0521328010559082, 0.051533824920654295, 0.051727935791015624, 0.05122092819213867, 0.051585025787353515, 0.05151334381103516, 0.051490814208984374, 0.05116108703613281, 0.051574337005615235, 0.05171244812011719, 0.05254332733154297, 0.05219449615478516, 0.05194803237915039, 0.05212815856933594, 0.05158256149291992, 0.05215660858154297, 0.05214031982421875, 0.05161574554443359, 0.052192928314208985, 0.05166320037841797, 0.052193279266357424, 0.05102592086791992, 0.05175091171264649, 0.05203542327880859, 0.05190671920776367, 0.05233635330200195, 0.05156687927246094, 0.052170753479003906, 0.05167308807373047, 0.052345951080322264, 0.05262204742431641, 0.0523388786315918, 0.05250371170043945, 0.05233135986328125, 0.052400127410888675, 0.052208641052246096, 0.05247427368164063, 0.05260044860839844, 0.052257759094238285, 0.051955711364746096, 0.05207654571533203, 0.05243699264526367, 0.0521638069152832, 0.05200678253173828, 0.05220240020751953, 0.051748863220214845, 0.052226047515869144, 0.05262745666503906, 0.052373504638671874, 0.05267657470703125, 0.05233164978027344, 0.0525599365234375, 0.05258444976806641, 0.05221196746826172, 0.05290659332275391, 0.05243494415283203, 0.05495798492431641, 0.0518021125793457, 0.051435649871826174, 0.051162593841552736, 0.05178217697143555, 0.05086207962036133, 0.05193091201782227, 0.05171958541870117, 0.052049888610839846, 0.052832382202148434, 0.05216310501098633, 0.0518903694152832, 0.05128806304931641, 0.05196169662475586, 0.05163433456420898, 0.05203558349609375, 0.052170753479003906, 0.053037025451660155, 0.0531190071105957, 0.052080127716064455, 0.05245347213745117, 0.05220547103881836, 0.051512832641601565, 0.05199155044555664, 0.05181548690795899, 0.05253011322021484, 0.052113025665283204, 0.052455806732177736, 0.052590591430664066, 0.051763137817382815, 0.05259215927124023, 0.05147097778320313, 0.052651935577392575, 0.052225566864013674, 0.05189379119873047, 0.05277510452270508, 0.05239270401000977, 0.05280931091308594, 0.05326473617553711, 0.05240838241577148, 0.05245542526245117, 0.05320223999023437, 0.05199737548828125, 0.05285059356689453, 0.05254070281982422, 0.05245779037475586, 0.05178598403930664, 0.05292057418823242, 0.05287046432495117, 0.05197391891479492, 0.05231708908081055, 0.0514785270690918, 0.05267443084716797, 0.0529409294128418, 0.05275596618652344, 0.05304166412353516, 0.05216825485229492, 0.05301011276245117, 0.05301862335205078, 0.05264051055908203, 0.052577342987060544, 0.05185804748535156, 0.053037376403808595, 0.05605055999755859, 0.051873023986816404, 0.050805057525634766, 0.05142777633666992, 0.05194956970214844, 0.05204582214355469, 0.05185494232177734, 0.051909023284912106, 0.05220115280151367, 0.051183902740478515, 0.05187356948852539, 0.051969345092773435, 0.05227411270141601, 0.05214585494995117, 0.05171574401855469, 0.05245609664916992, 0.051638080596923826, 0.053639358520507815, 0.0535464973449707, 0.053172737121582034, 0.05210831832885742, 0.051243423461914066, 0.052021343231201174, 0.05204220962524414, 0.051515071868896485, 0.05261958312988281, 0.05183276748657226, 0.052627521514892577, 0.05213561630249024, 0.05206835174560547, 0.0523015022277832, 0.05224921417236328, 0.05223014450073242, 0.05631734466552735, 0.051303009033203124, 0.05312921524047851, 0.052291584014892575, 0.053172222137451174, 0.05284611129760742, 0.05262326431274414, 0.05267103958129883, 0.05134131240844726, 0.053413375854492184, 0.05239798355102539, 0.052099231719970704, 0.05278108978271484, 0.05174099349975586, 0.05192918395996094, 0.05171331024169922, 0.05269696044921875, 0.053465312957763675, 0.05295372772216797, 0.052512767791748044, 0.05269615936279297, 0.052724510192871096, 0.05299008178710937, 0.05246156692504883, 0.0529705924987793, 0.053504928588867184, 0.05263529586791992, 0.05253276824951172, 0.05209171295166016, 0.05250576019287109, 0.0561803207397461, 0.05215887832641602, 0.05087641525268555, 0.051328510284423826, 0.05116310501098633, 0.051866142272949216, 0.05189363098144531, 0.05189081573486328, 0.051953662872314454, 0.051593215942382815, 0.05235452651977539, 0.052789791107177735, 0.052279232025146484, 0.052800575256347654, 0.05211443328857422, 0.052547584533691405, 0.052553150177001955, 0.05265011215209961, 0.0539284782409668, 0.052776256561279294, 0.05240278244018555, 0.052142078399658204, 0.05200595092773438, 0.05195052719116211, 0.051395744323730466, 0.05210198211669922, 0.05163363265991211, 0.05207043075561523, 0.052076576232910156, 0.05242486572265625, 0.05265235137939453, 0.05216255950927735, 0.05276211166381836, 0.053080577850341794, 0.052541278839111326, 0.05313308715820313, 0.05279980850219727, 0.05318217468261719, 0.05322742462158203, 0.052300159454345706, 0.05277497482299805, 0.05228691101074219, 0.05202569580078125, 0.052866752624511716, 0.05182479858398437, 0.052389312744140625, 0.05195980834960937, 0.052738624572753905, 0.05286336135864258, 0.05201100921630859, 0.05292031860351563, 0.052393985748291017, 0.052951038360595705, 0.052910079956054686, 0.05291212844848633, 0.053286911010742184, 0.05267030334472656, 0.05297782516479492, 0.053433952331542967, 0.052437408447265625, 0.05264156723022461, 0.052969215393066406, 0.05243337631225586, 0.055416736602783206, 0.051969825744628904, 0.05170223999023438, 0.051816192626953125, 0.05147603225708008, 0.05145462417602539, 0.05114038467407227, 0.05075369644165039, 0.052278526306152345, 0.05205635070800781, 0.05228201675415039, 0.05243084716796875, 0.05194956970214844, 0.05233868789672851, 0.051471488952636715, 0.05251772689819336, 0.05268073654174805, 0.05328486251831055, 0.05352403259277344, 0.05226540756225586, 0.05303500747680664, 0.05255987167358398, 0.05203292846679688, 0.05218159866333008, 0.052094974517822266, 0.051681278228759765, 0.051991775512695314, 0.052049983978271486, 0.05212643051147461, 0.05216179275512695, 0.05234355163574219, 0.05286697769165039, 0.052144222259521485, 0.052680225372314454, 0.05264841461181641, 0.05270732879638672, 0.05312716674804688, 0.05278515243530273, 0.05367574310302734, 0.05245980834960937, 0.052641342163085934, 0.05286342239379883, 0.05258028793334961, 0.05225244903564453, 0.05136124801635742, 0.05216134262084961, 0.052483745574951175, 0.052711776733398434, 0.05248409652709961, 0.052391937255859375, 0.0525250244140625, 0.052881439208984374, 0.05364902496337891, 0.05294883346557617, 0.05209552001953125, 0.053419265747070316, 0.05348838424682617, 0.05478780746459961, 0.05250665664672852, 0.052934913635253905, 0.05269913482666016, 0.05288140869140625, 0.05180112075805664, 0.05650457763671875, 0.05192879867553711, 0.05173276901245117, 0.05506047821044922, 0.05095129776000976, 0.05224127960205078, 0.05142272186279297, 0.051476993560791016, 0.05118099212646485, 0.05191737747192383, 0.05146419143676758, 0.05235279846191406, 0.05243721771240235, 0.0522911376953125, 0.05254502487182617, 0.05228009414672852, 0.052537246704101564, 0.05424335861206055, 0.05327872085571289, 0.052954719543457034, 0.05260284805297852, 0.05276467132568359, 0.05254620742797852, 0.05163756942749023, 0.05248684692382812, 0.05142259216308594, 0.051722145080566405, 0.05223292922973633, 0.052089855194091796, 0.05225164794921875, 0.05219942474365234, 0.0522608642578125, 0.05256758499145508, 0.05219990539550781, 0.0530513916015625, 0.052994049072265625, 0.05346255874633789, 0.0536478385925293, 0.052596736907958984, 0.05276224136352539, 0.051904895782470706, 0.052837921142578126, 0.05285715103149414, 0.05234627151489258, 0.052163135528564455, 0.051323070526123046, 0.0522608642578125, 0.052182334899902344, 0.0523570556640625, 0.05287308883666992, 0.05238662338256836, 0.05254560089111328, 0.05257401657104492, 0.05287865447998047, 0.05383257675170899, 0.05269465637207031, 0.05309430313110351, 0.0534031982421875, 0.05268368148803711, 0.053121311187744144, 0.05265926361083984, 0.05306639862060547, 0.05305132675170898, 0.05672505569458008, 0.052404640197753906, 0.05100751876831055, 0.051963905334472656, 0.05120000076293945, 0.051335166931152344, 0.05085551834106445, 0.05198601531982422, 0.051998817443847656, 0.05228003311157227, 0.05240115356445312, 0.05187398529052734, 0.05249478530883789, 0.052017536163330075, 0.05228307342529297, 0.05288991928100586, 0.052553409576416014, 0.053326145172119144, 0.053055072784423826, 0.05312963104248047, 0.052579742431640625, 0.052090526580810544, 0.052656223297119144, 0.05260927963256836, 0.052009567260742184, 0.05193523025512695, 0.051641761779785154, 0.05267923355102539, 0.05212934494018555, 0.05229379272460938, 0.052590911865234374, 0.051568031311035156, 0.05273603057861328, 0.05225529479980469, 0.05304729461669922, 0.0537740478515625, 0.053131553649902345, 0.053217281341552736, 0.05361663818359375, 0.05250646209716797, 0.05276803207397461, 0.052368030548095704, 0.05268521499633789, 0.05605561447143555, 0.05127916717529297, 0.05283910369873047, 0.05283020782470703, 0.05264384078979492, 0.052768768310546874, 0.05191846466064453, 0.052722049713134767, 0.05286092758178711, 0.05296332931518555, 0.05301248168945313, 0.05331353759765625, 0.05330659103393555, 0.05379900741577148, 0.05301443099975586, 0.05338528060913086, 0.05262384033203125, 0.052951297760009765, 0.05365760040283203, 0.05212575912475586, 0.05615228652954102, 0.05149472045898437, 0.051776863098144534, 0.05159203338623047, 0.051310142517089846, 0.051222976684570314, 0.05224649429321289, 0.05203532791137695, 0.05207183837890625, 0.05211584091186523, 0.052066814422607424, 0.05246156692504883, 0.05213916778564453, 0.05195792007446289, 0.052458175659179686, 0.052547584533691405, 0.053008384704589843, 0.05261270523071289, 0.05331539154052734, 0.05550700759887695, 0.05140646362304688, 0.05270415878295898, 0.052598785400390625, 0.0522158088684082, 0.052045440673828124, 0.05196428680419922, 0.05209702301025391, 0.05168332672119141, 0.05246105575561524, 0.05268326568603516, 0.05224857711791992, 0.052346687316894534, 0.05183916854858398, 0.05336883163452148, 0.052749568939208985, 0.05275660705566406, 0.05311862564086914, 0.053217823028564454, 0.05253984069824219, 0.0528317756652832, 0.05214665603637696, 0.052563392639160156, 0.05254345703125, 0.052505184173583984, 0.052653377532958984, 0.05217145538330078, 0.05253529739379883, 0.05217267227172852, 0.05235929489135742, 0.05286707305908203, 0.052291584014892575, 0.05283430480957031, 0.052531200408935545, 0.05265510559082031, 0.05381119918823242, 0.0529496955871582, 0.052642017364501956, 0.05337247848510742, 0.05278752136230469, 0.05301424026489258, 0.052494144439697264, 0.05304348754882812, 0.052630943298339845, 0.0569090576171875, 0.052230911254882814, 0.05127577590942383, 0.05145971298217773, 0.05146809768676758, 0.051378753662109374, 0.05111808013916016, 0.051979679107666016, 0.05242736053466797, 0.05212160110473633, 0.0521146240234375, 0.05180294418334961, 0.052512767791748044, 0.05167887878417969, 0.052604705810546874, 0.05215030288696289, 0.05217948913574219, 0.05351628875732422, 0.053133312225341796, 0.05296083068847656, 0.052740543365478516, 0.05224857711791992, 0.05190860748291016, 0.05163417434692383, 0.052514942169189456, 0.05186751937866211, 0.05204547119140625, 0.05232060623168945, 0.052006622314453126, 0.052305343627929685, 0.051993438720703125, 0.05252505493164063, 0.05280912017822265, 0.05212015914916992, 0.05253228759765625, 0.05277382278442383, 0.05314889526367188, 0.05330793762207031, 0.05293695831298828, 0.05278915023803711, 0.05307308959960937, 0.052294208526611326, 0.05248060989379883, 0.05215411376953125, 0.05231206512451172, 0.0524183349609375, 0.052453601837158206, 0.05286297607421875, 0.05227724838256836, 0.052574207305908206, 0.05222195053100586, 0.05257164764404297, 0.053058048248291016, 0.05262745666503906, 0.053378238677978515, 0.053449024200439454, 0.052711936950683595, 0.052979423522949216, 0.05261116790771484, 0.052628929138183594, 0.052595199584960936, 0.05206991958618164, 0.0526360969543457, 0.05696633529663086, 0.05194937515258789, 0.05119692611694336, 0.0520533447265625, 0.051444320678710936, 0.0510728645324707, 0.052484321594238284, 0.051676990509033204, 0.05169375991821289, 0.052086784362792966, 0.05207440185546875, 0.05158224105834961, 0.05212051010131836, 0.05268572616577148, 0.05257315063476563, 0.05249990463256836, 0.052136512756347654, 0.05360844802856445, 0.05359791946411133, 0.05270489501953125, 0.05222803115844726, 0.0522632942199707, 0.0521506233215332, 0.052200511932373045, 0.052061119079589845, 0.05215014266967773, 0.051636192321777345, 0.05226694488525391, 0.05226835250854492, 0.052128063201904294, 0.05265673446655274, 0.052245887756347656, 0.052421249389648435, 0.05205811309814453, 0.05247734451293945, 0.05356115341186524, 0.05306985473632812, 0.053179134368896486, 0.05347084808349609, 0.05255923080444336, 0.05253811264038086, 0.05234092712402344, 0.05250873565673828, 0.05248988723754883, 0.052085086822509764, 0.0528504638671875, 0.05194364929199219, 0.052641342163085934, 0.05218291091918945, 0.05260960006713867, 0.05290393447875977, 0.052299774169921875, 0.05291417694091797, 0.05339750289916992, 0.05293183898925781, 0.05333465576171875, 0.05302489471435547, 0.05255097579956055, 0.05364806365966797, 0.052414016723632814, 0.05260537719726562, 0.0530247688293457, 0.05262646484375]",tokens/s,19.063581297250888,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 112457 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3894, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading model, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear model._modules[name] = target_cls( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 42, in __init__ assert out_features % (32 // self.w_bit) == 0 AssertionError " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1488.13824,1546.584064,0.0,1168.113664,1154.613248,s,1,8.2988203125,8.2988203125,0.0,8.2988203125,8.2988203125,8.2988203125,8.2988203125,[8.2988203125],,kWh,3.984401601249677e-05,4.387897293824016e-06,1.3327788440001265e-05,5.755970174632205e-05,,MB,1472.036864,1777.270784,0.0,1369.440256,1323.44832,s,10,0.7596723175048828,0.07596723175048828,0.00045580130739098163,0.07586155319213868,0.07628573913574219,0.07674357376098632,0.07710984146118163,"[0.07720140838623046, 0.07585977935791016, 0.07539791870117188, 0.0760009307861328, 0.0758256607055664, 0.07575039672851562, 0.07618399810791016, 0.0758633270263672, 0.07592291259765625, 0.07566598510742187]",tokens/s,3369.874011479358,kWh,2.338612180100002e-06,2.5782695515844666e-07,1.5509145740638663e-06,4.1473537093223155e-06,tokens/kWh,61726107.282475024,MB,1478.69696,1798.242304,0.0,1390.411776,1377.233408,s,10,11.5315078125,1.15315078125,0.009420732157273685,1.1547311401367186,1.1652214721679686,1.1661131774902342,1.1668265417480468,"[1.1529599609375, 1.1565023193359374, 1.15901416015625, 1.14597998046875, 1.160075439453125, 1.1650233154296874, 1.1398558349609376, 1.1445345458984375, 1.140557373046875, 1.1670048828125]",tokens/s,54.63292487363088,kWh,3.392155081198458e-05,3.7411917676915357e-06,1.8114184491337117e-05,5.5776927071013235e-05,tokens/kWh,1129499.2985144306,,s,630,11.52871917533875,0.01829955424656944,0.00045683104943365426,0.018227952003479004,0.018644780540466307,0.01881768684387207,0.01958206460952759,"[0.01893868827819824, 0.01852969551086426, 0.01835683250427246, 0.01823539161682129, 0.018175136566162108, 0.018379615783691405, 0.018335744857788085, 0.018201791763305664, 0.01855286407470703, 0.01823004722595215, 0.0183540153503418, 0.01809174346923828, 0.018112159729003905, 0.01813987159729004, 0.018210432052612305, 0.01826144027709961, 0.018387935638427735, 0.018468416213989258, 0.018586111068725587, 0.01854876708984375, 0.018660703659057618, 0.018496095657348634, 0.018388959884643556, 0.018207904815673828, 0.018201471328735352, 0.01817094421386719, 0.018080127716064452, 0.018104896545410157, 0.018210784912109375, 0.018325504302978517, 0.018268192291259765, 0.018257919311523436, 0.0183767032623291, 0.018130720138549803, 0.018241151809692383, 0.01851247978210449, 0.018593599319458008, 0.01833184051513672, 0.018118656158447266, 0.018137088775634767, 0.018144287109375, 0.01825606346130371, 0.018209823608398436, 0.018428831100463866, 0.018368480682373046, 0.018453535079956056, 0.0184006404876709, 0.01828713607788086, 0.018186176300048828, 0.018067455291748045, 0.018284000396728516, 0.018170400619506834, 0.018324480056762696, 0.018227359771728516, 0.018041343688964845, 0.01822329521179199, 0.018104736328125, 0.01863065528869629, 0.01828611183166504, 0.0182192325592041, 0.01818364715576172, 0.01808195114135742, 0.018226879119873047, 0.019005727767944337, 0.018481632232666016, 0.01836851119995117, 0.018163808822631834, 0.018136959075927734, 0.01804729652404785, 0.018044384002685546, 0.01805958366394043, 0.018010208129882813, 0.017913631439208984, 0.018114816665649413, 0.018016063690185546, 0.018046783447265624, 0.01803023910522461, 0.018024864196777343, 0.017958080291748047, 0.018052032470703125, 0.01816985511779785, 0.01806540870666504, 0.018166048049926758, 0.018392799377441406, 0.018157535552978516, 0.018128927230834962, 0.01826963233947754, 0.018468608856201173, 0.018565952301025392, 0.018550079345703126, 0.018444992065429686, 0.018362272262573243, 0.018472320556640626, 0.0184051513671875, 0.018367424011230468, 0.01843561553955078, 0.018448863983154297, 0.018548864364624024, 0.018544031143188477, 0.018531808853149413, 0.018412544250488282, 0.018390495300292967, 0.018288639068603514, 0.01833782386779785, 0.018622079849243165, 0.020173696517944335, 0.019198207855224608, 0.018476800918579103, 0.018378751754760742, 0.01834102439880371, 0.01818614387512207, 0.018111423492431642, 0.018073888778686525, 0.018521600723266602, 0.01811091232299805, 0.01834364891052246, 0.018353696823120116, 0.018184511184692383, 0.018311391830444335, 0.018529600143432617, 0.018982751846313477, 0.018254688262939453, 0.018228992462158203, 0.018249984741210937, 0.01846886444091797, 0.018679807662963867, 0.019154943466186524, 0.018548959732055663, 0.018499488830566405, 0.018371583938598633, 0.018312063217163086, 0.018302463531494142, 0.018051584243774413, 0.01840460777282715, 0.01813372802734375, 0.018313119888305664, 0.018493663787841796, 0.018943904876708984, 0.018504735946655273, 0.01836031913757324, 0.01839411163330078, 0.018332735061645507, 0.018389312744140626, 0.01848975944519043, 0.018333120346069334, 0.01854038429260254, 0.01877903938293457, 0.018659328460693358, 0.018716447830200194, 0.018723039627075194, 0.01857535934448242, 0.018485248565673826, 0.018443872451782226, 0.01832547187805176, 0.018269920349121095, 0.01915763282775879, 0.01824777603149414, 0.018369855880737303, 0.018362783432006837, 0.01834217643737793, 0.01819647979736328, 0.018095327377319337, 0.018115007400512695, 0.01837295913696289, 0.0182541446685791, 0.018261568069458008, 0.018092159271240235, 0.01807155227661133, 0.018106367111206053, 0.018288511276245117, 0.01811782455444336, 0.018234304428100586, 0.018374048233032226, 0.018306688308715822, 0.018318304061889647, 0.0185380802154541, 0.01846067237854004, 0.018338560104370117, 0.018158975601196288, 0.018124128341674806, 0.01818899154663086, 0.018298912048339843, 0.01833750343322754, 0.01852592086791992, 0.018377119064331055, 0.01849555206298828, 0.018307584762573242, 0.018675039291381836, 0.01838947105407715, 0.019040288925170897, 0.018362720489501952, 0.018059423446655273, 0.017892416000366212, 0.018026784896850587, 0.017877695083618163, 0.01802851104736328, 0.018190336227416993, 0.018253503799438478, 0.01834623908996582, 0.018190336227416993, 0.018413631439208985, 0.018094079971313477, 0.01805923271179199, 0.018167360305786133, 0.01812224006652832, 0.018082176208496094, 0.01909987258911133, 0.01821120071411133, 0.018724863052368163, 0.01818556785583496, 0.017995424270629883, 0.018092960357666017, 0.01805936050415039, 0.018335968017578124, 0.01843791961669922, 0.018440288543701173, 0.018239231109619142, 0.018012319564819336, 0.0179682559967041, 0.017965951919555665, 0.01800396728515625, 0.017971200942993162, 0.017960960388183594, 0.01804310417175293, 0.0179648323059082, 0.017919424057006837, 0.01816428756713867, 0.01819036865234375, 0.019322080612182616, 0.018115327835083007, 0.01809939193725586, 0.01809222412109375, 0.017968767166137694, 0.017947647094726564, 0.017953887939453125, 0.018060192108154297, 0.018118656158447266, 0.018051071166992186, 0.01839468765258789, 0.01802079963684082, 0.01800396728515625, 0.018415903091430662, 0.01857315254211426, 0.018267072677612305, 0.017988224029541016, 0.01804128074645996, 0.018056800842285156, 0.018015775680541992, 0.018123552322387694, 0.01812272071838379, 0.01828659248352051, 0.018501632690429686, 0.018251808166503906, 0.01806528091430664, 0.018141183853149414, 0.018040000915527345, 0.01809596824645996, 0.01894931221008301, 0.01799760055541992, 0.017888864517211913, 0.01794483184814453, 0.01795907211303711, 0.018077695846557617, 0.018177600860595704, 0.018041343688964845, 0.017995552062988283, 0.018223264694213866, 0.022200319290161134, 0.018237791061401366, 0.018072736740112304, 0.017926912307739257, 0.017958335876464844, 0.017944992065429686, 0.017932191848754882, 0.0180316162109375, 0.018151872634887694, 0.01791231918334961, 0.017846336364746095, 0.017964607238769532, 0.017932832717895506, 0.01800387191772461, 0.017983327865600585, 0.018106496810913086, 0.018120672225952147, 0.018210880279541014, 0.01826201629638672, 0.018265951156616212, 0.01876915168762207, 0.01852841567993164, 0.018698591232299805, 0.01859350395202637, 0.018702495574951173, 0.018819616317749022, 0.018661376953125, 0.019079168319702147, 0.018597055435180664, 0.018420543670654297, 0.01847500801086426, 0.01845039939880371, 0.01841155242919922, 0.018432096481323244, 0.01848054313659668, 0.018541055679321287, 0.01859993553161621, 0.018644895553588867, 0.018575456619262694, 0.018761375427246093, 0.018737503051757812, 0.018810911178588866, 0.01887228775024414, 0.01864089584350586, 0.01852796745300293, 0.01949929618835449, 0.018737087249755858, 0.018699424743652344, 0.01930259132385254, 0.018788223266601563, 0.018854719161987304, 0.018520063400268554, 0.018321407318115233, 0.018287872314453123, 0.018371328353881836, 0.01827020835876465, 0.01842585563659668, 0.018483072280883788, 0.01860544013977051, 0.018405567169189452, 0.018520639419555663, 0.018582559585571288, 0.018461824417114258, 0.018478431701660157, 0.018500095367431642, 0.01843587112426758, 0.019046144485473634, 0.01859452819824219, 0.018619327545166015, 0.018455360412597658, 0.018774015426635742, 0.018397184371948243, 0.018693536758422852, 0.01837526321411133, 0.018369951248168946, 0.018779903411865233, 0.018371423721313476, 0.018247615814208983, 0.0183255672454834, 0.0183287353515625, 0.018207584381103516, 0.01816166305541992, 0.018206720352172853, 0.018094079971313477, 0.018183456420898438, 0.018278367996215822, 0.01839142417907715, 0.018384544372558594, 0.018416128158569335, 0.01862403106689453, 0.01871241569519043, 0.018618431091308594, 0.01864169692993164, 0.01885593605041504, 0.01868185615539551, 0.01961587142944336, 0.01871164894104004, 0.018772768020629882, 0.018709728240966797, 0.018587520599365234, 0.018541471481323242, 0.018325504302978517, 0.018328832626342773, 0.018102815628051758, 0.018120576858520508, 0.01840982437133789, 0.018268032073974608, 0.01819251251220703, 0.018317087173461914, 0.01822854423522949, 0.018090911865234375, 0.018670272827148438, 0.018108415603637695, 0.017971200942993162, 0.018051071166992186, 0.018129087448120116, 0.018178911209106446, 0.018227264404296874, 0.01818707275390625, 0.01821295928955078, 0.018577407836914063, 0.01846886444091797, 0.01847871971130371, 0.01813542366027832, 0.018026399612426757, 0.017997919082641603, 0.018189632415771484, 0.01799616050720215, 0.017895744323730468, 0.017881088256835938, 0.017969152450561524, 0.01798761558532715, 0.01786851119995117, 0.017940511703491212, 0.0178874568939209, 0.017914112091064454, 0.018001951217651368, 0.018093439102172853, 0.017957216262817384, 0.017960031509399413, 0.018005983352661133, 0.01792095947265625, 0.01820057678222656, 0.01819647979736328, 0.017953855514526367, 0.018426111221313476, 0.01823577690124512, 0.017950687408447265, 0.017927967071533202, 0.017963680267333984, 0.01799567985534668, 0.017934335708618163, 0.017864736557006836, 0.018147296905517578, 0.017970815658569336, 0.01801411247253418, 0.017938911437988283, 0.018150943756103516, 0.01819593620300293, 0.018182687759399414, 0.018364896774291994, 0.018225151062011717, 0.018268159866333008, 0.018333696365356447, 0.018221120834350586, 0.01805427169799805, 0.018107200622558595, 0.01799577522277832, 0.018159551620483397, 0.017997888565063475, 0.017909759521484374, 0.01796505546569824, 0.017936384201049805, 0.017948320388793945, 0.01828518486022949, 0.018290815353393556, 0.018208864212036133, 0.018253599166870117, 0.017995744705200194, 0.017936416625976562, 0.018081792831420897, 0.018054847717285157, 0.018116928100585936, 0.018118207931518554, 0.018148895263671874, 0.01808259201049805, 0.018068960189819336, 0.018030399322509764, 0.0180163516998291, 0.01810470390319824, 0.018121055603027344, 0.019494943618774414, 0.01805459213256836, 0.01804083251953125, 0.017886816024780275, 0.017984447479248048, 0.017907039642333984, 0.018989759445190428, 0.01799750328063965, 0.01786092758178711, 0.01800934410095215, 0.017922815322875978, 0.017886783599853514, 0.017930688858032225, 0.018050912857055665, 0.018001279830932616, 0.018067583084106446, 0.017971872329711914, 0.017936384201049805, 0.017884767532348633, 0.01790332794189453, 0.017936256408691405, 0.01809676742553711, 0.01794272041320801, 0.017998912811279296, 0.018514879226684572, 0.02188083267211914, 0.01854080009460449, 0.018165599822998046, 0.018072864532470704, 0.01806604766845703, 0.017985536575317384, 0.0179704647064209, 0.017991487503051757, 0.018006656646728517, 0.01789571189880371, 0.017953792572021485, 0.01797427177429199, 0.01796816062927246, 0.01825276756286621, 0.01794047927856445, 0.01810963249206543, 0.018824064254760742, 0.018111616134643554, 0.018407712936401366, 0.017971744537353517, 0.01801625633239746, 0.018270751953125, 0.018038368225097655, 0.017971616744995117, 0.018164863586425783, 0.018176671981811523, 0.018462944030761718, 0.01807792091369629, 0.017938207626342774, 0.01790937614440918, 0.018008447647094725, 0.017926143646240233, 0.018950111389160158, 0.018065439224243165, 0.017858015060424805, 0.017951263427734374, 0.018188512802124024, 0.017895200729370116, 0.017979263305664062, 0.017967231750488283, 0.017977344512939454, 0.017896799087524413, 0.017998432159423827, 0.017825855255126952, 0.017911455154418946, 0.017947200775146485, 0.01786400032043457, 0.018033119201660158, 0.017930240631103517, 0.018210336685180663, 0.01796748733520508, 0.018079839706420898, 0.0179587516784668, 0.01791606330871582, 0.01804697608947754, 0.017967103958129883, 0.017920000076293945, 0.017974592208862303, 0.01801491165161133, 0.018031776428222655, 0.018084096908569335, 0.018031200408935546, 0.01804841613769531, 0.018045343399047852, 0.018411712646484377, 0.01804697608947754, 0.01815884780883789, 0.018417631149291992, 0.01819523239135742, 0.01806723213195801, 0.0181409912109375, 0.017942335128784178, 0.01797100830078125, 0.018002719879150392, 0.01799318313598633, 0.01809388732910156, 0.01815353584289551, 0.018179967880249025, 0.0184737606048584, 0.01834307289123535, 0.018435136795043945, 0.018644767761230467, 0.018521440505981444, 0.018516639709472656, 0.01913862419128418, 0.018613855361938478, 0.01867407989501953, 0.01864499282836914, 0.018528255462646484, 0.01851408004760742, 0.01835811233520508, 0.018355520248413085, 0.018325279235839844, 0.018205215454101562, 0.01829311943054199, 0.018305023193359374, 0.018321407318115233, 0.01827840042114258, 0.01825200080871582, 0.018302431106567384, 0.018333343505859374, 0.018328096389770506, 0.018335872650146485, 0.018225151062011717, 0.018159872055053712, 0.018091775894165038, 0.018118112564086915, 0.018188831329345703, 0.018247840881347656, 0.01825161552429199, 0.018261184692382814, 0.01832601547241211, 0.018323776245117187, 0.01838863945007324, 0.01881532859802246, 0.018427104949951173, 0.018387744903564453, 0.018472032546997072, 0.01837910461425781, 0.018436767578125, 0.018511968612670897, 0.018451295852661132, 0.01842438316345215, 0.018325632095336913, 0.01832374382019043, 0.01834003257751465, 0.0184718074798584, 0.01834284782409668, 0.018380800247192384, 0.018301183700561524, 0.018263935089111328, 0.018323200225830078, 0.018110015869140624, 0.018074432373046876, 0.018046720504760742, 0.019159040451049804, 0.02500351905822754, 0.019855871200561523, 0.01854640007019043, 0.01830735969543457, 0.0183438720703125, 0.018473024368286132, 0.018317312240600587, 0.018255903244018556, 0.01825702476501465, 0.01821504020690918, 0.01971887969970703]",tokens/s,54.646139819906644,,, 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1550.25408,1546.584064,0.0,1168.113664,1154.613248,s,1,8.2976953125,8.2976953125,0.0,8.2976953125,8.2976953125,8.2976953125,8.2976953125,[8.2976953125],,kWh,3.979363800413769e-05,4.382259203332224e-06,1.239528769397813e-05,5.6571184901448046e-05,,MB,1622.872064,1777.270784,0.0,1369.440256,1323.44832,s,10,0.7577682571411132,0.07577682571411133,0.00044258388422384527,0.07567547225952148,0.07648710784912109,0.07660779266357422,0.07670434051513672,"[0.07672847747802734, 0.07563311767578125, 0.07556864166259766, 0.0753691864013672, 0.07646028900146484, 0.075830078125, 0.07573881530761718, 0.075328125, 0.07571782684326171, 0.0753936996459961]",tokens/s,3378.3415653465026,kWh,2.332164941666785e-06,2.5719568844260757e-07,1.5518420703968655e-06,4.141202700506258e-06,tokens/kWh,61817790.26868311,MB,1630.355456,1798.242304,0.0,1390.411776,1377.233408,s,10,11.643792358398438,1.1643792358398437,0.007841945048422793,1.1645668945312502,1.173194677734375,1.1750732299804687,1.1765760717773437,"[1.171073974609375, 1.1650872802734376, 1.166047607421875, 1.154470703125, 1.1769517822265625, 1.1624239501953124, 1.1640465087890626, 1.1727772216796875, 1.161658447265625, 1.1492548828125]",tokens/s,54.106083362573315,kWh,3.382460167167134e-05,3.7303772633406755e-06,1.7983645780203018e-05,5.553862471521503e-05,tokens/kWh,1134345.6976661666,,s,630,11.64137124061584,0.01847836704859658,0.00045703590184294283,0.018392287254333495,0.018807874298095703,0.018984730339050293,0.019883276405334476,"[0.01957548713684082, 0.018872224807739257, 0.018896896362304686, 0.018661376953125, 0.018702272415161134, 0.018657344818115235, 0.018571264266967775, 0.018425344467163086, 0.01833590316772461, 0.01831337547302246, 0.018380992889404296, 0.01836851119995117, 0.018375904083251952, 0.018444480895996093, 0.018460256576538086, 0.018538751602172852, 0.018739967346191405, 0.018561023712158203, 0.018550783157348632, 0.018461952209472655, 0.01827712059020996, 0.018309055328369142, 0.018442304611206054, 0.018290687561035156, 0.018288383483886717, 0.01825190353393555, 0.018683008193969727, 0.018422336578369142, 0.018305471420288086, 0.01843974494934082, 0.019503231048583983, 0.018897216796875, 0.018648672103881835, 0.019211999893188475, 0.018535104751586914, 0.01857535934448242, 0.018656448364257814, 0.018612543106079103, 0.0185533447265625, 0.01846886444091797, 0.018767551422119142, 0.018821151733398437, 0.01874358367919922, 0.018750783920288085, 0.019919551849365235, 0.01897635269165039, 0.018952608108520508, 0.018535903930664063, 0.018475168228149413, 0.018280672073364257, 0.01821238327026367, 0.019173311233520507, 0.018625120162963867, 0.01846886444091797, 0.01843142318725586, 0.018332319259643556, 0.018443647384643554, 0.0184117431640625, 0.018368223190307616, 0.018268863677978517, 0.018184192657470705, 0.01827020835876465, 0.018164800643920898, 0.01866422462463379, 0.01874684715270996, 0.018831296920776366, 0.018774143218994142, 0.01899158477783203, 0.01865894317626953, 0.018737056732177734, 0.01857174491882324, 0.018743167877197264, 0.01853766441345215, 0.01862499237060547, 0.018597375869750975, 0.018636959075927734, 0.018628896713256834, 0.018663423538208008, 0.018480863571166992, 0.01855135917663574, 0.018596096038818358, 0.018657024383544923, 0.018516223907470705, 0.01857695960998535, 0.018364864349365233, 0.018220800399780274, 0.018366176605224608, 0.018388864517211913, 0.018467487335205077, 0.01840127944946289, 0.018355775833129882, 0.01838470458984375, 0.01832940864562988, 0.01818841552734375, 0.01822175979614258, 0.018132095336914063, 0.01826425552368164, 0.01837468719482422, 0.01838310432434082, 0.01821072006225586, 0.018196863174438478, 0.019295808792114257, 0.018892351150512694, 0.01872719955444336, 0.018615135192871092, 0.0186243839263916, 0.018579456329345705, 0.018577215194702148, 0.018284416198730467, 0.018382144927978517, 0.01820569610595703, 0.018523263931274413, 0.018250335693359376, 0.01826617622375488, 0.018364639282226564, 0.018362207412719725, 0.018781856536865236, 0.01825836753845215, 0.018304191589355468, 0.01825676727294922, 0.018392799377441406, 0.01832579231262207, 0.018339359283447265, 0.018242015838623046, 0.01844121551513672, 0.018529279708862305, 0.018452608108520507, 0.01854863929748535, 0.01848099136352539, 0.01818649673461914, 0.018108415603637695, 0.018231039047241212, 0.018303199768066405, 0.018210464477539063, 0.01828652763366699, 0.01885638427734375, 0.018431232452392577, 0.018567775726318358, 0.018438047409057617, 0.01833171272277832, 0.01841097640991211, 0.018391775131225585, 0.01850729560852051, 0.01858793640136719, 0.018525407791137694, 0.018432992935180664, 0.018476480484008788, 0.018471488952636717, 0.01833344078063965, 0.018600128173828126, 0.018406784057617188, 0.01821766471862793, 0.01946214485168457, 0.018311168670654295, 0.018464767456054687, 0.018404544830322264, 0.018334527969360352, 0.01827020835876465, 0.018343040466308594, 0.018366783142089844, 0.018287168502807618, 0.01815283203125, 0.018267871856689454, 0.018275232315063478, 0.018325504302978517, 0.01827020835876465, 0.018169471740722656, 0.018214879989624025, 0.018229408264160155, 0.018485279083251954, 0.018350303649902342, 0.01859993553161621, 0.019056320190429688, 0.018877952575683594, 0.0189935359954834, 0.018839967727661132, 0.018914688110351564, 0.01880745506286621, 0.018817024230957033, 0.018726816177368166, 0.018781856536865236, 0.018849407196044922, 0.018614336013793944, 0.01872768020629883, 0.01845020866394043, 0.01866569519042969, 0.018610176086425782, 0.019066879272460938, 0.018609952926635743, 0.018722591400146486, 0.018574687957763673, 0.01846361541748047, 0.01827984046936035, 0.01847724723815918, 0.018407840728759766, 0.018336864471435548, 0.018369440078735352, 0.018415231704711914, 0.018368032455444334, 0.018314048767089842, 0.018212703704833983, 0.018249919891357422, 0.019539104461669923, 0.018850175857543946, 0.019057119369506836, 0.01835811233520508, 0.018206880569458007, 0.01823539161682129, 0.01824563217163086, 0.018413568496704103, 0.018453760147094725, 0.01850649642944336, 0.018532352447509767, 0.018413568496704103, 0.0183767032623291, 0.0182476806640625, 0.01821286392211914, 0.018165760040283203, 0.018151039123535155, 0.018338176727294923, 0.018288480758666993, 0.018311328887939453, 0.01863270378112793, 0.0182108154296875, 0.018268032073974608, 0.018352256774902344, 0.018251775741577148, 0.01818009567260742, 0.01813862419128418, 0.01811712074279785, 0.01820262336730957, 0.018324928283691408, 0.018143264770507813, 0.01814582443237305, 0.018060831069946288, 0.01816009521484375, 0.018245439529418945, 0.018171424865722655, 0.018170528411865235, 0.018113887786865235, 0.01810908889770508, 0.01824358367919922, 0.018164928436279298, 0.018182687759399414, 0.01814147186279297, 0.01820022392272949, 0.01825827217102051, 0.01828659248352051, 0.01826348876953125, 0.01807417678833008, 0.018143199920654298, 0.018188192367553712, 0.01923276710510254, 0.018382848739624022, 0.019967296600341796, 0.02612220764160156, 0.02220719909667969, 0.01830297660827637, 0.01825484848022461, 0.01828339195251465, 0.01819558334350586, 0.01826028823852539, 0.018399328231811524, 0.018410079956054686, 0.018651136398315428, 0.018536127090454102, 0.018559295654296874, 0.018507776260375978, 0.01845587158203125, 0.018897567749023438, 0.018535711288452147, 0.018467584609985353, 0.018863744735717773, 0.018434431076049803, 0.01844633674621582, 0.018968544006347655, 0.018418752670288085, 0.018129888534545897, 0.01817804718017578, 0.01821286392211914, 0.01827840042114258, 0.0183308162689209, 0.018268991470336914, 0.01820579147338867, 0.018164640426635743, 0.018270015716552734, 0.01827449607849121, 0.018292448043823243, 0.018189760208129884, 0.018166624069213867, 0.01823744010925293, 0.018200159072875977, 0.018330015182495118, 0.01820400047302246, 0.018209184646606445, 0.018135295867919923, 0.018258943557739257, 0.018278879165649416, 0.0185001277923584, 0.018683904647827147, 0.018817024230957033, 0.01841695976257324, 0.01842655944824219, 0.01857686424255371, 0.01848297691345215, 0.018512447357177733, 0.01997433662414551, 0.019256704330444335, 0.018835552215576173, 0.018782432556152345, 0.018566879272460937, 0.018581727981567382, 0.01850364875793457, 0.01867407989501953, 0.018524160385131837, 0.018459392547607423, 0.018520063400268554, 0.01841152000427246, 0.018898815155029298, 0.018411647796630858, 0.018354175567626953, 0.018290687561035156, 0.018339839935302735, 0.018448383331298827, 0.018253376007080078, 0.018387392044067384, 0.01847091293334961, 0.01826576042175293, 0.018759904861450197, 0.018310335159301756, 0.018348831176757813, 0.018386560440063475, 0.018221599578857422, 0.01817731285095215, 0.018352863311767578, 0.01840332794189453, 0.018570240020751954, 0.018490367889404297, 0.018323135375976563, 0.018386720657348633, 0.01835196876525879, 0.019326656341552735, 0.019794464111328125, 0.018814815521240234, 0.018608768463134764, 0.01869824028015137, 0.018610176086425782, 0.018652544021606446, 0.01849337577819824, 0.018568992614746094, 0.01844112014770508, 0.01847500801086426, 0.018321407318115233, 0.018372608184814454, 0.018199712753295898, 0.018227903366088868, 0.018311328887939453, 0.018292415618896486, 0.018358591079711915, 0.018333696365356447, 0.018278047561645507, 0.018286943435668945, 0.01817705535888672, 0.019112672805786133, 0.018256128311157225, 0.018176000595092775, 0.018184192657470705, 0.01818828773498535, 0.018241535186767577, 0.018307071685791015, 0.018297887802124022, 0.018205631256103517, 0.01831324768066406, 0.01824492835998535, 0.01838969612121582, 0.018585599899291993, 0.018857120513916015, 0.018569536209106445, 0.019322944641113282, 0.018934431076049803, 0.018890687942504883, 0.01895359992980957, 0.01861305618286133, 0.01862246322631836, 0.0186060791015625, 0.018531871795654298, 0.018629087448120116, 0.01872627258300781, 0.018496128082275392, 0.01827840042114258, 0.01819443130493164, 0.01820627212524414, 0.01823708724975586, 0.018292543411254882, 0.01826019287109375, 0.018227968215942383, 0.01824563217163086, 0.018364416122436524, 0.01849692726135254, 0.018221248626708986, 0.018227615356445313, 0.018179807662963867, 0.01819267272949219, 0.018374656677246092, 0.01816511917114258, 0.01817580795288086, 0.01925926399230957, 0.018795455932617187, 0.018366464614868162, 0.01824563217163086, 0.018222463607788085, 0.018395776748657226, 0.01863987159729004, 0.018626943588256836, 0.018446048736572265, 0.018248031616210938, 0.018293312072753906, 0.018266111373901366, 0.01821286392211914, 0.01823744010925293, 0.018206720352172853, 0.018245248794555663, 0.018243392944335936, 0.018255456924438477, 0.01824048042297363, 0.018238943099975587, 0.018432064056396483, 0.018684383392333984, 0.018626560211181642, 0.018699615478515626, 0.01858627128601074, 0.018740480422973632, 0.018811647415161132, 0.018423519134521484, 0.018470687866210936, 0.018878271102905273, 0.018670272827148438, 0.018571264266967775, 0.01866547203063965, 0.01860812759399414, 0.01856716728210449, 0.018688032150268555, 0.01849625587463379, 0.018296831130981444, 0.018396480560302735, 0.018326208114624022, 0.01844633674621582, 0.018440191268920898, 0.018563072204589845, 0.01859350395202637, 0.018796031951904296, 0.0187619514465332, 0.018778528213500977, 0.018679967880249025, 0.018585599899291993, 0.018710527420043945, 0.018700288772583007, 0.018696191787719727, 0.0187064323425293, 0.018702335357666015, 0.01863270378112793, 0.018550783157348632, 0.018366464614868162, 0.018298879623413086, 0.018351808547973632, 0.01828895950317383, 0.018378751754760742, 0.018386335372924806, 0.018390880584716798, 0.01827097511291504, 0.018264064788818358, 0.01821900749206543, 0.018364416122436524, 0.018173280715942382, 0.01822127914428711, 0.019513792037963867, 0.01923686408996582, 0.02043289566040039, 0.018341888427734376, 0.01844985580444336, 0.018244159698486327, 0.018259967803955078, 0.01830259132385254, 0.018448768615722658, 0.01860812759399414, 0.018612064361572266, 0.018614431381225587, 0.01847500801086426, 0.018556928634643553, 0.018413568496704103, 0.018534400939941405, 0.01861222457885742, 0.018572799682617186, 0.018601760864257813, 0.01863542366027832, 0.018602048873901367, 0.01862838363647461, 0.018539775848388673, 0.019479520797729494, 0.019959583282470703, 0.01963849639892578, 0.018615711212158204, 0.018618560791015624, 0.01846067237854004, 0.019082368850708006, 0.018760320663452148, 0.0186711368560791, 0.018461408615112303, 0.018311168670654295, 0.01812396812438965, 0.018264896392822267, 0.018091167449951172, 0.018124736785888673, 0.0181144962310791, 0.018161983489990235, 0.018117279052734376, 0.018317312240600587, 0.018159616470336915, 0.018226240158081056, 0.018422719955444335, 0.018406816482543945, 0.018415327072143554, 0.018403648376464844, 0.018301504135131836, 0.01835379219055176, 0.018450592041015627, 0.018595872879028322, 0.018626752853393554, 0.018593791961669923, 0.018625791549682618, 0.018851839065551757, 0.0187194881439209, 0.018753536224365236, 0.018695167541503906, 0.01877017593383789, 0.018688127517700194, 0.01867782402038574, 0.01848556709289551, 0.018323711395263672, 0.018176000595092775, 0.01820262336730957, 0.018225151062011717, 0.018356224060058594, 0.018593568801879883, 0.0185118408203125, 0.01846249580383301, 0.01842019271850586, 0.01847500801086426, 0.018372159957885742, 0.018358720779418945, 0.018382848739624022, 0.01836345672607422, 0.018424768447875977, 0.018388992309570314, 0.018183935165405275, 0.018139392852783202, 0.01815545654296875, 0.018116512298583985, 0.018157440185546873, 0.01858905601501465, 0.018858911514282227, 0.019281919479370118, 0.01862144088745117, 0.018465503692626953, 0.018333152770996095, 0.018385663986206054, 0.01827027130126953, 0.018289600372314453, 0.018214656829833985, 0.018172096252441407, 0.01815488052368164, 0.018143552780151367, 0.01816204833984375, 0.018145280838012694, 0.018114559173583983, 0.018114559173583983, 0.01839289665222168, 0.01820044708251953, 0.01809436798095703, 0.018225183486938478, 0.018241535186767577, 0.018241535186767577, 0.018122560501098634, 0.018122943878173828, 0.018157567977905274, 0.01817804718017578, 0.018157472610473634, 0.018139232635498048, 0.018184192657470705, 0.01804083251953125, 0.018124704360961915, 0.01807574462890625, 0.018128480911254883, 0.01808393669128418, 0.018114879608154298, 0.018120704650878908, 0.018085887908935547, 0.018092031478881835, 0.018106367111206053, 0.018075647354125975, 0.018116607666015624, 0.018124799728393554, 0.018130495071411134, 0.018135360717773438, 0.018090112686157227, 0.018126848220825196, 0.018138656616210936, 0.018115039825439452, 0.018141183853149414, 0.018210304260253905, 0.0181847038269043, 0.018495487213134765, 0.018826847076416017, 0.01834009552001953, 0.018265472412109376, 0.018320159912109377, 0.018357440948486327, 0.018331968307495117, 0.018343904495239257, 0.0184182071685791, 0.018777280807495116, 0.018401311874389648, 0.018483007431030273, 0.01852070426940918, 0.01862998390197754, 0.018477279663085936, 0.018423999786376953, 0.01845907211303711, 0.018311328887939453, 0.018307071685791015]",tokens/s,54.11733609198705,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 27896 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6725.931008,7525.564416,0.0,7147.094016,7138.9184,s,1,11.535142578125,11.535142578125,0.0,11.535142578125,11.535142578125,11.535142578125,11.535142578125,[11.535142578125],,kWh,0.00013089681240415605,1.4431528435576988e-05,4.277336755201255e-05,0.00018810170839174558,,MB,1598.083072,8236.498944,0.0,7828.668416,7715.649536,s,10,6.331598999023438,0.6331598999023437,0.000872366389456193,0.6334921264648437,0.6339573791503906,0.6340320037841797,0.6340917034912109,"[0.631711181640625, 0.63167041015625, 0.6332913208007812, 0.632321533203125, 0.6339407958984375, 0.6335877075195312, 0.6339322509765625, 0.633640625, 0.6333965454101562, 0.6341066284179687]",tokens/s,404.32124655949394,kWh,1.8516487303124526e-05,2.0407386927067532e-06,1.2314159156876295e-05,3.287138515270757e-05,tokens/kWh,7787928.583195517,MB,1602.039808,8383.299584,0.0,7975.469056,7906.446848,s,10,28.94725219726563,2.8947252197265625,0.00898014543589322,2.8938502197265628,2.905096484375,2.9061799560546877,2.9070467333984373,"[2.88181298828125, 2.882487060546875, 2.885023681640625, 2.903577392578125, 2.892619384765625, 2.89383740234375, 2.893863037109375, 2.907263427734375, 2.904855712890625, 2.901912109375]",tokens/s,21.763723745064485,kWh,8.467853091104378e-05,9.341712162362736e-06,5.626167348152209e-05,0.0001502819165549286,tokens/kWh,419212.11443276523,,s,630,28.943798686981182,0.04594253759838286,0.0008122710189837785,0.045904142379760746,0.046646790695190433,0.046840979194641114,0.04782046039581299,"[0.04779507064819336, 0.04495065689086914, 0.04474095916748047, 0.044648990631103516, 0.04461363220214844, 0.04544483184814453, 0.04571932983398438, 0.045623809814453124, 0.044635326385498046, 0.04497836685180664, 0.04530611038208008, 0.04524070358276367, 0.04518502426147461, 0.045764606475830076, 0.04552246475219727, 0.04531043243408203, 0.04498604965209961, 0.045447486877441406, 0.04543590545654297, 0.045308929443359375, 0.04561078262329102, 0.04629052734375, 0.04560345458984375, 0.04547100830078125, 0.045310558319091795, 0.0450847053527832, 0.04592601776123047, 0.04594313430786133, 0.0455865592956543, 0.04554265594482422, 0.04557635116577148, 0.04556246566772461, 0.04563731384277344, 0.045510433197021485, 0.04584694290161133, 0.045674625396728515, 0.045491294860839845, 0.04545820617675781, 0.04652864074707031, 0.046083648681640624, 0.04581577682495117, 0.04604095840454102, 0.04589014434814453, 0.04587478256225586, 0.04617871856689453, 0.04602265548706055, 0.04551804733276367, 0.045879329681396484, 0.0459881591796875, 0.046352832794189454, 0.04638105773925781, 0.046036991119384765, 0.04570111846923828, 0.04548591995239258, 0.04614364624023438, 0.04616396713256836, 0.04589126586914063, 0.04573215866088867, 0.04700899124145508, 0.04680374526977539, 0.04637900924682617, 0.04680499267578125, 0.04697660827636719, 0.04751283264160156, 0.04509552001953125, 0.044724384307861326, 0.04536099243164062, 0.04571123123168945, 0.04567068862915039, 0.04487782287597656, 0.044750846862792966, 0.04503551864624023, 0.045716545104980466, 0.04541718292236328, 0.045203678131103514, 0.04540620803833008, 0.045383678436279294, 0.04529520034790039, 0.04582345581054687, 0.04560550308227539, 0.045072193145751956, 0.04505241775512695, 0.04539801788330078, 0.045832191467285156, 0.04603257751464844, 0.04550073623657227, 0.045424320220947265, 0.04530176162719726, 0.04592572784423828, 0.04560688018798828, 0.04559564971923828, 0.04547174453735352, 0.04565375900268555, 0.045377792358398436, 0.04560281753540039, 0.04560591888427734, 0.04550118255615234, 0.045760353088378905, 0.04569331359863281, 0.04564761734008789, 0.04583449554443359, 0.04576665496826172, 0.04562739181518555, 0.046280288696289064, 0.04588380813598633, 0.04629708862304688, 0.04630636978149414, 0.04594905471801758, 0.04570745468139648, 0.04611468887329102, 0.04590822219848633, 0.045789569854736326, 0.04634636688232422, 0.04609638214111328, 0.045954719543457034, 0.04582640075683594, 0.0462044792175293, 0.04603731155395508, 0.04564086532592773, 0.04605436706542969, 0.04653875350952148, 0.046241790771484374, 0.046102527618408204, 0.047126529693603515, 0.04661990356445313, 0.04629142379760742, 0.0469365119934082, 0.04492710494995117, 0.045400257110595706, 0.044738494873046875, 0.045190654754638675, 0.045195423126220706, 0.044863903045654296, 0.04542867279052734, 0.045219905853271486, 0.045385726928710936, 0.04572774505615235, 0.04555980682373047, 0.045279232025146485, 0.04565305709838867, 0.04584864044189453, 0.045523841857910155, 0.04516864013671875, 0.04538777542114258, 0.04541571044921875, 0.04523491287231445, 0.045946880340576174, 0.045889537811279295, 0.04571891021728516, 0.04554608154296875, 0.04526470565795898, 0.04549363327026367, 0.045257568359375, 0.045502464294433595, 0.04530585479736328, 0.04553932952880859, 0.04561305618286133, 0.04606771087646484, 0.045674015045166015, 0.04572208023071289, 0.04570019149780274, 0.04615852737426758, 0.04617647933959961, 0.0457400016784668, 0.04542367935180664, 0.04586979293823242, 0.046695777893066406, 0.04634425735473633, 0.046005088806152346, 0.046288257598876954, 0.04603500747680664, 0.04562182235717773, 0.046022529602050784, 0.04635865783691406, 0.04627046585083008, 0.04559872055053711, 0.04593164825439453, 0.046010398864746095, 0.04637900924682617, 0.04613820648193359, 0.046053375244140625, 0.04658560180664063, 0.04616729736328125, 0.04679782485961914, 0.04629043197631836, 0.04611695861816406, 0.045967105865478516, 0.04664582443237305, 0.0467147216796875, 0.0480371208190918, 0.045766494750976563, 0.04476515197753906, 0.04467193603515625, 0.0451945915222168, 0.04557244873046875, 0.04486076736450195, 0.044915679931640626, 0.045246463775634765, 0.045674495697021485, 0.0451932144165039, 0.045250144958496094, 0.04553548812866211, 0.045531295776367185, 0.04541644668579101, 0.0452935676574707, 0.045767807006835935, 0.04546780776977539, 0.04553945541381836, 0.045879295349121094, 0.046580318450927735, 0.04642566299438477, 0.04596371078491211, 0.04570217514038086, 0.045888481140136717, 0.04579641723632812, 0.045560768127441406, 0.04616518402099609, 0.045361408233642576, 0.04524499130249023, 0.04569702529907226, 0.045610847473144533, 0.04593679809570313, 0.04614080047607422, 0.0601032943725586, 0.043550559997558594, 0.046217376708984376, 0.04608204650878906, 0.045352958679199216, 0.04581990432739258, 0.04622732925415039, 0.04603635025024414, 0.0459496955871582, 0.04637081527709961, 0.04632524871826172, 0.04550092697143555, 0.0459791374206543, 0.04686489486694336, 0.04633139038085937, 0.045978111267089845, 0.045744129180908207, 0.045559551239013674, 0.045787391662597654, 0.04749027252197266, 0.04756969451904297, 0.046781566619873045, 0.0460890884399414, 0.04625727844238281, 0.04615647888183594, 0.045840576171875, 0.04602675247192383, 0.04712783813476563, 0.04647190475463867, 0.04791827011108398, 0.045677120208740235, 0.04518096160888672, 0.04485116958618164, 0.044507102966308595, 0.04514188766479492, 0.04514604949951172, 0.044902111053466795, 0.04547222518920899, 0.04569731140136719, 0.04545036697387695, 0.045276031494140626, 0.04511059188842773, 0.04592291259765625, 0.04616767883300781, 0.04549587249755859, 0.04497273635864258, 0.045338497161865235, 0.0458583984375, 0.045832286834716796, 0.04587382507324219, 0.0465797119140625, 0.04643132781982422, 0.04597443389892578, 0.045690879821777344, 0.04551065444946289, 0.045162494659423826, 0.04541753768920898, 0.04553414535522461, 0.045794559478759767, 0.045384449005126955, 0.04561056137084961, 0.04567599868774414, 0.04538876724243164, 0.04669356918334961, 0.04687545776367188, 0.04635846328735352, 0.045829566955566406, 0.04556864166259766, 0.04626432037353516, 0.046284801483154295, 0.04603903961181641, 0.04628416061401367, 0.0460865592956543, 0.04594710540771484, 0.046057441711425784, 0.04607574462890625, 0.04594243240356445, 0.04610307312011719, 0.04622467041015625, 0.04594761657714844, 0.04595238494873047, 0.045695358276367185, 0.04625433731079102, 0.046655487060546875, 0.04665958404541016, 0.0468927993774414, 0.04635263824462891, 0.04619216156005859, 0.046766368865966794, 0.04681894302368164, 0.04683740615844727, 0.04667596817016602, 0.047826816558837894, 0.045377662658691406, 0.04483071899414062, 0.04474060821533203, 0.04549155044555664, 0.04563011169433594, 0.04533225631713867, 0.0451519660949707, 0.045375999450683595, 0.04577062225341797, 0.04544729614257813, 0.045055999755859374, 0.045129726409912106, 0.045742080688476565, 0.04548198318481445, 0.04510515213012695, 0.04518064117431641, 0.045900062561035154, 0.046004222869873046, 0.045774078369140624, 0.04580537414550781, 0.046119873046875, 0.045723648071289064, 0.04525247955322265, 0.045936767578125, 0.046045185089111325, 0.045742080688476565, 0.04556083297729492, 0.04570412826538086, 0.04579904174804687, 0.04625174331665039, 0.045779678344726564, 0.046285888671875, 0.046177215576171875, 0.04584447860717773, 0.046601951599121096, 0.0464161605834961, 0.04607302474975586, 0.04582252883911133, 0.04626252746582031, 0.046295040130615236, 0.045886558532714845, 0.045894561767578126, 0.046300224304199215, 0.04595404815673828, 0.04647724914550781, 0.046473217010498044, 0.04614105606079102, 0.04609471893310547, 0.04596460723876953, 0.04615766525268555, 0.046041950225830075, 0.04597760009765625, 0.04656947326660156, 0.04622883224487305, 0.04616016006469727, 0.04670460891723633, 0.04663132858276367, 0.04628044891357422, 0.046323966979980466, 0.046534366607666015, 0.04656883239746094, 0.046277534484863284, 0.04782675170898437, 0.045475967407226564, 0.04513859176635742, 0.044875072479248046, 0.045173439025878906, 0.04520131301879883, 0.044918495178222655, 0.04545574569702148, 0.045271041870117185, 0.04579894256591797, 0.045725406646728514, 0.04535372924804688, 0.04532633590698242, 0.045164192199707034, 0.04607132720947266, 0.045857601165771485, 0.045606910705566404, 0.045426559448242185, 0.045553279876708985, 0.04565827178955078, 0.04685654449462891, 0.04646892929077148, 0.045611198425292966, 0.04548812866210938, 0.045369342803955076, 0.04565523147583008, 0.04548649597167969, 0.045311870574951174, 0.045992481231689454, 0.04616787338256836, 0.045752513885498045, 0.046077953338623044, 0.04580966567993164, 0.04545881652832031, 0.04613798522949219, 0.046176254272460936, 0.04595507049560547, 0.045970752716064454, 0.04593529510498047, 0.045870174407958986, 0.04681820678710937, 0.04683283233642578, 0.046401409149169924, 0.046040000915527346, 0.04615167999267578, 0.04639334487915039, 0.04592230224609375, 0.04557942581176758, 0.04618735885620117, 0.046120960235595705, 0.04602470397949219, 0.04584243011474609, 0.046470462799072264, 0.04632403182983398, 0.04638105773925781, 0.04637120056152344, 0.046677440643310544, 0.046489246368408205, 0.04617718505859375, 0.046415870666503906, 0.04664115142822266, 0.04647731018066406, 0.04635238265991211, 0.04909372711181641, 0.04595939254760742, 0.04487443161010742, 0.04487760162353516, 0.04535932922363281, 0.04547174453735352, 0.04543283081054687, 0.04562739181518555, 0.045879295349121094, 0.045743839263916015, 0.0453798713684082, 0.04621305465698242, 0.045975231170654295, 0.04574860763549805, 0.045303104400634765, 0.045440799713134764, 0.04556832122802734, 0.04560713577270508, 0.045695358276367185, 0.04601036834716797, 0.04650188827514649, 0.046458881378173826, 0.04613119888305664, 0.04611270523071289, 0.0460513916015625, 0.04578508758544922, 0.04567244720458984, 0.045559070587158204, 0.046245761871337894, 0.046067745208740234, 0.045873985290527344, 0.04598169708251953, 0.046532608032226565, 0.04698726272583008, 0.046405216217041016, 0.04554959869384766, 0.04633990478515625, 0.046037567138671874, 0.04574003219604492, 0.04636262512207031, 0.04674063873291016, 0.04660105514526367, 0.04617555236816406, 0.04656758499145508, 0.046684703826904296, 0.046118911743164064, 0.04612860870361328, 0.04597119903564453, 0.04627523040771484, 0.04663292694091797, 0.04637507247924805, 0.046243839263916016, 0.04593868637084961, 0.046663585662841796, 0.046889057159423826, 0.04638876724243164, 0.046449119567871094, 0.046843902587890625, 0.04636671829223633, 0.04634624099731445, 0.04698316955566406, 0.046851776123046876, 0.046940479278564456, 0.04780505752563476, 0.04603500747680664, 0.045502689361572264, 0.04520755386352539, 0.045146110534667966, 0.04553292846679687, 0.04529324722290039, 0.04535932922363281, 0.045467201232910155, 0.04618729782104492, 0.045746112823486326, 0.04572576141357422, 0.04526694488525391, 0.04597350311279297, 0.04572063827514648, 0.04534697723388672, 0.04557907104492188, 0.04553110504150391, 0.0456888313293457, 0.04611686325073242, 0.046427776336669925, 0.046524158477783205, 0.04600640106201172, 0.04602316665649414, 0.04599193572998047, 0.04566835021972656, 0.04549017715454102, 0.046071807861328126, 0.04581171035766601, 0.04573798370361328, 0.04595097732543945, 0.04645843124389649, 0.04629318237304687, 0.045937950134277344, 0.046240734100341796, 0.04601446533203125, 0.04586659240722656, 0.04574796676635742, 0.045951648712158205, 0.04639744186401367, 0.04642406463623047, 0.0466165771484375, 0.04716953659057617, 0.0469769287109375, 0.0461082878112793, 0.04616236877441406, 0.04612489700317383, 0.046740993499755856, 0.046451393127441405, 0.046266368865966793, 0.04608992004394531, 0.04666336059570313, 0.04665769577026367, 0.046251903533935546, 0.046400096893310545, 0.04676192092895508, 0.046370880126953125, 0.046005569458007815, 0.04671891021728516, 0.04682828903198242, 0.04637062454223633, 0.046766143798828125, 0.04664332962036133, 0.048355262756347654, 0.04548409652709961, 0.044856895446777345, 0.045179039001464846, 0.04527715301513672, 0.04579564666748047, 0.045202751159667966, 0.04543353652954102, 0.04569497680664063, 0.04551200103759766, 0.045808319091796876, 0.04577859115600586, 0.045529441833496095, 0.04597750473022461, 0.04562134552001953, 0.045848575592041016, 0.04558438491821289, 0.045271041870117185, 0.04589977645874024, 0.04594483184814453, 0.04648953628540039, 0.04611884689331055, 0.04582003021240234, 0.04572137451171875, 0.045725887298583984, 0.04607136154174805, 0.046136993408203125, 0.0458691520690918, 0.04565260696411133, 0.04606780624389648, 0.04612505722045898, 0.045623294830322264, 0.046102527618408204, 0.046682113647460936, 0.04630323028564453, 0.045444511413574216, 0.045945438385009765, 0.04623878479003906, 0.04595536041259766, 0.046137439727783204, 0.047042686462402346, 0.04753395080566406, 0.046740032196044924, 0.04611196899414063, 0.04574288177490234, 0.04599763107299805, 0.046419841766357425, 0.046072383880615235, 0.04630515289306641, 0.046408958435058593, 0.04662486267089844, 0.04611692810058594, 0.04612579345703125, 0.04639324951171875, 0.04668127822875977, 0.0459571533203125, 0.04610342407226563, 0.0468592643737793, 0.046682174682617185, 0.04637382507324219, 0.04614144134521484, 0.046244895935058594, 0.04663510513305664]",tokens/s,21.76632054462745,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8184.229888,11080.237056,0.0,10701.766656,10468.923392,s,1,13.624958984375,13.624958984375,0.0,13.624958984375,13.624958984375,13.624958984375,13.624958984375,[13.624958984375],,kWh,0.00018848511120831972,2.078398916371034e-05,6.125671567197566e-05,0.00027052581604400573,,MB,3761.123328,11596.136448,0.0,11188.30592,10924.281856,s,10,7.1939533081054705,0.7193953308105469,0.0032105422254021195,0.7190292968750001,0.7231811035156249,0.7239746459960937,0.7246094799804688,"[0.7137408447265625, 0.7159876098632812, 0.7186414794921875, 0.71795703125, 0.7194171142578125, 0.7179682006835938, 0.719479248046875, 0.7230047607421874, 0.7247681884765625, 0.7229888305664063]",tokens/s,355.85440860668825,kWh,2.1005317179166615e-05,2.3152727821531963e-06,1.3990467541568391e-05,3.73110575028882e-05,tokens/kWh,6861236.778941025,MB,3761.123328,11598.2336,0.0,11190.403072,10924.284416,s,10,33.78362280273438,3.378362280273438,0.005775475230882454,3.3771402587890624,3.38703984375,3.387946435546875,3.388671708984375,"[3.37131494140625, 3.374577392578125, 3.373578857421875, 3.3742099609375, 3.372783447265625, 3.379703125, 3.38683837890625, 3.3818642578125, 3.3798994140625, 3.38885302734375]",tokens/s,18.64808885887185,kWh,9.891497501708878e-05,1.0910171633788415e-05,6.56877350898374e-05,0.00017551288174071458,tokens/kWh,358948.0121069973,,s,630,33.78057100677492,0.05361995397900779,0.0008795463550417683,0.053510686874389646,0.054382683181762696,0.054890793991088864,0.058187379913330076,"[0.05852979278564453, 0.05375174331665039, 0.052815937042236326, 0.053262016296386716, 0.05295135879516601, 0.052695072174072266, 0.05292192077636719, 0.05316159820556641, 0.052429214477539066, 0.05278348922729492, 0.052631553649902345, 0.052779006958007815, 0.05315980911254883, 0.05269926452636719, 0.052836353302001954, 0.052799518585205076, 0.053028129577636716, 0.05453689575195313, 0.05502153778076172, 0.05337478256225586, 0.053399776458740236, 0.053217281341552736, 0.052968734741210936, 0.0535497932434082, 0.05337443161010742, 0.05319705581665039, 0.053073566436767576, 0.05286361694335937, 0.052983680725097654, 0.0534749755859375, 0.05310512161254883, 0.053489505767822264, 0.053442718505859375, 0.053101726531982425, 0.054336353302001955, 0.05441948699951172, 0.053721057891845704, 0.054401023864746094, 0.053953536987304686, 0.053082271575927736, 0.05381792068481445, 0.05364559936523437, 0.05308620834350586, 0.053657535552978516, 0.05322963333129883, 0.053093631744384764, 0.053319873809814455, 0.0535107192993164, 0.05328076934814453, 0.05341798400878906, 0.05313740921020508, 0.05333782577514649, 0.05428047943115234, 0.05423513412475586, 0.05366556930541992, 0.054241310119628905, 0.05418963241577148, 0.053674625396728515, 0.05365526580810547, 0.053418270111083986, 0.05428224182128906, 0.053938175201416014, 0.05352243041992188, 0.05855366516113281, 0.05341891098022461, 0.05236684799194336, 0.05272627258300781, 0.0525814094543457, 0.05275875091552734, 0.05267718505859375, 0.05213782501220703, 0.05274454498291015, 0.05313315200805664, 0.05254159927368164, 0.053856254577636715, 0.05335647964477539, 0.05311494445800781, 0.053286369323730466, 0.053236255645751955, 0.05351830291748047, 0.055011009216308596, 0.05365932846069336, 0.05405148696899414, 0.0536220817565918, 0.052472511291503904, 0.05341798400878906, 0.053265567779541015, 0.05269347381591797, 0.052901535034179686, 0.05260355377197266, 0.053316864013671875, 0.05352841567993164, 0.05401699066162109, 0.05358796691894531, 0.053526527404785154, 0.053053279876708985, 0.054020256042480466, 0.05431897735595703, 0.05511999893188477, 0.05434774398803711, 0.05424335861206055, 0.053938175201416014, 0.05300617599487305, 0.053409950256347656, 0.05345280075073242, 0.053017921447753906, 0.05298988723754883, 0.05306867218017578, 0.053653377532958985, 0.053678081512451174, 0.053438720703125, 0.05385776138305664, 0.05369680023193359, 0.05338636779785156, 0.05387142562866211, 0.05435782241821289, 0.05431315231323242, 0.05453420639038086, 0.0539156494140625, 0.05380681610107422, 0.05349814224243164, 0.05382940673828125, 0.05356351852416992, 0.05434729766845703, 0.05338336181640625, 0.053404033660888674, 0.058878143310546874, 0.0534466552734375, 0.05230112075805664, 0.052619552612304686, 0.05253142547607422, 0.05233667373657227, 0.053123233795166015, 0.05260012817382813, 0.05261587142944336, 0.052703231811523435, 0.052794689178466796, 0.053279296875, 0.05377036666870117, 0.05356329727172852, 0.0532050895690918, 0.053433345794677733, 0.054096927642822264, 0.054876129150390626, 0.05483625411987305, 0.053421024322509766, 0.0534365119934082, 0.0529889907836914, 0.053033824920654296, 0.05308528137207031, 0.05338393783569336, 0.05300588989257812, 0.052951648712158204, 0.05299369430541992, 0.05325993728637695, 0.05333647918701172, 0.05329135894775391, 0.05345004653930664, 0.05384041595458984, 0.053346176147460934, 0.053916927337646484, 0.054897087097167965, 0.054067745208740234, 0.05425356674194336, 0.05359427261352539, 0.05310038375854492, 0.05337235260009766, 0.05325267028808594, 0.05290105438232422, 0.05312527847290039, 0.05351065444946289, 0.05305155181884766, 0.05349740982055664, 0.05336489486694336, 0.05323519897460938, 0.05449100875854492, 0.05405129623413086, 0.05355155181884766, 0.054281566619873045, 0.05514473724365234, 0.05439731216430664, 0.05422079849243164, 0.05363663864135742, 0.053141983032226565, 0.05359206390380859, 0.05346928024291992, 0.05306972885131836, 0.05359622573852539, 0.05358774566650391, 0.05815836715698242, 0.05343920135498047, 0.05255299377441406, 0.052900577545166014, 0.05281587219238281, 0.052244094848632815, 0.05264169692993164, 0.05262384033203125, 0.0521973762512207, 0.053053024291992185, 0.05296140670776367, 0.053287200927734375, 0.05372723388671875, 0.05352985763549805, 0.054080257415771486, 0.05379072189331055, 0.05301971054077149, 0.05484844970703125, 0.05471846389770508, 0.05357158279418945, 0.05395455932617187, 0.053376705169677734, 0.0528919677734375, 0.05309846496582031, 0.05238582229614258, 0.05324185562133789, 0.052915775299072265, 0.05280575942993164, 0.05324627304077149, 0.05337686538696289, 0.053338272094726566, 0.0535838737487793, 0.05390681457519531, 0.05380774307250977, 0.05502361679077149, 0.05442502212524414, 0.05351891326904297, 0.053835777282714846, 0.05415935897827148, 0.053465087890625, 0.05338889694213867, 0.05296169662475586, 0.05301248168945313, 0.05294204711914063, 0.053142303466796874, 0.053526527404785154, 0.05405462265014648, 0.053526081085205075, 0.053359264373779296, 0.05363513565063477, 0.05407129669189453, 0.05362073516845703, 0.05422284698486328, 0.0542105598449707, 0.053739105224609375, 0.0549031982421875, 0.05432851028442383, 0.05377312088012695, 0.05324800109863281, 0.05362895965576172, 0.05325791931152344, 0.05323190307617188, 0.05363097763061524, 0.05791929626464844, 0.05348448181152344, 0.05300156784057617, 0.05262790298461914, 0.05257033538818359, 0.052039745330810544, 0.052592575073242186, 0.052959232330322265, 0.05227724838256836, 0.0526192626953125, 0.052678398132324215, 0.05326054382324219, 0.05372313690185547, 0.05350400161743164, 0.05343436813354492, 0.053956607818603515, 0.05410598373413086, 0.05449123382568359, 0.05499878311157227, 0.05396828842163086, 0.05338159942626953, 0.05321564865112305, 0.05256937789916992, 0.053176864624023434, 0.05300857543945312, 0.052784191131591794, 0.052911041259765625, 0.05330940628051758, 0.05285481643676758, 0.05334630584716797, 0.05332582473754883, 0.05326412963867187, 0.053932289123535156, 0.05341299057006836, 0.053942977905273436, 0.054114398956298826, 0.05410620880126953, 0.05396275329589844, 0.05375747299194336, 0.053735904693603516, 0.053354496002197264, 0.05325823974609375, 0.052994049072265625, 0.0530063362121582, 0.053440513610839846, 0.0531599349975586, 0.05341798400878906, 0.053405696868896485, 0.053106689453125, 0.053776382446289066, 0.05411542510986328, 0.05354595184326172, 0.05388076782226563, 0.054269248962402344, 0.05432572937011719, 0.05412681579589844, 0.0538309440612793, 0.05476220703125, 0.05358182525634766, 0.05354620742797851, 0.05313411331176758, 0.05427308654785156, 0.05387760162353516, 0.059109310150146484, 0.05351081466674805, 0.05310844802856445, 0.0527158088684082, 0.05307731246948242, 0.05214278411865234, 0.052631553649902345, 0.05255478286743164, 0.05241955184936523, 0.053343616485595706, 0.05266886520385742, 0.05370694351196289, 0.053337631225585935, 0.0531190071105957, 0.05376822280883789, 0.053518753051757816, 0.05351424026489258, 0.05533875274658203, 0.05542486572265625, 0.054604190826416016, 0.05353420639038086, 0.05328332901000977, 0.052876350402832034, 0.052853694915771486, 0.05273395156860351, 0.05250371170043945, 0.053281280517578126, 0.05320044708251953, 0.05322991943359375, 0.05392428970336914, 0.05354905700683594, 0.053037086486816404, 0.05392585754394531, 0.05394761657714844, 0.05394515228271484, 0.0545167350769043, 0.05504508972167969, 0.05408358383178711, 0.05352447891235351, 0.053370174407958985, 0.05345939254760742, 0.053399520874023436, 0.05303734588623047, 0.0534870719909668, 0.05308265686035156, 0.05291417694091797, 0.053166080474853515, 0.053628158569335935, 0.05340441513061524, 0.05358796691894531, 0.05382758331298828, 0.05382758331298828, 0.05436345672607422, 0.054460990905761716, 0.054628288269042966, 0.05535353469848633, 0.05384806442260742, 0.05399552154541016, 0.053851871490478515, 0.053494049072265626, 0.053679710388183595, 0.05323984146118164, 0.0536657600402832, 0.058199230194091796, 0.05353055953979492, 0.05304204940795899, 0.05260697555541992, 0.052763809204101564, 0.0526032943725586, 0.05305916976928711, 0.05294780731201172, 0.05227724838256836, 0.052776958465576174, 0.053104190826416015, 0.05322774505615235, 0.05361891174316406, 0.05391360092163086, 0.05398739242553711, 0.05386848068237305, 0.05415446472167969, 0.054141727447509766, 0.05487206268310547, 0.05403622436523438, 0.052934913635253905, 0.05329919815063477, 0.05325619125366211, 0.05276633453369141, 0.053014911651611325, 0.05358339309692383, 0.05341136169433594, 0.05299065780639649, 0.05303526306152344, 0.05394598388671875, 0.05393008041381836, 0.05346732711791992, 0.05416707229614258, 0.05420707321166992, 0.05500707244873047, 0.054376575469970705, 0.05428224182128906, 0.05381324768066406, 0.053741600036621096, 0.0542589111328125, 0.05400758361816406, 0.053275646209716795, 0.05359408187866211, 0.05354905700683594, 0.05324710464477539, 0.05369945526123047, 0.05353059387207031, 0.05353254318237305, 0.05379087829589844, 0.05360595321655273, 0.05406355285644531, 0.054639713287353515, 0.05440105438232422, 0.054415519714355466, 0.05456560134887695, 0.05499699020385742, 0.053690433502197266, 0.054298561096191404, 0.05383708953857422, 0.0537033920288086, 0.054296577453613284, 0.05394188690185547, 0.05366412734985351, 0.058063232421875, 0.05331763076782227, 0.05286092758178711, 0.053049343109130856, 0.05269270324707031, 0.05258054351806641, 0.0530228157043457, 0.052948928833007815, 0.05252921676635742, 0.05249587249755859, 0.05313792037963867, 0.05369241714477539, 0.053749759674072264, 0.053749183654785156, 0.053201473236083985, 0.05401190567016602, 0.053835777282714846, 0.054846622467041015, 0.05497494506835938, 0.05412803268432617, 0.05325104141235352, 0.05380300903320313, 0.05343436813354492, 0.05279743957519531, 0.053443904876708984, 0.053091007232666014, 0.05269504165649414, 0.05326438522338867, 0.05288937759399414, 0.0533260498046875, 0.053988544464111325, 0.05351059341430664, 0.05354095840454102, 0.05394870376586914, 0.053718208312988285, 0.05450425720214844, 0.055070240020751955, 0.054303199768066406, 0.054040576934814455, 0.05341958236694336, 0.0534266242980957, 0.05344870376586914, 0.053510143280029294, 0.05348508834838867, 0.053488094329833986, 0.05337071990966797, 0.053045406341552734, 0.05400985717773438, 0.05395455932617187, 0.053508129119873044, 0.053517631530761715, 0.054506145477294925, 0.05460172653198242, 0.05431500625610351, 0.054379680633544925, 0.054903648376464845, 0.05364038467407226, 0.05357241439819336, 0.05325209426879883, 0.0541921272277832, 0.05374156951904297, 0.05361663818359375, 0.053190208435058596, 0.05868153762817383, 0.05326847839355469, 0.0526561279296875, 0.052706302642822264, 0.052854881286621094, 0.05266934585571289, 0.0526965446472168, 0.05307651138305664, 0.05269417572021484, 0.053065631866455076, 0.05317504119873047, 0.05369590377807617, 0.05362489700317383, 0.053410526275634765, 0.05406719970703125, 0.05412659072875976, 0.05361449432373047, 0.05486396789550781, 0.05413273620605469, 0.05371903991699219, 0.05334220886230469, 0.053204544067382814, 0.05276716613769531, 0.052654079437255856, 0.05338252639770508, 0.052904384613037106, 0.05319494247436524, 0.05320908737182617, 0.05298588943481446, 0.053915073394775394, 0.05389081573486328, 0.053956607818603515, 0.05488310241699219, 0.05431283187866211, 0.05366796875, 0.0542138557434082, 0.054032833099365234, 0.05354703903198242, 0.054272319793701174, 0.054013950347900394, 0.05325414276123047, 0.05343231964111328, 0.05340726470947266, 0.05287974548339844, 0.05347132873535156, 0.05343958282470703, 0.053185440063476565, 0.053731040954589845, 0.05356777572631836, 0.053611808776855466, 0.054137569427490234, 0.05404828643798828, 0.05376252746582031, 0.055103488922119144, 0.05453209686279297, 0.05363711929321289, 0.05423276901245117, 0.053778751373291016, 0.0531517448425293, 0.05371680068969727, 0.05360614395141602, 0.05313753509521484, 0.05364358520507812, 0.05971004867553711, 0.05364640045166016, 0.053104705810546875, 0.05276950454711914, 0.05255593490600586, 0.053079425811767576, 0.053241790771484374, 0.05271993637084961, 0.05284688186645508, 0.0527176628112793, 0.05285823822021484, 0.05351897430419922, 0.05378051376342773, 0.05355926513671875, 0.05422694396972656, 0.05380815887451172, 0.05355324935913086, 0.055180160522460935, 0.05505023956298828, 0.05416960144042969, 0.05358796691894531, 0.05344655990600586, 0.05317846298217774, 0.05341593551635742, 0.05334364700317383, 0.05358796691894531, 0.05305615997314453, 0.05288748931884766, 0.05343231964111328, 0.053507102966308596, 0.053730270385742185, 0.05423513412475586, 0.054040576934814455, 0.053610496520996094, 0.05430681610107422, 0.05443916702270508, 0.054136737823486325, 0.054647296905517576, 0.05401430511474609, 0.053661697387695315, 0.05292819213867188, 0.05342444610595703, 0.053515296936035156, 0.05364835357666015, 0.05352447891235351, 0.05315379333496094, 0.05410172653198242, 0.05359439849853516, 0.05410819244384766, 0.053579742431640626, 0.05389267349243164, 0.05517475128173828, 0.053863262176513674, 0.05396275329589844, 0.055100513458251954, 0.05372406387329102, 0.054335487365722655, 0.05394563293457031, 0.053752513885498045, 0.05343356704711914, 0.05438105773925781, 0.05357395172119141, 0.05347635269165039]",tokens/s,18.649773559885933,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4925.566976,7227.768832,0.0,6849.298432,6445.09696,s,1,11.7717197265625,11.7717197265625,0.0,11.7717197265625,11.7717197265625,11.7717197265625,11.7717197265625,[11.7717197265625],,kWh,0.00012844015038757182,1.4160420944724027e-05,4.179503343604285e-05,0.0001843956047683387,,MB,2807.660544,7559.118848,0.0,7151.28832,6823.319552,s,10,3.5995702514648436,0.35995702514648437,0.002581422794684266,0.3598522186279297,0.3623224304199219,0.3633622039794922,0.36419402282714847,"[0.3542107238769531, 0.35824066162109375, 0.3620205078125, 0.3598674011230469, 0.35879513549804687, 0.3598370361328125, 0.35945697021484374, 0.3606484680175781, 0.3644019775390625, 0.36209136962890626]",tokens/s,711.1960098454,kWh,1.0511655587500475e-05,1.1586021996240848e-06,6.9803726477873126e-06,1.865063043491187e-05,tokens/kWh,13726077.565764047,MB,2807.660544,7561.216,0.0,7153.385472,6823.322112,s,10,20.433365112304685,2.0433365112304687,0.004517455761130949,2.0419090576171874,2.048548193359375,2.050531665039063,2.0521184423828127,"[2.05251513671875, 2.0403682861328125, 2.048107421875, 2.042965576171875, 2.0398515625, 2.0364364013671876, 2.0408525390625, 2.040412841796875, 2.0455999755859375, 2.04625537109375]",tokens/s,30.83192594746045,kWh,5.879838770042018e-05,6.4856176940007134e-06,3.89495252072087e-05,0.0001042335306016296,tokens/kWh,604412.0316789409,,s,630,20.430577440261853,0.032429488000415625,0.0006968669825549724,0.03233548927307128,0.03280437545776367,0.03304746971130371,0.03600647537231446,"[0.036607009887695316, 0.032174240112304686, 0.032224319458007813, 0.0319997444152832, 0.032126976013183595, 0.03224518585205078, 0.03227500915527344, 0.032299263000488285, 0.03261209487915039, 0.03238092803955078, 0.03284787368774414, 0.032435585021972656, 0.032479873657226564, 0.03234201431274414, 0.0335948486328125, 0.03505411148071289, 0.032606208801269534, 0.032538047790527345, 0.03242448043823242, 0.03245654296875, 0.032601310729980466, 0.03282796859741211, 0.03274383926391602, 0.03280665588378906, 0.03261670303344726, 0.03242969512939453, 0.03254105758666992, 0.03280486297607422, 0.03261027145385742, 0.03277622222900391, 0.03271456146240234, 0.032529983520507816, 0.032586368560791015, 0.03246694564819336, 0.03301990509033203, 0.03256892776489258, 0.03291795349121094, 0.03252774429321289, 0.032526878356933596, 0.03242348861694336, 0.03256304168701172, 0.03563996887207031, 0.03259801483154297, 0.032210945129394535, 0.032167808532714844, 0.0321393928527832, 0.03196108818054199, 0.032059391021728514, 0.03157606315612793, 0.031784032821655275, 0.031814559936523434, 0.03196425628662109, 0.03212179183959961, 0.03237638473510742, 0.0327632942199707, 0.03217737579345703, 0.032167713165283204, 0.03213926315307617, 0.03203430557250977, 0.03232172775268555, 0.03233209609985352, 0.032487422943115234, 0.03207376098632812, 0.036987392425537106, 0.0325937614440918, 0.03215734481811523, 0.032131454467773435, 0.03226134490966797, 0.03240419387817383, 0.03233222579956055, 0.03251747131347656, 0.0323260498046875, 0.03258556747436524, 0.03258179092407226, 0.032529983520507816, 0.03251859283447266, 0.03247264099121094, 0.03268652725219726, 0.03263651275634766, 0.03271424102783203, 0.032594528198242184, 0.032723167419433596, 0.03248751831054687, 0.03254447937011719, 0.03479782485961914, 0.034224159240722654, 0.032589824676513675, 0.03255855941772461, 0.032271968841552735, 0.032640254974365235, 0.03255612945556641, 0.03243215942382813, 0.03211452865600586, 0.03240419387817383, 0.032118209838867186, 0.03212131118774414, 0.03199180793762207, 0.03265897750854492, 0.03543443298339844, 0.03205593490600586, 0.03170236778259277, 0.03184255981445312, 0.03181404876708984, 0.03184134483337402, 0.03242899322509766, 0.03212083053588867, 0.0321798095703125, 0.03206796646118164, 0.03204716873168945, 0.03194630432128906, 0.03207004928588867, 0.03155267143249512, 0.03188617515563965, 0.03196723175048828, 0.03227852630615234, 0.03223062515258789, 0.031984415054321286, 0.03210649490356445, 0.03160838317871094, 0.03137580871582031, 0.0317706241607666, 0.03157196807861328, 0.031770368576049805, 0.03165376091003418, 0.03132259178161621, 0.031250591278076174, 0.035724864959716794, 0.032318241119384764, 0.032099937438964846, 0.03226416015625, 0.03232998275756836, 0.03233577728271484, 0.03233520126342773, 0.032188705444335934, 0.0323897933959961, 0.03301580810546875, 0.03221920013427734, 0.032359584808349606, 0.03235715103149414, 0.032456703186035156, 0.03257468795776367, 0.03249641418457031, 0.033127422332763674, 0.0324659194946289, 0.03246876907348633, 0.03241510391235351, 0.03249603271484375, 0.03256374359130859, 0.032492576599121095, 0.03243507385253906, 0.03240249633789063, 0.03238544082641601, 0.032482078552246094, 0.03241923141479492, 0.032796161651611325, 0.033172321319580075, 0.03261030578613281, 0.03243212890625, 0.032366592407226565, 0.03241984176635742, 0.03286220932006836, 0.03251609420776367, 0.032181472778320314, 0.03227676773071289, 0.0322503662109375, 0.03223455810546875, 0.03232966232299805, 0.03227305603027344, 0.03215126419067383, 0.03242377471923828, 0.03247183990478516, 0.03238399887084961, 0.03272192001342773, 0.03234406280517578, 0.03226009750366211, 0.032200702667236326, 0.03210015869140625, 0.03228076934814453, 0.03245439910888672, 0.03240371322631836, 0.03231948852539063, 0.032292385101318356, 0.03236912155151367, 0.0326383056640625, 0.03271673583984375, 0.032730880737304686, 0.032807903289794924, 0.032712223052978516, 0.032667327880859375, 0.03676774215698242, 0.03281510543823242, 0.03222220611572266, 0.03275027084350586, 0.03253894424438476, 0.03227987289428711, 0.03223948669433594, 0.03208643341064453, 0.03221750259399414, 0.03220076751708984, 0.03225798416137695, 0.03188908767700195, 0.032030593872070315, 0.032170433044433594, 0.03210150527954102, 0.03220060729980469, 0.03247612762451172, 0.032753089904785156, 0.03220742416381836, 0.03232150268554688, 0.03213929748535156, 0.032116127014160153, 0.03256553649902344, 0.032241695404052736, 0.032067359924316405, 0.03201484680175781, 0.032104545593261716, 0.03213302230834961, 0.03213459014892578, 0.032231136322021486, 0.03222380828857422, 0.03213545608520508, 0.032538623809814454, 0.032124832153320314, 0.03261449432373047, 0.0319522876739502, 0.03213155364990234, 0.03222748947143555, 0.0321715202331543, 0.032268768310546876, 0.032317440032958986, 0.032203903198242186, 0.03213526535034179, 0.032320159912109375, 0.03254079818725586, 0.03284108734130859, 0.03241024017333984, 0.03259811019897461, 0.03282294464111328, 0.03247091293334961, 0.0335582389831543, 0.03300579071044922, 0.03239904022216797, 0.03237964630126953, 0.03243632125854492, 0.032375934600830075, 0.03238579177856445, 0.03231545639038086, 0.032581600189208984, 0.03242598342895508, 0.03223497772216797, 0.0328135986328125, 0.032421886444091795, 0.035751968383789065, 0.032403839111328124, 0.03231363296508789, 0.03243622589111328, 0.03394924926757813, 0.032475582122802736, 0.032623905181884766, 0.032612735748291016, 0.032463455200195314, 0.03248463821411133, 0.03239984130859375, 0.03199948883056641, 0.03211663818359375, 0.03236105728149414, 0.032124736785888675, 0.03196944046020508, 0.03216521453857422, 0.03217641448974609, 0.03245235061645508, 0.03273513412475586, 0.03250252914428711, 0.03242393493652344, 0.032828800201416014, 0.032341793060302736, 0.032487808227539064, 0.032530654907226564, 0.03368489456176758, 0.032699230194091794, 0.0328043212890625, 0.032764190673828124, 0.032559326171875, 0.032542560577392576, 0.03262892913818359, 0.03257462310791016, 0.03243868637084961, 0.03269420623779297, 0.03260067367553711, 0.03276291275024414, 0.03251408004760742, 0.03236950302124023, 0.03230515289306641, 0.032194561004638675, 0.03214495849609375, 0.032457279205322265, 0.03216371154785156, 0.032000030517578125, 0.03230636978149414, 0.031531808853149414, 0.03158176040649414, 0.03158806419372558, 0.031916576385498045, 0.03203641510009766, 0.031561344146728516, 0.031579135894775394, 0.03486259078979492, 0.03154921531677246, 0.031482175827026365, 0.03224617767333984, 0.03136716842651367, 0.03158220863342285, 0.031393247604370116, 0.03144918441772461, 0.03151097679138184, 0.0361987190246582, 0.03243008041381836, 0.03190336036682129, 0.03179523277282715, 0.0333087043762207, 0.03174393653869629, 0.03176281547546387, 0.03237887954711914, 0.03234815979003906, 0.032323585510253904, 0.032464897155761716, 0.032306400299072266, 0.032492321014404295, 0.03237887954711914, 0.03223961639404297, 0.03232342529296875, 0.03221315383911133, 0.03227212905883789, 0.03229721450805664, 0.032382110595703124, 0.03240841674804688, 0.0320206413269043, 0.03204899215698242, 0.03207167816162109, 0.031954944610595705, 0.03198361587524414, 0.03209852981567383, 0.032077598571777347, 0.031858272552490234, 0.03168092727661133, 0.03163750457763672, 0.03173513603210449, 0.0323263053894043, 0.033039585113525394, 0.03301251220703125, 0.031990976333618165, 0.031679296493530275, 0.03147980880737305, 0.031682559967041016, 0.03158435249328613, 0.03158211135864258, 0.03200841522216797, 0.03176217651367187, 0.032038753509521484, 0.032147647857666016, 0.032194561004638675, 0.03227033615112305, 0.03413520050048828, 0.03287289428710938, 0.03247078323364258, 0.032422046661376956, 0.03259801483154297, 0.03235891342163086, 0.032595264434814454, 0.03325724792480469, 0.03258790588378906, 0.03260931015014648, 0.03252358245849609, 0.03233017730712891, 0.03239731216430664, 0.03222022247314453, 0.03239622497558594, 0.03243212890625, 0.037165599822998045, 0.032536033630371095, 0.03218486404418945, 0.03214054489135742, 0.03203968048095703, 0.03213516616821289, 0.03219574356079102, 0.03229782485961914, 0.03240959930419922, 0.03223551940917969, 0.03239023971557617, 0.03239004898071289, 0.03225145721435547, 0.032309696197509764, 0.03232767868041992, 0.03237887954711914, 0.032413471221923826, 0.03236476898193359, 0.03224576187133789, 0.032282623291015625, 0.03246899032592773, 0.03232582473754883, 0.032382942199707034, 0.03230908966064453, 0.032284671783447266, 0.03212492752075195, 0.0325591049194336, 0.03223952102661133, 0.03214755249023438, 0.03215315246582031, 0.032403839111328124, 0.03226828765869141, 0.03270867156982422, 0.03221206283569336, 0.03216476821899414, 0.032110591888427735, 0.032251998901367186, 0.03233331298828125, 0.032111167907714844, 0.03197727966308594, 0.03210243225097656, 0.03220275115966797, 0.032535552978515625, 0.03212502288818359, 0.03216025543212891, 0.0324202880859375, 0.032671710968017575, 0.032355712890625, 0.03236518478393555, 0.03237068939208984, 0.03239116668701172, 0.03228876876831055, 0.03227590560913086, 0.032263904571533206, 0.03222774505615234, 0.03226668930053711, 0.03230310440063477, 0.03254684829711914, 0.033028064727783205, 0.03236454391479492, 0.03240867233276367, 0.03229718399047852, 0.032342113494873044, 0.036028255462646486, 0.03249168014526367, 0.03265945434570312, 0.03268198394775391, 0.032497665405273435, 0.03233587265014649, 0.03221724700927735, 0.03249324798583984, 0.032132545471191404, 0.032168991088867185, 0.03206111907958984, 0.03187638473510742, 0.03164233589172363, 0.03197542381286621, 0.03220028686523437, 0.031918336868286136, 0.03221696090698242, 0.03189545631408691, 0.03190329551696777, 0.032191295623779294, 0.0317391357421875, 0.03164588737487793, 0.031703615188598634, 0.03206505584716797, 0.032663646697998046, 0.032247966766357425, 0.03215766525268555, 0.03214771270751953, 0.03220684814453125, 0.032942081451416014, 0.032313056945800785, 0.03218259048461914, 0.032221153259277345, 0.03236454391479492, 0.03230515289306641, 0.03242598342895508, 0.0324771842956543, 0.032395263671875, 0.03235430526733398, 0.0323766098022461, 0.03236656188964844, 0.03255526351928711, 0.032301151275634765, 0.03238886260986328, 0.03245072174072266, 0.03276950454711914, 0.03224348831176758, 0.032701183319091796, 0.03259392166137695, 0.032889854431152346, 0.03258169555664062, 0.03275462341308594, 0.03244371032714844, 0.03232223892211914, 0.03230515289306641, 0.0325733757019043, 0.032272449493408205, 0.03272294235229492, 0.032587295532226564, 0.03262307357788086, 0.03263488006591797, 0.032280384063720705, 0.032252128601074216, 0.036184383392333985, 0.03270774459838867, 0.03219468688964844, 0.03203763198852539, 0.032365631103515625, 0.03285084915161133, 0.03209625625610352, 0.032169151306152347, 0.03220297622680664, 0.032191070556640625, 0.03215359878540039, 0.03229695892333984, 0.03236044692993164, 0.032317440032958986, 0.032382240295410154, 0.032300926208496096, 0.03249238586425781, 0.03292979049682617, 0.03252396774291992, 0.03242351913452148, 0.03233219146728516, 0.032534847259521486, 0.03251609420776367, 0.03238291168212891, 0.032126495361328125, 0.03233824157714844, 0.03228819274902344, 0.03220121765136719, 0.032218528747558595, 0.032140159606933595, 0.03230428695678711, 0.032205825805664064, 0.032632095336914066, 0.0322545280456543, 0.03227852630615234, 0.03236774444580078, 0.032161792755126956, 0.03212992095947265, 0.03252787017822266, 0.03595315170288086, 0.03251200103759765, 0.03240345764160156, 0.032322975158691404, 0.03246147155761719, 0.03246384048461914, 0.03251670455932617, 0.03222566223144531, 0.03224313735961914, 0.03245523071289062, 0.032384544372558596, 0.03233039855957031, 0.03238278579711914, 0.032387134552001956, 0.03246659088134766, 0.03253071975708008, 0.03228793716430664, 0.03216073608398438, 0.03244972610473633, 0.03220137786865234, 0.032094207763671875, 0.032317440032958986, 0.032441600799560544, 0.03220761489868164, 0.03566227340698242, 0.032579265594482425, 0.032473793029785154, 0.032643070220947264, 0.03356467056274414, 0.032901119232177735, 0.032595966339111326, 0.03257753753662109, 0.03267379379272461, 0.03260588836669922, 0.03395372772216797, 0.03285452651977539, 0.032927616119384766, 0.03260540771484375, 0.03271350479125976, 0.03251366424560547, 0.03356038284301758, 0.03272867202758789, 0.03262108612060547, 0.03299296188354492, 0.03260083389282226, 0.032281982421875, 0.032528545379638674, 0.03241366577148438, 0.03217049789428711, 0.032118785858154295, 0.03213312149047851, 0.03305392074584961, 0.03297359848022461, 0.03223270416259766, 0.03159670448303223, 0.03190863990783691, 0.03282720184326172, 0.03209331130981445, 0.031533952713012695, 0.031624799728393556, 0.03149196815490723, 0.03147334480285645, 0.03201520156860352, 0.032032833099365235, 0.03196512031555176, 0.03154745674133301, 0.03175827217102051, 0.031948575973510744, 0.03187132835388184, 0.032853759765625, 0.03593423843383789, 0.03219055938720703, 0.03213926315307617, 0.03214131164550781, 0.03234761428833008, 0.0319879035949707, 0.0319780158996582, 0.03187078475952149, 0.03196723175048828, 0.03238447952270508, 0.03180521583557129, 0.03201433563232422, 0.032026782989501956, 0.03329289627075195, 0.032917407989501955, 0.032242015838623045, 0.031964223861694334]",tokens/s,30.83613284265184,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11031.482368,12225.282048,0.0,11846.811648,11814.785024,s,1,14.1742060546875,14.1742060546875,0.0,14.1742060546875,14.1742060546875,14.1742060546875,14.1742060546875,[14.1742060546875],,kWh,0.0002060095365125221,2.2711510691751467e-05,6.820699901001481e-05,0.0002969280462142884,,MB,2041.421824,13158.514688,0.0,12750.68416,12632.68864,s,10,11.269819213867187,1.126981921386719,0.0009837382519498324,1.1272222290039062,1.1280132080078127,1.1280138671875,1.12801439453125,"[1.124601806640625, 1.1260865478515625, 1.126607421875, 1.1271981201171875, 1.1273677978515626, 1.127246337890625, 1.126885498046875, 1.1280145263671875, 1.127798095703125, 1.1280130615234376]",tokens/s,227.1553741385659,kWh,3.29439294770835e-05,3.6311154198365797e-06,2.189140640200121e-05,5.846645129892129e-05,tokens/kWh,4378579.412852499,MB,2045.68576,13431.144448,0.0,13023.31392,12936.608256,s,10,48.449287109375,4.8449287109375,0.011159043985337812,4.8456611328125,4.8569484375,4.8600782226562504,4.86258205078125,"[4.82113916015625, 4.83734375, 4.8402119140625, 4.8375341796875, 4.85026513671875, 4.852009765625, 4.84683984375, 4.8562529296875, 4.844482421875, 4.8632080078125]",tokens/s,13.003287304883669,kWh,0.00014183703294833096,1.5647829244971095e-05,9.426238096539863e-05,0.00025174724315870067,tokens/kWh,250251.00259105916,,s,630,48.44516996765141,0.0768970951867482,0.0009903678534263585,0.07707817459106445,0.07788375549316406,0.07814800834655762,0.07881736396789552,"[0.07785676574707032, 0.07526579284667968, 0.0757882537841797, 0.07408258819580078, 0.0748359375, 0.07607504272460938, 0.07666191864013672, 0.07567622375488281, 0.0742996826171875, 0.07581849670410157, 0.07445555114746094, 0.075499267578125, 0.07802678680419922, 0.07563900756835938, 0.07463731384277343, 0.07814102172851563, 0.07496336364746094, 0.07424934387207031, 0.07650188446044921, 0.07589024353027343, 0.0773364486694336, 0.07680601501464844, 0.07670159912109376, 0.07657756805419921, 0.07660115051269531, 0.07485203552246093, 0.0754892807006836, 0.0772828140258789, 0.07673446655273437, 0.07661465454101563, 0.07576576232910157, 0.07504608154296875, 0.07724320220947266, 0.07693881225585937, 0.07721619415283203, 0.0769751968383789, 0.07723299407958985, 0.07545043182373047, 0.07555891418457031, 0.07767228698730469, 0.07710121917724609, 0.0766967010498047, 0.0776200942993164, 0.07671807861328125, 0.0770109405517578, 0.0765132827758789, 0.07711043548583985, 0.07820941162109375, 0.07761670684814453, 0.07686236572265626, 0.07683891296386719, 0.0775920639038086, 0.07747824096679687, 0.07700495910644531, 0.07720960235595703, 0.0770920639038086, 0.07698697662353515, 0.07749037170410156, 0.07751270294189454, 0.07655014038085937, 0.07779942321777343, 0.07731715393066406, 0.07791276550292969, 0.0786033935546875, 0.07609638214111328, 0.0756305923461914, 0.07455129241943359, 0.07676723480224609, 0.07450998687744141, 0.07465516662597656, 0.07625820922851563, 0.07547084808349609, 0.07696924591064454, 0.07560060882568359, 0.07573689270019532, 0.07680223846435547, 0.07652047729492187, 0.07694560241699219, 0.07668611145019531, 0.07704524993896485, 0.07631104278564453, 0.07454515075683593, 0.07735295867919922, 0.07654605102539062, 0.07673414611816406, 0.07509843444824219, 0.07729462432861328, 0.07688089752197266, 0.076676513671875, 0.07678009796142578, 0.07659254455566407, 0.07755427551269531, 0.07671356964111328, 0.0764072036743164, 0.0778885726928711, 0.07701395416259765, 0.07698614501953124, 0.07710720062255859, 0.07626569366455078, 0.07636544036865234, 0.07758006286621094, 0.07662448120117188, 0.07685939025878906, 0.0775308837890625, 0.07715046691894531, 0.07702937316894531, 0.07752671813964844, 0.07750028991699219, 0.07753363037109375, 0.077557373046875, 0.07737586975097656, 0.07689132690429687, 0.07641769409179687, 0.07771692657470704, 0.07712371063232422, 0.07690300750732422, 0.07663206481933593, 0.07772569274902344, 0.07736038208007813, 0.07743360137939453, 0.0775513916015625, 0.07758665466308594, 0.07787721252441407, 0.07761244964599609, 0.07663001251220704, 0.07716307067871093, 0.07869641876220704, 0.074655517578125, 0.07691494750976563, 0.07670098876953126, 0.0772656021118164, 0.0762265625, 0.07540735626220703, 0.07659724426269532, 0.07531903839111329, 0.0760732192993164, 0.07762092590332031, 0.07583980560302735, 0.07471923065185547, 0.0766402587890625, 0.07631027221679687, 0.07604454040527343, 0.07751065826416016, 0.07634099578857421, 0.07458019256591797, 0.07683650970458984, 0.07583952331542969, 0.07519017791748046, 0.07445343780517578, 0.07854003143310546, 0.0767452163696289, 0.07760297393798828, 0.07468450927734376, 0.07680588531494141, 0.07654153442382812, 0.07720793914794923, 0.07788368225097657, 0.07785167694091796, 0.07628211212158204, 0.07803977966308594, 0.07440998077392579, 0.07607872009277344, 0.07754790496826172, 0.0771256332397461, 0.07664435577392578, 0.07686492919921875, 0.07659990692138671, 0.07732963562011719, 0.07672029113769531, 0.07771405029296875, 0.07757004547119141, 0.07758233642578125, 0.07725670623779297, 0.07758643341064453, 0.0777330551147461, 0.07841260528564453, 0.07736319732666015, 0.07719526672363282, 0.07733193969726562, 0.07717731475830078, 0.0771502685546875, 0.07722755432128907, 0.07721212768554687, 0.07785881805419922, 0.07827426910400391, 0.0773532485961914, 0.07762739562988281, 0.07744108581542969, 0.07751264190673827, 0.07925007629394532, 0.07684095764160156, 0.07577315521240234, 0.07403542327880859, 0.07693778991699218, 0.07521485137939453, 0.07411917114257813, 0.07441522979736329, 0.07878131103515625, 0.07660953521728515, 0.07570432281494141, 0.07775027465820313, 0.07691590118408204, 0.07613113403320312, 0.0768736343383789, 0.07605872344970703, 0.07715360260009765, 0.07604022216796875, 0.07630095672607422, 0.07614364624023437, 0.07704061126708985, 0.07682457733154296, 0.07708057403564453, 0.07727104187011719, 0.07668326568603516, 0.07685517120361328, 0.07695168304443359, 0.0762449951171875, 0.07683891296386719, 0.07775437164306641, 0.07650918579101562, 0.0745832290649414, 0.07519519805908204, 0.07621577453613282, 0.07803919982910157, 0.07445337677001954, 0.0750223388671875, 0.07552931213378906, 0.07757711791992188, 0.07706829071044922, 0.07715606689453125, 0.07758057403564453, 0.07744306945800782, 0.07767040252685548, 0.07735238647460937, 0.07695795440673828, 0.07777699279785157, 0.07663398742675781, 0.07759436798095704, 0.07701155090332032, 0.07735887908935547, 0.07798089599609374, 0.07720857238769531, 0.07684262084960937, 0.07729564666748047, 0.07750211334228516, 0.07781961822509766, 0.07717577362060547, 0.07777587127685547, 0.07749120330810547, 0.07719647979736328, 0.07751763153076172, 0.0780505599975586, 0.07878246307373046, 0.0764587173461914, 0.07495254516601563, 0.076017822265625, 0.07713177490234376, 0.07462297821044922, 0.07767040252685548, 0.07696383666992188, 0.07652146911621094, 0.0753070068359375, 0.07718911743164063, 0.07734591674804688, 0.07665718078613282, 0.07711548614501954, 0.07725820922851563, 0.07634611511230469, 0.07687305450439454, 0.07635014343261719, 0.07636790466308593, 0.07749423980712891, 0.07670579528808594, 0.07464959716796875, 0.07715948486328125, 0.07751747131347657, 0.07706038665771485, 0.07659056091308594, 0.07613497924804688, 0.07696112060546875, 0.07727372741699219, 0.07772905731201171, 0.07576416015625, 0.07787494659423828, 0.0769664306640625, 0.07671807861328125, 0.07602381134033204, 0.07737753295898438, 0.07680818939208985, 0.07723763275146485, 0.07706585693359375, 0.07669452667236328, 0.07692902374267578, 0.07649075317382813, 0.07679385375976562, 0.07705776214599609, 0.07724674987792969, 0.07799398040771484, 0.07680732727050782, 0.07703638458251953, 0.0780546875, 0.07819951629638672, 0.07699251556396484, 0.07728476715087891, 0.07729417419433594, 0.07802674865722656, 0.07722354888916015, 0.07786943817138672, 0.07762739562988281, 0.07706829071044922, 0.07726416015625, 0.07718370819091797, 0.07761714935302734, 0.07793663787841797, 0.07815372467041015, 0.07822201538085938, 0.07638582611083984, 0.07641951751708985, 0.07677903747558594, 0.0764400634765625, 0.07453244781494141, 0.07653212738037109, 0.07511859130859375, 0.07612620544433593, 0.07707577514648438, 0.07667167663574219, 0.07467826843261718, 0.07851417541503906, 0.07683020782470704, 0.07688448333740235, 0.07638015747070312, 0.07708057403564453, 0.07680000305175781, 0.07677133178710938, 0.07669734191894531, 0.07671424102783203, 0.07683670043945312, 0.07716470336914062, 0.07534793853759765, 0.07585081481933593, 0.07744406127929687, 0.07660134124755859, 0.07698204803466797, 0.07632038116455078, 0.07743689727783203, 0.07684729766845703, 0.07713581085205078, 0.07724079895019531, 0.07901708984375, 0.07703839874267578, 0.07687177276611327, 0.07702275085449219, 0.07675504302978516, 0.07715264129638671, 0.07742463684082031, 0.07666185760498047, 0.07786589050292969, 0.07731404876708985, 0.07705779266357422, 0.07789183807373047, 0.07792176055908204, 0.07710979461669921, 0.07697612762451173, 0.07749964904785156, 0.07705062103271484, 0.07774185943603516, 0.07761328125, 0.07746351623535157, 0.07772278594970704, 0.07722473907470703, 0.07704790496826172, 0.07735056304931641, 0.07844694519042969, 0.0774878387451172, 0.07728729248046876, 0.0780005111694336, 0.07716361236572265, 0.07762992095947266, 0.07868777465820312, 0.07463884735107422, 0.07674976348876954, 0.07597875213623047, 0.07626751708984375, 0.07761920166015625, 0.07573296356201172, 0.07388572692871094, 0.07562649536132812, 0.07541350555419922, 0.07682048034667968, 0.0760627212524414, 0.07795478057861328, 0.07738396453857421, 0.07663929748535156, 0.07458265686035156, 0.07432556915283203, 0.0784268798828125, 0.07734476470947266, 0.07474121856689453, 0.07815593719482422, 0.07657305908203126, 0.07528982543945313, 0.07589558410644531, 0.07804108428955078, 0.0772116470336914, 0.0766976318359375, 0.0776396484375, 0.07667501068115234, 0.07691986846923828, 0.07769805145263672, 0.07686246490478515, 0.07673677062988281, 0.07715033721923828, 0.07756864166259765, 0.07607091522216797, 0.07791820526123047, 0.07557939147949219, 0.07883161926269532, 0.0771783676147461, 0.07769110107421875, 0.07741468811035156, 0.0776349105834961, 0.0775297622680664, 0.07637308502197265, 0.07794985961914062, 0.07712358093261719, 0.07705359649658203, 0.07745689392089844, 0.07706505584716797, 0.07772774505615235, 0.07788441467285157, 0.07722675323486328, 0.07766041564941406, 0.07817215728759766, 0.07750857543945312, 0.07726902770996094, 0.07788339233398438, 0.07720550537109375, 0.07744694519042969, 0.07702754974365235, 0.077559326171875, 0.07702960205078126, 0.07878041839599609, 0.07671807861328125, 0.07690035247802735, 0.07653939056396485, 0.07479106903076171, 0.07884371185302734, 0.07458870697021484, 0.0742850570678711, 0.07681436920166015, 0.07709487915039062, 0.07635763549804687, 0.07550351715087891, 0.0775332794189453, 0.07669760131835937, 0.07688396453857421, 0.07683830261230469, 0.07687596893310547, 0.07729132843017578, 0.07674285125732422, 0.07652754974365235, 0.07665827178955079, 0.07438365173339843, 0.07757884979248048, 0.0772833251953125, 0.07690444946289063, 0.0771396484375, 0.07730963134765625, 0.07695958709716796, 0.07720220947265626, 0.07817388916015625, 0.07613881683349609, 0.07578214263916015, 0.07691622161865234, 0.07792447662353516, 0.07729804992675782, 0.07712767791748047, 0.07690220642089844, 0.07772496032714844, 0.07786383819580078, 0.07814076995849609, 0.07719503784179688, 0.07709990692138671, 0.07713916778564453, 0.07766236877441406, 0.07706451416015625, 0.07729170989990235, 0.078010498046875, 0.07687075042724609, 0.07658553314208984, 0.07706588745117188, 0.07716320037841796, 0.07909171295166016, 0.07752703857421875, 0.07747174072265625, 0.07759613037109375, 0.0773616943359375, 0.0779653091430664, 0.07723417663574218, 0.07785001373291016, 0.0781145248413086, 0.0771629409790039, 0.07760931396484375, 0.07773808288574219, 0.0798538589477539, 0.07681455993652343, 0.07651155090332032, 0.07493017578125, 0.07427276611328125, 0.07538893127441407, 0.0744120330810547, 0.07764991760253906, 0.0762429428100586, 0.07546265411376953, 0.07448371124267578, 0.07621017456054688, 0.0773939208984375, 0.07683891296386719, 0.07456563568115235, 0.07678361511230469, 0.07679180908203125, 0.07448751831054687, 0.07650947570800781, 0.07458937835693359, 0.0744865951538086, 0.07691011047363282, 0.07778556823730469, 0.07503814697265625, 0.0751212158203125, 0.077946044921875, 0.07851910400390626, 0.07610755157470703, 0.07744493103027343, 0.07744121551513672, 0.07714387512207031, 0.07747830200195313, 0.07756594848632813, 0.07625523376464843, 0.07684300994873047, 0.07740118408203125, 0.07712246704101562, 0.07687318420410157, 0.07808873748779296, 0.0777615966796875, 0.07747465515136719, 0.07694960021972656, 0.07742873382568359, 0.07797555541992188, 0.07748812866210937, 0.07691776275634765, 0.07771033477783203, 0.07739929962158203, 0.07777254486083984, 0.07729779052734374, 0.07773683166503906, 0.07766015625, 0.07825532531738282, 0.07744796752929688, 0.07757414245605469, 0.07752294158935547, 0.07762902069091797, 0.07822582244873047, 0.07762892913818359, 0.07705577850341797, 0.078115234375, 0.07751026916503906, 0.07758882904052734, 0.07943996429443359, 0.07385906982421875, 0.07786495971679687, 0.07657667541503907, 0.07699993896484375, 0.07492620849609374, 0.07848998260498047, 0.07709935760498046, 0.07669145965576171, 0.07688595581054687, 0.07626886749267578, 0.0770852508544922, 0.07454534149169922, 0.07774822235107422, 0.07693708801269532, 0.07696784210205078, 0.07469276428222656, 0.07707244873046876, 0.07777689361572265, 0.07666812896728516, 0.07705680084228515, 0.07629414367675781, 0.07714733123779297, 0.07667795562744141, 0.07740620422363281, 0.07673004913330078, 0.0770167999267578, 0.07725270080566406, 0.07702169799804688, 0.07718096160888673, 0.0775920639038086, 0.07710768127441406, 0.0771520004272461, 0.0776908187866211, 0.07769292449951172, 0.0774167709350586, 0.07798966217041016, 0.07731222534179688, 0.07706214141845703, 0.07735295867919922, 0.07738162994384766, 0.07771734619140624, 0.07840735626220703, 0.07716690826416016, 0.07732192230224609, 0.07730838775634766, 0.07738368225097657, 0.077246337890625, 0.07760086059570312, 0.07749977874755859, 0.07777961730957031, 0.07746543884277343, 0.07728144073486329, 0.07792144012451171, 0.07734937286376953, 0.07823168182373047, 0.07755558776855469, 0.07745980834960937, 0.07762124633789062, 0.07748780822753906, 0.07770758056640625, 0.07758233642578125, 0.07750399780273437]",tokens/s,13.004392397026873,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3548.471296,4490.985472,0.0,4112.515072,3976.487424,s,1,9.674736328125,9.674736328125,0.0,9.674736328125,9.674736328125,9.674736328125,9.674736328125,[9.674736328125],,kWh,8.025016947500205e-05,8.844910268695005e-06,2.7366133003997595e-05,0.00011646121274769465,,MB,1480.708096,4690.214912,0.0,4282.384384,4102.199808,s,10,3.0004592285156244,0.3000459228515625,0.002382701810579694,0.30021560668945313,0.30208082580566403,0.3034429244995117,0.3045326034545899,"[0.2961402893066406, 0.2970372619628906, 0.3009500427246094, 0.3017781372070312, 0.3001174621582031, 0.29800216674804686, 0.29995126342773437, 0.30031375122070314, 0.3048050231933594, 0.30136383056640625]",tokens/s,853.2027283258478,kWh,8.738438462867693e-06,9.636931963324836e-07,5.819645178588244e-06,1.5521776837788418e-05,tokens/kWh,16492957.13212145,MB,1489.108992,4698.60352,0.0,4290.772992,4102.202368,s,10,17.7380908203125,1.77380908203125,0.009381673081748462,1.7759326782226563,1.7821324951171875,1.7833516479492186,1.7843269702148437,"[1.762597412109375, 1.7813135986328126, 1.78457080078125, 1.781861572265625, 1.7780919189453126, 1.751927734375, 1.7751317138671876, 1.7761566162109375, 1.775708740234375, 1.770730712890625]",tokens/s,35.516787369166316,kWh,5.201708206213239e-05,5.737347936559142e-06,3.4305164699011866e-05,9.20595946977034e-05,tokens/kWh,684339.315275865,,s,630,17.73558416748047,0.028151720900762647,0.0005233327945577362,0.02804528045654297,0.02865833911895752,0.02905292730331421,0.030192383632659914,"[0.029214719772338867, 0.028591264724731447, 0.02875859260559082, 0.02827008056640625, 0.0282061767578125, 0.027969215393066408, 0.027839807510375975, 0.028016544342041014, 0.02785305595397949, 0.027818527221679688, 0.028062816619873046, 0.02813225555419922, 0.028037120819091797, 0.027942399978637695, 0.02782249641418457, 0.027848512649536132, 0.027775264739990233, 0.027744255065917968, 0.027762208938598633, 0.028221887588500978, 0.027789024353027342, 0.028057920455932618, 0.027993471145629882, 0.028236415863037108, 0.028836000442504884, 0.02829523277282715, 0.028057376861572267, 0.028858112335205077, 0.02809881591796875, 0.028055551528930665, 0.027926528930664062, 0.027853023529052733, 0.02823468780517578, 0.027906911849975586, 0.028028511047363282, 0.027899520874023438, 0.0278492488861084, 0.027924736022949218, 0.028332191467285155, 0.027929920196533203, 0.027844192504882813, 0.027827135086059572, 0.028008447647094727, 0.02775644874572754, 0.027807136535644532, 0.027783615112304687, 0.027832351684570312, 0.027713760375976563, 0.02770969581604004, 0.027840351104736327, 0.02791823959350586, 0.028047359466552735, 0.02779654312133789, 0.027519935607910155, 0.027535360336303712, 0.027743871688842774, 0.02761356735229492, 0.027615232467651366, 0.0276778564453125, 0.027767648696899416, 0.02763795280456543, 0.02759561538696289, 0.027728607177734375, 0.028792640686035157, 0.028007999420166015, 0.027926591873168944, 0.02797654342651367, 0.027932640075683593, 0.027881631851196288, 0.027856767654418944, 0.02782579231262207, 0.027744640350341798, 0.02784876823425293, 0.027950975418090822, 0.027766847610473634, 0.027715583801269532, 0.02816204833984375, 0.028196863174438477, 0.02804118347167969, 0.02815939140319824, 0.02861712074279785, 0.028243967056274414, 0.028172063827514648, 0.02802227210998535, 0.02807699203491211, 0.02874153518676758, 0.028016544342041014, 0.0279615364074707, 0.028800928115844726, 0.028137567520141602, 0.027987680435180663, 0.028027168273925783, 0.02809004783630371, 0.028483903884887696, 0.028245920181274413, 0.028418144226074218, 0.028051456451416015, 0.02852355194091797, 0.028158943176269532, 0.028483392715454102, 0.02825337600708008, 0.028355424880981445, 0.028582048416137696, 0.02797398376464844, 0.030203519821166994, 0.02831772804260254, 0.028476448059082032, 0.028310272216796876, 0.028288608551025392, 0.02802547264099121, 0.027908063888549803, 0.02809654426574707, 0.028258304595947265, 0.02834444808959961, 0.028395296096801758, 0.02819081687927246, 0.02813091278076172, 0.02800271987915039, 0.028902463912963867, 0.029223871231079102, 0.028594175338745118, 0.0280798397064209, 0.028053247451782226, 0.030282272338867187, 0.028395423889160155, 0.02833404731750488, 0.02878870391845703, 0.02840985679626465, 0.028224607467651368, 0.02835139274597168, 0.028132991790771486, 0.02814300727844238, 0.027931615829467772, 0.028121088027954103, 0.027938816070556642, 0.028051456451416015, 0.028102655410766602, 0.028092639923095703, 0.028360479354858397, 0.02805936050415039, 0.028330272674560546, 0.02801840019226074, 0.027935007095336913, 0.0281180477142334, 0.028301376342773438, 0.028976032257080078, 0.028325887680053712, 0.0284279670715332, 0.02899193572998047, 0.028288063049316407, 0.02833024024963379, 0.028401695251464843, 0.028829856872558592, 0.028331647872924803, 0.028662687301635743, 0.02885001564025879, 0.02876374435424805, 0.028721887588500975, 0.02905465507507324, 0.02828463935852051, 0.0280599365234375, 0.028203008651733398, 0.02790969657897949, 0.028058048248291015, 0.02796080017089844, 0.028436992645263674, 0.02804672050476074, 0.028306079864501954, 0.02873958396911621, 0.02829212760925293, 0.02798863983154297, 0.02816160011291504, 0.028266656875610353, 0.028145503997802735, 0.0281976318359375, 0.028452224731445312, 0.028525184631347657, 0.028062976837158204, 0.028092447280883788, 0.02822844886779785, 0.02867340850830078, 0.028368864059448242, 0.028400352478027344, 0.029015871047973634, 0.028192031860351564, 0.027931360244750975, 0.028256256103515624, 0.028200960159301756, 0.02850201606750488, 0.029050815582275392, 0.028413856506347656, 0.028254304885864258, 0.028296192169189452, 0.02815692710876465, 0.028100608825683594, 0.028112895965576173, 0.02844268798828125, 0.028374975204467773, 0.028080127716064454, 0.028108800888061523, 0.02850931167602539, 0.028263296127319336, 0.028379264831542968, 0.028482879638671875, 0.02835308837890625, 0.02878463935852051, 0.02835251235961914, 0.0320552978515625, 0.028366975784301758, 0.02827244758605957, 0.02817184066772461, 0.028137279510498048, 0.02839360046386719, 0.028000831604003906, 0.029224960327148438, 0.029593599319458007, 0.02870230484008789, 0.028492191314697265, 0.0287825927734375, 0.02813542366027832, 0.028061695098876953, 0.028094463348388672, 0.028077856063842773, 0.028247520446777342, 0.028110944747924804, 0.027994783401489257, 0.02811712074279785, 0.027995424270629884, 0.02810736083984375, 0.028077856063842773, 0.027997472763061523, 0.027933216094970702, 0.02801091194152832, 0.02798204803466797, 0.027823904037475585, 0.028040576934814453, 0.027878015518188477, 0.027856895446777344, 0.027828224182128908, 0.028000255584716797, 0.028388639450073243, 0.02795939254760742, 0.027771520614624023, 0.02870044708251953, 0.027910367965698242, 0.02792448043823242, 0.027828224182128908, 0.02790809631347656, 0.02838528060913086, 0.02797750473022461, 0.027824159622192382, 0.027938976287841796, 0.028885536193847657, 0.02805718421936035, 0.027797632217407227, 0.027871519088745116, 0.028000255584716797, 0.028310880661010743, 0.028186975479125978, 0.02915900802612305, 0.028278495788574217, 0.028160255432128908, 0.027805471420288087, 0.02789798355102539, 0.027963775634765625, 0.028459487915039064, 0.0279552001953125, 0.027864992141723634, 0.028686431884765624, 0.02787942314147949, 0.02800783920288086, 0.02827120018005371, 0.028033088684082032, 0.02844460868835449, 0.028139328002929686, 0.028166015625, 0.0280883846282959, 0.02788915252685547, 0.02784332847595215, 0.028297216415405273, 0.02786012840270996, 0.028248512268066406, 0.028330400466918947, 0.028495872497558594, 0.028525600433349608, 0.02841289520263672, 0.028353952407836915, 0.028256864547729493, 0.028106752395629882, 0.028597408294677735, 0.028134239196777343, 0.02794905662536621, 0.027983583450317383, 0.02843836784362793, 0.028486080169677734, 0.028277984619140627, 0.02841379165649414, 0.028306240081787108, 0.028092544555664064, 0.02789900779724121, 0.028484352111816408, 0.027940927505493166, 0.030082719802856445, 0.029275808334350586, 0.028498687744140626, 0.02832383918762207, 0.028121088027954103, 0.028077280044555664, 0.02827939224243164, 0.027940383911132814, 0.027849376678466795, 0.027633119583129882, 0.027838207244873046, 0.027794208526611328, 0.02813132858276367, 0.029343679428100587, 0.028348447799682618, 0.02805097579956055, 0.02799804878234863, 0.027734111785888672, 0.02773027229309082, 0.02767401695251465, 0.027540128707885744, 0.02756211280822754, 0.027502592086791993, 0.028020736694335937, 0.027751615524291992, 0.02767545509338379, 0.028507295608520507, 0.027679328918457032, 0.027495967864990235, 0.02745622444152832, 0.027582496643066407, 0.027944927215576173, 0.02773324775695801, 0.027798271179199217, 0.027709056854248047, 0.027736352920532226, 0.02755593681335449, 0.02798591995239258, 0.027502592086791993, 0.02760028839111328, 0.02787164878845215, 0.027815135955810547, 0.02777350425720215, 0.02764227294921875, 0.02768076705932617, 0.027692512512207033, 0.02776323127746582, 0.027929664611816406, 0.02788652801513672, 0.027722848892211913, 0.0277106876373291, 0.027702592849731447, 0.02776019287109375, 0.027640607833862303, 0.027545631408691405, 0.027828447341918944, 0.02759657669067383, 0.02762704086303711, 0.02750716781616211, 0.027856895446777344, 0.027613183975219727, 0.02758246421813965, 0.029181663513183593, 0.027738399505615234, 0.027609151840209963, 0.028114879608154297, 0.02749849510192871, 0.028112895965576173, 0.02768076705932617, 0.02792448043823242, 0.02785865592956543, 0.027799840927124023, 0.027878847122192383, 0.028043840408325197, 0.027528255462646485, 0.027751136779785156, 0.029416479110717774, 0.02823779106140137, 0.027775199890136718, 0.027812639236450196, 0.027985279083251952, 0.027676416397094727, 0.027468704223632814, 0.02761724853515625, 0.027966976165771484, 0.02823129653930664, 0.027691072463989257, 0.02888172721862793, 0.029683328628540038, 0.027871616363525392, 0.02786479949951172, 0.027715871810913086, 0.028116992950439453, 0.027715583801269532, 0.02868342399597168, 0.02802569580078125, 0.02786623954772949, 0.028934944152832032, 0.028350048065185547, 0.027709951400756837, 0.027795616149902343, 0.02771504020690918, 0.028098047256469725, 0.02781683158874512, 0.0277708797454834, 0.028061695098876953, 0.02795929527282715, 0.027705408096313475, 0.027905471801757814, 0.027892480850219725, 0.0280184326171875, 0.02783014488220215, 0.02773334312438965, 0.027951904296875, 0.02772505569458008, 0.028066335678100587, 0.028117216110229493, 0.027668384552001952, 0.031035263061523436, 0.029231264114379884, 0.028401952743530273, 0.02870863914489746, 0.02872915267944336, 0.029957727432250978, 0.02830191993713379, 0.028047359466552735, 0.02813132858276367, 0.027960927963256835, 0.0279835205078125, 0.028330751419067383, 0.028212959289550782, 0.028125471115112304, 0.02806287956237793, 0.028126047134399413, 0.028250431060791014, 0.027799232482910156, 0.027989824295043944, 0.028274879455566407, 0.028086271286010742, 0.029573535919189452, 0.028387327194213868, 0.02862073516845703, 0.02803446388244629, 0.027937631607055664, 0.02798195266723633, 0.02855824089050293, 0.027867935180664063, 0.02853023910522461, 0.02806345558166504, 0.028030912399291993, 0.028058399200439454, 0.0279135684967041, 0.02863801574707031, 0.02808940887451172, 0.027871616363525392, 0.0278768310546875, 0.028234272003173827, 0.028436895370483398, 0.029132799148559572, 0.028430335998535155, 0.028274240493774413, 0.028456960678100586, 0.028267200469970704, 0.027911935806274414, 0.027850528717041016, 0.027584255218505858, 0.028383712768554687, 0.028039167404174805, 0.02783616065979004, 0.027914464950561522, 0.02796985626220703, 0.027935808181762695, 0.027919231414794923, 0.028141216278076173, 0.028104223251342774, 0.028102336883544923, 0.028363679885864256, 0.02797772789001465, 0.02840575981140137, 0.027850751876831056, 0.029393983840942384, 0.028009408950805663, 0.027846656799316406, 0.029296640396118165, 0.028173887252807617, 0.027938560485839845, 0.028368831634521485, 0.028224256515502928, 0.028639232635498047, 0.028023807525634766, 0.02804956817626953, 0.027988191604614257, 0.02785139274597168, 0.028108800888061523, 0.028560800552368162, 0.0282935676574707, 0.028094751358032227, 0.028011520385742186, 0.02812918472290039, 0.027906688690185547, 0.027709152221679686, 0.027736703872680665, 0.030134271621704102, 0.028178016662597657, 0.027710880279541016, 0.028185600280761718, 0.02772991943359375, 0.028263776779174805, 0.0319268798828125, 0.030612768173217772, 0.028657855987548827, 0.028160480499267577, 0.02813145637512207, 0.027729471206665038, 0.029638944625854494, 0.029280351638793944, 0.02852771186828613, 0.02785152053833008, 0.02829539108276367, 0.027936159133911134, 0.02789232063293457, 0.02817433547973633, 0.027797760009765624, 0.027991167068481447, 0.02766035270690918, 0.02772435188293457, 0.027688255310058595, 0.027643871307373048, 0.02767500877380371, 0.027786624908447265, 0.027922496795654297, 0.02789616012573242, 0.02793235206604004, 0.027744415283203126, 0.027976415634155274, 0.028063072204589843, 0.027822431564331056, 0.02777529525756836, 0.02755583953857422, 0.02770534324645996, 0.028194208145141602, 0.027848512649536132, 0.027699071884155272, 0.028509056091308594, 0.028118688583374022, 0.027817535400390624, 0.027559999465942384, 0.027988447189331054, 0.028172000885009766, 0.027695680618286134, 0.02807423973083496, 0.0277521915435791, 0.028430080413818358, 0.027940576553344726, 0.028269088745117188, 0.027922431945800782, 0.02776406478881836, 0.027830944061279297, 0.0314204158782959, 0.028023008346557618, 0.0278089599609375, 0.027728479385375978, 0.027830272674560546, 0.027944320678710936, 0.02774880027770996, 0.030165119171142576, 0.028558431625366212, 0.028283807754516603, 0.029252960205078126, 0.028064416885375976, 0.02800230407714844, 0.0276845760345459, 0.02768451118469238, 0.0280994873046875, 0.027713407516479494, 0.02782575988769531, 0.02770150375366211, 0.028141151428222655, 0.02781395149230957, 0.02820649528503418, 0.027691968917846678, 0.027979776382446288, 0.027584575653076173, 0.02762678337097168, 0.027896480560302736, 0.027697376251220703, 0.027703071594238283, 0.02767033576965332, 0.027949247360229492, 0.027733375549316406, 0.027902591705322267, 0.027952640533447266, 0.028445056915283203, 0.02834649658203125, 0.028201984405517577, 0.02775551986694336, 0.027809791564941407, 0.028102655410766602, 0.027828256607055665, 0.027844287872314452, 0.027746591567993164, 0.027877376556396483, 0.03007302474975586, 0.029748287200927735, 0.028857088088989256, 0.028288063049316407, 0.02814627265930176, 0.02789516830444336, 0.028232511520385743, 0.027945119857788085, 0.027805696487426756, 0.028079200744628906, 0.02820947265625, 0.028395391464233397, 0.028562143325805665, 0.028288576126098634, 0.02798851203918457, 0.027824159622192382, 0.027700799942016602, 0.02780342483520508, 0.028018655776977538, 0.027871231079101562, 0.027931327819824218, 0.028008319854736327, 0.028118207931518556, 0.027992895126342773, 0.02795110321044922, 0.028184576034545897]",tokens/s,35.52180712238126,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5168.80384,5444.07552,0.0,5058.330624,5057.441792,s,1,10.7883662109375,10.7883662109375,0.0,10.7883662109375,10.7883662109375,10.7883662109375,10.7883662109375,[10.7883662109375],,kWh,0.00010178368605418957,1.1220211151767043e-05,3.421919404197893e-05,0.00014722309124793554,,MB,1765.797888,5630.722048,0.0,5215.617024,5189.70368,s,10,4.6272205810546865,0.4627220581054686,0.002933348460908013,0.46348547363281245,0.46478533630371094,0.4657304214477539,0.46648648956298827,"[0.4548807678222656, 0.4619276428222656, 0.4625466613769531, 0.46346029663085936, 0.4618591003417969, 0.4645753173828125, 0.46359005737304687, 0.4635106506347656, 0.464194580078125, 0.46667550659179685]",tokens/s,553.2478850222648,kWh,1.3443957685986638e-05,1.4826183503564555e-06,8.953163728182203e-06,2.38797397645253e-05,tokens/kWh,10720384.833519101,MB,1768.689664,5649.596416,0.0,5234.491392,5189.70624,s,10,26.6988544921875,2.6698854492187505,0.01623390202968721,2.6686475830078127,2.685975439453125,2.6936388427734377,2.6997695654296874,"[2.680671630859375, 2.70130224609375, 2.677565185546875, 2.648965087890625, 2.66069189453125, 2.66480224609375, 2.64328857421875, 2.666698974609375, 2.6842724609375, 2.67059619140625]",tokens/s,23.596517977366695,kWh,7.880853706859928e-05,8.692779580051808e-06,5.225285745881693e-05,0.00013975417410746802,tokens/kWh,450791.5445269944,,s,630,26.696288806915295,0.04237506159827823,0.0006875272501914667,0.04226753616333008,0.042946719741821286,0.0432572078704834,0.04454908390045167,"[0.043845630645751955, 0.043096065521240234, 0.04273321533203125, 0.04331875228881836, 0.04256438446044922, 0.04278220748901367, 0.043227745056152345, 0.04312063980102539, 0.04290063858032227, 0.04279548645019531, 0.04283039855957031, 0.04293395233154297, 0.04280947113037109, 0.042624801635742185, 0.042678497314453126, 0.042888832092285153, 0.04221990585327148, 0.04204496002197266, 0.04197014236450195, 0.04241104125976562, 0.04267891311645508, 0.042832225799560544, 0.0424796142578125, 0.04218265533447266, 0.04206796646118164, 0.04219289779663086, 0.04257913589477539, 0.042644287109375, 0.04230963134765625, 0.042452735900878905, 0.042238079071044925, 0.042162303924560544, 0.04229529571533203, 0.042405376434326174, 0.042530529022216795, 0.042085151672363284, 0.04261014556884766, 0.042885311126708986, 0.043135326385498045, 0.04307558441162109, 0.042575870513916016, 0.042426368713378904, 0.04267168045043945, 0.04199270248413086, 0.04202284622192383, 0.04201388931274414, 0.04262137603759766, 0.04251811218261719, 0.04245568084716797, 0.04218918228149414, 0.042181568145751955, 0.04212003326416015, 0.042046592712402346, 0.04200259017944336, 0.04188028717041015, 0.042518527984619144, 0.042684417724609375, 0.042732864379882815, 0.042420928955078124, 0.04356915283203125, 0.042431617736816404, 0.04227967834472656, 0.04240771102905273, 0.04386841583251953, 0.04280899047851563, 0.04259875106811523, 0.042546337127685546, 0.04230025482177734, 0.04227686309814453, 0.04217555236816406, 0.04204800033569336, 0.04227673721313477, 0.04257440185546875, 0.04228710556030273, 0.04215167999267578, 0.04236249542236328, 0.043010688781738284, 0.04308777618408203, 0.04273680114746094, 0.04269126510620117, 0.0434791374206543, 0.04703657531738281, 0.04324585723876953, 0.04322614288330078, 0.04309859085083008, 0.04259040069580078, 0.04270700836181641, 0.04255740737915039, 0.04275321578979492, 0.04289379119873047, 0.042774879455566406, 0.04294582366943359, 0.04299135971069336, 0.04308060836791992, 0.04296300888061524, 0.044090686798095705, 0.04295478439331055, 0.04244569778442383, 0.04279068756103516, 0.04261999893188476, 0.043163616180419924, 0.042781631469726564, 0.042805248260498044, 0.04301824188232422, 0.04269641494750977, 0.04297347259521484, 0.04272335815429688, 0.04283184051513672, 0.0433889274597168, 0.04292812728881836, 0.042876289367675784, 0.04283046340942383, 0.042523872375488284, 0.0434409294128418, 0.042829025268554685, 0.04300422286987305, 0.04284832000732422, 0.04269468688964844, 0.04251596832275391, 0.04247580718994141, 0.04233599853515625, 0.04272351837158203, 0.042819263458251954, 0.042594913482666016, 0.04293254470825195, 0.04321900939941406, 0.04412246322631836, 0.04259654235839844, 0.042616832733154295, 0.04257923126220703, 0.04213372802734375, 0.042869247436523435, 0.04213555145263672, 0.04220323181152344, 0.04276969528198242, 0.04311865615844727, 0.0426391372680664, 0.04289804840087891, 0.043010112762451175, 0.043079776763916014, 0.04272515106201172, 0.042555007934570316, 0.04308438491821289, 0.04264044952392578, 0.04245747375488281, 0.04216889572143555, 0.042145790100097655, 0.042240001678466796, 0.0421962890625, 0.04211539077758789, 0.04243904113769531, 0.04202700805664063, 0.042327838897705077, 0.04289318466186524, 0.043837791442871095, 0.04293632125854492, 0.04215193557739258, 0.042523681640625, 0.042173599243164064, 0.04205548858642578, 0.04177926254272461, 0.04220307159423828, 0.042160160064697264, 0.04192150497436523, 0.04393577575683594, 0.04235065460205078, 0.04212604904174805, 0.042032577514648437, 0.04209331130981445, 0.04239510345458984, 0.04240195083618164, 0.04256396865844726, 0.04222137451171875, 0.04231391906738281, 0.04196352005004883, 0.04192777633666992, 0.04187369537353516, 0.041923198699951175, 0.042385406494140625, 0.042260288238525394, 0.041995521545410155, 0.04207712173461914, 0.042008544921875, 0.04195126342773438, 0.0450621452331543, 0.04279417419433594, 0.042901920318603515, 0.04261929702758789, 0.042588001251220704, 0.04333356857299805, 0.04218003082275391, 0.04223644638061524, 0.04191567993164062, 0.041870113372802734, 0.041756126403808595, 0.042571361541748044, 0.04242681503295898, 0.04251062393188477, 0.042213600158691404, 0.041992191314697266, 0.0420843505859375, 0.04177920150756836, 0.04191231918334961, 0.041809921264648435, 0.04171558380126953, 0.041703296661376954, 0.04156646347045898, 0.041818111419677735, 0.0422369270324707, 0.04187238311767578, 0.04210483169555664, 0.041645950317382815, 0.04178137588500976, 0.04154131317138672, 0.041676097869873044, 0.04175548934936523, 0.042102943420410155, 0.042221569061279295, 0.04229024124145508, 0.042849216461181644, 0.04225228881835937, 0.0431135368347168, 0.041861793518066404, 0.042194751739501955, 0.04192038345336914, 0.04194591903686523, 0.042100414276123044, 0.042199134826660156, 0.04189132690429687, 0.041718273162841796, 0.04167679977416992, 0.041836544036865236, 0.04177920150756836, 0.0426102409362793, 0.04207830429077149, 0.042452896118164066, 0.042719680786132815, 0.042033153533935545, 0.04225024032592774, 0.04222758483886719, 0.04198822402954101, 0.04182988739013672, 0.041990657806396485, 0.04184678268432617, 0.04170342254638672, 0.041848159790039065, 0.04168566513061524, 0.041748031616210934, 0.04168745422363281, 0.04205126571655273, 0.04226492691040039, 0.04174848175048828, 0.04326649475097656, 0.041941024780273437, 0.04166323089599609, 0.04192633438110351, 0.041744384765625, 0.04157347106933594, 0.041690017700195314, 0.04237721633911133, 0.04220927810668945, 0.041973758697509765, 0.041834495544433595, 0.04223097610473633, 0.042222400665283204, 0.04250124740600586, 0.042277374267578126, 0.042117088317871094, 0.04189225769042969, 0.04209209442138672, 0.04278931045532226, 0.04239769744873047, 0.04219289779663086, 0.042124961853027346, 0.04240544128417969, 0.042044063568115235, 0.04191446304321289, 0.041832481384277344, 0.04251996612548828, 0.04233276748657227, 0.04212940979003906, 0.04228265762329102, 0.04207369613647461, 0.042581790924072264, 0.044045215606689454, 0.042135616302490235, 0.04202201461791992, 0.042056415557861326, 0.0420968017578125, 0.04179763031005859, 0.04190003204345703, 0.04204336166381836, 0.04237705612182617, 0.042411552429199216, 0.04247353744506836, 0.0421231689453125, 0.04227756881713867, 0.04205567932128906, 0.0421662712097168, 0.04373708724975586, 0.04261686325073242, 0.04235011291503906, 0.04220099258422851, 0.042342113494873046, 0.042389823913574216, 0.04239206314086914, 0.041940990447998046, 0.042777950286865235, 0.042578590393066405, 0.042185985565185546, 0.04183014297485352, 0.04190268707275391, 0.04205318450927734, 0.04195135879516602, 0.04204758453369141, 0.04342806243896485, 0.04220927810668945, 0.04197715377807617, 0.042129310607910156, 0.04366131210327148, 0.04232185745239258, 0.04240470504760742, 0.04190412902832031, 0.04240588760375977, 0.04177510452270508, 0.04196511840820313, 0.04191020965576172, 0.04214220809936523, 0.04219903945922852, 0.04220489501953125, 0.0423768310546875, 0.04214803314208984, 0.0420145263671875, 0.042283679962158205, 0.042180606842041016, 0.042297344207763675, 0.041850879669189454, 0.04196352005004883, 0.042205055236816405, 0.04206387329101562, 0.04183903884887695, 0.04183967971801758, 0.04195094299316406, 0.04201145553588867, 0.0423579216003418, 0.04236793518066406, 0.04187340927124023, 0.04199423980712891, 0.04168294525146484, 0.04167375946044922, 0.04196860885620117, 0.04189593505859375, 0.04189923095703125, 0.04233705520629883, 0.04251603317260742, 0.04218310546875, 0.04226844787597656, 0.04280329513549805, 0.04417753601074219, 0.042563297271728515, 0.042452896118164066, 0.04209292984008789, 0.04270694351196289, 0.04248076629638672, 0.04224499130249024, 0.04232191848754883, 0.04232806396484375, 0.042160320281982425, 0.04458067321777344, 0.043003902435302735, 0.04233011245727539, 0.04236083221435547, 0.04199200057983398, 0.0419411506652832, 0.042489887237548825, 0.0425164794921875, 0.04228505706787109, 0.04205542373657226, 0.044471744537353516, 0.04273926544189453, 0.0424436149597168, 0.04187567901611328, 0.042035167694091796, 0.04179462432861328, 0.042076927185058594, 0.04169855880737305, 0.04206668853759766, 0.04215110397338867, 0.042412864685058595, 0.042352638244628905, 0.04230553436279297, 0.042436607360839845, 0.04194508743286133, 0.041680896759033206, 0.04157628631591797, 0.0417179183959961, 0.04156630325317383, 0.04156192016601563, 0.041430561065673825, 0.041968193054199215, 0.04189184188842773, 0.04182387161254883, 0.04157068634033203, 0.04137567901611328, 0.04159904098510742, 0.04233420944213867, 0.04146755218505859, 0.04140252685546875, 0.0419837760925293, 0.04138585662841797, 0.041457889556884765, 0.041457984924316404, 0.041662017822265626, 0.04220479965209961, 0.04180188751220703, 0.042209953308105466, 0.042000385284423826, 0.04195510482788086, 0.041600929260253904, 0.041860511779785156, 0.04208332824707031, 0.04172995376586914, 0.04157603073120117, 0.04202876663208008, 0.042216129302978515, 0.0419359359741211, 0.04174124908447266, 0.042897407531738284, 0.04205305480957031, 0.04169580841064453, 0.04164812850952149, 0.04190534210205078, 0.041925472259521486, 0.0418568000793457, 0.04178553771972656, 0.04220867156982422, 0.04209692764282227, 0.04212054443359375, 0.04236355209350586, 0.04182457733154297, 0.041998336791992184, 0.04335865783691406, 0.04226268768310547, 0.041821792602539064, 0.04159529495239258, 0.041989311218261716, 0.04194796752929687, 0.041917728424072265, 0.04165027236938477, 0.04169350433349609, 0.04169132614135742, 0.041982078552246095, 0.0417894401550293, 0.04213145446777344, 0.04209187316894531, 0.04181379318237305, 0.042482303619384765, 0.041519359588623045, 0.04169728088378906, 0.04280924987792969, 0.04178953552246094, 0.041630752563476564, 0.04200886535644531, 0.04220787048339844, 0.04258617782592773, 0.042084320068359375, 0.042168350219726564, 0.04208230209350586, 0.0421212158203125, 0.04186272048950195, 0.04172006225585938, 0.04176092910766602, 0.041868671417236325, 0.0419334716796875, 0.04243865585327149, 0.04230915069580078, 0.04255289459228516, 0.04240828704833984, 0.04203577423095703, 0.04226662445068359, 0.042477344512939455, 0.042487552642822266, 0.04225059127807617, 0.04246486282348633, 0.042340896606445313, 0.04228227233886719, 0.042369728088378904, 0.04231951904296875, 0.05163772964477539, 0.04274422454833984, 0.04238921737670898, 0.04318684768676758, 0.04231081771850586, 0.0422852783203125, 0.042078399658203126, 0.04215456008911133, 0.042336254119873046, 0.04257484817504883, 0.04224883270263672, 0.04238691329956055, 0.04243548965454102, 0.04217139053344727, 0.04205875015258789, 0.04237420654296875, 0.04352000045776367, 0.04245708847045898, 0.042049537658691405, 0.04230758285522461, 0.042180831909179685, 0.04307331085205078, 0.04226662445068359, 0.04210483169555664, 0.04250419235229492, 0.042444801330566405, 0.04290895843505859, 0.041977825164794924, 0.041849601745605466, 0.0420843505859375, 0.04227686309814453, 0.04280428695678711, 0.042517440795898434, 0.04230918502807617, 0.042406337738037106, 0.043472286224365234, 0.04351155090332031, 0.04254348754882813, 0.04248214340209961, 0.04252684783935547, 0.04220710372924805, 0.04200431823730469, 0.042057281494140626, 0.04233260726928711, 0.04228726577758789, 0.0422064323425293, 0.04222953414916992, 0.0422185287475586, 0.04272329711914063, 0.04272342300415039, 0.04231158447265625, 0.043421119689941404, 0.041990718841552734, 0.041856319427490234, 0.04220297622680664, 0.04258819198608398, 0.04572761535644531, 0.042509246826171875, 0.042401729583740236, 0.04268243026733398, 0.042293407440185546, 0.042387454986572266, 0.04250812911987305, 0.0427823371887207, 0.0426295051574707, 0.042864097595214846, 0.04255337524414062, 0.04255590438842773, 0.042450111389160154, 0.04225516891479492, 0.04239782333374023, 0.042665664672851565, 0.042506431579589846, 0.046583168029785155, 0.0430618896484375, 0.042194942474365234, 0.042567680358886716, 0.0427413444519043, 0.042762657165527344, 0.04358828735351562, 0.04275199890136719, 0.042823585510253906, 0.04254934310913086, 0.042428417205810545, 0.04273958587646484, 0.04233417510986328, 0.04194524765014648, 0.04252262496948242, 0.04250211334228516, 0.04282575988769531, 0.0427988166809082, 0.04258230209350586, 0.042584064483642575, 0.04318979263305664, 0.04522134399414063, 0.042810176849365236, 0.04273497772216797, 0.04264815902709961, 0.042340576171875, 0.042218849182128905, 0.04243523025512695, 0.042251873016357425, 0.04225680160522461, 0.041992191314697266, 0.04205977630615235, 0.04216592025756836, 0.04221987152099609, 0.04233347320556641, 0.04243529510498047, 0.04234239959716797, 0.04227686309814453, 0.042178558349609374, 0.042016735076904295, 0.04176079940795899, 0.04195113754272461, 0.04182361602783203, 0.04172463989257812, 0.04262092971801758, 0.04179558563232422, 0.041828353881835936, 0.04186521530151367, 0.04212521743774414, 0.04210483169555664, 0.04219878387451172, 0.041928993225097654, 0.0418540153503418, 0.04212822341918945, 0.042106880187988284, 0.04189404678344726, 0.042237377166748045, 0.04242489624023438, 0.042409568786621096, 0.04256809616088867, 0.04233155059814453, 0.043203136444091794, 0.042219425201416014, 0.04256371307373047, 0.042618881225585936, 0.041908222198486327, 0.04197580718994141, 0.042064960479736326, 0.04296988677978516]",tokens/s,23.598785754700394,,, 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,888.6272,638.517248,0.0,260.046848,253.883392,s,1,8.1063505859375,8.1063505859375,0.0,8.1063505859375,8.1063505859375,8.1063505859375,8.1063505859375,[8.1063505859375],,kWh,2.2307581012470715e-05,2.453471619660418e-06,7.547506037985929e-06,3.230855867011706e-05,,MB,1197.21984,751.763456,0.0,343.932928,312.754176,s,16,0.18802291202545168,0.011751432001590728,0.0003932107752449667,0.011671087741851806,0.012107920169830321,0.012526600122451782,0.012900865983963013,"[0.01237065601348877, 0.011300288200378417, 0.011611424446105956, 0.011666367530822754, 0.01299443244934082, 0.01167580795288086, 0.011574015617370606, 0.011341152191162109, 0.01158505630493164, 0.011582271575927734, 0.011845184326171874, 0.011739456176757812, 0.011546303749084472, 0.011733183860778809, 0.011708959579467774, 0.01174835205078125]",tokens/s,21784.579101963627,kWh,3.4663537898855505e-07,3.822086240254721e-08,2.2844337357008875e-07,6.132996149611911e-07,tokens/kWh,417414251.9495947,MB,1208.389632,776.92928,0.0,369.098752,313.412096,s,16,10.237614807128907,0.6398509254455567,0.009020818156885704,0.6407603759765625,0.6492899475097655,0.6501135559082031,0.6516580139160156,"[0.6292039184570313, 0.6278231811523437, 0.6450326538085938, 0.6494700317382812, 0.6400562133789063, 0.619185546875, 0.63930419921875, 0.629085693359375, 0.63702001953125, 0.6465849609375, 0.648677001953125, 0.6401679077148438, 0.6413528442382812, 0.64910986328125, 0.6520441284179688, 0.6434966430664063]",tokens/s,98.46043428964379,kWh,1.8093102338200042e-05,1.9953449318071566e-06,7.187882003928834e-06,2.727632927393603e-05,tokens/kWh,2309694.9507864984,,s,1008,10.228204399108881,0.010147028173719133,0.0002855753306472145,0.010167295932769774,0.010352002906799317,0.010433611059188842,0.011110925121307368,"[0.011118975639343262, 0.01171878433227539, 0.010147520065307617, 0.010025600433349609, 0.010050559997558594, 0.010089471817016601, 0.010200768470764161, 0.010006848335266114, 0.010266143798828125, 0.010023072242736816, 0.01002291202545166, 0.01017843246459961, 0.010164095878601074, 0.00996787166595459, 0.010042624473571777, 0.009949888229370117, 0.00993062400817871, 0.009908384323120117, 0.009882847785949707, 0.009728832244873047, 0.009756671905517577, 0.010051584243774414, 0.010440671920776367, 0.00986348819732666, 0.009830112457275391, 0.009830400466918946, 0.00986019229888916, 0.009956128120422363, 0.00994655990600586, 0.009920224189758301, 0.009878527641296387, 0.009809344291687012, 0.009914239883422852, 0.009816991806030273, 0.009807616233825684, 0.00982425594329834, 0.0099104642868042, 0.009903039932250977, 0.009868351936340333, 0.010100480079650878, 0.01003551959991455, 0.00989356803894043, 0.009795519828796386, 0.009797760009765625, 0.009787391662597657, 0.009783488273620605, 0.009867072105407715, 0.010084511756896973, 0.009961312294006347, 0.0099104642868042, 0.00989574432373047, 0.009975872039794921, 0.009875391960144043, 0.009886719703674317, 0.00985587215423584, 0.009899328231811524, 0.009855808258056641, 0.009789312362670898, 0.009814399719238281, 0.010008255958557128, 0.009771072387695312, 0.009715744018554687, 0.009741920471191406, 0.009354911804199218, 0.009695167541503907, 0.009781408309936523, 0.010012672424316407, 0.0097958402633667, 0.009852383613586426, 0.009718303680419921, 0.009915807723999023, 0.009787936210632325, 0.009732159614562988, 0.009711615562438965, 0.010186335563659669, 0.010751392364501953, 0.012026880264282227, 0.010252287864685058, 0.009975711822509765, 0.009895168304443359, 0.0098985595703125, 0.01011571216583252, 0.01005123233795166, 0.010126912117004394, 0.010033599853515625, 0.01021945571899414, 0.00991648006439209, 0.009857024192810059, 0.009897983551025391, 0.009754624366760254, 0.009791487693786622, 0.009835583686828613, 0.009959808349609375, 0.010148415565490723, 0.010043392181396485, 0.009945088386535645, 0.009893088340759278, 0.009788064002990723, 0.009768671989440918, 0.009685407638549805, 0.009684672355651855, 0.009707200050354003, 0.009730688095092773, 0.009787263870239258, 0.009748160362243652, 0.009804544448852539, 0.009753984451293945, 0.009834879875183106, 0.009965503692626953, 0.009875552177429199, 0.010106368064880371, 0.009898688316345215, 0.009950943946838379, 0.00995695972442627, 0.009920255661010742, 0.00982857608795166, 0.009889344215393066, 0.009997119903564453, 0.010043456077575683, 0.01005731201171875, 0.010043807983398437, 0.010059712409973144, 0.010094592094421387, 0.010084223747253419, 0.010135904312133789, 0.01008777618408203, 0.009892448425292969, 0.010185279846191407, 0.010172032356262206, 0.010083711624145508, 0.01010086441040039, 0.010156800270080566, 0.010164128303527833, 0.010182144165039063, 0.0101725435256958, 0.010059712409973144, 0.01005568027496338, 0.01006345558166504, 0.010332384109497071, 0.010270624160766602, 0.01015830421447754, 0.010264415740966797, 0.010387455940246582, 0.010084704399108886, 0.01017625617980957, 0.010275360107421874, 0.010217375755310058, 0.0101725435256958, 0.010208895683288574, 0.010234304428100586, 0.010274368286132813, 0.010301183700561524, 0.010289792060852052, 0.010233247756958008, 0.010264287948608399, 0.010273632049560546, 0.010163392066955566, 0.01052348804473877, 0.010266752243041992, 0.010176095962524414, 0.010187040328979493, 0.010182656288146973, 0.010354911804199219, 0.010251520156860351, 0.010566176414489746, 0.010295552253723144, 0.010249983787536622, 0.01042841625213623, 0.010782367706298828, 0.010348896026611328, 0.010242143630981445, 0.010249152183532714, 0.010502847671508789, 0.01021571159362793, 0.010381312370300292, 0.010094592094421387, 0.010201087951660156, 0.010175871849060058, 0.010065983772277833, 0.0101396484375, 0.01010540771484375, 0.01011302375793457, 0.010153568267822266, 0.010140095710754395, 0.010108960151672364, 0.010264575958251953, 0.010340512275695801, 0.010264351844787597, 0.010206815719604492, 0.010054207801818848, 0.010325695991516113, 0.010261887550354004, 0.010317888259887695, 0.010333056449890137, 0.010249471664428711, 0.010320768356323243, 0.010201120376586914, 0.010204480171203614, 0.010160672187805175, 0.010131456375122071, 0.010188799858093261, 0.01017251205444336, 0.010108832359313966, 0.010164223670959472, 0.010084351539611817, 0.010202943801879883, 0.01027295970916748, 0.010198944091796875, 0.010244192123413086, 0.010145792007446289, 0.010259776115417481, 0.010187456130981445, 0.010168319702148437, 0.010096096038818359, 0.010375712394714355, 0.010208800315856933, 0.010196672439575196, 0.010725215911865234, 0.010185600280761719, 0.010266304016113281, 0.010312064170837403, 0.010196479797363281, 0.010226431846618653, 0.010210847854614258, 0.010256287574768066, 0.010248831748962402, 0.010272192001342774, 0.010170047760009765, 0.010232576370239258, 0.010149472236633301, 0.010142111778259277, 0.010210816383361816, 0.01043449592590332, 0.010228096008300781, 0.01026198387145996, 0.010297887802124024, 0.010630911827087402, 0.010293408393859864, 0.01026476764678955, 0.0102575044631958, 0.010301728248596192, 0.010322463989257813, 0.01033011245727539, 0.010283007621765136, 0.010222751617431641, 0.010233951568603515, 0.010195712089538574, 0.010179712295532226, 0.010215328216552735, 0.010259424209594727, 0.011761664390563965, 0.012142592430114747, 0.010226048469543458, 0.010266304016113281, 0.0102640962600708, 0.01020902442932129, 0.010154335975646973, 0.010291135787963868, 0.010217920303344726, 0.01065113639831543, 0.010248800277709962, 0.01019388771057129, 0.010267007827758788, 0.01022828769683838, 0.010102784156799317, 0.010122495651245117, 0.010417119979858398, 0.010708831787109374, 0.010276800155639648, 0.010168160438537598, 0.010106207847595215, 0.010119999885559082, 0.010096768379211425, 0.010090208053588868, 0.01011257553100586, 0.010101344108581543, 0.010057120323181153, 0.010205792427062989, 0.010031359672546387, 0.010022656440734863, 0.010000384330749512, 0.010087712287902832, 0.010031840324401856, 0.010053631782531738, 0.010057855606079101, 0.010076031684875489, 0.009973695755004883, 0.010143808364868164, 0.01071836757659912, 0.010038111686706543, 0.009936896324157715, 0.009943039894104003, 0.010030559539794922, 0.009943552017211914, 0.009899392127990722, 0.009919136047363282, 0.009991328239440917, 0.010127296447753906, 0.010074943542480468, 0.010117216110229492, 0.010123264312744141, 0.010088447570800782, 0.010676480293273926, 0.010055168151855469, 0.010422528266906738, 0.009965696334838868, 0.010133376121520996, 0.010092543601989747, 0.010059967994689941, 0.01013923168182373, 0.0101112003326416, 0.010160127639770507, 0.010136608123779298, 0.010132351875305175, 0.010063967704772948, 0.00985916805267334, 0.010191295623779297, 0.010116352081298827, 0.010086720466613769, 0.010076512336730957, 0.010090687751770019, 0.010087360382080078, 0.01009552001953125, 0.010200736045837402, 0.010103136062622071, 0.009981727600097657, 0.01026249599456787, 0.010178688049316407, 0.010071743965148925, 0.010011039733886718, 0.010083776473999023, 0.009988703727722169, 0.010011775970458985, 0.010209471702575683, 0.009923263549804688, 0.009852928161621094, 0.009910271644592286, 0.009861184120178222, 0.009893823623657226, 0.009783391952514648, 0.010086655616760254, 0.009871007919311524, 0.009862272262573243, 0.010031968116760255, 0.009787008285522461, 0.009804224014282227, 0.009788895606994629, 0.0097259521484375, 0.009629471778869629, 0.009720159530639648, 0.009707839965820312, 0.009648192405700683, 0.009602272033691406, 0.009667103767395019, 0.009545280456542968, 0.009560480117797851, 0.009551360130310058, 0.009644672393798829, 0.009547616004943848, 0.009504863739013672, 0.009511136054992675, 0.00951244831085205, 0.009677311897277833, 0.009565247535705566, 0.009574655532836915, 0.009590815544128418, 0.009476767539978028, 0.009543680191040039, 0.009550975799560548, 0.00956112003326416, 0.009594592094421386, 0.009619359970092773, 0.009668448448181152, 0.009604928016662597, 0.009621696472167969, 0.009752960205078124, 0.009938495635986328, 0.010043840408325195, 0.009901280403137208, 0.01015503978729248, 0.010150624275207519, 0.010129343986511231, 0.010153183937072754, 0.010107775688171387, 0.010158080101013184, 0.010147295951843261, 0.010187264442443847, 0.0101396484375, 0.010178720474243164, 0.010264255523681641, 0.010166048049926758, 0.010107071876525878, 0.010194399833679198, 0.01019753646850586, 0.010221088409423828, 0.010647999763488769, 0.010352800369262696, 0.010362943649291993, 0.010370623588562012, 0.01033033561706543, 0.010402112007141114, 0.010179807662963867, 0.010305983543395996, 0.010213631629943848, 0.010233375549316406, 0.01025500774383545, 0.010213215827941895, 0.010175552368164063, 0.010207712173461914, 0.01018899154663086, 0.010686176300048828, 0.010424863815307618, 0.01044825553894043, 0.010193535804748535, 0.010164031982421875, 0.010065759658813477, 0.009938272476196289, 0.01001369571685791, 0.010045696258544921, 0.010065664291381836, 0.01010912036895752, 0.010177696228027344, 0.010029727935791016, 0.009969440460205078, 0.010004704475402831, 0.00994428825378418, 0.009925439834594726, 0.00989692783355713, 0.009855999946594238, 0.009810111999511718, 0.009793279647827148, 0.009859135627746582, 0.009918560028076171, 0.010182559967041016, 0.010156031608581542, 0.010070015907287597, 0.010192895889282226, 0.010051584243774414, 0.010010623931884765, 0.00994099235534668, 0.009845855712890626, 0.009618464469909669, 0.00989897632598877, 0.009858624458312988, 0.009884320259094238, 0.009824031829833985, 0.009744671821594239, 0.009789152145385742, 0.009875040054321289, 0.00984438419342041, 0.009957792282104493, 0.010053024291992188, 0.010098976135253906, 0.010072575569152833, 0.010070176124572753, 0.01030947208404541, 0.010119327545166016, 0.010235903739929199, 0.010000384330749512, 0.010120736122131348, 0.010004704475402831, 0.009957920074462891, 0.009922271728515624, 0.009875231742858887, 0.009830623626708985, 0.009862688064575196, 0.009873888015747071, 0.009889375686645508, 0.00997212791442871, 0.009861120223999023, 0.009989184379577637, 0.009851327896118164, 0.009900544166564941, 0.009889792442321778, 0.00991641616821289, 0.009856032371520997, 0.009808671951293946, 0.00978172779083252, 0.009924351692199707, 0.00988595199584961, 0.009938655853271484, 0.009873632431030274, 0.00987331199645996, 0.009807071685791015, 0.009855648040771485, 0.00988684844970703, 0.010003328323364258, 0.010044416427612305, 0.01001471996307373, 0.010021056175231934, 0.010080960273742676, 0.010128928184509276, 0.010158687591552735, 0.010145440101623535, 0.010156512260437012, 0.01005072021484375, 0.010105567932128906, 0.010194720268249512, 0.010132863998413085, 0.010169183731079102, 0.010282336235046386, 0.01010256004333496, 0.010056575775146485, 0.010180607795715332, 0.009937088012695312, 0.010063936233520508, 0.010201215744018555, 0.010033568382263184, 0.010046815872192383, 0.010119839668273926, 0.010168416023254395, 0.010153056144714356, 0.01008249568939209, 0.01002575969696045, 0.010047327995300294, 0.009912320137023926, 0.009928031921386719, 0.010017248153686523, 0.010011072158813477, 0.010306336402893067, 0.010135968208312989, 0.01004310417175293, 0.010033568382263184, 0.009933247566223144, 0.010000415802001953, 0.01005123233795166, 0.010058112144470215, 0.010022848129272462, 0.010034367561340332, 0.010107775688171387, 0.010080191612243652, 0.010383359909057617, 0.01042950439453125, 0.010155200004577637, 0.010245887756347657, 0.010074111938476562, 0.01006991958618164, 0.009994336128234863, 0.009992192268371582, 0.010284735679626464, 0.010150208473205566, 0.010067935943603515, 0.009996128082275391, 0.010315967559814452, 0.010305536270141602, 0.010028863906860351, 0.009981599807739258, 0.010035743713378906, 0.010027008056640625, 0.010102463722229003, 0.010100319862365722, 0.010236512184143067, 0.010147968292236327, 0.01012940788269043, 0.010080256462097169, 0.010196224212646484, 0.010025728225708007, 0.010094335556030273, 0.010029120445251465, 0.010082143783569336, 0.010094688415527344, 0.010143775939941407, 0.010158399581909179, 0.01018051242828369, 0.010228927612304688, 0.010148320198059082, 0.010172767639160156, 0.010091296195983887, 0.01027455997467041, 0.010193056106567382, 0.01025385570526123, 0.010388031959533691, 0.010249759674072266, 0.01027939224243164, 0.01017958354949951, 0.0102488956451416, 0.010170687675476074, 0.010108511924743652, 0.010197407722473144, 0.010178751945495605, 0.010143487930297852, 0.0102740478515625, 0.010259103775024415, 0.010188960075378417, 0.010259936332702637, 0.010175007820129395, 0.01030348777770996, 0.010283007621765136, 0.010264127731323243, 0.01021174430847168, 0.010317855834960938, 0.010342399597167968, 0.010477760314941405, 0.01059615993499756, 0.010211487770080567, 0.010159968376159668, 0.010528736114501953, 0.010192928314208984, 0.010229567527770995, 0.010326208114624024, 0.010175840377807617, 0.010228384017944335, 0.01022976016998291, 0.010241984367370605, 0.01029081630706787, 0.010211775779724121, 0.010448896408081054, 0.01022156810760498, 0.010344127655029296, 0.010239487648010253, 0.010236736297607421, 0.010293439865112304, 0.010297151565551757, 0.01026460838317871, 0.010241888046264648, 0.0102741756439209, 0.0103156156539917, 0.010162176132202149, 0.010231007575988769, 0.010247872352600098, 0.010149248123168945, 0.010194944381713868, 0.010185343742370605, 0.010145792007446289, 0.01016972827911377, 0.010272416114807129, 0.010194944381713868, 0.0101693115234375, 0.01021132755279541, 0.010414112091064453, 0.01012940788269043, 0.01023196792602539, 0.010279775619506836, 0.010171456336975098, 0.010266559600830078, 0.010373120307922363, 0.010186752319335938, 0.010203136444091796, 0.010401344299316407, 0.010296832084655762, 0.010296256065368652, 0.010266655921936036, 0.010282976150512695, 0.010143872261047363, 0.010196864128112793, 0.01021491241455078, 0.01023846435546875, 0.010205120086669921, 0.010098431587219238, 0.010379424095153808, 0.01011513614654541, 0.010228128433227538, 0.01011292839050293, 0.010346495628356933, 0.01032585620880127, 0.010218879699707032, 0.01022169589996338, 0.010119903564453124, 0.01014140796661377, 0.010141823768615723, 0.010166144371032715, 0.010194944381713868, 0.010167455673217774, 0.010221920013427734, 0.010252799987792969, 0.010198847770690918, 0.010073951721191406, 0.010132991790771484, 0.010246720314025879, 0.01009721565246582, 0.010216320037841797, 0.010183520317077636, 0.010078207969665527, 0.010369024276733398, 0.011003968238830566, 0.013030752182006836, 0.01027244758605957, 0.010238495826721192, 0.010231776237487793, 0.01019331169128418, 0.010283007621765136, 0.010227871894836425, 0.010235424041748048, 0.010178144454956056, 0.010246975898742676, 0.010250144004821778, 0.010273792266845704, 0.010304512023925782, 0.01035260772705078, 0.010340383529663086, 0.010329248428344727, 0.01032588768005371, 0.010320480346679688, 0.010637311935424805, 0.010522624015808106, 0.01033011245727539, 0.010314047813415527, 0.01022492790222168, 0.010274880409240723, 0.010254688262939453, 0.010070015907287597, 0.01001587200164795, 0.010035296440124512, 0.010068767547607422, 0.009980159759521484, 0.01002847957611084, 0.010035743713378906, 0.010010399818420411, 0.00994099235534668, 0.010012672424316407, 0.009885312080383301, 0.009941375732421875, 0.009797632217407226, 0.00986348819732666, 0.009858752250671387, 0.009805824279785156, 0.00981606388092041, 0.009868512153625488, 0.009777952194213867, 0.009789664268493653, 0.009854751586914063, 0.009899711608886719, 0.010018752098083497, 0.01002943992614746, 0.010194944381713868, 0.010184672355651856, 0.010782848358154297, 0.010358688354492187, 0.010178560256958008, 0.010188608169555665, 0.010116352081298827, 0.010126272201538085, 0.010201184272766114, 0.010216896057128905, 0.010258367538452148, 0.010162976264953614, 0.010135616302490234, 0.010081695556640626, 0.010103072166442871, 0.010074048042297364, 0.010252351760864259, 0.01144380760192871, 0.010277152061462402, 0.010196895599365234, 0.01027280044555664, 0.010188032150268554, 0.010476415634155273, 0.010246047973632813, 0.010447135925292969, 0.010290528297424316, 0.01018899154663086, 0.010125663757324219, 0.010228063583374023, 0.010196415901184083, 0.010182880401611328, 0.010241920471191407, 0.009974047660827637, 0.01029145622253418, 0.010178751945495605, 0.01022156810760498, 0.010087648391723632, 0.01018070411682129, 0.01019155216217041, 0.010256383895874023, 0.010268863677978515, 0.01034348773956299, 0.010230527877807618, 0.010178560256958008, 0.010209280014038086, 0.01018611240386963, 0.010222335815429688, 0.01020902442932129, 0.010227840423583984, 0.010203136444091796, 0.01001471996307373, 0.010037247657775878, 0.010028032302856446, 0.01009990406036377, 0.010028863906860351, 0.010008576393127442, 0.009955327987670898, 0.010079936027526855, 0.010184384346008301, 0.010194815635681153, 0.010136128425598145, 0.010074560165405273, 0.010117183685302735, 0.010095840454101562, 0.010082079887390136, 0.010007455825805664, 0.010045215606689454, 0.010074111938476562, 0.010207008361816407, 0.010162431716918946, 0.010126463890075684, 0.010152640342712403, 0.010112735748291015, 0.01041862392425537, 0.010207008361816407, 0.010348768234252929, 0.010272768020629883, 0.01026358413696289, 0.010181599617004395, 0.010147839546203614, 0.010176511764526367, 0.010215295791625976, 0.010209407806396485, 0.010202336311340331, 0.010248991966247558, 0.010115072250366211, 0.010136927604675293, 0.010260128021240234, 0.010213472366333009, 0.010309920310974122, 0.010201087951660156, 0.010195263862609863, 0.0102357759475708, 0.01025830364227295, 0.010226240158081054, 0.010037280082702637, 0.010248127937316895, 0.010195648193359375, 0.010140735626220703, 0.010488767623901368, 0.010375167846679688, 0.01031123161315918, 0.010289312362670899, 0.010414560317993165, 0.010347455978393554, 0.010377728462219238, 0.010301823616027833, 0.010319744110107422, 0.010371199607849121, 0.010329567909240722, 0.010260128021240234, 0.010294143676757812, 0.010170368194580079, 0.010167360305786133, 0.010224448204040528, 0.010258655548095703, 0.010239904403686523, 0.010256192207336427, 0.010431967735290527, 0.01035654354095459, 0.0103056001663208, 0.010412896156311035, 0.010381312370300292, 0.01042851161956787, 0.010368415832519531, 0.010301952362060546, 0.010280960083007813, 0.010326047897338868, 0.010258399963378906, 0.010279104232788086, 0.010207200050354004, 0.010323488235473632, 0.010114432334899903, 0.010167231559753418, 0.010167360305786133, 0.010203424453735352, 0.01034716796875, 0.010335391998291015, 0.010458175659179687, 0.010312735557556152, 0.010304256439208985, 0.010248191833496094, 0.010171839714050293, 0.010307744026184082, 0.010277152061462402, 0.010549375534057617, 0.010293024063110352, 0.010315999984741211, 0.010286144256591797, 0.01028326416015625, 0.010321727752685547, 0.010310751914978027, 0.010289055824279785, 0.010233983993530274, 0.010214495658874511, 0.010279711723327636, 0.01025216007232666, 0.010364704132080078, 0.010141471862792968, 0.010319999694824219, 0.01035968017578125, 0.010362624168395996, 0.010475008010864258, 0.010324671745300293, 0.010363840103149414, 0.010379872322082519, 0.010402079582214355, 0.010223648071289063, 0.010207296371459961, 0.010258336067199707, 0.010266048431396484, 0.010261055946350097, 0.010273887634277343, 0.010339167594909668, 0.010351743698120117, 0.010372032165527343, 0.010339679718017579, 0.01036291217803955, 0.010285696029663087, 0.01042841625213623, 0.010244064331054688, 0.010301471710205078, 0.010324031829833984, 0.01027603244781494, 0.010246912002563476, 0.010223615646362304, 0.010249631881713867, 0.010234432220458984, 0.010528800010681152, 0.010483807563781738, 0.010330016136169434, 0.01032806396484375, 0.010270719528198241, 0.010302687644958496, 0.010349216461181641, 0.0103538236618042, 0.010711968421936035, 0.010450271606445313, 0.01037507152557373, 0.010285887718200683, 0.010233887672424316, 0.010774496078491212, 0.011421695709228515, 0.01023795223236084, 0.010586112022399903, 0.01074176025390625, 0.010184096336364747, 0.010195136070251465, 0.010217375755310058, 0.010214079856872558, 0.01024124813079834, 0.01017046356201172, 0.01016387176513672, 0.010150688171386719, 0.0102390718460083, 0.01040278434753418, 0.010397695541381835, 0.010231136322021484, 0.010221311569213867, 0.010234784126281739, 0.01023788833618164, 0.01025654411315918, 0.010371040344238282, 0.010332863807678222, 0.010373184204101563, 0.010409472465515136, 0.010481504440307618, 0.010326623916625977, 0.010354496002197266, 0.010274975776672364, 0.010219072341918945, 0.010039775848388673, 0.010088704109191894, 0.010286848068237304, 0.010170368194580079, 0.010171648025512695, 0.010141504287719727, 0.01014675235748291, 0.010106240272521973, 0.010067999839782715, 0.010082559585571289, 0.010172863960266114, 0.010286656379699707, 0.010797599792480468, 0.013413279533386231, 0.010668959617614746, 0.010178496360778808, 0.010169856071472168, 0.010119711875915528, 0.010139679908752442, 0.010196991920471191, 0.010176287651062012, 0.010172736167907715, 0.010020671844482422, 0.009940095901489258, 0.010011872291564942, 0.009977248191833496, 0.009939295768737793, 0.01002905559539795, 0.009879551887512206, 0.010013792037963867, 0.010007455825805664, 0.010195103645324707, 0.01019478416442871, 0.01029689598083496, 0.010332608222961425, 0.010191200256347656, 0.010164031982421875, 0.011173727989196778, 0.01134550380706787, 0.010014143943786621, 0.010004575729370118, 0.009947711944580079, 0.009881919860839843, 0.009842559814453125, 0.009889920234680176, 0.009834303855895996, 0.009941216468811034, 0.009879679679870605, 0.009856351852416992, 0.009857536315917969, 0.009772224426269531, 0.009671680450439453, 0.009697088241577148]",tokens/s,98.55102231705696,,, 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4254.9248,4725.866496,0.0,4347.396096,4328.833024,s,1,10.1469296875,10.1469296875,0.0,10.1469296875,10.1469296875,10.1469296875,10.1469296875,[10.1469296875],,kWh,8.911540633337153e-05,9.822733647001379e-06,3.0221413065995018e-05,0.00012915955304636793,,MB,1385.30816,5212.40576,0.0,4804.575232,4748.27776,s,10,3.353415740966797,0.3353415740966797,0.002830763496809025,0.3359424896240234,0.3385506988525391,0.3389105422973633,0.3391984170532226,"[0.33013101196289063, 0.3346091613769531, 0.3317735290527344, 0.33279052734375, 0.3358599853515625, 0.3360249938964844, 0.33715487670898436, 0.3373305358886719, 0.3392703857421875, 0.3384707336425781]",tokens/s,763.4007226500185,kWh,9.747006747915444e-06,1.0745657501540521e-06,6.462662577533251e-06,1.7284235075602748e-05,tokens/kWh,14811184.809754886,MB,1388.466176,5321.457664,0.0,4913.627136,4878.043648,s,10,18.212191162109374,1.8212191162109375,0.004827572697774101,1.8191776123046877,1.827504541015625,1.8289905517578124,1.8301793603515624,"[1.8162979736328124, 1.8251033935546874, 1.818055419921875, 1.8238345947265624, 1.8304765625, 1.8177127685546874, 1.8202998046875, 1.8162591552734375, 1.8169771728515625, 1.82717431640625]",tokens/s,34.59221322641949,kWh,5.342437244375331e-05,5.892963448966416e-06,3.5353204208465884e-05,9.467054010118562e-05,tokens/kWh,665465.729176832,,s,630,18.20843064689638,0.028902270868089466,0.0012119222805392464,0.028918832778930665,0.029754111671447755,0.03004606399536133,0.03278376026153565,"[0.03177004814147949, 0.02955731201171875, 0.028858367919921874, 0.02851840019226074, 0.027850751876831056, 0.02754867172241211, 0.02753638458251953, 0.027578367233276366, 0.028203008651733398, 0.029016063690185546, 0.029050464630126952, 0.028816831588745116, 0.029090656280517577, 0.028483680725097656, 0.02849795150756836, 0.028086271286010742, 0.027801599502563477, 0.028544736862182618, 0.029761024475097656, 0.029354784011840822, 0.029095775604248048, 0.028499647140502928, 0.028977632522583008, 0.029050880432128907, 0.028440576553344726, 0.027906047821044923, 0.0275883846282959, 0.028242143630981445, 0.029832672119140625, 0.029891103744506837, 0.029163520812988283, 0.02832156753540039, 0.028291296005249024, 0.03003392028808594, 0.03014656066894531, 0.02898739242553711, 0.02931692886352539, 0.029257919311523436, 0.029247488021850586, 0.02872230339050293, 0.02824665641784668, 0.028120479583740234, 0.028841951370239257, 0.02945849609375, 0.029219648361206055, 0.028964351654052735, 0.029443904876708983, 0.029078208923339843, 0.028456960678100586, 0.02815385627746582, 0.02800230407714844, 0.028374208450317382, 0.02902300834655762, 0.029276191711425783, 0.02873520088195801, 0.02784079933166504, 0.02813542366027832, 0.02848348808288574, 0.029728511810302734, 0.029505887985229493, 0.029249536514282228, 0.029326847076416016, 0.029399551391601563, 0.03220675277709961, 0.029395040512084962, 0.028828832626342775, 0.02757923126220703, 0.027123647689819334, 0.028083904266357422, 0.028434528350830077, 0.027760223388671876, 0.027682880401611328, 0.027588735580444335, 0.02812774467468262, 0.028112895965576173, 0.027661504745483397, 0.02776927947998047, 0.028299455642700196, 0.02780998420715332, 0.027906047821044923, 0.028274688720703125, 0.027883520126342775, 0.02781795120239258, 0.028302560806274413, 0.02907804870605469, 0.028835744857788087, 0.02902396774291992, 0.028891679763793945, 0.028144960403442384, 0.027810623168945312, 0.02791993522644043, 0.028352960586547852, 0.028080127716064454, 0.028243967056274414, 0.028663808822631837, 0.030811775207519532, 0.0302325439453125, 0.029684127807617186, 0.029362016677856446, 0.029407392501831053, 0.02933145523071289, 0.02958745574951172, 0.029133920669555665, 0.028629919052124024, 0.029247488021850586, 0.02949734306335449, 0.028338176727294922, 0.02771968078613281, 0.027678752899169923, 0.0284704647064209, 0.029706687927246095, 0.02988172721862793, 0.029518592834472657, 0.029104352951049805, 0.028655071258544922, 0.02846352005004883, 0.028051584243774415, 0.02840332794189453, 0.027990400314331056, 0.049065662384033204, 0.027023231506347656, 0.029034143447875978, 0.02974595260620117, 0.02917328071594238, 0.02923535919189453, 0.028907487869262696, 0.03366329574584961, 0.03047235107421875, 0.028802879333496095, 0.0285534725189209, 0.02906924819946289, 0.028940288543701172, 0.02834432029724121, 0.027701248168945314, 0.028097631454467774, 0.02898012733459473, 0.02907916831970215, 0.02828940773010254, 0.02789580726623535, 0.028030975341796875, 0.028017887115478514, 0.02787942314147949, 0.02818332862854004, 0.029312160491943358, 0.029037055969238282, 0.029102432250976563, 0.028553216934204102, 0.028193855285644533, 0.027881824493408203, 0.028227231979370118, 0.029346879959106446, 0.029349760055541994, 0.028366367340087892, 0.027639776229858398, 0.02810220718383789, 0.028253120422363283, 0.028112895965576173, 0.028084224700927734, 0.02901558494567871, 0.0298111686706543, 0.029353696823120116, 0.02922224044799805, 0.028330944061279298, 0.028077184677124025, 0.028137887954711914, 0.028527008056640626, 0.029323328018188478, 0.02897100830078125, 0.029474815368652343, 0.02949718475341797, 0.029005247116088866, 0.029147872924804686, 0.02975334358215332, 0.029123584747314454, 0.02845737648010254, 0.02769366455078125, 0.028088319778442384, 0.028704767227172853, 0.02875596809387207, 0.029290496826171877, 0.029501312255859374, 0.028831872940063476, 0.029509632110595704, 0.02961961555480957, 0.029037151336669922, 0.029675519943237305, 0.02915328025817871, 0.029683935165405274, 0.029357215881347657, 0.03265171051025391, 0.02995840072631836, 0.028925952911376954, 0.028827104568481445, 0.029544992446899412, 0.028612607955932616, 0.027246368408203124, 0.027302112579345703, 0.02760704040527344, 0.02852060890197754, 0.029222751617431642, 0.028579679489135743, 0.02761334419250488, 0.027280895233154297, 0.02804377555847168, 0.02812886428833008, 0.027892127990722656, 0.02814771270751953, 0.027674623489379883, 0.028676095962524413, 0.029519872665405275, 0.029224960327148438, 0.02853273582458496, 0.028516128540039064, 0.02798944091796875, 0.027879199981689452, 0.028999935150146483, 0.029727487564086913, 0.029016063690185546, 0.02873958396911621, 0.027637407302856444, 0.028309856414794922, 0.030441087722778322, 0.0302923526763916, 0.029014015197753908, 0.029320735931396485, 0.02987571144104004, 0.029895647048950195, 0.029648895263671874, 0.029173536300659178, 0.02855891227722168, 0.028868511199951173, 0.030233343124389647, 0.029853055953979492, 0.028887359619140626, 0.029366592407226562, 0.02937001609802246, 0.02905241584777832, 0.02918454360961914, 0.02913862419128418, 0.02927199935913086, 0.028176544189453125, 0.028082719802856447, 0.02814566421508789, 0.029540096282958984, 0.029859296798706053, 0.029446943283081055, 0.02875596809387207, 0.02950553512573242, 0.029699232101440428, 0.029166431427001954, 0.028903423309326173, 0.030087167739868165, 0.03285168075561523, 0.029694303512573243, 0.028843551635742187, 0.02815433692932129, 0.028008447647094727, 0.028094463348388672, 0.02776380729675293, 0.02816092872619629, 0.028049407958984376, 0.028196096420288086, 0.02897724723815918, 0.0291843204498291, 0.02885443115234375, 0.029200576782226564, 0.02886409568786621, 0.029180320739746093, 0.028979103088378907, 0.028541023254394532, 0.027439104080200196, 0.027725887298583985, 0.02868332862854004, 0.029538944244384767, 0.02894054412841797, 0.02797772789001465, 0.02775859260559082, 0.027602336883544923, 0.02832035255432129, 0.029104127883911132, 0.028778495788574218, 0.029462528228759766, 0.029470720291137696, 0.029100032806396486, 0.028841567993164063, 0.030443935394287108, 0.035036224365234375, 0.02868729591369629, 0.0283504638671875, 0.029900800704956054, 0.02959564781188965, 0.028652544021606444, 0.029189023971557617, 0.02941961669921875, 0.02958950424194336, 0.028628095626831055, 0.02955763244628906, 0.029542335510253905, 0.02910214424133301, 0.028231679916381838, 0.02815782356262207, 0.029218944549560547, 0.029035968780517576, 0.029295167922973632, 0.02936627197265625, 0.028807167053222657, 0.0296711368560791, 0.030003488540649412, 0.029646848678588866, 0.02964838409423828, 0.02926748847961426, 0.029231327056884766, 0.029106943130493165, 0.02851840019226074, 0.028858335494995117, 0.03283769607543945, 0.029829055786132812, 0.02857740783691406, 0.027896192550659178, 0.028024288177490236, 0.028090911865234373, 0.028628992080688476, 0.027662336349487306, 0.028188255310058592, 0.02768479919433594, 0.02809084892272949, 0.028700063705444336, 0.027802207946777343, 0.02736947250366211, 0.02789295959472656, 0.028508960723876955, 0.027998207092285156, 0.02856879997253418, 0.027625759124755858, 0.02765180778503418, 0.02817100715637207, 0.028051488876342772, 0.027772031784057617, 0.027865407943725586, 0.028711488723754883, 0.029710336685180663, 0.02876345634460449, 0.027781824111938476, 0.027627519607543945, 0.02832313537597656, 0.029420223236083985, 0.029388799667358398, 0.029212480545043946, 0.0292388801574707, 0.02929929542541504, 0.029246591567993165, 0.02997657585144043, 0.029932416915893555, 0.029324384689331056, 0.028967552185058594, 0.029036832809448243, 0.030055999755859375, 0.029450687408447265, 0.029083648681640626, 0.02895382308959961, 0.02847772789001465, 0.0295897274017334, 0.02974764823913574, 0.029611072540283202, 0.029274911880493165, 0.028730623245239256, 0.029360895156860353, 0.029306880950927733, 0.029175296783447265, 0.029204992294311522, 0.029278207778930664, 0.029259679794311523, 0.02896291160583496, 0.029479167938232423, 0.029036575317382813, 0.028708576202392578, 0.029732864379882814, 0.029398624420166015, 0.03294822311401367, 0.029867839813232423, 0.029040864944458008, 0.027706783294677736, 0.027068992614746094, 0.028096511840820314, 0.028366847991943358, 0.028911455154418945, 0.029558944702148437, 0.028681535720825196, 0.027322656631469728, 0.027701663970947265, 0.028106752395629882, 0.028239839553833006, 0.02887068748474121, 0.029417695999145507, 0.029037343978881837, 0.028327007293701172, 0.027469728469848635, 0.02838937568664551, 0.028026559829711913, 0.028200544357299805, 0.028002527236938475, 0.028614463806152343, 0.0285150089263916, 0.027967487335205078, 0.02826755142211914, 0.02788150405883789, 0.02845359992980957, 0.027803712844848633, 0.0280864315032959, 0.02958336067199707, 0.029605119705200196, 0.02921548843383789, 0.0294072322845459, 0.02944819259643555, 0.029899999618530272, 0.02978019142150879, 0.029477439880371093, 0.02913603210449219, 0.02920956802368164, 0.029604799270629884, 0.029619136810302735, 0.028831520080566407, 0.029516000747680664, 0.029054527282714845, 0.02801299285888672, 0.0281395206451416, 0.028411872863769533, 0.02853276824951172, 0.029585407257080077, 0.02992291259765625, 0.029622175216674804, 0.029356544494628906, 0.029077503204345705, 0.028767967224121095, 0.029638944625854494, 0.03003392028808594, 0.02965878486633301, 0.029339424133300783, 0.02908831977844238, 0.028753536224365234, 0.02960367965698242, 0.03287036895751953, 0.029923360824584962, 0.028441728591918944, 0.02773593521118164, 0.028050432205200194, 0.02817228889465332, 0.028622848510742187, 0.029095935821533202, 0.028794879913330077, 0.02750223922729492, 0.02761337661743164, 0.02830294418334961, 0.029206464767456055, 0.028797760009765624, 0.028532543182373048, 0.02761849594116211, 0.027848512649536132, 0.02840060806274414, 0.027805728912353514, 0.027852800369262694, 0.02829516792297363, 0.028991487503051756, 0.0293703670501709, 0.028854272842407228, 0.02759212875366211, 0.0277509765625, 0.027936767578125, 0.02906284713745117, 0.029065536499023437, 0.028401376724243164, 0.027666719436645507, 0.028359872817993164, 0.030460735321044922, 0.030402559280395508, 0.029736640930175782, 0.029134496688842774, 0.029231775283813478, 0.02906857681274414, 0.028383359909057618, 0.02871766471862793, 0.027420448303222655, 0.028801248550415038, 0.028780160903930666, 0.028649280548095703, 0.02897724723815918, 0.030394847869873048, 0.029533824920654296, 0.028655040740966798, 0.027595712661743165, 0.02750054359436035, 0.028804512023925782, 0.02987654495239258, 0.029278495788574218, 0.029147136688232423, 0.029703872680664063, 0.029681983947753905, 0.02930633544921875, 0.028993888854980467, 0.02929871940612793, 0.02915305519104004, 0.028952320098876952, 0.030128671646118165, 0.029552703857421876, 0.03210588836669922, 0.02944879913330078, 0.028563199996948244, 0.02880281639099121, 0.028264959335327147, 0.02757212829589844, 0.027670623779296875, 0.027527328491210937, 0.02826019287109375, 0.02908345603942871, 0.028698816299438476, 0.027584512710571288, 0.02759065628051758, 0.02790809631347656, 0.028243648529052735, 0.028757856369018554, 0.029156864166259764, 0.028928159713745117, 0.029430591583251953, 0.02897305679321289, 0.028297216415405273, 0.027876447677612305, 0.027960224151611326, 0.028499967575073244, 0.027682815551757813, 0.027893760681152343, 0.028526496887207032, 0.02773955154418945, 0.028209856033325195, 0.029057024002075195, 0.02964179229736328, 0.029565887451171877, 0.029013376235961914, 0.029913728713989257, 0.029327360153198243, 0.02859779167175293, 0.02831817626953125, 0.027596128463745116, 0.028641984939575194, 0.02966524887084961, 0.029534208297729493, 0.02853887939453125, 0.028006399154663086, 0.028925216674804688, 0.03010211181640625, 0.02967900848388672, 0.029370271682739257, 0.028912448883056642, 0.02792038345336914, 0.02856550407409668, 0.02892995262145996, 0.030169183731079102, 0.029775871276855468, 0.029261823654174804, 0.029005311965942384, 0.029069440841674805, 0.029980863571166992, 0.029673055648803712, 0.029737567901611327, 0.029198335647583007, 0.02897100830078125, 0.029057024002075195, 0.029582880020141603, 0.03211884689331055, 0.029147136688232423, 0.02846031951904297, 0.027697887420654297, 0.027885087966918944, 0.028352991104125976, 0.028851648330688477, 0.029515647888183595, 0.028973760604858397, 0.02792857551574707, 0.02781955146789551, 0.02912918472290039, 0.029400896072387696, 0.028452255249023437, 0.027707584381103517, 0.02823360061645508, 0.029260320663452147, 0.02954022407531738, 0.02847148895263672, 0.02872563171386719, 0.02875366401672363, 0.029419519424438476, 0.029122560501098634, 0.028255807876586915, 0.027680831909179686, 0.02866419219970703, 0.030228063583374022, 0.029621696472167967, 0.029305728912353515, 0.028821599960327147, 0.028109920501708983, 0.02910095977783203, 0.030103551864624024, 0.029800447463989257, 0.028933696746826172, 0.029706687927246095, 0.029026304244995117, 0.029446016311645506, 0.029431583404541016, 0.030105951309204102, 0.030003200531005858, 0.029421567916870117, 0.02910310363769531, 0.029535232543945314, 0.029419519424438476, 0.028901376724243165, 0.027729055404663087, 0.027479904174804688, 0.027491167068481447, 0.02749251174926758, 0.027922431945800782, 0.029416479110717774, 0.029783008575439453, 0.029648832321166992, 0.029902048110961914, 0.029571456909179686, 0.029051359176635743, 0.028383232116699218, 0.028180383682250978, 0.029373823165893556, 0.03010633659362793, 0.02998067283630371, 0.029626047134399414]",tokens/s,34.599357419492044,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2193.874944,2486.10816,0.0,2107.63776,1984.899072,s,1,8.646224609375,8.646224609375,0.0,8.646224609375,8.646224609375,8.646224609375,8.646224609375,[8.646224609375],,kWh,5.23868538958671e-05,5.771400339708608e-06,1.7365847226002984e-05,7.55241014615787e-05,,MB,2219.184128,2781.806592,0.0,2373.976064,2247.84384,s,10,1.5936590423583983,0.15936590423583982,0.0005326781635457881,0.15929850006103516,0.15965512542724608,0.1601829704284668,0.16060524642944335,"[0.1607108154296875, 0.15953782653808593, 0.1590572509765625, 0.15952720642089843, 0.15938755798339843, 0.15948480224609374, 0.15920944213867189, 0.15853050231933594, 0.15904937744140624, 0.1591642608642578]",tokens/s,1606.3661874697796,kWh,4.751470376276601e-06,5.239947012396316e-07,3.161127080870649e-06,8.436592158386882e-06,tokens/kWh,30344005.63567701,MB,2220.621824,2865.692672,0.0,2457.862144,2341.346816,s,10,15.49423681640625,1.549423681640625,0.01236509743194837,1.5456995849609374,1.5683415161132812,1.5730599792480469,1.5768347497558595,"[1.5434661865234376, 1.5443287353515625, 1.5384366455078125, 1.5373388671875, 1.5470704345703126, 1.547352783203125, 1.5412177734375, 1.5499539794921875, 1.5777784423828125, 1.56729296875]",tokens/s,40.660279526185974,kWh,4.6381450148306314e-05,5.1154417775501975e-06,2.863289835433217e-05,8.012979028018868e-05,tokens/kWh,786224.4463601966,,s,630,15.491415399551387,0.024589548253256176,0.0004737422126479425,0.024426400184631346,0.025178885650634766,0.025357644176483153,0.026356863498687745,"[0.025246559143066408, 0.024980703353881837, 0.025242399215698243, 0.02483305549621582, 0.0245831356048584, 0.02435686492919922, 0.024293535232543944, 0.02456150436401367, 0.024390783309936524, 0.024582719802856444, 0.025866783142089844, 0.025108192443847658, 0.024294496536254883, 0.028855264663696287, 0.024311040878295897, 0.024402687072753906, 0.02434252738952637, 0.024360383987426758, 0.02431648063659668, 0.02434867286682129, 0.024197120666503907, 0.024322080612182616, 0.024321216583251953, 0.024363103866577147, 0.024314111709594726, 0.02508025550842285, 0.024200992584228517, 0.024334560394287108, 0.02439129638671875, 0.02475881576538086, 0.02426358413696289, 0.024367456436157228, 0.024187200546264647, 0.024276479721069336, 0.02409347152709961, 0.02422809600830078, 0.024253215789794922, 0.024331104278564452, 0.024206911087036133, 0.02429142379760742, 0.024275583267211916, 0.02425040054321289, 0.024294240951538086, 0.024199552536010742, 0.024209280014038086, 0.024163040161132812, 0.02421785545349121, 0.024333311080932618, 0.02476214408874512, 0.02470832061767578, 0.024379135131835938, 0.024363008499145508, 0.024289472579956055, 0.024378368377685547, 0.024226015090942382, 0.024404640197753905, 0.024354751586914063, 0.02432598304748535, 0.02416860771179199, 0.024175872802734377, 0.024193504333496093, 0.02426233673095703, 0.024218208312988283, 0.02470889663696289, 0.024913536071777344, 0.024355424880981445, 0.024319072723388672, 0.024273792266845704, 0.024193056106567384, 0.024260608673095704, 0.02430771255493164, 0.026355136871337892, 0.02553094482421875, 0.024473600387573242, 0.024344415664672853, 0.02430521583557129, 0.024253023147583007, 0.024223583221435547, 0.024298784255981445, 0.024170944213867188, 0.024256959915161132, 0.024195072174072265, 0.024557567596435546, 0.02421753692626953, 0.025440319061279297, 0.024346752166748045, 0.024419551849365236, 0.02426128005981445, 0.024420352935791017, 0.024315967559814453, 0.02446329689025879, 0.024358816146850586, 0.024410207748413085, 0.024747648239135743, 0.024295808792114258, 0.024354591369628906, 0.02437763214111328, 0.02426985549926758, 0.02425539207458496, 0.024275136947631837, 0.024423295974731446, 0.024351680755615234, 0.02431590461730957, 0.02454313659667969, 0.02541391944885254, 0.024926015853881836, 0.024374656677246094, 0.02435696029663086, 0.024295167922973634, 0.024400672912597655, 0.024401920318603516, 0.024367103576660155, 0.02434867286682129, 0.02442144012451172, 0.024625503540039062, 0.024443487167358398, 0.024422399520874022, 0.02434662437438965, 0.02434662437438965, 0.024366559982299803, 0.024351232528686522, 0.024363040924072266, 0.024345983505249025, 0.02486534309387207, 0.026291648864746095, 0.02482035255432129, 0.025149375915527343, 0.024415903091430664, 0.02431420707702637, 0.02429955291748047, 0.02474611282348633, 0.0244466552734375, 0.024166528701782226, 0.024339935302734376, 0.024136127471923827, 0.024332416534423827, 0.024182912826538085, 0.02467593574523926, 0.024615232467651366, 0.025097536087036132, 0.024392383575439453, 0.02434662437438965, 0.02425587272644043, 0.024248735427856445, 0.024324607849121094, 0.024461023330688475, 0.024379392623901368, 0.024429567337036134, 0.02445414352416992, 0.0245534725189209, 0.02434867286682129, 0.024426496505737305, 0.02429952049255371, 0.024362016677856445, 0.02429641532897949, 0.024276031494140624, 0.024260671615600585, 0.024255231857299806, 0.024567712783813478, 0.02432841682434082, 0.024442880630493165, 0.024250431060791014, 0.02430988883972168, 0.024248128890991212, 0.024258560180664062, 0.02427903938293457, 0.024190080642700194, 0.02414886474609375, 0.024328351974487305, 0.024237791061401368, 0.0242642879486084, 0.024236576080322265, 0.025944063186645508, 0.02510032081604004, 0.024481311798095703, 0.024275392532348634, 0.02509414482116699, 0.024266752243041992, 0.0245467529296875, 0.024344768524169922, 0.024527040481567383, 0.024225984573364258, 0.02432204818725586, 0.024236192703247072, 0.024282976150512694, 0.02428927993774414, 0.024223743438720705, 0.024262752532958985, 0.024350624084472656, 0.025370624542236327, 0.024645631790161132, 0.024297664642333985, 0.02501817512512207, 0.02478489685058594, 0.02561027145385742, 0.024242143630981445, 0.024387199401855467, 0.024252799987792967, 0.024368991851806642, 0.024156320571899415, 0.024268096923828125, 0.02414252853393555, 0.024193023681640623, 0.024156160354614258, 0.024132863998413086, 0.024048383712768556, 0.0240614070892334, 0.024151935577392578, 0.024394399642944337, 0.024209407806396483, 0.02418217658996582, 0.024140031814575195, 0.024144128799438478, 0.024543039321899413, 0.02436534309387207, 0.024712928771972655, 0.02415817642211914, 0.02441379165649414, 0.024355552673339845, 0.024379392623901368, 0.024393184661865235, 0.024424896240234376, 0.02432419204711914, 0.02442844772338867, 0.02434467124938965, 0.02449612808227539, 0.02439596748352051, 0.02439676856994629, 0.024520959854125977, 0.024545888900756836, 0.024653120040893553, 0.024873023986816407, 0.024525312423706053, 0.024930559158325195, 0.02446112060546875, 0.02452444839477539, 0.024256927490234375, 0.02434016036987305, 0.02430803108215332, 0.024275968551635742, 0.0242653751373291, 0.024227807998657227, 0.024289663314819337, 0.024149984359741212, 0.024317983627319337, 0.024371072769165038, 0.02430169677734375, 0.024164287567138672, 0.02420128059387207, 0.024242176055908202, 0.02443449592590332, 0.024387136459350586, 0.024819616317749024, 0.024572671890258788, 0.024516639709472657, 0.024308000564575195, 0.024233951568603515, 0.02433612823486328, 0.02432601547241211, 0.02439993667602539, 0.02475372886657715, 0.02421811294555664, 0.02428313636779785, 0.024221216201782227, 0.024278656005859375, 0.02440892791748047, 0.024458463668823243, 0.024467679977416994, 0.025401920318603517, 0.024670015335083006, 0.024660160064697265, 0.02459020805358887, 0.024599679946899412, 0.024691551208496094, 0.0247031364440918, 0.024559135437011718, 0.024531423568725588, 0.02453913688659668, 0.024874176025390625, 0.024435712814331056, 0.024383295059204103, 0.02444623947143555, 0.024543584823608397, 0.024672607421875, 0.024671615600585936, 0.024601408004760742, 0.024595903396606444, 0.024658336639404296, 0.02469068717956543, 0.024567615509033202, 0.024674816131591795, 0.02454684829711914, 0.024537248611450194, 0.0245534725189209, 0.024532831192016602, 0.02458844757080078, 0.02448409652709961, 0.02450543975830078, 0.024470176696777344, 0.02459619140625, 0.024407808303833007, 0.024376960754394533, 0.024626272201538086, 0.024595808029174805, 0.024461055755615236, 0.02510041618347168, 0.024369760513305663, 0.024424448013305664, 0.024473600387573242, 0.02442038345336914, 0.024833343505859376, 0.024698944091796876, 0.02449468803405762, 0.024655487060546873, 0.02464192008972168, 0.025536672592163086, 0.024971200942993165, 0.02479315185546875, 0.02454732894897461, 0.024483840942382814, 0.02443878364562988, 0.02454332733154297, 0.02457740783691406, 0.024573631286621093, 0.024515424728393555, 0.024692607879638673, 0.024694175720214845, 0.02470528030395508, 0.024607200622558594, 0.02467148780822754, 0.024597375869750977, 0.024563743591308595, 0.024573631286621093, 0.024537248611450194, 0.024696832656860353, 0.024761888504028322, 0.025350624084472657, 0.02513068771362305, 0.025080095291137694, 0.024858591079711913, 0.024671775817871094, 0.02461955261230469, 0.025792768478393555, 0.0247825927734375, 0.024639488220214844, 0.024534879684448244, 0.02444304084777832, 0.02442630386352539, 0.024500415802001952, 0.024369152069091796, 0.024476959228515626, 0.024541120529174804, 0.024584991455078125, 0.02440575981140137, 0.024338304519653322, 0.024463743209838868, 0.024516511917114257, 0.0244716796875, 0.02452681541442871, 0.024399232864379884, 0.024306047439575196, 0.0242608642578125, 0.024313695907592775, 0.024574111938476563, 0.024700927734375, 0.02412348747253418, 0.024201120376586914, 0.02411667251586914, 0.024334911346435548, 0.024171775817871093, 0.02425916862487793, 0.024252864837646486, 0.024200927734375, 0.024188928604125977, 0.024252384185791016, 0.02418636894226074, 0.024271392822265626, 0.024359968185424803, 0.02487126350402832, 0.024457311630249022, 0.024449024200439453, 0.02427289581298828, 0.024389631271362306, 0.02734195137023926, 0.02472025680541992, 0.024532991409301756, 0.02444825553894043, 0.024443647384643555, 0.02447769546508789, 0.024649728775024415, 0.024551263809204103, 0.024307680130004884, 0.024291519165039063, 0.024276992797851563, 0.02494259262084961, 0.02440608024597168, 0.02461235237121582, 0.024854976654052733, 0.02467862319946289, 0.024991519927978517, 0.024319456100463866, 0.02418332862854004, 0.024120800018310545, 0.024226303100585937, 0.024477535247802735, 0.024207712173461914, 0.024223583221435547, 0.024186784744262696, 0.024238176345825195, 0.024193023681640623, 0.0241940803527832, 0.024247264862060545, 0.024186880111694335, 0.02421526336669922, 0.02421583938598633, 0.024213151931762697, 0.024205663681030273, 0.024167552947998047, 0.02474687957763672, 0.02444697570800781, 0.024252576828002928, 0.024254304885864258, 0.024218687057495115, 0.025066112518310545, 0.02431622314453125, 0.024550943374633788, 0.02436569595336914, 0.024405792236328126, 0.024270912170410157, 0.024377344131469726, 0.024268800735473633, 0.024467647552490233, 0.024198783874511718, 0.024325727462768554, 0.024309600830078125, 0.024326911926269533, 0.024576000213623047, 0.024647680282592774, 0.024285184860229493, 0.024236032485961914, 0.02505449676513672, 0.02641119956970215, 0.02457596778869629, 0.024829727172851562, 0.024139904022216798, 0.024119583129882813, 0.024205408096313476, 0.024174591064453126, 0.024262208938598633, 0.024100351333618163, 0.02424928092956543, 0.02412531280517578, 0.024602752685546875, 0.024165376663208008, 0.024378368377685547, 0.024246271133422852, 0.024389631271362306, 0.024155391693115234, 0.02539289665222168, 0.02426108741760254, 0.024259103775024413, 0.024098848342895506, 0.024203231811523437, 0.024145248413085938, 0.024246944427490234, 0.02410675239562988, 0.02415382385253906, 0.024087072372436524, 0.024180736541748047, 0.024297439575195312, 0.024319520950317385, 0.024280704498291016, 0.024281984329223634, 0.02535148811340332, 0.025165504455566406, 0.024405216217041014, 0.024579872131347658, 0.024223167419433592, 0.02435744094848633, 0.024252416610717774, 0.02434252738952637, 0.024211456298828125, 0.024178688049316405, 0.024305599212646484, 0.02529804801940918, 0.0246824951171875, 0.02462211227416992, 0.02464348793029785, 0.02485862350463867, 0.02485862350463867, 0.024854528427124024, 0.02500387191772461, 0.025178335189819337, 0.026527008056640624, 0.025773792266845702, 0.025045951843261718, 0.025046335220336915, 0.02502931213378906, 0.02500169563293457, 0.02495257568359375, 0.025258207321166994, 0.024747743606567382, 0.02476608085632324, 0.024672672271728514, 0.025593311309814452, 0.024751903533935547, 0.024936607360839844, 0.024850784301757814, 0.024780288696289062, 0.02468534469604492, 0.02543577575683594, 0.025192832946777342, 0.025194368362426757, 0.025610080718994142, 0.025256223678588867, 0.025023679733276367, 0.025128992080688476, 0.02500912094116211, 0.025163583755493164, 0.025203744888305665, 0.0251114559173584, 0.025186368942260742, 0.02514227294921875, 0.025025535583496093, 0.02495052719116211, 0.024944896697998046, 0.024853887557983397, 0.025063072204589844, 0.024977535247802735, 0.02520969581604004, 0.02518383979797363, 0.02514358329772949, 0.025106399536132813, 0.02521913528442383, 0.02521062469482422, 0.02529929542541504, 0.025132543563842775, 0.025138912200927736, 0.025273120880126954, 0.025053184509277345, 0.027054271697998046, 0.026357568740844727, 0.025272319793701172, 0.02539891242980957, 0.02535862350463867, 0.025250175476074218, 0.025337568283081056, 0.02497331237792969, 0.02484547233581543, 0.0258056640625, 0.02461926460266113, 0.024588031768798826, 0.02433228874206543, 0.024162336349487303, 0.024098432540893555, 0.024121408462524415, 0.024106304168701173, 0.024234975814819336, 0.027041791915893554, 0.02483206367492676, 0.02463737678527832, 0.024584192276000977, 0.02450227165222168, 0.024779935836791993, 0.024331104278564452, 0.02427903938293457, 0.024563711166381837, 0.025356447219848633, 0.0245897274017334, 0.024365503311157225, 0.02420265579223633, 0.024189184188842774, 0.024309280395507813, 0.024111648559570313, 0.024133535385131837, 0.024127872467041015, 0.024207040786743163, 0.024034912109375, 0.024121728897094727, 0.02412169647216797, 0.02427449607849121, 0.024228288650512696, 0.024516063690185545, 0.024500127792358398, 0.025204959869384765, 0.024859039306640626, 0.0245382080078125, 0.02447043228149414, 0.024432224273681642, 0.024629663467407227, 0.024705024719238283, 0.024913984298706053, 0.02523257637023926, 0.025219839096069337, 0.025083904266357423, 0.025038080215454103, 0.02522336006164551, 0.025292512893676757, 0.025165727615356445, 0.025095104217529297, 0.025092096328735353, 0.025612287521362305, 0.025286176681518554, 0.02520521545410156, 0.02549065589904785, 0.025444223403930665, 0.02547372817993164, 0.025364736557006835, 0.02517398452758789, 0.02521811294555664, 0.025170879364013674, 0.025108192443847658, 0.025094432830810545, 0.025233407974243165, 0.025078016281127928, 0.024940288543701172, 0.02507366371154785, 0.025114368438720704, 0.025034528732299804, 0.0249749755859375, 0.025289567947387695, 0.025075712203979493, 0.024958400726318358, 0.02485919952392578, 0.02512214469909668, 0.024824480056762695, 0.025102527618408203, 0.024988544464111327, 0.024968000411987306, 0.025106176376342774]",tokens/s,40.66768489199792,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1301.057536,1083.113472,0.0,706.740224,675.13344,s,1,8.02769287109375,8.02769287109375,0.0,8.02769287109375,8.02769287109375,8.02769287109375,8.02769287109375,[8.02769287109375],,kWh,3.416082585419341e-05,3.75901378485943e-06,1.1313064606000012e-05,4.923290424505285e-05,,MB,1338.507264,1403.977728,0.0,996.1472,942.731264,s,10,0.32046176147460936,0.032046176147460935,0.0002986242411204077,0.031976847648620604,0.032185202789306644,0.03253535423278808,0.03281547538757324,"[0.03288550567626953, 0.03196047973632812, 0.03190598487854004, 0.031801536560058595, 0.032107391357421876, 0.032062145233154295, 0.03206054306030273, 0.031993215560913085, 0.03177020835876465, 0.03191475105285645]",tokens/s,7988.472597230083,kWh,1.0090062612644241e-06,1.1127539921727499e-07,6.678242890483014e-07,1.7881059495300003e-06,tokens/kWh,143168250.21877983,MB,1356.914688,1412.366336,0.0,1004.535808,942.733824,s,10,13.1520458984375,1.3152045898437499,0.01372797262127614,1.311053955078125,1.3325357055664062,1.3414024963378908,1.3484959289550782,"[1.3132799072265624, 1.3078309326171875, 1.304052734375, 1.3305653076171875, 1.350269287109375, 1.311547607421875, 1.310560302734375, 1.306673583984375, 1.3040201416015624, 1.31324609375]",tokens/s,47.90129268594218,kWh,3.860332580165441e-05,4.257519750925985e-06,1.6431994946351634e-05,5.929284049893203e-05,tokens/kWh,1062522.8858977458,,s,630,13.149944005966175,0.02087292699359712,0.000397384396262056,0.02076681613922119,0.021191869163513184,0.0216299822807312,0.022143090381622318,"[0.020803615570068358, 0.02102272033691406, 0.020967424392700194, 0.020914176940917968, 0.02078268814086914, 0.02070515251159668, 0.020525600433349608, 0.020752384185791017, 0.02056300735473633, 0.020646848678588868, 0.020609024047851563, 0.02064787292480469, 0.02072991943359375, 0.02066022491455078, 0.020592639923095703, 0.020717567443847656, 0.021561344146728514, 0.021295103073120117, 0.020914176940917968, 0.020957183837890626, 0.020715520858764647, 0.020868576049804688, 0.02091881561279297, 0.020616735458374023, 0.020797920227050782, 0.020897184371948242, 0.020827968597412108, 0.020982175827026366, 0.020805376052856445, 0.020769407272338867, 0.020746240615844725, 0.02074790382385254, 0.02074457550048828, 0.020785120010375975, 0.020730047225952147, 0.02086425590515137, 0.021059648513793945, 0.020776992797851564, 0.02080758476257324, 0.0208056640625, 0.02102739143371582, 0.020893024444580077, 0.020856735229492187, 0.02086579132080078, 0.020836383819580077, 0.02096691131591797, 0.02086960029602051, 0.020856096267700196, 0.02085308837890625, 0.02098419189453125, 0.020908031463623047, 0.020965375900268556, 0.02168185615539551, 0.020817760467529298, 0.02070694351196289, 0.020800159454345702, 0.020863168716430663, 0.020792448043823242, 0.02071340751647949, 0.020703487396240235, 0.02085536003112793, 0.02086720085144043, 0.020762624740600585, 0.020867071151733398, 0.02082614326477051, 0.02055369567871094, 0.02063148880004883, 0.020639808654785156, 0.020736000061035157, 0.020754432678222655, 0.020614463806152342, 0.020672639846801757, 0.020644416809082033, 0.020746240615844725, 0.020658176422119142, 0.02069231986999512, 0.020676416397094728, 0.020724159240722656, 0.02065558433532715, 0.020628416061401367, 0.02057164764404297, 0.020597248077392577, 0.020625408172607423, 0.020717567443847656, 0.020641792297363282, 0.02064134407043457, 0.020813728332519533, 0.02062748718261719, 0.02073651123046875, 0.02087321662902832, 0.02090595245361328, 0.02074012756347656, 0.02080499267578125, 0.020865055084228517, 0.020836959838867186, 0.020817920684814452, 0.02163644790649414, 0.02080732727050781, 0.02104422378540039, 0.020848096847534178, 0.020754655838012694, 0.020833856582641603, 0.0209783992767334, 0.020758560180664062, 0.020789247512817383, 0.020669567108154298, 0.020734848022460936, 0.0209039363861084, 0.0208035831451416, 0.020728063583374024, 0.02068454360961914, 0.02074844741821289, 0.020813663482666014, 0.02072985649108887, 0.020674560546875, 0.0207127685546875, 0.020679359436035157, 0.02077846336364746, 0.02068534469604492, 0.020719615936279297, 0.020764671325683593, 0.020813440322875975, 0.020822399139404296, 0.02069708824157715, 0.020731008529663086, 0.02071436882019043, 0.02080143928527832, 0.02072380828857422, 0.021032960891723632, 0.022084768295288087, 0.021023584365844727, 0.020783103942871094, 0.02090825653076172, 0.020676383972167967, 0.02065407943725586, 0.020543487548828124, 0.020557535171508788, 0.02057347106933594, 0.020583423614501953, 0.020663936614990233, 0.020803903579711912, 0.02055766487121582, 0.020647584915161134, 0.020502464294433594, 0.020701311111450196, 0.020656639099121094, 0.020600831985473633, 0.020508672714233397, 0.020559871673583984, 0.02069708824157715, 0.020666048049926757, 0.020547903060913086, 0.02057580757141113, 0.02057200050354004, 0.020648544311523437, 0.02055513572692871, 0.020718208312988283, 0.020772447586059572, 0.020740480422973634, 0.020709407806396483, 0.020699071884155273, 0.020598848342895507, 0.02057823944091797, 0.020733280181884764, 0.02078323173522949, 0.020533855438232423, 0.02054960060119629, 0.02059062385559082, 0.020626911163330076, 0.020720352172851564, 0.0205980167388916, 0.020513343811035156, 0.020663423538208006, 0.020732799530029298, 0.020684799194335936, 0.020565376281738282, 0.020602655410766602, 0.020541343688964844, 0.02082912063598633, 0.02059395217895508, 0.020698944091796876, 0.020650911331176757, 0.020665760040283202, 0.02083008003234863, 0.02077743911743164, 0.020668672561645507, 0.020789247512817383, 0.02072166442871094, 0.020929887771606447, 0.021078399658203125, 0.022013952255249023, 0.021489919662475584, 0.021057376861572264, 0.020739871978759764, 0.020908447265625, 0.02084614372253418, 0.020856927871704102, 0.02090867233276367, 0.020772863388061523, 0.020774751663208007, 0.02591926383972168, 0.023018911361694337, 0.020912864685058593, 0.020879392623901368, 0.020823904037475586, 0.02080191993713379, 0.020745439529418944, 0.021135520935058594, 0.020911775588989257, 0.02091107177734375, 0.020965375900268556, 0.02169036865234375, 0.021616384506225585, 0.021528831481933595, 0.02105753517150879, 0.020997280120849608, 0.020928352355957032, 0.02085785675048828, 0.02090297508239746, 0.020949951171875, 0.02089129638671875, 0.020957504272460938, 0.020938495635986328, 0.020939008712768555, 0.02085068893432617, 0.020867103576660155, 0.020839744567871094, 0.021121728897094728, 0.02084390449523926, 0.020898431777954102, 0.020811904907226564, 0.02091948890686035, 0.021083839416503908, 0.020925312042236327, 0.02085411262512207, 0.020980512619018555, 0.02100223922729492, 0.021044832229614258, 0.020879711151123047, 0.02097158432006836, 0.020872352600097656, 0.020938655853271485, 0.020904895782470703, 0.020973567962646485, 0.020914335250854493, 0.021046367645263672, 0.021612831115722656, 0.021154272079467774, 0.021102592468261717, 0.021004064559936524, 0.02105686378479004, 0.021160959243774414, 0.021361919403076173, 0.02235228729248047, 0.021129728317260742, 0.021142879486083985, 0.020959903717041015, 0.020858879089355468, 0.020883455276489257, 0.02105446434020996, 0.020976512908935548, 0.021299327850341797, 0.02111692810058594, 0.020996095657348633, 0.020992000579833983, 0.020917375564575194, 0.020846559524536134, 0.02095110321044922, 0.021003103256225585, 0.02090332794189453, 0.020832191467285155, 0.020806304931640623, 0.021170175552368165, 0.02271027183532715, 0.021127296447753907, 0.020954399108886718, 0.020978271484375, 0.020914176940917968, 0.021139455795288087, 0.02106368064880371, 0.021132736206054686, 0.02128748893737793, 0.021555200576782226, 0.022030336380004883, 0.021534719467163087, 0.0239553279876709, 0.021263711929321288, 0.021317888259887695, 0.02102022361755371, 0.021119232177734374, 0.02120163154602051, 0.02128505516052246, 0.02160950469970703, 0.021902111053466795, 0.021782527923583983, 0.0218603515625, 0.021876735687255858, 0.02216691207885742, 0.021895807266235353, 0.02181318473815918, 0.021825599670410156, 0.02185558319091797, 0.021772159576416015, 0.021752607345581054, 0.02170591926574707, 0.021715776443481445, 0.021817344665527344, 0.021817344665527344, 0.021654624938964844, 0.021722047805786134, 0.02175587272644043, 0.021741567611694337, 0.021366239547729492, 0.021272863388061523, 0.021209344863891602, 0.021219327926635743, 0.021141504287719725, 0.020916223526000977, 0.020963327407836914, 0.02084000015258789, 0.02099580764770508, 0.020850528717041017, 0.020757024765014648, 0.020594303131103515, 0.02068332862854004, 0.02059833526611328, 0.02054640007019043, 0.02058527946472168, 0.020648895263671876, 0.02048204803466797, 0.020477951049804686, 0.020573919296264648, 0.020549919128417967, 0.020658176422119142, 0.02074777603149414, 0.02076691246032715, 0.0207108154296875, 0.020607904434204103, 0.020580352783203124, 0.02061296081542969, 0.02066201591491699, 0.020648351669311525, 0.020586496353149415, 0.021059135437011718, 0.020550079345703125, 0.020560991287231444, 0.020691871643066406, 0.02060073661804199, 0.02049648094177246, 0.021190656661987304, 0.020704639434814452, 0.020609184265136717, 0.020635200500488282, 0.02083113670349121, 0.020450496673583986, 0.020671295166015624, 0.02061520004272461, 0.020684768676757812, 0.020862943649291994, 0.020811807632446288, 0.02094895935058594, 0.020916255950927734, 0.021028863906860353, 0.02109775924682617, 0.02096988868713379, 0.021184383392333986, 0.02109075164794922, 0.021116352081298827, 0.02091270446777344, 0.0210882568359375, 0.021079296112060546, 0.020920799255371093, 0.020689184188842774, 0.0213087043762207, 0.02101113510131836, 0.021622079849243164, 0.02142486381530762, 0.02087936019897461, 0.020807071685791014, 0.020793760299682617, 0.02073139190673828, 0.020726463317871095, 0.02125004768371582, 0.020942720413208007, 0.020748416900634767, 0.020708799362182617, 0.020720191955566406, 0.020760576248168947, 0.02080143928527832, 0.021173856735229493, 0.02105094337463379, 0.021190784454345704, 0.020891584396362305, 0.020949440002441408, 0.020898208618164063, 0.0209932804107666, 0.021003040313720703, 0.021030080795288085, 0.02100511932373047, 0.021112831115722656, 0.021168127059936523, 0.0209849910736084, 0.021091039657592774, 0.020893375396728517, 0.020830656051635744, 0.02068396759033203, 0.020615999221801757, 0.020604703903198244, 0.020633823394775392, 0.020742143630981445, 0.020703231811523438, 0.020700319290161133, 0.020585311889648437, 0.020696992874145507, 0.02064188766479492, 0.02049228858947754, 0.020684223175048828, 0.020550207138061525, 0.020590591430664062, 0.020690944671630858, 0.020584320068359373, 0.020574335098266602, 0.020724960327148437, 0.020566112518310548, 0.02079404830932617, 0.020940799713134766, 0.02094499206542969, 0.02079270362854004, 0.02078505516052246, 0.020875904083251955, 0.020819583892822267, 0.020882015228271485, 0.02067375946044922, 0.020548160552978516, 0.02066431999206543, 0.02069001579284668, 0.02077343940734863, 0.020811807632446288, 0.02072403144836426, 0.020684768676757812, 0.020637727737426757, 0.020763071060180664, 0.020762048721313476, 0.02056687927246094, 0.020629215240478515, 0.020628896713256836, 0.020533855438232423, 0.020657279968261718, 0.02068160057067871, 0.020766719818115235, 0.020760576248168947, 0.020736000061035157, 0.02069424057006836, 0.020801471710205077, 0.020730720520019532, 0.020692895889282227, 0.020665887832641602, 0.021047712326049805, 0.020658336639404296, 0.0207391357421875, 0.020646751403808592, 0.02066236877441406, 0.020700927734375, 0.020684703826904297, 0.020674911499023438, 0.020676607131958007, 0.02064102363586426, 0.02074025535583496, 0.020656736373901367, 0.021345888137817383, 0.020754112243652343, 0.020714208602905272, 0.021070016860961913, 0.02098361587524414, 0.02083430480957031, 0.02077004814147949, 0.020766464233398438, 0.020815008163452147, 0.020731071472167968, 0.020806304931640623, 0.020620288848876952, 0.02067059135437012, 0.02057744026184082, 0.020620800018310546, 0.02083452796936035, 0.02095235252380371, 0.02064627265930176, 0.020801887512207032, 0.020750335693359375, 0.020727455139160158, 0.020674911499023438, 0.02074336051940918, 0.02079155158996582, 0.02084716796875, 0.020736000061035157, 0.02075651168823242, 0.02071945571899414, 0.02062553596496582, 0.020670015335083006, 0.020748735427856446, 0.020679935455322266, 0.020771583557128905, 0.020645280838012696, 0.020669023513793947, 0.020709695816040038, 0.020816095352172853, 0.020520959854125977, 0.020619359970092774, 0.020574111938476563, 0.020596031188964845, 0.020681631088256835, 0.02088118362426758, 0.020633600234985353, 0.020629024505615233, 0.02059107208251953, 0.02065407943725586, 0.020609024047851563, 0.020727807998657227, 0.0206376953125, 0.020715648651123048, 0.020621183395385743, 0.02060054397583008, 0.020879648208618165, 0.020702335357666017, 0.020713632583618163, 0.020615583419799806, 0.0207076473236084, 0.02065580749511719, 0.020682111740112304, 0.020675104141235353, 0.02061552047729492, 0.020784255981445312, 0.020546495437622072, 0.02061516761779785, 0.020619264602661135, 0.02065119934082031, 0.021478208541870117, 0.020673952102661132, 0.020574815750122072, 0.020725568771362304, 0.020566207885742187, 0.020578271865844728, 0.02075651168823242, 0.02066329574584961, 0.020506784439086913, 0.020626272201538086, 0.02059222412109375, 0.020634016036987304, 0.02064588737487793, 0.020633312225341798, 0.020590240478515626, 0.020664960861206054, 0.020686368942260742, 0.020732383728027343, 0.020889312744140624, 0.020748575210571288, 0.020641792297363282, 0.020774911880493165, 0.02063974380493164, 0.020696416854858398, 0.02066703987121582, 0.02071343994140625, 0.02073193550109863, 0.020725759506225586, 0.021323776245117186, 0.020731807708740235, 0.020926559448242187, 0.020727584838867188, 0.020941696166992187, 0.0209136962890625, 0.02063817596435547, 0.02067865562438965, 0.020590272903442383, 0.0206595516204834, 0.020636159896850585, 0.020827775955200196, 0.02055459213256836, 0.020594688415527345, 0.020800607681274414, 0.020808736801147462, 0.020670143127441407, 0.020702751159667968, 0.020721632003784178, 0.020671167373657227, 0.020701183319091796, 0.020762624740600585, 0.02085683250427246, 0.02127257537841797, 0.02107187271118164, 0.021112831115722656, 0.020774911880493165, 0.02068467140197754, 0.020879487991333007, 0.020747392654418946, 0.020759424209594726, 0.020760576248168947, 0.021456735610961914, 0.02203868865966797, 0.02136182403564453, 0.02090070343017578, 0.0208154239654541, 0.02091574478149414, 0.020660543441772462, 0.02063545608520508, 0.020566816329956054, 0.02078643226623535, 0.02131167984008789, 0.022221376419067383, 0.0209039363861084, 0.020841951370239257, 0.020844383239746092, 0.020768543243408204, 0.02071347236633301, 0.020675487518310547, 0.020710752487182616, 0.020646240234375, 0.020977983474731444, 0.020733631134033204, 0.02073334312438965, 0.020804319381713867, 0.02073382377624512, 0.020572128295898436, 0.020723487854003905, 0.02065171241760254, 0.02072822380065918, 0.02072198486328125, 0.02101807975769043, 0.021110687255859375, 0.020613536834716797, 0.020627840042114258]",tokens/s,47.908949248313625,,, 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1852.98944,2718.892032,0.0,2340.421632,2285.568,s,1,8.9999931640625,8.9999931640625,0.0,8.9999931640625,8.9999931640625,8.9999931640625,8.9999931640625,[8.9999931640625],,kWh,6.0278640858329404e-05,6.639440169627309e-06,2.0128627214038852e-05,8.704670824199556e-05,,MB,1854.042112,3067.019264,0.0,2659.188736,2578.855936,s,10,0.8156142654418945,0.08156142654418945,0.0005569991454410323,0.08144235229492187,0.0822944351196289,0.08255667190551758,0.08276646133422852,"[0.08281890869140625, 0.08223616027832031, 0.08115347290039063, 0.08161580657958985, 0.08172006225585937, 0.0813210220336914, 0.08085462188720703, 0.08117372894287109, 0.08156368255615234, 0.08115679931640625]",tokens/s,3138.7386273988336,kWh,2.5467738779711973e-06,2.808641281398764e-07,1.6995375915130268e-06,4.527175597624101e-06,tokens/kWh,56547397.92606033,MB,1854.042112,3067.019264,0.0,2659.188736,2578.858496,s,10,14.036172607421873,1.4036172607421873,0.010475848935466391,1.3988833007812498,1.4191065185546874,1.4217946533203125,1.4239451611328124,"[1.4011680908203126, 1.3940382080078124, 1.39739501953125, 1.3950809326171876, 1.3964273681640624, 1.41350537109375, 1.4185091552734375, 1.4244827880859374, 1.395194091796875, 1.40037158203125]",tokens/s,44.88403054169314,kWh,4.119195426994443e-05,4.543112637616315e-06,2.2239787115484975e-05,6.797485402304572e-05,tokens/kWh,926813.3180343561,,s,630,14.03415205383302,0.022276431831480967,0.00047671132272992823,0.02214495944976807,0.022801987648010252,0.02292944688796997,0.023775007877349853,"[0.022978527069091797, 0.022815519332885743, 0.022460416793823244, 0.02232851219177246, 0.02240185546875, 0.022271999359130858, 0.022429088592529296, 0.0220446720123291, 0.022266016006469727, 0.02223967933654785, 0.022103071212768555, 0.022101184844970704, 0.022183712005615235, 0.02215692710876465, 0.022425344467163086, 0.02214259147644043, 0.022202495574951173, 0.022119295120239257, 0.02210201644897461, 0.02205427169799805, 0.022106752395629883, 0.022091775894165038, 0.022079488754272462, 0.022080928802490234, 0.022153791427612306, 0.022102048873901367, 0.022066719055175783, 0.02200828742980957, 0.022009855270385743, 0.021925888061523437, 0.022019807815551757, 0.02198739242553711, 0.022035743713378905, 0.022084352493286132, 0.02203887939453125, 0.022038368225097655, 0.022169599533081053, 0.024182559967041016, 0.0225784969329834, 0.022270879745483398, 0.02220796775817871, 0.022065696716308595, 0.022237184524536133, 0.0223191032409668, 0.022058784484863283, 0.022103296279907227, 0.022455263137817382, 0.022120704650878908, 0.022232704162597657, 0.022047008514404297, 0.022084543228149414, 0.02201433563232422, 0.021987871170043947, 0.021950464248657226, 0.021961856842041015, 0.021985279083251954, 0.021957632064819335, 0.024041343688964843, 0.022814720153808594, 0.02228223991394043, 0.022108160018920898, 0.021980352401733398, 0.02208799934387207, 0.02360540771484375, 0.022427648544311524, 0.022067455291748046, 0.02206185531616211, 0.022094911575317382, 0.021919647216796876, 0.021948415756225585, 0.02192595291137695, 0.02208291244506836, 0.021905664443969727, 0.021883167266845704, 0.022052127838134764, 0.021996320724487303, 0.021906591415405272, 0.02196156883239746, 0.02208358383178711, 0.02186649513244629, 0.021835775375366212, 0.021950239181518554, 0.022014272689819335, 0.022022048950195314, 0.02198102378845215, 0.02191360092163086, 0.02196291160583496, 0.022441568374633788, 0.023889728546142578, 0.022132383346557618, 0.022121280670166017, 0.022049951553344726, 0.022013952255249023, 0.022124576568603515, 0.022024192810058595, 0.02197599983215332, 0.022042367935180666, 0.021999103546142578, 0.022010623931884767, 0.022022144317626953, 0.02206003189086914, 0.022157951354980467, 0.02198361587524414, 0.02245792007446289, 0.0230830078125, 0.022022592544555665, 0.02202009582519531, 0.022011072158813476, 0.0219881591796875, 0.021970943450927736, 0.02198262405395508, 0.021915679931640626, 0.02203500747680664, 0.02194780731201172, 0.021989183425903322, 0.021990175247192382, 0.022239231109619142, 0.022123519897460937, 0.021963775634765623, 0.02189516830444336, 0.022195520401000975, 0.022223104476928712, 0.022208511352539064, 0.022458816528320314, 0.02223014450073242, 0.022346624374389647, 0.02329187202453613, 0.02265056037902832, 0.022506464004516603, 0.023739360809326173, 0.022272895812988282, 0.02214249610900879, 0.022079391479492186, 0.022249887466430664, 0.02240323257446289, 0.022210559844970702, 0.022009952545166016, 0.022169504165649414, 0.02198441505432129, 0.02194313621520996, 0.021942176818847657, 0.021997663497924806, 0.022396928787231447, 0.021961759567260743, 0.02242681694030762, 0.022103296279907227, 0.02215609550476074, 0.02236579132080078, 0.022354047775268556, 0.023201791763305665, 0.02264860725402832, 0.022644960403442382, 0.02215337562561035, 0.022083423614501954, 0.021952512741088868, 0.022132736206054687, 0.021929983139038087, 0.0224333438873291, 0.0218853759765625, 0.021940223693847655, 0.022052288055419922, 0.022032960891723633, 0.021905248641967773, 0.021978368759155275, 0.022180992126464842, 0.022054624557495118, 0.021978303909301757, 0.022059904098510743, 0.021952512741088868, 0.022073183059692383, 0.021968128204345704, 0.02206528091430664, 0.022053407669067382, 0.02207155227661133, 0.02196623992919922, 0.022046655654907227, 0.02209244728088379, 0.022108160018920898, 0.022075231552124024, 0.02203664016723633, 0.021963808059692384, 0.021941247940063476, 0.02196611213684082, 0.022060831069946288, 0.021969535827636718, 0.022040063858032227, 0.02202908706665039, 0.022134016036987305, 0.021971296310424805, 0.022476480484008788, 0.022200000762939452, 0.022075040817260742, 0.022130687713623046, 0.022262752532958983, 0.02209708786010742, 0.02211027145385742, 0.022113216400146483, 0.022112064361572266, 0.02208140754699707, 0.022130815505981446, 0.022038591384887694, 0.022140863418579102, 0.022145023345947267, 0.022013856887817384, 0.022320831298828125, 0.02235638427734375, 0.02229248046875, 0.02233344078063965, 0.022196224212646484, 0.022242399215698243, 0.022167648315429687, 0.022051647186279298, 0.022009471893310546, 0.02203094482421875, 0.022064224243164062, 0.022051519393920898, 0.021984832763671875, 0.022065216064453125, 0.022097663879394533, 0.022095680236816406, 0.02208675193786621, 0.022042335510253905, 0.021966848373413086, 0.02196672058105469, 0.02195686340332031, 0.022011775970458985, 0.021964351654052736, 0.021979583740234374, 0.022062591552734375, 0.022813215255737304, 0.02247235107421875, 0.022403392791748047, 0.02227737617492676, 0.022352640151977538, 0.022128480911254883, 0.022104223251342772, 0.0233123836517334, 0.022230016708374024, 0.02205183982849121, 0.022015615463256834, 0.022033887863159178, 0.02202716827392578, 0.021949920654296875, 0.021985439300537108, 0.021987712860107422, 0.02205414390563965, 0.022042560577392577, 0.021963647842407227, 0.022048704147338866, 0.02196403121948242, 0.022051584243774413, 0.022079488754272462, 0.022212831497192383, 0.021987327575683592, 0.021950464248657226, 0.02205900764465332, 0.021979135513305666, 0.022027679443359375, 0.021874784469604492, 0.02201036834716797, 0.022052608489990234, 0.022061311721801757, 0.02196611213684082, 0.021952991485595704, 0.022015615463256834, 0.021983903884887697, 0.021988704681396486, 0.021985919952392578, 0.02198089599609375, 0.02202448081970215, 0.022039968490600585, 0.022047359466552733, 0.022077407836914063, 0.022187807083129882, 0.021991296768188475, 0.02226211166381836, 0.021977088928222657, 0.022103168487548827, 0.022153823852539063, 0.022190559387207032, 0.02256057548522949, 0.02224332809448242, 0.022239231109619142, 0.022222848892211915, 0.02223414421081543, 0.022260704040527345, 0.022175743103027345, 0.022245376586914063, 0.02225152015686035, 0.022259775161743163, 0.02216441535949707, 0.02229574394226074, 0.022230335235595703, 0.022225055694580078, 0.022225248336791993, 0.02226585578918457, 0.022271999359130858, 0.02240675163269043, 0.022299039840698243, 0.022269760131835938, 0.022216096878051757, 0.02227280044555664, 0.022245376586914063, 0.022185983657836913, 0.02240208053588867, 0.02247923278808594, 0.022264415740966798, 0.022308095932006836, 0.02216783905029297, 0.02223369598388672, 0.022081472396850585, 0.022187711715698243, 0.022254911422729493, 0.022207424163818358, 0.022230239868164064, 0.022770240783691408, 0.022519712448120118, 0.022603071212768555, 0.022847999572753908, 0.023460351943969726, 0.022923040390014648, 0.022892576217651367, 0.02285385513305664, 0.022826080322265626, 0.022919231414794922, 0.022911584854125977, 0.02287001609802246, 0.022863712310791016, 0.022853504180908202, 0.02280415916442871, 0.02281942367553711, 0.022877792358398437, 0.023044416427612305, 0.022865280151367188, 0.022847679138183592, 0.02269059181213379, 0.022660928726196287, 0.022517696380615234, 0.022463680267333985, 0.022493120193481445, 0.022463455200195312, 0.022406143188476564, 0.022219680786132814, 0.022166624069213867, 0.022152095794677733, 0.02211020851135254, 0.022551904678344725, 0.0222740478515625, 0.02225833511352539, 0.022163551330566408, 0.02307619285583496, 0.023293920516967773, 0.02234569549560547, 0.022149152755737304, 0.02244207954406738, 0.022043136596679686, 0.02245631980895996, 0.022114303588867186, 0.022130687713623046, 0.02207855987548828, 0.022048831939697266, 0.022025056838989258, 0.021992576599121093, 0.022133216857910157, 0.02195702362060547, 0.02205900764465332, 0.021935136795043945, 0.022060096740722655, 0.021941728591918945, 0.02191404724121094, 0.02198739242553711, 0.021944255828857423, 0.022066303253173828, 0.022012191772460936, 0.02185647964477539, 0.022050336837768556, 0.022180704116821288, 0.022054367065429688, 0.022268352508544923, 0.022316991806030275, 0.023569952011108397, 0.022217632293701172, 0.022148895263671874, 0.022025951385498045, 0.022292543411254882, 0.022003936767578124, 0.022048063278198242, 0.0221624641418457, 0.02220796775817871, 0.022541791915893554, 0.02244384002685547, 0.022375328063964844, 0.0222873592376709, 0.02214137649536133, 0.02209030342102051, 0.02202150344848633, 0.022061504364013673, 0.021999807357788087, 0.022095296859741213, 0.02201398468017578, 0.0220677433013916, 0.022068256378173827, 0.0221778564453125, 0.022344608306884766, 0.02225971221923828, 0.02280182456970215, 0.022444831848144532, 0.02221036720275879, 0.022185888290405274, 0.02207139205932617, 0.022278144836425783, 0.02274070358276367, 0.022136287689208986, 0.022289215087890626, 0.02249497604370117, 0.022813983917236328, 0.02289731216430664, 0.022854015350341796, 0.022910783767700196, 0.02284783935546875, 0.02290870475769043, 0.022862911224365234, 0.02295235252380371, 0.022854175567626953, 0.023072639465332032, 0.022934688568115234, 0.023055328369140624, 0.022999040603637694, 0.02296124839782715, 0.022803455352783202, 0.02287171173095703, 0.022907295227050782, 0.0233654727935791, 0.023399871826171877, 0.02294822311401367, 0.022939424514770507, 0.02272275161743164, 0.022579519271850586, 0.022491039276123045, 0.02231817626953125, 0.02212895965576172, 0.022753471374511718, 0.022361600875854492, 0.02215372848510742, 0.022312576293945313, 0.022427040100097655, 0.022563327789306642, 0.022598112106323242, 0.022527999877929687, 0.02244764709472656, 0.02245884895324707, 0.022398464202880858, 0.022442495346069336, 0.022425600051879883, 0.0227061767578125, 0.0224783992767334, 0.022564416885375978, 0.022391679763793946, 0.022404991149902343, 0.022395008087158202, 0.02466534423828125, 0.0234747200012207, 0.023050079345703123, 0.02240105628967285, 0.022503583908081055, 0.02238387107849121, 0.022268512725830077, 0.022565343856811523, 0.022193119049072264, 0.022148000717163087, 0.022278144836425783, 0.022078527450561523, 0.02200876808166504, 0.022025760650634767, 0.02253811264038086, 0.02223369598388672, 0.02216985511779785, 0.0221909122467041, 0.022447296142578125, 0.022636287689208983, 0.023883775711059572, 0.023789567947387694, 0.02242745590209961, 0.02249951934814453, 0.022461759567260743, 0.022617887496948243, 0.022195104598999024, 0.02221897506713867, 0.02207107162475586, 0.022140928268432617, 0.022814111709594728, 0.02238070487976074, 0.022190656661987305, 0.02232268714904785, 0.022104448318481445, 0.02206105613708496, 0.0301711368560791, 0.02364739227294922, 0.022293344497680664, 0.02216160011291504, 0.02218943977355957, 0.022217151641845703, 0.022212608337402344, 0.022146207809448242, 0.022253568649291993, 0.02207539176940918, 0.022106016159057617, 0.022116031646728516, 0.022301279067993163, 0.022027711868286132, 0.02218227195739746, 0.022054912567138672, 0.0220437126159668, 0.02235078430175781, 0.022096960067749024, 0.022547487258911134, 0.022253471374511717, 0.022337535858154296, 0.022006847381591796, 0.022119359970092775, 0.022054880142211915, 0.022052896499633788, 0.02206924819946289, 0.02206515121459961, 0.022079488754272462, 0.022026239395141603, 0.021966400146484374, 0.02220691108703613, 0.022911136627197265, 0.022144895553588867, 0.022196191787719727, 0.022079488754272462, 0.022137855529785155, 0.022098943710327147, 0.022034431457519533, 0.02200371170043945, 0.0220897274017334, 0.022044927597045898, 0.022013696670532226, 0.022052160263061525, 0.022036767959594725, 0.022054912567138672, 0.022079904556274413, 0.021954559326171876, 0.02208768081665039, 0.022063104629516602, 0.0220897274017334, 0.022150304794311522, 0.022194656372070312, 0.022288063049316405, 0.022305471420288086, 0.02221993637084961, 0.022096736907958985, 0.022117855072021485, 0.02228278350830078, 0.02208780860900879, 0.02201900863647461, 0.02229088020324707, 0.022056543350219726, 0.022100448608398438, 0.02211680030822754, 0.022166784286499024, 0.02230928039550781, 0.022063455581665038, 0.022200319290161134, 0.02206835174560547, 0.022296768188476562, 0.022257919311523437, 0.02212441635131836, 0.022021984100341795, 0.02200214385986328, 0.022062047958374024, 0.022364799499511718, 0.02235811233520508, 0.022077312469482423, 0.022198400497436523, 0.022163455963134765, 0.022344831466674805, 0.022186368942260743, 0.022254079818725587, 0.022126399993896484, 0.02209401512145996, 0.02197427177429199, 0.02208345603942871, 0.02196361541748047, 0.022056991577148438, 0.022011808395385742, 0.022097312927246093, 0.02194486427307129, 0.022060863494873045, 0.021979488372802735, 0.022199935913085937, 0.022048255920410157, 0.022065183639526368, 0.022082399368286133, 0.022264896392822267, 0.022094688415527343, 0.0221463680267334, 0.022004512786865233, 0.02220460891723633, 0.02205398368835449, 0.022139616012573242, 0.0220361270904541, 0.02247100830078125, 0.02211008071899414, 0.022032800674438476, 0.02199216079711914, 0.022021120071411132, 0.02208153533935547, 0.02226380729675293, 0.022349151611328125, 0.022780576705932618, 0.022439968109130858, 0.0225382080078125, 0.022476383209228516, 0.022524320602416992, 0.022487039566040038, 0.022547584533691406, 0.02246950340270996, 0.022759424209594727, 0.022568832397460936, 0.022508960723876953, 0.022482688903808595, 0.02211292839050293, 0.022006080627441405, 0.022173696517944336, 0.022206047058105468, 0.022407455444335936, 0.022564096450805662, 0.02263324737548828]",tokens/s,44.89049267696471,,, 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,808.792064,537.853952,0.0,159.383552,141.760512,s,1,7.6563134765625,7.6563134765625,0.0,7.6563134765625,7.6563134765625,7.6563134765625,7.6563134765625,[7.6563134765625],,kWh,1.79373740874856e-05,1.9668316973786543e-06,5.639448955985227e-06,2.5543654740849478e-05,,MB,1144.049664,607.059968,0.0,199.22944,184.771584,s,29,0.19300534534454347,0.006655356736018739,0.00011334521046294794,0.006623744010925293,0.006685113525390625,0.006855315113067626,0.007081322154998779,"[0.006968287944793701, 0.006623487949371338, 0.006574975967407227, 0.006549312114715576, 0.006638016223907471, 0.006614880084991455, 0.0066418561935424805, 0.007125279903411865, 0.006634367942810059, 0.006639647960662842, 0.006623744010925293, 0.006684927940368652, 0.006589568138122558, 0.006607135772705078, 0.006613344192504883, 0.006599296092987061, 0.006683296203613282, 0.0066440639495849605, 0.00661407995223999, 0.006594816207885742, 0.006597824096679687, 0.006613344192504883, 0.006680768013000488, 0.00660364818572998, 0.006643487930297852, 0.006685855865478515, 0.006675551891326905, 0.006609504222869873, 0.00663097620010376]",tokens/s,38465.25590649859,kWh,1.978375081271173e-07,2.1818001823691834e-08,1.1384039239050209e-07,3.334959023413112e-07,tokens/kWh,767625623.591623,MB,1154.94912,609.15712,0.0,201.326592,184.774144,s,29,10.026686584472657,0.34574781325767784,0.001936242525375772,0.34593734741210935,0.3482521545410156,0.3495382568359375,0.35064216064453124,"[0.3509805908203125, 0.34616665649414063, 0.34227490234375, 0.34410513305664064, 0.344109375, 0.34430694580078125, 0.34435601806640626, 0.3462296142578125, 0.3433265380859375, 0.34534359741210935, 0.3446510009765625, 0.34593777465820313, 0.34509738159179687, 0.3437035522460937, 0.34317913818359375, 0.3459658203125, 0.344365478515625, 0.3455206909179688, 0.34714877319335935, 0.3462953491210938, 0.3466032409667969, 0.34593734741210935, 0.3460842590332031, 0.3497719116210937, 0.3491877746582031, 0.3480182495117187, 0.34609002685546875, 0.34707467651367185, 0.3448547668457031]",tokens/s,182.2137337801398,kWh,9.914877986556458e-06,1.093428826163856e-06,3.7496588669900057e-06,1.4757965679710319e-05,tokens/kWh,4268881.048193129,,s,1827,10.013619136810306,0.005480908120859497,0.00010531538078863172,0.005462399959564209,0.005541747379302978,0.005588665628433227,0.005881040763854981,"[0.005810080051422119, 0.005703616142272949, 0.005703680038452149, 0.005683296203613282, 0.005652128219604492, 0.0056384320259094236, 0.005557439804077149, 0.005544223785400391, 0.005566976070404053, 0.005506847858428955, 0.005497056007385254, 0.00550710391998291, 0.0056394882202148435, 0.005673344135284424, 0.0055556797981262205, 0.005481279850006103, 0.005451200008392334, 0.005467711925506592, 0.005610496044158936, 0.00550707197189331, 0.005496831893920898, 0.0054651517868042, 0.005448863983154297, 0.005555935859680176, 0.005509183883666992, 0.0056273918151855465, 0.005583360195159912, 0.005475808143615723, 0.005519807815551758, 0.0054988799095153805, 0.0055168957710266114, 0.0054563841819763184, 0.005441343784332276, 0.00553718376159668, 0.005473055839538574, 0.005426688194274902, 0.005541920185089111, 0.005416416168212891, 0.005766143798828125, 0.0054579200744628905, 0.005432384014129639, 0.0054462399482727054, 0.005490752220153808, 0.005454112052917481, 0.005480447769165039, 0.005441535949707031, 0.005932831764221191, 0.005479648113250733, 0.006020095825195313, 0.0065977277755737305, 0.006096992015838623, 0.0054767999649047855, 0.005443808078765869, 0.005546144008636475, 0.005490464210510254, 0.00548905611038208, 0.005468160152435303, 0.005477920055389405, 0.0054848318099975585, 0.005479712009429932, 0.005444320201873779, 0.005472576141357422, 0.005423935890197754, 0.0053311681747436526, 0.005439871788024902, 0.005711775779724121, 0.005418335914611817, 0.005464767932891846, 0.0054429759979248045, 0.005789504051208496, 0.005429535865783691, 0.005415487766265869, 0.005432672023773193, 0.005425920009613037, 0.005413631916046143, 0.005444384098052978, 0.005429471969604492, 0.005399680137634278, 0.00542790412902832, 0.0054048638343811035, 0.005402624130249023, 0.005437727928161621, 0.005434144020080567, 0.00548140811920166, 0.007245535850524902, 0.005472544193267822, 0.005700607776641845, 0.0054241280555725096, 0.005466239929199219, 0.005427072048187256, 0.0055168957710266114, 0.005440127849578857, 0.005453343868255615, 0.005523231983184815, 0.005454559803009033, 0.005506239891052246, 0.005455967903137207, 0.0054268798828125, 0.00553056001663208, 0.005437183856964112, 0.005435679912567139, 0.005508512020111084, 0.005420928001403809, 0.005383776187896728, 0.005436351776123047, 0.005408768177032471, 0.005469503879547119, 0.0054627199172973635, 0.005439328193664551, 0.005402944087982178, 0.005465951919555664, 0.005462016105651855, 0.005470335960388184, 0.005431168079376221, 0.005437024116516113, 0.005433760166168213, 0.005418655872344971, 0.0054148478507995606, 0.005435967922210693, 0.005473663806915283, 0.005423583984375, 0.005472256183624268, 0.005432928085327148, 0.005423200130462646, 0.005400512218475342, 0.005701888084411621, 0.005315839767456055, 0.005440063953399658, 0.005402624130249023, 0.00539244794845581, 0.005534880161285401, 0.005401088237762451, 0.005458176136016846, 0.005422783851623535, 0.00541926383972168, 0.005443520069122314, 0.005419360160827637, 0.005421088218688965, 0.005428544044494629, 0.0054280319213867185, 0.005421055793762207, 0.0054290881156921385, 0.0054290881156921385, 0.005406496047973633, 0.005424960136413574, 0.0054330239295959475, 0.0054301438331604, 0.005406720161437988, 0.005418848037719726, 0.005441696166992188, 0.00542310380935669, 0.005433343887329102, 0.005418240070343017, 0.005421919822692871, 0.005440959930419922, 0.00543177604675293, 0.005419007778167725, 0.005398528099060058, 0.0054609918594360355, 0.005475327968597412, 0.005439295768737793, 0.005418496131896973, 0.005410624027252197, 0.005427231788635254, 0.005392320156097412, 0.005393311977386474, 0.005410816192626953, 0.005404928207397461, 0.0054494719505310055, 0.0054308481216430666, 0.005388735771179199, 0.005433343887329102, 0.005418464183807373, 0.005430975914001465, 0.005373983860015869, 0.005522240161895752, 0.005427040100097656, 0.005455967903137207, 0.0054293122291564945, 0.005396736145019531, 0.005426368236541748, 0.005493311882019043, 0.005399775981903076, 0.005463039875030518, 0.0053901119232177734, 0.00540064001083374, 0.005424895763397217, 0.0054273920059204105, 0.005455359935760498, 0.0053309440612792965, 0.005423295974731446, 0.005474112033843994, 0.005429535865783691, 0.005448416233062744, 0.005427584171295166, 0.0054503679275512695, 0.00539628791809082, 0.005384191989898681, 0.0054254398345947264, 0.005434463977813721, 0.005443520069122314, 0.0054148159027099605, 0.005407392024993896, 0.005439712047576904, 0.005394527912139893, 0.005818367958068848, 0.005437215805053711, 0.005400447845458984, 0.0054126400947570805, 0.005496895790100097, 0.005448192119598389, 0.005429247856140137, 0.005404672145843506, 0.005385663986206055, 0.005432064056396484, 0.005533504009246826, 0.005406367778778076, 0.005482944011688232, 0.005430304050445557, 0.005404672145843506, 0.00559222412109375, 0.005456831932067871, 0.0054505281448364255, 0.005473696231842041, 0.005423647880554199, 0.0054448962211608885, 0.0054135041236877445, 0.005408927917480468, 0.005437695980072021, 0.005500480175018311, 0.005529280185699463, 0.005446144104003906, 0.005446752071380615, 0.0054486079216003415, 0.005430592060089111, 0.005438144207000732, 0.005442944049835205, 0.005439680099487304, 0.005416607856750489, 0.00545462417602539, 0.005472256183624268, 0.005428864002227783, 0.005466495990753174, 0.005492256164550781, 0.005450208187103272, 0.005428351879119873, 0.005718463897705078, 0.005497280120849609, 0.005453824043273926, 0.005441023826599121, 0.005601439952850342, 0.005466464042663574, 0.005371903896331787, 0.005462016105651855, 0.005502975940704345, 0.0054596481323242185, 0.005472799777984619, 0.005477183818817138, 0.005458911895751953, 0.0054635519981384275, 0.005419648170471191, 0.00546127986907959, 0.005454432010650634, 0.005459551811218262, 0.005441952228546143, 0.005449120044708252, 0.005435872077941895, 0.005490496158599854, 0.005471776008605957, 0.005486720085144043, 0.005499711990356445, 0.005427296161651611, 0.005484000205993652, 0.005445600032806397, 0.0054520959854125976, 0.005455039978027344, 0.005459775924682617, 0.005409791946411133, 0.00557260799407959, 0.005485727787017823, 0.005534560203552246, 0.005451776027679443, 0.005438496112823486, 0.005470208168029785, 0.005441535949707031, 0.00543228816986084, 0.005456096172332764, 0.005431072235107422, 0.005418655872344971, 0.005505055904388428, 0.005451935768127442, 0.005463871955871582, 0.005429599761962891, 0.005424831867218018, 0.005455552101135254, 0.005445919990539551, 0.005408671855926514, 0.005422848224639893, 0.005434048175811767, 0.005403679847717285, 0.0054236159324646, 0.005400191783905029, 0.00540553617477417, 0.005412864208221436, 0.005428671836853027, 0.0055487041473388675, 0.0054098558425903324, 0.0053993921279907224, 0.005433343887329102, 0.005475808143615723, 0.0054605121612548825, 0.005562431812286377, 0.005502175807952881, 0.005489696025848389, 0.005455008029937744, 0.005437312126159668, 0.005457856178283691, 0.005652768135070801, 0.005601791858673095, 0.005492224216461182, 0.005468160152435303, 0.005437952041625976, 0.005467199802398682, 0.005464928150177002, 0.005426976203918457, 0.005449759960174561, 0.005429247856140137, 0.005433343887329102, 0.005480415821075439, 0.005420351982116699, 0.005446368217468261, 0.005445631980895996, 0.005442592144012451, 0.00542409610748291, 0.005429247856140137, 0.0054287037849426265, 0.0054297919273376466, 0.005435359954833984, 0.0054287037849426265, 0.005579328060150147, 0.005533376216888428, 0.005458079814910889, 0.005454336166381836, 0.005433055877685547, 0.005429183959960938, 0.005440991878509521, 0.005418879985809326, 0.005470016002655029, 0.005479040145874023, 0.005430784225463867, 0.0055092802047729495, 0.005440127849578857, 0.005494880199432373, 0.00543833589553833, 0.0055428800582885745, 0.0054906878471374515, 0.005462016105651855, 0.0054230718612670896, 0.0054672322273254395, 0.005425312042236328, 0.005467167854309082, 0.005473567962646485, 0.005444191932678223, 0.005445343971252441, 0.005415135860443115, 0.005392320156097412, 0.005428959846496582, 0.005441152095794678, 0.005411488056182861, 0.005471519947052002, 0.0054332160949707034, 0.005482463836669922, 0.005460864067077637, 0.005453343868255615, 0.0054709439277648925, 0.005441504001617432, 0.0054280638694763185, 0.005447840213775635, 0.005362880229949951, 0.005430335998535156, 0.005426784038543701, 0.005457695960998535, 0.005459680080413819, 0.005485343933105469, 0.005416831970214844, 0.005408768177032471, 0.005452991962432862, 0.005452928066253662, 0.005387392044067383, 0.005427775859832763, 0.005498623847961426, 0.005386752128601074, 0.005424032211303711, 0.005401055812835693, 0.005516831874847412, 0.005423967838287354, 0.005425280094146729, 0.005599103927612305, 0.0054347200393676754, 0.005415584087371826, 0.005529248237609864, 0.005418432235717773, 0.005452191829681397, 0.005456543922424316, 0.005416287899017334, 0.005409215927124024, 0.00542742395401001, 0.005439455986022949, 0.005445504188537597, 0.005464064121246338, 0.005470367908477783, 0.0054268798828125, 0.005414239883422852, 0.005420191764831543, 0.005459616184234619, 0.005458047866821289, 0.0054085440635681156, 0.005486432075500488, 0.005480703830718994, 0.005467391967773437, 0.005436192035675049, 0.005459936141967774, 0.005468160152435303, 0.00544268798828125, 0.0054563841819763184, 0.005683584213256836, 0.005474431991577149, 0.005611519813537597, 0.0054967041015625, 0.005543263912200927, 0.005501599788665772, 0.005453887939453125, 0.005450719833374023, 0.005471199989318848, 0.005533696174621582, 0.005480415821075439, 0.005455904006958008, 0.005443456172943115, 0.005494912147521972, 0.005477536201477051, 0.005426015853881836, 0.005427296161651611, 0.005451807975769043, 0.005793312072753906, 0.005507199764251709, 0.005884352207183838, 0.005524735927581787, 0.0054852161407470705, 0.005485727787017823, 0.005534656047821045, 0.005462016105651855, 0.005483551979064942, 0.005454783916473389, 0.005410848140716552, 0.005449120044708252, 0.005425888061523438, 0.005453184127807618, 0.005576799869537354, 0.005458335876464843, 0.00542310380935669, 0.005586944103240967, 0.00545692777633667, 0.005448671817779541, 0.005484543800354004, 0.005484576225280762, 0.005585919857025146, 0.005456736087799072, 0.005429599761962891, 0.005433343887329102, 0.005418784141540527, 0.00543449592590332, 0.005393280029296875, 0.005426368236541748, 0.005961728096008301, 0.005475232124328613, 0.005431295871734619, 0.0054967360496521, 0.005464064121246338, 0.005463679790496826, 0.005469791889190674, 0.005517375946044922, 0.00547708797454834, 0.00545088005065918, 0.005428095817565918, 0.005554207801818848, 0.005421023845672607, 0.005441184043884277, 0.005482848167419434, 0.00561356782913208, 0.005517600059509277, 0.005466976165771484, 0.005462560176849365, 0.005517024040222168, 0.005470880031585693, 0.005422048091888428, 0.005448991775512695, 0.005493887901306152, 0.005489439964294434, 0.005421887874603271, 0.005420224189758301, 0.005447487831115722, 0.005428768157958984, 0.005410624027252197, 0.005478047847747803, 0.005343200206756592, 0.00547382402420044, 0.005461599826812744, 0.005431392192840576, 0.005401311874389648, 0.005435488224029541, 0.005435391902923584, 0.005389311790466309, 0.005430335998535156, 0.005426112174987793, 0.0054057598114013675, 0.005472224235534668, 0.005400544166564942, 0.005406527996063232, 0.005409247875213623, 0.00543123197555542, 0.005429024219512939, 0.005382239818572998, 0.00543120002746582, 0.005445600032806397, 0.005414976119995117, 0.0054412479400634765, 0.005418911933898925, 0.005417312145233154, 0.005407872200012207, 0.005421055793762207, 0.005404928207397461, 0.005404384136199951, 0.005423423767089844, 0.00545472002029419, 0.0054189438819885255, 0.005484511852264404, 0.005416704177856445, 0.005434463977813721, 0.005426144123077393, 0.005439616203308105, 0.005497759819030762, 0.005430240154266358, 0.005404704093933106, 0.005459104061126709, 0.005442368030548096, 0.005482367992401123, 0.005492479801177979, 0.005476672172546387, 0.005484000205993652, 0.005458528041839599, 0.005470208168029785, 0.005486368179321289, 0.005470431804656982, 0.005453824043273926, 0.005478400230407715, 0.005500959873199463, 0.005493984222412109, 0.005438496112823486, 0.005476064205169677, 0.005470208168029785, 0.005420832157135009, 0.005456096172332764, 0.005469823837280273, 0.005474688053131104, 0.005462016105651855, 0.0055227518081665036, 0.005433055877685547, 0.005400576114654541, 0.005502272129058838, 0.005450431823730469, 0.005407936096191406, 0.005468031883239746, 0.005445663928985596, 0.00548960018157959, 0.005506815910339355, 0.005437664031982422, 0.005525504112243652, 0.005443583965301513, 0.005443136215209961, 0.005458367824554443, 0.0054579200744628905, 0.005447360038757324, 0.0054787201881408695, 0.005431295871734619, 0.005513120174407959, 0.0054477758407592775, 0.005429247856140137, 0.00545088005065918, 0.0054362878799438475, 0.0054534077644348145, 0.005474016189575195, 0.005448512077331543, 0.005402495861053467, 0.005451776027679443, 0.00550435209274292, 0.005463712215423584, 0.005507455825805664, 0.0054585919380187985, 0.0054617600440979, 0.00544979190826416, 0.0054438719749450686, 0.005459551811218262, 0.005454048156738282, 0.005415264129638672, 0.005510879993438721, 0.0054694080352783205, 0.005444191932678223, 0.005687488079071045, 0.005631999969482422, 0.005439487934112549, 0.005412576198577881, 0.005427648067474365, 0.0054754562377929685, 0.00542793607711792, 0.005484416007995606, 0.005630080223083496, 0.005443583965301513, 0.005470208168029785, 0.005440927982330322, 0.005448224067687989, 0.00549894380569458, 0.0054884800910949706, 0.005508607864379883, 0.0054967041015625, 0.005552927970886231, 0.005525504112243652, 0.005511168003082275, 0.0054967679977417, 0.005547616004943847, 0.0054966721534729, 0.005396096229553222, 0.005486303806304931, 0.005485311985015869, 0.0055008320808410645, 0.005451551914215088, 0.0054765758514404295, 0.005472320079803467, 0.005470143795013428, 0.005441823959350586, 0.0054617600440979, 0.005433311939239502, 0.005458047866821289, 0.0054967041015625, 0.005502975940704345, 0.005437183856964112, 0.00548035192489624, 0.005448031902313233, 0.005443488121032715, 0.005429344177246094, 0.005428671836853027, 0.005445919990539551, 0.005410208225250244, 0.005417312145233154, 0.00549561595916748, 0.005436543941497803, 0.005397280216217041, 0.005451583862304687, 0.005442624092102051, 0.005431968212127686, 0.00541103982925415, 0.005414303779602051, 0.00543775987625122, 0.005451968193054199, 0.00545308780670166, 0.005426176071166992, 0.00543068790435791, 0.0054280319213867185, 0.005437088012695312, 0.005433343887329102, 0.005418784141540527, 0.005463903903961182, 0.005427872180938721, 0.005475584030151367, 0.005521759986877441, 0.005496960163116455, 0.005477407932281494, 0.005485439777374267, 0.005465727806091308, 0.005482975959777832, 0.005472320079803467, 0.005457119941711426, 0.005472352027893066, 0.005464191913604736, 0.005587647914886475, 0.005518655776977539, 0.005470719814300537, 0.005516960144042969, 0.005525568008422852, 0.0055548157691955565, 0.00552342414855957, 0.0055447998046875, 0.005474815845489502, 0.005455967903137207, 0.0053489279747009275, 0.0054605121612548825, 0.005434783935546875, 0.005430111885070801, 0.005463808059692383, 0.00547215986251831, 0.005399551868438721, 0.005449855804443359, 0.005455008029937744, 0.00548419189453125, 0.0054498882293701175, 0.005433440208435059, 0.005443391799926758, 0.0053944320678710935, 0.005435647964477539, 0.00547814416885376, 0.005447711944580078, 0.005418528079986573, 0.0054464321136474605, 0.005500703811645508, 0.005439360141754151, 0.0054577279090881345, 0.005449920177459717, 0.00540169620513916, 0.005460896015167236, 0.005435200214385987, 0.005437952041625976, 0.0054327998161315914, 0.005602943897247315, 0.005497439861297608, 0.005437632083892822, 0.005420928001403809, 0.00544982385635376, 0.00544547176361084, 0.005453855991363526, 0.005425119876861572, 0.005440703868865967, 0.005491615772247315, 0.005425055980682373, 0.005477727890014649, 0.005452576160430908, 0.005430399894714355, 0.005415552139282227, 0.005442944049835205, 0.005430016040802002, 0.006418432235717773, 0.00603875207901001, 0.006087456226348877, 0.00546611213684082, 0.005490111827850342, 0.005483168125152588, 0.005461343765258789, 0.005482272148132324, 0.005456672191619873, 0.005431295871734619, 0.005457791805267334, 0.0054432001113891605, 0.005435904026031494, 0.005453824043273926, 0.0054858880043029785, 0.005425568103790283, 0.005455455780029297, 0.00541971206665039, 0.005344255924224854, 0.005522047996520996, 0.005443967819213867, 0.005414912223815918, 0.005465919971466064, 0.005503168106079101, 0.005512256145477295, 0.005461152076721191, 0.005430431842803955, 0.0055558719635009765, 0.005455872058868408, 0.005423840045928955, 0.005450016021728516, 0.005464032173156739, 0.005472256183624268, 0.00551526403427124, 0.005453983783721924, 0.005481344223022461, 0.005749728202819824, 0.0054568958282470706, 0.005471231937408447, 0.005449376106262207, 0.005453216075897217, 0.005485055923461914, 0.005489088058471679, 0.005558176040649414, 0.005467775821685791, 0.005506815910339355, 0.005478271961212158, 0.005454559803009033, 0.005446976184844971, 0.005579584121704102, 0.00547430419921875, 0.0054778242111206054, 0.005460544109344483, 0.005479936122894287, 0.0054540162086486816, 0.00543503999710083, 0.005421440124511719, 0.005439008235931396, 0.005455967903137207, 0.00542300796508789, 0.005455935955047607, 0.005442463874816895, 0.005431072235107422, 0.005494175910949707, 0.005427807807922363, 0.005461056232452393, 0.005438399791717529, 0.005633312225341797, 0.005464799880981445, 0.005433343887329102, 0.005410816192626953, 0.005457471847534179, 0.005437727928161621, 0.005494944095611572, 0.005447679996490478, 0.005510335922241211, 0.005440320014953613, 0.005435679912567139, 0.005495808124542236, 0.00545251178741455, 0.005451551914215088, 0.005399648189544678, 0.005493120193481445, 0.005435935974121094, 0.005428575992584229, 0.005468832015991211, 0.005443583965301513, 0.0054287037849426265, 0.005478432178497314, 0.005425663948059082, 0.00545084810256958, 0.0054115839004516605, 0.005456319808959961, 0.005425983905792236, 0.005428127765655518, 0.005431295871734619, 0.005433152198791504, 0.00543775987625122, 0.005420928001403809, 0.005441120147705078, 0.005448448181152344, 0.005426623821258545, 0.005490528106689453, 0.005462399959564209, 0.005537792205810547, 0.005439487934112549, 0.00542310380935669, 0.005476352214813233, 0.005441088199615479, 0.005441760063171387, 0.005547743797302246, 0.0054297599792480465, 0.005479936122894287, 0.00541107177734375, 0.00544927978515625, 0.005446591854095459, 0.005426943778991699, 0.005539167881011963, 0.005458720207214355, 0.005440896034240723, 0.005474016189575195, 0.005466623783111572, 0.0054841599464416505, 0.005457632064819336, 0.005434304237365723, 0.005420896053314209, 0.005442016124725342, 0.005437119960784912, 0.005412864208221436, 0.005435455799102784, 0.005429376125335694, 0.005409759998321533, 0.005486976146697998, 0.005439807891845703, 0.005409088134765625, 0.005445312023162842, 0.005508319854736328, 0.005437952041625976, 0.00543174409866333, 0.005492735862731934, 0.005445631980895996, 0.005419007778167725, 0.005459968090057373, 0.005424479961395264, 0.005348351955413819, 0.005430016040802002, 0.005439712047576904, 0.0054570879936218265, 0.005426015853881836, 0.0054390721321105956, 0.00543555212020874, 0.005476448059082031, 0.005447840213775635, 0.005447679996490478, 0.0054700798988342285, 0.005496352195739746, 0.005465951919555664, 0.005501567840576172, 0.005445759773254394, 0.005425151824951172, 0.0054291200637817385, 0.005588575839996338, 0.005442080020904541, 0.005451744079589844, 0.005406752109527588, 0.005418272018432617, 0.0054134721755981444, 0.005418528079986573, 0.005459743976593018, 0.005417183876037597, 0.005397088050842285, 0.005486911773681641, 0.005451583862304687, 0.0054065918922424315, 0.005449728012084961, 0.005435391902923584, 0.005437439918518067, 0.005449728012084961, 0.005437439918518067, 0.005439487934112549, 0.005445312023162842, 0.005496831893920898, 0.005443071842193604, 0.005432127952575683, 0.005390336036682129, 0.005431295871734619, 0.00540067195892334, 0.005400479793548584, 0.005425151824951172, 0.005426591873168946, 0.005395008087158203, 0.005423136234283447, 0.005486495971679687, 0.0054068160057067875, 0.005436992168426514, 0.0055136637687683105, 0.005414463996887207, 0.005417568206787109, 0.005418303966522217, 0.0055280961990356444, 0.005400000095367431, 0.005412960052490234, 0.00546457576751709, 0.00543503999710083, 0.005414463996887207, 0.005444352149963379, 0.005447648048400879, 0.0053673601150512696, 0.005465792179107666, 0.005473023891448975, 0.005504096031188965, 0.005477248191833496, 0.00544700813293457, 0.005521312236785889, 0.005469183921813964, 0.005486368179321289, 0.005502975940704345, 0.005510528087615967, 0.005467008113861084, 0.005543680191040039, 0.0055316481590271, 0.005483935832977295, 0.005510047912597656, 0.005497856140136719, 0.005521312236785889, 0.0055368318557739256, 0.005479743957519531, 0.00549724817276001, 0.00558406400680542, 0.005535711765289306, 0.005538368225097656, 0.005473567962646485, 0.005487872123718262, 0.005488383769989014, 0.0054943361282348635, 0.005515711784362793, 0.0054653120040893555, 0.005491136074066162, 0.0055055999755859375, 0.005473120212554932, 0.005548992156982422, 0.005494080066680908, 0.005468863964080811, 0.005479743957519531, 0.005503680229187012, 0.0055066561698913575, 0.005468575954437256, 0.005471776008605957, 0.005491007804870605, 0.005482880115509034, 0.005500703811645508, 0.005482656002044678, 0.0054925761222839355, 0.005423359870910645, 0.005476096153259277, 0.005469696044921875, 0.0054683837890625, 0.005509151935577393, 0.005431136131286621, 0.005513631820678711, 0.00547814416885376, 0.00542464017868042, 0.005441664218902588, 0.005452767848968506, 0.005461664199829102, 0.005455840110778809, 0.005453311920166016, 0.0054496641159057614, 0.005450111865997314, 0.005423327922821045, 0.005339200019836426, 0.005488639831542969, 0.005459328174591064, 0.005484672069549561, 0.005458432197570801, 0.005445631980895996, 0.005490623950958252, 0.005463488101959228, 0.005450272083282471, 0.005540031909942627, 0.005481919765472412, 0.005468255996704101, 0.005466495990753174, 0.005535679817199707, 0.005459392070770263, 0.005435776233673095, 0.005440832138061524, 0.005444543838500977, 0.00545084810256958, 0.005605823993682861, 0.005470047950744629, 0.0054765758514404295, 0.005466784000396728, 0.005452640056610107, 0.005424032211303711, 0.005451104164123535, 0.005423776149749756, 0.0054254717826843265, 0.005465792179107666, 0.005480127811431885, 0.0054152321815490725, 0.005449151992797852, 0.005433792114257812, 0.005490816116333007, 0.005533696174621582, 0.005431295871734619, 0.005433343887329102, 0.00544758415222168, 0.005421152114868164, 0.005489664077758789, 0.005429503917694092, 0.005403391838073731, 0.005436448097229004, 0.00543228816986084, 0.005453343868255615, 0.005433087825775147, 0.005440224170684815, 0.00544595193862915, 0.005502719879150391, 0.005413887977600097, 0.005571135997772217, 0.005446208000183105, 0.005415775775909424, 0.00548144006729126, 0.005455455780029297, 0.005506976127624512, 0.005443071842193604, 0.005419648170471191, 0.005467552185058594, 0.005459936141967774, 0.005475327968597412, 0.005453824043273926, 0.005441664218902588, 0.005399295806884766, 0.005486303806304931, 0.005484064102172851, 0.0054583997726440426, 0.005480447769165039, 0.005500895977020264, 0.005496863842010498, 0.0054988799095153805, 0.0054617919921875, 0.005490592002868652, 0.005479072093963623, 0.0054505281448364255, 0.005481056213378906, 0.005466400146484375, 0.005449728012084961, 0.005496831893920898, 0.00546611213684082, 0.00547430419921875, 0.005464064121246338, 0.005462016105651855, 0.005451935768127442, 0.0054642558097839355, 0.005452703952789306, 0.005485055923461914, 0.005475808143615723, 0.005481535911560058, 0.005486239910125732, 0.0054618239402770994, 0.005482272148132324, 0.005478879928588867, 0.005435455799102784, 0.005486527919769287, 0.0054579200744628905, 0.005453824043273926, 0.005496831893920898, 0.005552127838134766, 0.005484672069549561, 0.005465439796447754, 0.005454559803009033, 0.005452864170074463, 0.005530303955078125, 0.005451488018035889, 0.005482848167419434, 0.005466239929199219, 0.005623807907104492, 0.005535615921020508, 0.0054633598327636715, 0.0056121921539306644, 0.005516384124755859, 0.005438399791717529, 0.005480447769165039, 0.005461535930633545, 0.005435872077941895, 0.005480447769165039, 0.005448991775512695, 0.005508031845092774, 0.005439455986022949, 0.005456960201263428, 0.00544982385635376, 0.005528223991394043, 0.005470208168029785, 0.00546611213684082, 0.005419007778167725, 0.005357664108276367, 0.005523007869720459, 0.00561023998260498, 0.005595136165618897, 0.005591040134429932, 0.005443583965301513, 0.005459743976593018, 0.005469855785369873, 0.005502912044525146, 0.005464704036712647, 0.005440544128417968, 0.0055775361061096195, 0.005485983848571777, 0.0055838079452514644, 0.005506527900695801, 0.00547430419921875, 0.005544288158416748, 0.005490623950958252, 0.005468224048614502, 0.0054579200744628905, 0.005472256183624268, 0.0055270719528198245, 0.0054380159378051756, 0.005463967800140381, 0.005468160152435303, 0.005447936058044433, 0.00647321605682373, 0.005478367805480957, 0.005509247779846191, 0.005677248001098633, 0.005510464191436768, 0.005556159973144531, 0.005501535892486573, 0.0054776320457458495, 0.00544652795791626, 0.005486591815948487, 0.0054579200744628905, 0.005453824043273926, 0.005455872058868408, 0.005449728012084961, 0.005543935775756836, 0.005502848148345947, 0.005441152095794678, 0.005544447898864746, 0.005494431972503662, 0.00545801591873169, 0.005467679977416992, 0.005481184005737305, 0.005449728012084961, 0.0054620480537414555, 0.005476128101348877, 0.005601056098937988, 0.005439871788024902, 0.005426911830902099, 0.005453887939453125, 0.005459296226501465, 0.005442463874816895, 0.0054774398803710935, 0.0054486398696899415, 0.00547430419921875, 0.005428991794586182, 0.005490719795227051, 0.005439712047576904, 0.005359327793121338, 0.005448671817779541, 0.005445631980895996, 0.005515391826629639, 0.005437439918518067, 0.00545088005065918, 0.005468992233276368, 0.0054783358573913575, 0.005512191772460938, 0.005529952049255371, 0.005470592021942139, 0.005493023872375488, 0.0055131521224975584, 0.005483712196350098, 0.005485439777374267, 0.005487711906433106, 0.005594048023223877, 0.0058715839385986325, 0.005502463817596436, 0.00547382402420044, 0.0054568958282470706, 0.005475808143615723, 0.005468671798706055, 0.005432576179504395, 0.005493631839752197, 0.005475391864776611, 0.0054915199279785155, 0.005567647933959961, 0.0054915518760681155, 0.005459328174591064, 0.005466752052307129, 0.005477503776550293, 0.005474239826202393, 0.005456768035888672, 0.00546124792098999, 0.00548528003692627, 0.005459743976593018, 0.00555244779586792, 0.005556032180786132, 0.0054540162086486816, 0.005471456050872803, 0.005538591861724854, 0.005455872058868408, 0.005486591815948487, 0.005457632064819336, 0.005482751846313477, 0.005482783794403076, 0.005515039920806885, 0.005480415821075439, 0.0054939198493957515, 0.005460576057434082, 0.005468416213989258, 0.005465888023376465, 0.005483007907867431, 0.005518496036529541, 0.005488927841186523, 0.005508512020111084, 0.005534592151641845, 0.0054704642295837404, 0.005463840007781983, 0.005459936141967774, 0.0054759359359741215, 0.005491104125976563, 0.00543503999710083, 0.005458720207214355, 0.005502111911773681, 0.005514080047607422, 0.005447679996490478, 0.0054988799095153805, 0.0054701118469238285, 0.005461311817169189, 0.005471007823944092, 0.005442751884460449, 0.005481215953826904, 0.005445536136627197, 0.005464064121246338, 0.00550108814239502, 0.005455743789672851, 0.005449855804443359, 0.005485663890838623, 0.005477280139923096, 0.005478432178497314, 0.0054906558990478515, 0.005428319931030274, 0.005485472202301026, 0.005441184043884277, 0.00549513578414917, 0.005461599826812744, 0.005472320079803467, 0.005457568168640137, 0.005855936050415039, 0.005588992118835449, 0.005609407901763916, 0.005930751800537109, 0.006117695808410644, 0.005545504093170166, 0.005538464069366455, 0.005476223945617676, 0.005437376022338868, 0.005467360019683838, 0.00555452823638916, 0.0054382081031799315, 0.0054616317749023435, 0.005435328006744385, 0.005419392108917237, 0.0055129599571228025, 0.005463935852050781, 0.0054860801696777345, 0.005466752052307129, 0.005433504104614258, 0.005440896034240723, 0.005423327922821045, 0.005414559841156006, 0.005455743789672851, 0.005588863849639893, 0.005507487773895264, 0.0054349441528320315, 0.005403520107269287, 0.005441760063171387, 0.005410624027252197, 0.0053976960182189945, 0.005438240051269532, 0.005433343887329102, 0.005422976016998291, 0.005451903820037842, 0.005537568092346191, 0.005482367992401123, 0.005471903800964356, 0.0054481601715087894, 0.005416959762573242, 0.005441120147705078, 0.0054230718612670896, 0.005435840129852295, 0.005445631980895996, 0.005468160152435303, 0.005449408054351806, 0.005427840232849121, 0.005406400203704834, 0.0054271998405456545, 0.005421055793762207, 0.005449056148529053, 0.0054217281341552735, 0.005430943965911865, 0.0054316802024841305, 0.005494688034057617, 0.005431359767913819, 0.005439680099487304, 0.005474080085754394, 0.005482528209686279, 0.005482143878936767, 0.005478752136230469, 0.005486368179321289, 0.005544159889221191, 0.00550707197189331, 0.005549280166625976, 0.005486847877502442, 0.0055342397689819335, 0.005482495784759522, 0.005459968090057373, 0.005432320117950439, 0.0054876160621643065, 0.00557260799407959, 0.005464000225067139, 0.005482463836669922, 0.005504640102386474, 0.005488448143005371, 0.005541632175445557, 0.005482592105865478, 0.005515647888183594, 0.005513631820678711, 0.005496128082275391, 0.005515999794006347, 0.005586944103240967, 0.005569600105285645, 0.005579520225524903, 0.005482687950134278, 0.005495071887969971, 0.005471968173980713, 0.005486591815948487, 0.005488383769989014, 0.005452032089233398, 0.005507199764251709, 0.005494495868682861, 0.005546271800994873, 0.00553107213973999, 0.0055136637687683105, 0.005492767810821533, 0.005499872207641601, 0.005555391788482666, 0.005423903942108154, 0.005564576148986817, 0.0055188159942626954, 0.005535583972930908, 0.005517343997955323, 0.005449920177459717, 0.005489280223846435, 0.005513055801391602, 0.005464064121246338, 0.00547430419921875, 0.005466400146484375, 0.005489471912384033, 0.005501215934753418, 0.0054462399482727054, 0.005561952114105225, 0.005482912063598633, 0.0054579200744628905, 0.005492479801177979, 0.005520703792572021, 0.005509856224060058, 0.005469888210296631, 0.00546668815612793, 0.005431327819824219, 0.0054514241218566895, 0.005490880012512207, 0.005470335960388184, 0.00542303991317749, 0.0055337600708007815, 0.005486720085144043, 0.00544163179397583, 0.00546995210647583, 0.005543807983398438, 0.0054457921981811526, 0.005484543800354004, 0.005512576103210449, 0.0054791679382324215, 0.0055409598350524905, 0.005537759780883789, 0.005505248069763184, 0.005494688034057617, 0.005470143795013428, 0.005503744125366211, 0.005476352214813233, 0.0054988799095153805, 0.005771423816680908, 0.005498720169067383, 0.005496831893920898, 0.005478464126586914, 0.005457856178283691, 0.0054925761222839355, 0.005470143795013428, 0.00543942403793335, 0.005472544193267822, 0.005443583965301513, 0.005523744106292724, 0.00544927978515625, 0.005451615810394287, 0.0054702720642089845, 0.005445695877075195, 0.005449920177459717, 0.0054330239295959475, 0.0054336638450622554, 0.005420735836029053, 0.005402463912963867, 0.005660831928253174, 0.005435391902923584, 0.005609471797943116, 0.005512479782104492, 0.005425888061523438, 0.005459968090057373, 0.005431295871734619, 0.005457376003265381, 0.005440192222595215, 0.0055195198059082035, 0.005482175827026367, 0.005491936206817627, 0.005409823894500732, 0.0054757437705993655, 0.005462272167205811, 0.00543939208984375, 0.005418848037719726, 0.005458271980285644, 0.005549088001251221, 0.005647295951843261, 0.005476384162902832, 0.00561356782913208, 0.005664896011352539, 0.005475903987884521, 0.005484864234924316, 0.005456128120422363, 0.005496895790100097, 0.005580480098724365, 0.005421216011047363, 0.005513055801391602, 0.005474368095397949, 0.005642176151275635, 0.005484543800354004, 0.005609471797943116, 0.005462016105651855, 0.0054579200744628905, 0.005458047866821289, 0.005469183921813964, 0.005487296104431152, 0.00553388786315918, 0.005461696147918701, 0.006893472194671631, 0.006023583889007568, 0.005584320068359375, 0.005554080009460449, 0.005498623847961426, 0.005490623950958252, 0.0054651198387145996, 0.005511104106903076, 0.005564159870147705, 0.005511712074279785, 0.005537824153900146, 0.005648223876953125, 0.0055560641288757324, 0.005566463947296142, 0.005536799907684326, 0.0055502080917358395, 0.005729055881500244, 0.005595200061798096, 0.0055316481590271, 0.0055092802047729495, 0.005547264099121094, 0.005478400230407715, 0.005512639999389648, 0.005472832202911377, 0.005492735862731934, 0.006676000118255615, 0.005593567848205566, 0.005681568145751953, 0.0055701441764831544, 0.005533472061157226, 0.0054991040229797365, 0.0055409278869628905, 0.005529568195343017, 0.005626431941986084, 0.005542304039001465, 0.005513504028320313, 0.005582560062408448, 0.005509119987487793, 0.005545983791351319, 0.005603328227996827, 0.005479712009429932, 0.005550816059112549, 0.005545983791351319, 0.005498112201690674, 0.005517951965332031, 0.005637631893157959, 0.005496543884277344, 0.005567488193511963, 0.005490592002868652, 0.005505119800567627, 0.005508831977844238, 0.005628032207489014, 0.005490943908691406, 0.005469503879547119, 0.005532159805297852, 0.005588704109191894, 0.005484543800354004, 0.0055298562049865725, 0.005533055782318115, 0.005495456218719483, 0.005455552101135254, 0.005443903923034668, 0.0054988799095153805, 0.005500895977020264, 0.005455552101135254, 0.00553772783279419, 0.0055689277648925785, 0.005474527835845947, 0.00545366382598877, 0.0055725440979003905, 0.005473696231842041, 0.005544352054595947, 0.005449920177459717, 0.005455872058868408, 0.005478400230407715, 0.005535744190216065, 0.005472256183624268, 0.005429247856140137, 0.00546611213684082, 0.00547430419921875, 0.005453824043273926, 0.005502143859863281, 0.00548144006729126, 0.005502016067504883, 0.0055680317878723145, 0.005552608013153076, 0.005683199882507324, 0.005518943786621094, 0.005501152038574219, 0.006435103893280029, 0.005646431922912597, 0.005559360027313233, 0.0054952001571655276, 0.005488992214202881, 0.005525279998779297, 0.005474815845489502, 0.0055025601387023925, 0.005476191997528076, 0.0054479680061340335, 0.005555712223052979, 0.005468671798706055, 0.005484255790710449, 0.005474815845489502, 0.005463903903961182, 0.005538943767547607, 0.00555075216293335, 0.0054570879936218265, 0.005478623867034912, 0.005484960079193115, 0.0054603199958801266, 0.005505375862121582, 0.0055149121284484865, 0.0055419840812683105, 0.005525407791137696, 0.005517024040222168, 0.005515647888183594, 0.005482399940490722, 0.005453536033630371, 0.005467840194702148, 0.005474912166595459, 0.005555359840393066, 0.005503392219543457, 0.005470367908477783, 0.005458208084106445, 0.005519360065460205, 0.005464159965515137, 0.005486688137054443, 0.005463871955871582, 0.005471776008605957, 0.005466591835021972, 0.005434624195098877, 0.005476384162902832, 0.005466847896575928, 0.0054551677703857425, 0.005509024143218994, 0.005471231937408447, 0.0054534077644348145, 0.005468639850616455, 0.005465824127197266, 0.0055095357894897464, 0.005493887901306152, 0.005518879890441895, 0.005655168056488037, 0.0055196800231933595, 0.005508543968200684, 0.0055055999755859375, 0.0054963197708129885, 0.005477888107299805, 0.00548854398727417, 0.005461664199829102, 0.005469120025634765, 0.005514976024627686, 0.005441215991973877, 0.005519328117370606, 0.005547967910766602, 0.005459712028503418, 0.005471168041229248, 0.005442624092102051, 0.005823423862457275, 0.005477536201477051, 0.005436255931854248, 0.005453824043273926, 0.005478400230407715, 0.005449312210083008, 0.005564832210540772, 0.005445343971252441, 0.005435232162475586, 0.005466559886932373, 0.005467584133148193, 0.00546070384979248, 0.005464032173156739, 0.005474175930023193, 0.005462016105651855, 0.005488639831542969, 0.005427008152008057, 0.005480639934539795, 0.005564159870147705, 0.005478655815124512, 0.005463263988494873, 0.005437503814697266, 0.005477248191833496, 0.005467423915863037, 0.005478047847747803, 0.005546688079833984, 0.005478879928588867, 0.0055064959526062015, 0.0055032958984375, 0.0054988799095153805, 0.005492735862731934, 0.005568511962890625, 0.005494783878326416, 0.005523776054382324, 0.005476223945617676, 0.005556128025054932, 0.005515391826629639, 0.005520959854125976, 0.005483903884887695, 0.005464928150177002, 0.0054581441879272465, 0.005469183921813964, 0.0054559998512268065, 0.005450399875640869, 0.005474016189575195, 0.005504767894744873, 0.0054973759651184085, 0.0054514241218566895, 0.00548089599609375, 0.005463967800140381, 0.00544326400756836, 0.005427072048187256, 0.005386847972869873, 0.005539423942565918, 0.0055433278083801265, 0.005487679958343506, 0.005463935852050781, 0.005408736228942871, 0.005464096069335938, 0.005515488147735595, 0.0054617919921875, 0.005486591815948487, 0.005449728012084961, 0.005484799861907959, 0.005447264194488525, 0.00542742395401001, 0.005457215785980225, 0.005497471809387207, 0.005443583965301513, 0.005522848129272461, 0.005534304141998291, 0.005467775821685791, 0.005456255912780762, 0.005482495784759522, 0.005451839923858642, 0.005455808162689209, 0.005439648151397705, 0.0054618558883666995, 0.005462016105651855, 0.005449984073638916, 0.005467520236968994, 0.0054316802024841305, 0.0054579200744628905, 0.005509312152862549, 0.005448991775512695, 0.0054605121612548825, 0.0055212478637695316, 0.00544159984588623, 0.005453919887542725, 0.005451935768127442, 0.005451615810394287, 0.00545356798171997, 0.005531904220581055, 0.005525279998779297, 0.0054683837890625, 0.005447679996490478, 0.005472256183624268, 0.005483871936798096, 0.00544220781326294, 0.00547430419921875, 0.005459199905395508, 0.005479296207427978, 0.005435264110565186, 0.005425151824951172, 0.0054941439628601076, 0.005442272186279297, 0.005617280006408692, 0.0054848318099975585, 0.005504255771636963, 0.006189824104309082, 0.005599232196807862, 0.0055353279113769535, 0.006015391826629639, 0.0058716158866882326, 0.005513055801391602, 0.00540499210357666, 0.005467743873596192, 0.005525279998779297, 0.005456768035888672, 0.005553055763244629, 0.0055, 0.005590176105499268, 0.005553855895996094, 0.005466239929199219, 0.005447904109954834, 0.005460095882415771, 0.005500448226928711, 0.005452896118164062, 0.005463808059692383, 0.005460031986236572, 0.005482495784759522, 0.005503200054168701, 0.0054802241325378415, 0.005461120128631592, 0.005470560073852539, 0.0054349441528320315, 0.005497824192047119, 0.005453536033630371, 0.005467872142791748, 0.005444159984588623, 0.005431456089019775, 0.005448768138885498, 0.005499584197998047, 0.005425248146057129, 0.005476352214813233, 0.005424479961395264, 0.005421472072601318, 0.005426976203918457, 0.005452479839324951, 0.0055677762031555176, 0.005425151824951172, 0.005431807994842529, 0.005514304161071778, 0.0054501757621765136, 0.005435935974121094, 0.005412831783294678, 0.005435391902923584, 0.005438560009002685, 0.005482751846313477, 0.005466720104217529, 0.005644351959228516, 0.005453824043273926, 0.005478400230407715, 0.005441535949707031, 0.005451776027679443, 0.005421055793762207, 0.005491936206817627, 0.005432096004486084, 0.005418240070343017, 0.0054382081031799315, 0.005456992149353027, 0.005437439918518067, 0.005464992046356201, 0.005437312126159668, 0.005470335960388184, 0.005449728012084961, 0.005484384059906006, 0.005469888210296631]",tokens/s,182.4515167831683,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1846.915072,2466.11968,0.0,2080.374784,2078.348288,s,1,8.6422900390625,8.6422900390625,0.0,8.6422900390625,8.6422900390625,8.6422900390625,8.6422900390625,[8.6422900390625],,kWh,4.17317010542168e-05,4.596099367708189e-06,1.350612191602929e-05,5.983392233795428e-05,,MB,1928.138752,2663.251968,0.0,2248.146944,2179.679232,s,10,1.208913703918457,0.12089137039184569,0.0009536201386957759,0.12122719955444336,0.1212889778137207,0.12134521064758301,0.12139019691467286,"[0.12122982025146484, 0.12094310760498046, 0.12111007690429687, 0.12115478515625, 0.12127648162841796, 0.12122457885742187, 0.12124732971191406, 0.12127481842041016, 0.12140144348144531, 0.11805126190185547]",tokens/s,2117.603590481489,kWh,3.6628332810420258e-06,4.039125718312122e-07,2.423873466874879e-06,6.490619319748117e-06,tokens/kWh,39441536.68373431,MB,1935.253504,2663.251968,0.0,2248.146944,2179.681792,s,10,15.148650756835938,1.5148650756835937,0.015696019420022442,1.507517822265625,1.5432012695312498,1.5457697631835936,1.5478245581054686,"[1.5063739013671875, 1.501903564453125, 1.5055443115234375, 1.5148082275390624, 1.5075947265625, 1.509640625, 1.50744091796875, 1.504375732421875, 1.5426304931640624, 1.5483382568359374]",tokens/s,41.58786218737718,kWh,5.658015418270318e-05,6.240601774275239e-06,3.0158708154723668e-05,9.297946411170209e-05,tokens/kWh,677568.9729112025,,s,630,15.146537839889527,0.0240421235553802,0.000516650247039382,0.02390230369567871,0.024491538810729983,0.02458247022628784,0.025239078311920165,"[0.02462726402282715, 0.024084575653076173, 0.02393408012390137, 0.023857952117919922, 0.023866432189941406, 0.023880031585693358, 0.02384752082824707, 0.023758848190307616, 0.023791519165039063, 0.024016992568969726, 0.023803936004638673, 0.023707263946533202, 0.02444051170349121, 0.024983776092529296, 0.024062400817871095, 0.024041471481323243, 0.02384486389160156, 0.023966880798339845, 0.023880575180053713, 0.02375676727294922, 0.02381376075744629, 0.02380633544921875, 0.023760000228881837, 0.023765888214111328, 0.02373222351074219, 0.023795711517333985, 0.02370969581604004, 0.0237076473236084, 0.02369331169128418, 0.02372140884399414, 0.023894111633300782, 0.023972320556640624, 0.02408243179321289, 0.02397177505493164, 0.02393075180053711, 0.023944543838500976, 0.02393788719177246, 0.023857152938842774, 0.024080608367919924, 0.02380975914001465, 0.02390239906311035, 0.023748479843139648, 0.023785472869873047, 0.023752544403076174, 0.023852607727050782, 0.023935615539550783, 0.023885568618774413, 0.023822559356689452, 0.023779327392578126, 0.02376633644104004, 0.02377120018005371, 0.024059551239013672, 0.024246944427490234, 0.02416374397277832, 0.02382736015319824, 0.023932992935180666, 0.023694559097290038, 0.02384931182861328, 0.02395155143737793, 0.02374470329284668, 0.023738367080688477, 0.023936351776123046, 0.023881536483764648, 0.024119199752807616, 0.02380726432800293, 0.02380771255493164, 0.023841791152954102, 0.023824064254760743, 0.02377350425720215, 0.02385465621948242, 0.0238350715637207, 0.023842815399169923, 0.023777280807495117, 0.023818239212036133, 0.023705312728881836, 0.023716127395629883, 0.023670783996582033, 0.023758752822875977, 0.023650400161743163, 0.023787519454956055, 0.02389811134338379, 0.023842208862304686, 0.023773792266845704, 0.023775039672851564, 0.023706911087036132, 0.02374457550048828, 0.02380588722229004, 0.023708192825317383, 0.024016447067260742, 0.02377756881713867, 0.02377987289428711, 0.02372198486328125, 0.023738367080688477, 0.023744512557983398, 0.02392678451538086, 0.024153440475463868, 0.023859872817993164, 0.02386934471130371, 0.0238144645690918, 0.023812992095947266, 0.023769376754760742, 0.023711456298828124, 0.023836767196655274, 0.023704383850097658, 0.023721055984497072, 0.0237512321472168, 0.023912319183349608, 0.023740703582763673, 0.023754304885864257, 0.023785919189453126, 0.023990463256835938, 0.02415113639831543, 0.024029760360717772, 0.023996768951416017, 0.024002559661865236, 0.024045568466186523, 0.02393401527404785, 0.023909311294555664, 0.023910175323486327, 0.023890144348144533, 0.023816192626953125, 0.023801855087280274, 0.023805952072143553, 0.02394870376586914, 0.023857759475708007, 0.02387116813659668, 0.024330335617065428, 0.02419068717956543, 0.023923519134521485, 0.023848352432250978, 0.023914783477783204, 0.023732351303100585, 0.02367820739746094, 0.02408755111694336, 0.023795711517333985, 0.023863296508789062, 0.0238787841796875, 0.023782272338867188, 0.02369331169128418, 0.023810047149658203, 0.023840736389160157, 0.023746591567993164, 0.02375257682800293, 0.02371187210083008, 0.023822336196899413, 0.02373638343811035, 0.024124832153320314, 0.02552272033691406, 0.023851007461547852, 0.02401875114440918, 0.024047103881835938, 0.023925439834594726, 0.023914112091064452, 0.02420159912109375, 0.023805952072143553, 0.023801088333129883, 0.02391116714477539, 0.023810047149658203, 0.024352767944335937, 0.023744319915771483, 0.023889663696289063, 0.02371945571899414, 0.023703615188598634, 0.023802719116210937, 0.02375209617614746, 0.023831136703491212, 0.023809696197509767, 0.024242431640625, 0.023793407440185547, 0.023953760147094726, 0.023754751205444336, 0.023723264694213868, 0.02373868751525879, 0.023749055862426757, 0.023742464065551756, 0.02368671989440918, 0.023721952438354493, 0.023908607482910155, 0.023922655105590822, 0.024010623931884766, 0.023693216323852538, 0.02391641616821289, 0.024016639709472657, 0.023791872024536132, 0.023904640197753905, 0.02378972816467285, 0.023851072311401367, 0.02382998466491699, 0.02392684745788574, 0.024803327560424804, 0.02447551918029785, 0.024365055084228517, 0.024030719757080078, 0.02392947196960449, 0.02384486389160156, 0.023828224182128908, 0.023723615646362304, 0.023792287826538087, 0.023820032119750978, 0.023791103363037108, 0.023767391204833985, 0.023685440063476563, 0.023816064834594728, 0.023873760223388673, 0.024247711181640624, 0.023863008499145508, 0.023884672164916992, 0.02392268753051758, 0.023830528259277343, 0.023768831253051757, 0.023736576080322265, 0.023805952072143553, 0.023883295059204102, 0.023844415664672852, 0.024048095703125, 0.02459283256530762, 0.02447158432006836, 0.024166336059570314, 0.023988256454467772, 0.024000511169433594, 0.023786592483520507, 0.023712671279907227, 0.023754911422729494, 0.023881568908691406, 0.023799808502197265, 0.023916543960571288, 0.024336383819580077, 0.024162303924560546, 0.023985504150390625, 0.023851680755615234, 0.023899520874023437, 0.024058496475219727, 0.02385500717163086, 0.023861343383789063, 0.02386934471130371, 0.023808095932006838, 0.024016895294189454, 0.023769088745117187, 0.02381977653503418, 0.0238636474609375, 0.023720096588134766, 0.023764703750610353, 0.02712550354003906, 0.023914880752563476, 0.023947423934936523, 0.024119327545166016, 0.024325855255126955, 0.025223424911499023, 0.024805376052856445, 0.024090112686157225, 0.02401740837097168, 0.023940959930419923, 0.025073568344116212, 0.024211551666259764, 0.023975328445434572, 0.023908960342407228, 0.023900159835815428, 0.02381558418273926, 0.023785280227661132, 0.02382464027404785, 0.02392095947265625, 0.023838783264160158, 0.023965408325195312, 0.02404153633117676, 0.023912832260131835, 0.023844127655029298, 0.023838592529296876, 0.023849727630615235, 0.024219135284423828, 0.0238372802734375, 0.023767040252685546, 0.02369532775878906, 0.02387936019897461, 0.02382195281982422, 0.02385113525390625, 0.023923295974731446, 0.023881631851196287, 0.02388796806335449, 0.024010751724243166, 0.023850719451904298, 0.02386463928222656, 0.023959583282470703, 0.024176992416381837, 0.02411734390258789, 0.024058368682861327, 0.023984128952026368, 0.023967744827270508, 0.023911712646484375, 0.023907039642333986, 0.023910400390625, 0.02388787269592285, 0.02390220832824707, 0.023965696334838867, 0.024063840866088867, 0.023912288665771483, 0.023882047653198242, 0.023930240631103515, 0.02392127990722656, 0.02379305648803711, 0.023953119277954103, 0.0237042236328125, 0.02375196838378906, 0.02381532859802246, 0.024037023544311524, 0.024008480072021485, 0.023871904373168946, 0.023878944396972655, 0.023867488861083985, 0.02384339141845703, 0.023889503479003905, 0.024011167526245117, 0.023783456802368163, 0.023775199890136718, 0.02386934471130371, 0.023891231536865235, 0.02477145576477051, 0.02409775924682617, 0.023986175537109376, 0.023949247360229492, 0.024309823989868164, 0.023867136001586915, 0.024146175384521483, 0.02408038330078125, 0.02385305595397949, 0.02374457550048828, 0.02402854347229004, 0.023902015686035158, 0.023798528671264647, 0.023834623336791993, 0.023969472885131834, 0.02396816062927246, 0.023840864181518553, 0.02378019142150879, 0.02381100845336914, 0.023748735427856445, 0.02388777542114258, 0.023752704620361328, 0.023834400177001953, 0.023840576171875, 0.023820703506469726, 0.023826431274414063, 0.0238504638671875, 0.023853599548339845, 0.024067487716674805, 0.02386390495300293, 0.023842815399169923, 0.023777280807495117, 0.024061952590942383, 0.023883775711059572, 0.023973440170288084, 0.02403318405151367, 0.024187423706054687, 0.024071807861328124, 0.02401113510131836, 0.02403692817687988, 0.024057695388793945, 0.023877599716186523, 0.023872127532958986, 0.023870655059814453, 0.023953823089599608, 0.024721824645996093, 0.02433433532714844, 0.023909887313842772, 0.023915008544921876, 0.023808128356933595, 0.023850208282470704, 0.023841440200805666, 0.023791072845458984, 0.023826847076416014, 0.023947391510009765, 0.023791616439819335, 0.02409676742553711, 0.024183935165405273, 0.024046464920043944, 0.02391859245300293, 0.024153312683105468, 0.023904384613037108, 0.023804576873779296, 0.0252454719543457, 0.02418227195739746, 0.024096832275390626, 0.024805728912353515, 0.025522495269775392, 0.024403776168823242, 0.02459872055053711, 0.02396531105041504, 0.02375913619995117, 0.023885919570922853, 0.023814144134521483, 0.023975807189941405, 0.02371366310119629, 0.023938335418701173, 0.023863296508789062, 0.02380627250671387, 0.023722143173217775, 0.023655040740966797, 0.02372185516357422, 0.0238919677734375, 0.023665952682495116, 0.0238384952545166, 0.025371583938598632, 0.02449148750305176, 0.0241177921295166, 0.024025087356567384, 0.024066047668457033, 0.023854848861694335, 0.023830591201782228, 0.023762815475463866, 0.023871616363525392, 0.02376028823852539, 0.02381023979187012, 0.02371545600891113, 0.023860191345214842, 0.02371788787841797, 0.023658496856689453, 0.02370150375366211, 0.023758848190307616, 0.02381996726989746, 0.023767360687255858, 0.023661983489990233, 0.02396015930175781, 0.02392032051086426, 0.023733888626098633, 0.023685663223266602, 0.023666847229003907, 0.024147424697875976, 0.023717824935913085, 0.023638175964355468, 0.02365020751953125, 0.023654399871826173, 0.02362393569946289, 0.023619104385375976, 0.023650976181030275, 0.023672927856445314, 0.02373360061645508, 0.023680992126464843, 0.023812288284301757, 0.023835039138793944, 0.023654495239257813, 0.023751712799072264, 0.023741376876831054, 0.02448975944519043, 0.02388540840148926, 0.02380044746398926, 0.023694496154785156, 0.02380681610107422, 0.023787519454956055, 0.023658496856689453, 0.02367695999145508, 0.023684320449829103, 0.023692031860351563, 0.023725311279296876, 0.023679744720458983, 0.023641216278076173, 0.02356671905517578, 0.02384492874145508, 0.023634368896484376, 0.023723007202148438, 0.023630815505981444, 0.02364828872680664, 0.023584768295288085, 0.023513088226318358, 0.023589887619018556, 0.023614463806152345, 0.02367283248901367, 0.023734272003173826, 0.02362508773803711, 0.02369590377807617, 0.023603296279907225, 0.02364326477050781, 0.023765888214111328, 0.023781375885009767, 0.02389360046386719, 0.024071935653686524, 0.023990623474121092, 0.023791936874389647, 0.023928831100463867, 0.023623647689819335, 0.023664031982421875, 0.0236364803314209, 0.023627904891967772, 0.02370560073852539, 0.023656288146972657, 0.023653728485107422, 0.02376278305053711, 0.02367296028137207, 0.0236942081451416, 0.02382758331298828, 0.02396601676940918, 0.023906143188476562, 0.02428998374938965, 0.024358911514282225, 0.024268800735473633, 0.02434048080444336, 0.024213504791259766, 0.024198591232299806, 0.024214080810546875, 0.024338176727294922, 0.02439193534851074, 0.02450432014465332, 0.02453094482421875, 0.024414207458496092, 0.024501440048217773, 0.02440275192260742, 0.025110559463500978, 0.0245882568359375, 0.024505792617797853, 0.024445152282714842, 0.02451263999938965, 0.024575712203979493, 0.02457651138305664, 0.02469478416442871, 0.024600576400756836, 0.02495078468322754, 0.02450432014465332, 0.024452991485595703, 0.024342655181884765, 0.02442019271850586, 0.024427743911743165, 0.024392480850219726, 0.02450147247314453, 0.02438153648376465, 0.024345439910888673, 0.0244715518951416, 0.02451456069946289, 0.02443059158325195, 0.024567583084106445, 0.02447792053222656, 0.02450227165222168, 0.024547679901123047, 0.024333568572998048, 0.024535455703735352, 0.02448588752746582, 0.024354368209838866, 0.024420799255371092, 0.024341856002807617, 0.02429404830932617, 0.02433024024963379, 0.024292736053466796, 0.024304256439208985, 0.024423967361450194, 0.024346176147460936, 0.02449862480163574, 0.024578527450561525, 0.02447065544128418, 0.02454412841796875, 0.024526847839355468, 0.0245185604095459, 0.024399967193603517, 0.024387584686279298, 0.02439743995666504, 0.024499647140502928, 0.02448585510253906, 0.024640480041503907, 0.024492000579833983, 0.024522783279418946, 0.02445516777038574, 0.024467456817626954, 0.02445516777038574, 0.024448415756225587, 0.024377952575683592, 0.024442720413208007, 0.02459657669067383, 0.024449087142944335, 0.024403968811035157, 0.02449955177307129, 0.02451487922668457, 0.025091232299804686, 0.02449065589904785, 0.024449024200439453, 0.024459264755249024, 0.024606719970703125, 0.0245166072845459, 0.024551424026489257, 0.024614368438720703, 0.02453763198852539, 0.024751264572143553, 0.024379295349121095, 0.02442038345336914, 0.024357120513916017, 0.024384159088134766, 0.024389631271362306, 0.024581344604492187, 0.02427961540222168, 0.024160192489624022, 0.02419875144958496, 0.024111616134643556, 0.024107200622558594, 0.02409062385559082, 0.024309152603149413, 0.025116479873657227, 0.02450921630859375, 0.024311071395874025, 0.02438003158569336, 0.024340543746948242, 0.02439567947387695, 0.024423871994018555, 0.024236127853393553, 0.024414464950561522, 0.024447328567504884, 0.02429747200012207, 0.024412160873413087, 0.02445516777038574, 0.024397504806518554, 0.02429545593261719, 0.024332576751708985, 0.024261791229248045, 0.024313951492309572, 0.024356672286987305, 0.024414592742919922, 0.02423788833618164, 0.02436735916137695, 0.02444339179992676, 0.02432204818725586, 0.02433625602722168, 0.024367231369018555, 0.024266752243041992, 0.024388671875, 0.024435327529907225, 0.02439219284057617, 0.02459123229980469, 0.024583391189575195, 0.024339391708374025, 0.024374048233032228, 0.024364351272583008, 0.0244967041015625, 0.02457318305969238, 0.02533260726928711, 0.033556415557861326, 0.024376928329467775]",tokens/s,41.59366362528396,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4758.048768,6022.889472,0.0,5637.144576,5630.431232,s,1,10.645783203125,10.645783203125,0.0,10.645783203125,10.645783203125,10.645783203125,10.645783203125,[10.645783203125],,kWh,0.00010006543464166195,1.1030222106192207e-05,3.340724894798641e-05,0.00014450290569584056,,MB,1771.413504,6402.473984,0.0,5987.36896,5889.96608,s,10,4.525478393554687,0.4525478393554687,0.0028786470292687626,0.45333647155761714,0.4550501342773437,0.455488735961914,0.4558396173095703,"[0.44518426513671877, 0.4516409301757813, 0.45452658081054687, 0.45342489624023435, 0.4508680725097656, 0.45174948120117187, 0.453248046875, 0.4549526672363281, 0.45395611572265626, 0.45592733764648435]",tokens/s,565.6860507048324,kWh,1.3111647800543071e-05,1.4459869212438372e-06,8.755043235912822e-06,2.331267795769973e-05,tokens/kWh,10981149.41854838,MB,1779.441664,6402.473984,0.0,5987.36896,5889.96864,s,10,28.717907470703125,2.8717907470703126,0.013521227430442189,2.8722371826171873,2.88121884765625,2.8919276611328124,2.9004947119140625,"[2.902636474609375, 2.872265380859375, 2.847655029296875, 2.866521484375, 2.860639892578125, 2.86499560546875, 2.878218994140625, 2.878839111328125, 2.872208984375, 2.873926513671875]",tokens/s,21.93753150861361,kWh,6.732961361861964e-05,7.426542878989397e-06,4.463930503508916e-05,0.00011939546153269823,tokens/kWh,527658.2475687027,,s,630,28.715477203369154,0.04558012254503038,0.0007199102228892166,0.045474401473999025,0.046092403793334964,0.046588317680358884,0.04809122558593751,"[0.04696012878417969, 0.04581043243408203, 0.04589990234375, 0.04599305725097656, 0.045725696563720705, 0.04570601654052735, 0.045688289642333985, 0.04556380844116211, 0.04562188720703125, 0.045871105194091794, 0.04638924789428711, 0.04576399993896484, 0.04783705520629883, 0.04577155303955078, 0.04600191879272461, 0.045582462310791015, 0.04571692657470703, 0.045845119476318356, 0.04578508758544922, 0.04612505722045898, 0.0461844482421875, 0.04599363327026367, 0.04581411361694336, 0.04819504165649414, 0.04839475250244141, 0.046034976959228514, 0.046514175415039063, 0.045823486328125, 0.04561948776245117, 0.045897216796875, 0.045910751342773434, 0.04574617767333984, 0.04671897506713867, 0.047443870544433595, 0.046122718811035156, 0.04595955276489258, 0.046045185089111325, 0.04599321746826172, 0.04584524917602539, 0.04594793701171875, 0.04603184127807617, 0.0460431022644043, 0.04578483200073242, 0.0460497932434082, 0.046062656402587894, 0.045944862365722657, 0.04619305419921875, 0.045999969482421875, 0.045996479034423825, 0.046139392852783206, 0.045797374725341795, 0.045658111572265625, 0.045946880340576174, 0.04639744186401367, 0.04598342514038086, 0.045685054779052735, 0.045885025024414064, 0.04569724655151367, 0.04568902587890625, 0.045819839477539065, 0.04594403076171875, 0.0457531852722168, 0.046013504028320315, 0.04664934539794922, 0.04535043334960937, 0.04589616012573242, 0.04572979354858398, 0.045690399169921875, 0.04559468841552734, 0.045445438385009765, 0.04552422332763672, 0.04679148864746094, 0.04553539276123047, 0.04542972946166992, 0.045603744506835936, 0.04577443313598633, 0.04600073623657226, 0.04575827026367187, 0.04543862533569336, 0.04566460800170898, 0.045527008056640624, 0.045669536590576175, 0.04530470275878906, 0.04615577697753906, 0.04580460739135742, 0.045492702484130856, 0.045263233184814455, 0.04607936096191406, 0.045361503601074216, 0.04532057571411133, 0.0452935676574707, 0.04555980682373047, 0.046495552062988284, 0.045637825012207034, 0.04563983917236328, 0.045395488739013674, 0.045531455993652346, 0.04554342269897461, 0.04567859268188477, 0.045535232543945314, 0.047341182708740236, 0.04602272033691406, 0.04543110275268555, 0.04530176162719726, 0.04503744125366211, 0.045246593475341795, 0.045520896911621096, 0.04569216156005859, 0.045435649871826175, 0.04585881423950195, 0.045451263427734374, 0.04520755386352539, 0.045303199768066404, 0.04538019180297852, 0.045122718811035155, 0.04536310577392578, 0.04534368133544922, 0.04545238494873047, 0.0454172477722168, 0.045109375, 0.04545497512817383, 0.04552947235107422, 0.04527308654785156, 0.04534806442260742, 0.04518112182617187, 0.045050464630126956, 0.04634211349487305, 0.045844287872314454, 0.04586524963378906, 0.04547379302978516, 0.045242080688476564, 0.044882209777832034, 0.04505369567871094, 0.044996864318847654, 0.04472217559814453, 0.04471398544311524, 0.04554547119140625, 0.0454205436706543, 0.04515020751953125, 0.04516864013671875, 0.044980224609375, 0.0448144645690918, 0.045072254180908204, 0.04553433609008789, 0.046215808868408204, 0.04564608001708984, 0.04549017715454102, 0.045336353302001954, 0.045287841796875, 0.0450107536315918, 0.04487356948852539, 0.044773536682128905, 0.04497971343994141, 0.04527360153198242, 0.04551679992675781, 0.04534272003173828, 0.04510515213012695, 0.04518083190917969, 0.04487107086181641, 0.04470854568481445, 0.04484697723388672, 0.04506227111816406, 0.045400062561035154, 0.04520140838623047, 0.04511651229858398, 0.045445343017578126, 0.04523881530761719, 0.044961952209472654, 0.045045120239257816, 0.044905086517333985, 0.045385726928710936, 0.045309280395507814, 0.04519139099121094, 0.0451374397277832, 0.04509916687011719, 0.04490316772460937, 0.044781566619873044, 0.04507823944091797, 0.045423969268798825, 0.04536620712280273, 0.045219486236572265, 0.04508614349365234, 0.04493590545654297, 0.044902591705322265, 0.04493926239013672, 0.04478480148315429, 0.04483158493041992, 0.04529532623291015, 0.046067424774169925, 0.04703622436523437, 0.04568153762817383, 0.04512051010131836, 0.04525120162963867, 0.04492531204223633, 0.044916736602783204, 0.04488380813598633, 0.045473247528076174, 0.04533926391601562, 0.04518918228149414, 0.045244415283203124, 0.045162494659423826, 0.045170463562011716, 0.046466686248779296, 0.04487184143066406, 0.04514144134521485, 0.04536217498779297, 0.04504576110839844, 0.04494950485229492, 0.045041664123535156, 0.04493878555297852, 0.04483430480957031, 0.0448458251953125, 0.045208961486816406, 0.04729443359375, 0.045886337280273436, 0.05021507263183594, 0.04554025650024414, 0.04512166213989258, 0.04519987106323242, 0.04484128189086914, 0.04510627365112305, 0.045519775390625, 0.046432254791259765, 0.04650572967529297, 0.04568703842163086, 0.04545459365844726, 0.044956321716308596, 0.04513596725463867, 0.045557758331298825, 0.04589561462402344, 0.045921951293945315, 0.045652320861816406, 0.045247776031494144, 0.045357856750488285, 0.04525056076049805, 0.04522310256958008, 0.045388481140136716, 0.04519500732421875, 0.0459738883972168, 0.04549427032470703, 0.04550041580200195, 0.04510521697998047, 0.0454634895324707, 0.04570924758911133, 0.04555923080444336, 0.04519382476806641, 0.045846561431884765, 0.045788257598876954, 0.04557030487060547, 0.045298336029052734, 0.045078079223632814, 0.04502751922607422, 0.04681296157836914, 0.04586681747436523, 0.04585718536376953, 0.045453311920166016, 0.04524054336547852, 0.04503260803222656, 0.04507712173461914, 0.04510924911499024, 0.045082401275634766, 0.04528963088989258, 0.04562255859375, 0.0454716796875, 0.04517359924316406, 0.04523212814331055, 0.04532374572753906, 0.045383617401123046, 0.04521635055541992, 0.045832191467285156, 0.046237056732177734, 0.04598233413696289, 0.045625377655029296, 0.045363166809082034, 0.04520771026611328, 0.045027168273925784, 0.044896190643310546, 0.04553859329223633, 0.04559939193725586, 0.04565414428710937, 0.04549836730957031, 0.04534684753417969, 0.04547171020507813, 0.045283134460449216, 0.04502956771850586, 0.044943359375, 0.04501299285888672, 0.04558992004394531, 0.04547427368164063, 0.04536742401123047, 0.04546294403076172, 0.04556832122802734, 0.04521807861328125, 0.04557619094848633, 0.04557388687133789, 0.04539616012573242, 0.045170654296875, 0.04526684951782227, 0.04564566421508789, 0.04555401611328125, 0.04530124664306641, 0.045150047302246095, 0.04513040161132813, 0.0452567024230957, 0.04505712127685547, 0.04576553726196289, 0.04562739181518555, 0.04584243011474609, 0.045418495178222655, 0.045309505462646484, 0.045109695434570315, 0.04511948776245117, 0.04494540786743164, 0.045161567687988284, 0.045511489868164064, 0.04679116821289062, 0.04538972854614258, 0.04520924758911133, 0.045191230773925783, 0.04498080062866211, 0.0448474235534668, 0.045268192291259765, 0.045722335815429685, 0.04523212814331055, 0.04497983932495117, 0.04507427215576172, 0.04499836730957031, 0.044843841552734375, 0.044818687438964847, 0.04478096008300781, 0.04611721420288086, 0.04555980682373047, 0.04527040100097656, 0.04501689529418945, 0.04719443130493164, 0.045346782684326174, 0.04505984115600586, 0.04967913436889648, 0.04513177490234375, 0.04562944030761719, 0.045518848419189455, 0.04556390380859375, 0.04506009674072266, 0.04500275039672851, 0.044967937469482425, 0.04482457733154297, 0.044818431854248046, 0.04568070220947266, 0.04668739318847656, 0.04523702239990234, 0.04507827377319336, 0.04497638320922852, 0.04539984130859375, 0.04503984069824219, 0.04509286499023438, 0.04564310455322266, 0.04685481643676758, 0.045499935150146484, 0.04532844924926758, 0.045382049560546874, 0.04512768173217773, 0.04483071899414062, 0.04499251174926758, 0.04530963134765625, 0.045580001831054685, 0.04570694351196289, 0.047442111968994144, 0.0456748161315918, 0.04541183853149414, 0.045290401458740234, 0.045565280914306644, 0.04537615966796875, 0.04549555206298828, 0.045869823455810546, 0.04552499389648437, 0.04537753677368164, 0.04529484939575195, 0.045095680236816406, 0.04653104019165039, 0.05541839981079102, 0.04589750289916992, 0.045833248138427735, 0.045829952239990236, 0.04530176162719726, 0.044954753875732424, 0.0449189453125, 0.044874591827392576, 0.0448961296081543, 0.0449699821472168, 0.045911487579345704, 0.04575872039794922, 0.04561983871459961, 0.04660601425170898, 0.04556390380859375, 0.04514627075195313, 0.04520687866210937, 0.044972225189208986, 0.045297279357910156, 0.046013118743896485, 0.04578713607788086, 0.04538163375854492, 0.04566016006469727, 0.04557209777832031, 0.046878719329833986, 0.04567244720458984, 0.04739072036743164, 0.045848575592041016, 0.04561830520629883, 0.04533737564086914, 0.04520969772338867, 0.04540620803833008, 0.04510259246826172, 0.046120960235595705, 0.04552956771850586, 0.045383712768554685, 0.04581545639038086, 0.04545571136474609, 0.045295616149902344, 0.04511539077758789, 0.04500067138671875, 0.04531203079223633, 0.04543283081054687, 0.045120609283447265, 0.04532112121582031, 0.04544921493530273, 0.045379585266113284, 0.04576227188110352, 0.045547809600830075, 0.04527718353271484, 0.045107200622558595, 0.04524031829833984, 0.045590526580810545, 0.04583980941772461, 0.046377536773681644, 0.04608982467651367, 0.045521312713623044, 0.04516659164428711, 0.045162494659423826, 0.04501708984375, 0.045246463775634765, 0.04493484878540039, 0.046855457305908205, 0.04594992065429687, 0.04627369689941406, 0.045575008392333985, 0.045363201141357425, 0.04525379180908203, 0.044890975952148436, 0.04509225463867188, 0.045478496551513675, 0.045676544189453126, 0.04541987228393555, 0.04524508666992187, 0.04515430450439453, 0.04510639953613281, 0.044984767913818356, 0.04525910568237305, 0.04584771347045898, 0.04586783981323242, 0.0455456314086914, 0.04586687850952149, 0.0457625617980957, 0.04591225433349609, 0.0455843505859375, 0.04569817733764649, 0.04539984130859375, 0.0458306884765625, 0.046300575256347655, 0.04616012954711914, 0.04595600128173828, 0.045924190521240235, 0.04565132904052734, 0.045514911651611326, 0.04563811111450195, 0.045570049285888675, 0.045954334259033204, 0.04593942260742188, 0.04568473434448242, 0.04560486221313476, 0.046020606994628906, 0.045871105194091794, 0.04566185760498047, 0.04571171188354492, 0.04587142562866211, 0.046115615844726565, 0.04599491119384766, 0.04602624130249024, 0.045806079864501956, 0.04578847885131836, 0.04551308822631836, 0.04558265686035156, 0.04539116668701172, 0.04565673446655273, 0.04564310455322266, 0.04603359985351563, 0.04627167892456055, 0.04574700927734375, 0.0455266227722168, 0.045498783111572266, 0.04550656127929688, 0.04533657455444336, 0.04549427032470703, 0.04571059036254883, 0.04599631881713867, 0.04678879928588867, 0.04530380630493164, 0.0451932144165039, 0.045184864044189456, 0.04565971374511719, 0.0449705924987793, 0.04513792037963867, 0.04567372894287109, 0.04556256103515625, 0.045570110321044924, 0.04536892700195312, 0.04564009475708008, 0.04541030502319336, 0.04555887985229492, 0.0451798095703125, 0.04510515213012695, 0.045602432250976564, 0.04582191848754883, 0.045408512115478514, 0.04766531372070312, 0.04676979064941406, 0.04556224060058594, 0.05015894317626953, 0.045173534393310545, 0.045123455047607425, 0.04553670501708985, 0.045767230987548826, 0.0456519660949707, 0.045125633239746096, 0.04500384140014648, 0.04499756622314453, 0.04508671951293945, 0.04501721572875977, 0.04567596817016602, 0.045824447631835935, 0.045954334259033204, 0.04562607955932617, 0.04531430435180664, 0.04523187255859375, 0.04524851226806641, 0.04496588897705078, 0.04547379302978516, 0.04548198318481445, 0.04579875183105469, 0.0464147834777832, 0.04539791870117187, 0.045055809020996096, 0.045142017364501956, 0.045074432373046876, 0.045174785614013675, 0.045090721130371096, 0.04549846267700195, 0.04575436782836914, 0.04570671844482422, 0.04550643157958984, 0.04534748840332031, 0.04558028793334961, 0.04541171264648437, 0.04536089706420898, 0.045548416137695315, 0.04579459381103516, 0.04607664108276367, 0.04565606307983398, 0.04670793533325195, 0.04553398513793945, 0.04537753677368164, 0.045176830291748044, 0.04560806274414062, 0.04513075256347656, 0.046392864227294925, 0.04597545623779297, 0.045624992370605466, 0.04524905776977539, 0.04500227355957031, 0.04547452926635742, 0.04528294372558594, 0.04533081436157226, 0.046566688537597656, 0.0455687370300293, 0.045758464813232425, 0.04536873626708984, 0.04511977767944336, 0.04497414398193359, 0.0451583023071289, 0.045214271545410155, 0.04508364868164062, 0.04531280136108398, 0.045699073791503904, 0.047110145568847656, 0.046153728485107424, 0.045309150695800784, 0.04885084915161133, 0.045730079650878906, 0.045561473846435545, 0.04516876983642578, 0.04513673782348633, 0.04574617767333984, 0.0461589126586914, 0.046789569854736326, 0.04545241546630859, 0.04535398483276367, 0.045289344787597656, 0.04516864013671875, 0.045032478332519534, 0.04535395050048828, 0.045905311584472655, 0.04558089447021484, 0.04545536041259766, 0.04552908706665039, 0.045176830291748044, 0.04532223892211914, 0.04574755096435547, 0.044975967407226564, 0.04542537689208984, 0.04632361602783203, 0.04583852767944336, 0.045486080169677735, 0.04536070251464844, 0.0457795181274414, 0.04577062225341797, 0.04569414520263672, 0.04511363220214844, 0.04575862503051758, 0.04577471923828125, 0.04541491317749023, 0.045201183319091794]",tokens/s,21.939388140346946,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 223697 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7394.029568,7808.679936,0.0,7430.209536,7414.23104,s,1,11.6087958984375,11.6087958984375,0.0,11.6087958984375,11.6087958984375,11.6087958984375,11.6087958984375,[11.6087958984375],,kWh,0.00013641785669166968,1.5039798885584721e-05,4.55058697379912e-05,0.0001969635253152456,,MB,1705.480192,8574.140416,0.0,8166.309888,8044.111872,s,10,7.060970458984374,0.7060970458984375,0.0011264210878099625,0.7062753601074219,0.7073542114257813,0.7075199401855469,0.7076525231933594,"[0.7037373046875, 0.7047920532226563, 0.7063265991210937, 0.70622412109375, 0.7055739135742187, 0.7057803955078125, 0.7065865478515625, 0.7073173828125, 0.7076856689453125, 0.7069464721679688]",tokens/s,362.5563957349032,kWh,2.063900946333206e-05,2.2761256128991876e-06,1.3666640562932467e-05,3.658177563916372e-05,tokens/kWh,6998020.06674415,MB,1711.550464,8741.912576,0.0,8334.082048,8265.686528,s,10,31.494411376953124,3.1494411376953124,0.00693604709210387,3.148900146484375,3.1596987304687496,3.16015576171875,3.16052138671875,"[3.13799658203125, 3.140945068359375, 3.146510009765625, 3.147779296875, 3.15002099609375, 3.145820068359375, 3.15088232421875, 3.16061279296875, 3.1542470703125, 3.15959716796875]",tokens/s,20.003548961737994,kWh,9.221209574208425e-05,1.0171450241856433e-05,6.134283611126853e-05,0.0001637263820952092,tokens/kWh,384788.3230166572,,s,630,31.49077796554566,0.04998536185007246,0.0006632363913626596,0.049945600509643556,0.05066940574645996,0.050867183113098145,0.051667712059021,"[0.05168320083618164, 0.049449089050292966, 0.04885289764404297, 0.0493548469543457, 0.0495302734375, 0.04914006423950195, 0.049039070129394534, 0.04963369750976562, 0.0493645133972168, 0.04901686477661133, 0.049473663330078126, 0.04911539077758789, 0.04906383895874023, 0.04924854278564453, 0.04908806228637695, 0.04898409652709961, 0.04977824020385742, 0.04954304122924805, 0.049492416381835935, 0.0499431037902832, 0.04966185760498047, 0.0500777587890625, 0.049903617858886716, 0.05008588790893555, 0.04970454406738281, 0.04927667236328125, 0.049652385711669925, 0.04968243026733398, 0.049604606628417966, 0.049390911102294925, 0.049656513214111325, 0.049495166778564456, 0.04974585723876953, 0.04943369674682617, 0.04935795211791992, 0.04980809783935547, 0.050355358123779295, 0.05007241439819336, 0.04991785430908203, 0.04990512084960937, 0.05016844940185547, 0.050405376434326174, 0.05003059387207031, 0.05042966461181641, 0.05003276824951172, 0.050092193603515626, 0.04993628692626953, 0.049860702514648435, 0.05023110580444336, 0.05003811264038086, 0.050068321228027346, 0.05019443130493164, 0.04966604614257813, 0.05013708877563477, 0.049903617858886716, 0.05021868896484375, 0.05029849624633789, 0.05022895812988281, 0.05016572952270508, 0.050237472534179685, 0.05043503952026367, 0.050751487731933595, 0.05056211090087891, 0.051413311004638675, 0.048828414916992184, 0.0487014389038086, 0.049326080322265625, 0.04962236785888672, 0.049164958953857425, 0.048774784088134765, 0.04921705627441406, 0.0492470703125, 0.04969062423706055, 0.04956774520874024, 0.049743072509765625, 0.04940639877319336, 0.049250656127929685, 0.04896982574462891, 0.04936080169677735, 0.0496978874206543, 0.049466209411621095, 0.049850433349609376, 0.05040304183959961, 0.049602848052978515, 0.04924415969848633, 0.04963328170776367, 0.049186302185058595, 0.04946380615234375, 0.0497336311340332, 0.050283905029296874, 0.04994521713256836, 0.05007155227661133, 0.04970086288452148, 0.05020876693725586, 0.04981939315795898, 0.04973571014404297, 0.0503216323852539, 0.0497437744140625, 0.049973182678222657, 0.04958163070678711, 0.04990012741088867, 0.050135040283203126, 0.050151424407958986, 0.04996860885620117, 0.049838623046875, 0.050157569885253904, 0.05041766357421875, 0.05004083251953125, 0.04992409515380859, 0.049962718963623046, 0.05043228912353516, 0.05007561492919922, 0.05003459167480469, 0.05007782363891602, 0.050067455291748046, 0.05017804718017578, 0.05008377456665039, 0.050085281372070314, 0.05063033676147461, 0.04981196975708008, 0.050105857849121097, 0.05012092971801758, 0.05027065658569336, 0.05068012619018555, 0.050851104736328125, 0.05056790542602539, 0.05162979125976563, 0.05000409698486328, 0.04920950317382813, 0.049186431884765625, 0.04907807922363281, 0.04903308868408203, 0.049737728118896485, 0.04938159942626953, 0.04901020812988281, 0.04925740814208984, 0.04916633605957031, 0.049844223022460936, 0.049870849609375, 0.049565696716308595, 0.050019710540771485, 0.049555103302001954, 0.04916118240356445, 0.04984832000732422, 0.049721343994140625, 0.04975526428222656, 0.05010316848754883, 0.04927644729614258, 0.049721088409423825, 0.04964425659179687, 0.04961280059814453, 0.04936294555664063, 0.049697822570800784, 0.04952163314819336, 0.04970025634765625, 0.05017660903930664, 0.05008793640136719, 0.04959555053710937, 0.04969270324707031, 0.049738559722900394, 0.05038022232055664, 0.050340415954589844, 0.05045033645629883, 0.05035964965820312, 0.050590465545654294, 0.050323455810546876, 0.050014270782470706, 0.04992812728881836, 0.05027123260498047, 0.04977875137329101, 0.04986566543579102, 0.049756160736083986, 0.049796768188476566, 0.05049792098999024, 0.05000188827514648, 0.05035007858276367, 0.04976844787597656, 0.05038035202026367, 0.05016361618041992, 0.05038723373413086, 0.050202880859375, 0.05039228820800781, 0.05042460632324219, 0.05066870498657226, 0.05028268814086914, 0.051010208129882814, 0.05084979248046875, 0.05076287841796875, 0.05012700653076172, 0.0512710075378418, 0.049090496063232424, 0.04951679992675781, 0.04945558547973633, 0.04943814468383789, 0.049021503448486325, 0.04946649551391601, 0.049407905578613284, 0.049207454681396486, 0.04904995346069336, 0.04907260894775391, 0.04974777603149414, 0.04933036804199219, 0.04933603286743164, 0.04995510482788086, 0.04976435089111328, 0.04981145477294922, 0.04953702545166016, 0.05057126235961914, 0.05037583923339844, 0.049504287719726564, 0.04955215835571289, 0.04959958267211914, 0.04964771270751953, 0.04965868759155274, 0.04903071975708008, 0.04960918426513672, 0.04929536056518555, 0.04955955123901367, 0.049377281188964846, 0.050165374755859374, 0.05013900756835937, 0.04975395202636719, 0.049668033599853514, 0.05018454360961914, 0.049858943939208984, 0.05026201629638672, 0.05123891067504883, 0.05153177642822265, 0.049754112243652344, 0.05034393692016602, 0.050206718444824217, 0.04997024154663086, 0.05053740692138672, 0.050261344909667965, 0.049944225311279296, 0.04981622314453125, 0.05009779357910156, 0.04978316879272461, 0.050166110992431644, 0.05036812973022461, 0.0500184326171875, 0.050315521240234376, 0.0504087028503418, 0.05038156890869141, 0.05053849411010742, 0.05050540924072266, 0.05120150375366211, 0.05035452651977539, 0.05086259078979492, 0.05051801681518555, 0.05039888000488281, 0.050487648010253905, 0.052324352264404295, 0.04948918533325195, 0.04959305572509766, 0.04871097564697266, 0.049199806213378904, 0.04910489654541016, 0.048879520416259765, 0.04945110321044922, 0.04939324951171875, 0.049335807800292966, 0.04993040084838867, 0.049849086761474606, 0.049909759521484375, 0.049565696716308595, 0.049239486694335935, 0.049609119415283204, 0.04965353775024414, 0.049692031860351565, 0.05013401412963867, 0.05034947204589844, 0.049850975036621094, 0.050032638549804685, 0.049874942779541014, 0.049838081359863284, 0.04965478515625, 0.04956671905517578, 0.0498603515625, 0.049715297698974606, 0.049645729064941406, 0.050179904937744144, 0.04979321670532227, 0.04965964889526367, 0.050245407104492185, 0.049625152587890624, 0.05020099258422851, 0.05016115188598633, 0.05034649658203125, 0.05062364959716797, 0.051006305694580076, 0.050896385192871096, 0.05030553436279297, 0.04994598388671875, 0.04986124801635742, 0.04945500946044922, 0.049932224273681644, 0.049505695343017575, 0.04991769790649414, 0.05014220809936523, 0.05047235107421875, 0.050067840576171876, 0.050509727478027344, 0.04970323181152344, 0.05035827255249024, 0.05038639831542969, 0.05028668975830078, 0.050316928863525394, 0.05061510467529297, 0.050245311737060545, 0.050704704284667966, 0.05068790435791016, 0.05046691131591797, 0.05086931228637695, 0.05076678466796875, 0.05190063858032227, 0.049793441772460936, 0.04917452621459961, 0.049192958831787106, 0.049698783874511716, 0.049203166961669924, 0.049156158447265626, 0.049235969543457034, 0.04935411071777344, 0.04877961730957031, 0.04930384063720703, 0.04952988815307617, 0.049759105682373045, 0.04952892684936523, 0.04979097747802735, 0.04943772888183594, 0.04947452926635742, 0.04963129425048828, 0.05036435317993164, 0.05003468704223633, 0.04993356704711914, 0.049604736328125, 0.04950080108642578, 0.050116127014160156, 0.049705440521240235, 0.04948096084594727, 0.049388286590576175, 0.049547264099121094, 0.04937932968139649, 0.050132896423339846, 0.04970476913452149, 0.05012713623046875, 0.05027814483642578, 0.049877246856689456, 0.05024739074707031, 0.05021699142456055, 0.05025740814208984, 0.049842945098876955, 0.04978870391845703, 0.05033801651000976, 0.05048115158081055, 0.050333278656005856, 0.050006046295166015, 0.04995663833618164, 0.05006396865844726, 0.05048713684082031, 0.050049312591552736, 0.05020044708251953, 0.050106273651123044, 0.04979926300048828, 0.05048844909667969, 0.049881057739257814, 0.0503276481628418, 0.05059052658081055, 0.05036441421508789, 0.05011215972900391, 0.05012847900390625, 0.05019315338134766, 0.05079859161376953, 0.05048524856567383, 0.05068310546875, 0.04982156753540039, 0.05033667373657227, 0.051605312347412106, 0.04896380615234375, 0.04879833602905274, 0.049352191925048826, 0.049162464141845705, 0.048858463287353514, 0.049103809356689454, 0.049084415435791014, 0.04901852798461914, 0.04932460784912109, 0.04978665542602539, 0.04935270309448242, 0.04948787307739258, 0.04986044692993164, 0.049793182373046876, 0.04946137619018555, 0.050263935089111325, 0.04962876892089844, 0.05004534530639648, 0.050525726318359374, 0.04994486236572265, 0.04952083206176758, 0.04993843078613281, 0.04989257431030274, 0.0495624008178711, 0.0495043830871582, 0.049554847717285154, 0.04959689712524414, 0.04959436798095703, 0.049516544342041016, 0.04962508773803711, 0.05018009567260742, 0.05046886444091797, 0.04995686340332031, 0.05008115386962891, 0.05004352188110352, 0.05002239990234375, 0.05038489532470703, 0.050528160095214845, 0.05065532684326172, 0.05026153564453125, 0.05076425552368164, 0.05024764633178711, 0.050147361755371093, 0.049797119140625, 0.05073100662231445, 0.0502108154296875, 0.05001203155517578, 0.05018342590332031, 0.050582401275634764, 0.05033552169799805, 0.05099708938598633, 0.0503702392578125, 0.050588352203369144, 0.050392894744873046, 0.0503152961730957, 0.049957023620605466, 0.05094790267944336, 0.05131660842895508, 0.050506046295166016, 0.05030912017822266, 0.0510648307800293, 0.05047091293334961, 0.052408065795898434, 0.04942515182495117, 0.04926892852783203, 0.04899225616455078, 0.04948342514038086, 0.049359199523925784, 0.04941619110107422, 0.048947200775146485, 0.0490967025756836, 0.04946739196777344, 0.04978857421875, 0.049746273040771484, 0.0495118408203125, 0.049785438537597655, 0.04974393463134766, 0.04991993713378906, 0.049698432922363284, 0.04990163040161133, 0.05032787322998047, 0.05064908981323242, 0.04998963165283203, 0.04991385650634766, 0.049635326385498044, 0.049430526733398435, 0.04948112106323242, 0.04969123077392578, 0.04928220748901367, 0.0493105583190918, 0.04966998291015625, 0.049825824737548825, 0.050092159271240236, 0.05050281524658203, 0.04983075332641602, 0.04965580749511719, 0.04962428665161133, 0.05804316711425781, 0.04798463821411133, 0.05063065719604492, 0.050865951538085936, 0.050402622222900394, 0.050439071655273435, 0.050539775848388674, 0.050244350433349606, 0.05008118438720703, 0.050098175048828124, 0.05058969497680664, 0.050180606842041016, 0.050018398284912106, 0.05020390319824219, 0.05087433624267578, 0.05069903945922852, 0.05045248031616211, 0.05017910385131836, 0.05162902450561523, 0.051059711456298826, 0.0504060173034668, 0.050356609344482425, 0.05069823837280273, 0.05055692672729492, 0.05040947341918945, 0.0508223991394043, 0.05053094482421875, 0.0504136962890625, 0.05153209686279297, 0.049323936462402344, 0.04909423828125, 0.04907251358032227, 0.04957187271118164, 0.04921286392211914, 0.049113502502441404, 0.04911324691772461, 0.049653759002685545, 0.049375232696533204, 0.048998401641845706, 0.04972857666015625, 0.04980012893676758, 0.04990083312988281, 0.05034467315673828, 0.05004886245727539, 0.049880577087402345, 0.04987152099609375, 0.05073715209960938, 0.05083955383300781, 0.04918272018432617, 0.049307647705078124, 0.04960870361328125, 0.049941856384277346, 0.04980188751220703, 0.04909414291381836, 0.050019840240478515, 0.04952576065063476, 0.04927078247070313, 0.0496448974609375, 0.05005583953857422, 0.04970003128051758, 0.05021974563598633, 0.050708606719970704, 0.05086819076538086, 0.05094604873657226, 0.05078764724731445, 0.05039583969116211, 0.05083955383300781, 0.050427902221679685, 0.0500401611328125, 0.04979369735717774, 0.05041766357421875, 0.0500398063659668, 0.0499681282043457, 0.04983388900756836, 0.049969249725341794, 0.04980121612548828, 0.050480510711669924, 0.050145919799804685, 0.05003984069824219, 0.05050601577758789, 0.05055088043212891, 0.05075209426879883, 0.05067571258544922, 0.05071567916870117, 0.05059804916381836, 0.050629440307617186, 0.050864158630371095, 0.050666942596435546, 0.05044483184814453, 0.05060310363769531, 0.05083843231201172, 0.05221532821655273, 0.04985289764404297, 0.04919091033935547, 0.049172576904296876, 0.04880783843994141, 0.04886483383178711, 0.049252704620361326, 0.04947158432006836, 0.04931958389282227, 0.04891683197021485, 0.04982374572753906, 0.0498172492980957, 0.04980259323120117, 0.04993040084838867, 0.04983280181884766, 0.04987670516967774, 0.05052444839477539, 0.050282497406005856, 0.05050572967529297, 0.05065100860595703, 0.05048297500610351, 0.05008623886108399, 0.04994867324829102, 0.049654975891113284, 0.049722175598144534, 0.05009612655639648, 0.04937907028198242, 0.04958428955078125, 0.04928931045532227, 0.049626590728759766, 0.04940240097045898, 0.05013087844848633, 0.050161727905273436, 0.04994611358642578, 0.05017036819458008, 0.050716670989990234, 0.05066547012329101, 0.05095804977416992, 0.05059104156494141, 0.050522113800048826, 0.05029372787475586, 0.05053849411010742, 0.05005516815185547, 0.05006444931030273, 0.05069670486450195, 0.05029833602905273, 0.05002953720092773, 0.04989033508300781, 0.050113502502441405, 0.05043404769897461, 0.05066854476928711, 0.050648063659667966, 0.05026816177368164, 0.050444446563720706, 0.0508941421508789, 0.05104611206054688, 0.05186191940307617, 0.0512639045715332, 0.05083340835571289, 0.05086566543579102, 0.050049182891845706, 0.05027875137329101, 0.05039308929443359]",tokens/s,20.005856974676483,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1012.629504,867.106816,0.0,488.636416,482.553856,s,1,8.0609853515625,8.0609853515625,0.0,8.0609853515625,8.0609853515625,8.0609853515625,8.0609853515625,[8.0609853515625],,kWh,2.8416175170870397e-05,3.1269823558455027e-06,8.546673504000735e-06,4.0089831030716635e-05,,MB,1239.527424,1024.393216,0.0,616.562688,582.974464,s,10,0.27054739379882814,0.027054739379882814,0.000286009729749811,0.026956192016601564,0.027177914237976077,0.027537452983856202,0.027825083980560304,"[0.02789699172973633, 0.02697020721435547, 0.026962175369262695, 0.026941280364990234, 0.026929088592529297, 0.02695020866394043, 0.02691299247741699, 0.027000160217285157, 0.027098016738891603, 0.02688627243041992]",tokens/s,9462.29776622261,kWh,8.612702605701969e-07,9.498283774394124e-08,5.70487182053024e-07,1.5267402803671622e-06,tokens/kWh,167677504.3483068,MB,1250.967552,1036.976128,0.0,629.1456,597.192192,s,10,11.528859130859376,1.1528859130859375,0.013783837027398216,1.1535343017578126,1.1710545288085938,1.1736548278808594,1.175735067138672,"[1.1483973388671875, 1.157601318359375, 1.1445784912109376, 1.128986328125, 1.14946728515625, 1.1581435546875, 1.135988525390625, 1.176255126953125, 1.1589644775390624, 1.1704766845703125]",tokens/s,54.645476438659465,kWh,3.303405783776308e-05,3.64319500948955e-06,1.3625607508146755e-05,5.0302860355399364e-05,tokens/kWh,1252413.8698056713,,s,630,11.52320091629028,0.01829079510522267,0.0005419719436823987,0.018167872428894043,0.018848256111145018,0.01896842737197876,0.019749512386322023,"[0.017847360610961913, 0.017965568542480468, 0.018098560333251953, 0.018417472839355468, 0.018214399337768555, 0.017998592376708984, 0.01790777587890625, 0.017827775955200194, 0.0179652156829834, 0.01795055961608887, 0.01780121612548828, 0.017927391052246094, 0.01788934326171875, 0.017883871078491213, 0.0178155517578125, 0.0178985595703125, 0.017884096145629882, 0.01797324752807617, 0.017950016021728514, 0.01790979194641113, 0.01780784034729004, 0.017891519546508788, 0.018081792831420897, 0.017856224060058594, 0.017899808883666993, 0.01845043182373047, 0.01801366424560547, 0.017934879302978515, 0.01805516815185547, 0.01799782371520996, 0.018082847595214845, 0.018039775848388673, 0.01966044807434082, 0.017974815368652343, 0.017895584106445313, 0.017866432189941408, 0.017930944442749022, 0.01852035140991211, 0.01975663948059082, 0.01851798439025879, 0.018310815811157226, 0.01817884826660156, 0.018152671813964842, 0.018063455581665038, 0.01804342460632324, 0.01812224006652832, 0.018064031600952147, 0.018214752197265625, 0.01839302444458008, 0.018623903274536134, 0.01873107147216797, 0.01918617630004883, 0.01857356834411621, 0.019163135528564454, 0.01863270378112793, 0.018753856658935548, 0.018578111648559572, 0.01836953544616699, 0.018874368667602538, 0.018527456283569336, 0.01844918441772461, 0.01835212707519531, 0.01816166305541992, 0.01779408073425293, 0.018195423126220703, 0.018294143676757812, 0.01831590461730957, 0.01845452880859375, 0.018568864822387697, 0.018526559829711915, 0.01859993553161621, 0.019179519653320314, 0.01928544044494629, 0.019657279968261717, 0.027027456283569336, 0.018192384719848635, 0.018225151062011717, 0.01872822380065918, 0.0186265926361084, 0.018621280670166017, 0.01853775978088379, 0.01842848014831543, 0.01823744010925293, 0.018426912307739258, 0.01848828887939453, 0.01853545570373535, 0.01840224075317383, 0.018439359664916992, 0.018068319320678712, 0.017979391098022462, 0.017968544006347655, 0.018022815704345704, 0.017965248107910156, 0.018108415603637695, 0.018155263900756835, 0.018168064117431642, 0.018169183731079102, 0.018137760162353515, 0.018288639068603514, 0.01851372718811035, 0.017970815658569336, 0.01798201560974121, 0.017944063186645508, 0.017856096267700194, 0.017904544830322267, 0.01791328048706055, 0.01800454330444336, 0.017966495513916016, 0.018004575729370118, 0.017900800704956053, 0.01806822395324707, 0.017909311294555665, 0.017951391220092774, 0.017894720077514647, 0.017844703674316405, 0.01778700828552246, 0.018076992034912108, 0.017996288299560546, 0.017791040420532228, 0.017952768325805665, 0.01793132781982422, 0.017882047653198244, 0.01782956886291504, 0.0178035831451416, 0.019271072387695314, 0.018202495574951173, 0.017990976333618163, 0.01796780776977539, 0.01807974433898926, 0.017747488021850586, 0.01781705665588379, 0.018241600036621095, 0.017855424880981446, 0.01778188705444336, 0.017891616821289064, 0.017903936386108397, 0.01791209602355957, 0.01796089553833008, 0.017844287872314454, 0.01784377670288086, 0.01777299118041992, 0.01791391944885254, 0.01780726432800293, 0.017915136337280275, 0.017904415130615234, 0.017864704132080078, 0.01780735969543457, 0.01807360076904297, 0.01973206329345703, 0.01810883140563965, 0.01802444839477539, 0.017922048568725587, 0.018001920700073244, 0.01811849594116211, 0.01831056022644043, 0.017926496505737306, 0.017848735809326173, 0.017967103958129883, 0.0180097599029541, 0.01923072052001953, 0.018368864059448244, 0.018095327377319337, 0.01816655921936035, 0.019165184020996092, 0.018245567321777345, 0.018174016952514648, 0.01821615982055664, 0.018201248168945312, 0.018282623291015626, 0.018214527130126952, 0.01828006362915039, 0.01837727928161621, 0.018311359405517577, 0.018215999603271485, 0.01814358329772949, 0.01814588737487793, 0.018251232147216797, 0.018322175979614257, 0.018707584381103516, 0.018612672805786133, 0.01845065689086914, 0.018374656677246092, 0.018312671661376952, 0.018589792251586915, 0.0183089599609375, 0.018165567398071288, 0.018274879455566405, 0.018030624389648437, 0.017982751846313476, 0.017631263732910157, 0.018114559173583983, 0.017987712860107422, 0.017986719131469726, 0.017875295639038086, 0.017837984085083008, 0.017864511489868163, 0.017746591567993165, 0.017846271514892577, 0.017750015258789064, 0.017909696578979492, 0.017872575759887696, 0.017852800369262695, 0.01794867134094238, 0.018079456329345704, 0.018014047622680666, 0.01780928039550781, 0.017920576095581054, 0.017790048599243165, 0.017822240829467772, 0.017856895446777345, 0.0178155517578125, 0.01784796714782715, 0.017878816604614257, 0.017840703964233397, 0.01779302406311035, 0.017887231826782226, 0.017923807144165037, 0.01791209602355957, 0.01782748794555664, 0.017881439208984374, 0.01780940818786621, 0.01780748748779297, 0.01786662483215332, 0.01779302406311035, 0.01781068801879883, 0.017801984786987305, 0.0178155517578125, 0.01781372833251953, 0.017810783386230468, 0.017942975997924805, 0.017960704803466798, 0.017930496215820314, 0.01803468894958496, 0.01803376007080078, 0.018080671310424803, 0.018059263229370116, 0.018062528610229493, 0.018297664642333983, 0.01805516815185547, 0.017989599227905273, 0.01802025604248047, 0.017859872817993165, 0.01798659133911133, 0.017884992599487306, 0.017903615951538086, 0.017889280319213868, 0.018126623153686523, 0.017938655853271486, 0.017945632934570313, 0.017970144271850588, 0.018077695846557617, 0.017988927841186525, 0.017742656707763673, 0.018051103591918947, 0.018124799728393554, 0.017960960388183594, 0.01790959930419922, 0.017903776168823243, 0.0178767032623291, 0.01802217674255371, 0.017947071075439452, 0.018091903686523438, 0.01807993507385254, 0.01834815979003906, 0.018573183059692383, 0.01877180862426758, 0.018969823837280273, 0.019212928771972657, 0.018620832443237305, 0.01856121635437012, 0.018681568145751955, 0.018653472900390624, 0.01852182388305664, 0.01857535934448242, 0.018933855056762695, 0.018614015579223632, 0.018431264877319335, 0.01828134346008301, 0.01890265655517578, 0.018216703414916994, 0.017981407165527343, 0.017869152069091798, 0.017961280822753906, 0.017862752914428712, 0.018077407836914063, 0.01820857620239258, 0.018133472442626954, 0.01807513618469238, 0.01807606315612793, 0.01815353584289551, 0.018077280044555662, 0.018405567169189452, 0.01795430374145508, 0.018092544555664062, 0.01833590316772461, 0.01806540870666504, 0.01794867134094238, 0.017969152450561524, 0.017860639572143556, 0.017983455657958985, 0.01801817512512207, 0.018135072708129883, 0.01819251251220703, 0.018352256774902344, 0.01823535919189453, 0.018014080047607423, 0.018019519805908202, 0.01818502426147461, 0.018704383850097657, 0.018280031204223633, 0.018313631057739258, 0.018201728820800782, 0.018191232681274414, 0.018130239486694337, 0.018250240325927734, 0.017895040512084962, 0.018139520645141603, 0.018149375915527344, 0.018091264724731444, 0.018579519271850586, 0.01864569664001465, 0.018605215072631836, 0.01853036880493164, 0.01865603256225586, 0.018605728149414063, 0.018774143218994142, 0.01872630310058594, 0.01864089584350586, 0.01865715217590332, 0.018502592086791992, 0.018467967987060546, 0.01826700782775879, 0.018110368728637697, 0.018014303207397463, 0.018208383560180664, 0.018530176162719725, 0.018700639724731447, 0.01865513610839844, 0.01886832046508789, 0.018845855712890627, 0.01875948715209961, 0.01873871994018555, 0.018651359558105467, 0.01852569580078125, 0.018545600891113283, 0.018413568496704103, 0.018370559692382812, 0.018452192306518556, 0.018827552795410155, 0.018204864501953126, 0.018177183151245117, 0.018178720474243164, 0.01827599906921387, 0.018310752868652344, 0.018363136291503906, 0.01823744010925293, 0.018147327423095702, 0.018008064270019532, 0.01806278419494629, 0.018116575241088867, 0.018268768310546874, 0.01821696090698242, 0.018316896438598632, 0.018288543701171875, 0.01829052734375, 0.018329599380493163, 0.018292512893676758, 0.018185184478759765, 0.01821878433227539, 0.018216543197631836, 0.018342079162597655, 0.01813711929321289, 0.018206943511962892, 0.018097248077392578, 0.01813811111450195, 0.018569215774536133, 0.018210912704467775, 0.018083295822143554, 0.017761568069458007, 0.018022239685058592, 0.018148223876953126, 0.018013248443603514, 0.018005056381225584, 0.017940351486206055, 0.01801158332824707, 0.017914527893066405, 0.017978303909301756, 0.017871488571166994, 0.017955167770385742, 0.017845504760742186, 0.01787571144104004, 0.017794912338256835, 0.0178832950592041, 0.017758367538452148, 0.017870687484741212, 0.01778819274902344, 0.017893407821655275, 0.017856800079345703, 0.017801631927490236, 0.017811456680297853, 0.017819648742675782, 0.018138303756713867, 0.0181276798248291, 0.017788703918457032, 0.017926048278808594, 0.017818111419677735, 0.0178175048828125, 0.017885087966918945, 0.017841440200805664, 0.017838016510009765, 0.017836639404296875, 0.017834175109863282, 0.017908927917480468, 0.01787593650817871, 0.017882591247558595, 0.017869440078735352, 0.0180031681060791, 0.01787059211730957, 0.018051872253417967, 0.017983488082885742, 0.01807360076904297, 0.01803468894958496, 0.018167680740356445, 0.018030208587646486, 0.018162176132202147, 0.0182108154296875, 0.018097375869750975, 0.01805392074584961, 0.018116416931152343, 0.018124544143676757, 0.018297279357910156, 0.018188608169555663, 0.018069183349609375, 0.0182476806640625, 0.018274303436279296, 0.01851408004760742, 0.018390815734863283, 0.018491104125976564, 0.018698591232299805, 0.018628543853759765, 0.018549951553344726, 0.01841379165649414, 0.018676959991455078, 0.018732959747314454, 0.01862723159790039, 0.01875289535522461, 0.018737279891967773, 0.01876838493347168, 0.01877363204956055, 0.018750112533569337, 0.018716384887695312, 0.01902387237548828, 0.018593311309814453, 0.018416095733642578, 0.01841391944885254, 0.018680767059326173, 0.01831923294067383, 0.01841375923156738, 0.018487136840820314, 0.018619199752807618, 0.01859993553161621, 0.01855404853820801, 0.018333631515502928, 0.018309696197509766, 0.018161088943481445, 0.018082687377929688, 0.01805267143249512, 0.018177824020385744, 0.018102880477905273, 0.01814352035522461, 0.0182205753326416, 0.018330015182495118, 0.018433055877685546, 0.018299936294555664, 0.01845840072631836, 0.01828976058959961, 0.018436704635620117, 0.01864896011352539, 0.018886560440063475, 0.01880873680114746, 0.01873369598388672, 0.019148704528808593, 0.01936191940307617, 0.018957311630249024, 0.01887945556640625, 0.01884774398803711, 0.018966720581054686, 0.019066688537597656, 0.018999551773071287, 0.01892076873779297, 0.01883795166015625, 0.01885968017578125, 0.01884156799316406, 0.018962175369262695, 0.018983552932739258, 0.01885753631591797, 0.018927743911743164, 0.01887433624267578, 0.018902944564819335, 0.01884819221496582, 0.018890752792358398, 0.01884172821044922, 0.01893987274169922, 0.01887753677368164, 0.018735200881958007, 0.018895296096801757, 0.01890883255004883, 0.019278175354003908, 0.01883135986328125, 0.018630847930908204, 0.018636255264282225, 0.018491519927978515, 0.018380224227905275, 0.018254623413085938, 0.018124799728393554, 0.018321407318115233, 0.018394176483154296, 0.018676671981811523, 0.02060006332397461, 0.018762527465820314, 0.01835935974121094, 0.01820355224609375, 0.01824492835998535, 0.018021055221557617, 0.01797324752807617, 0.01799123191833496, 0.017961408615112303, 0.018250751495361327, 0.01817523193359375, 0.01816694450378418, 0.018086496353149413, 0.018104320526123048, 0.018155519485473632, 0.018067455291748045, 0.01803264045715332, 0.0181014404296875, 0.01826464080810547, 0.018415456771850587, 0.01834239959716797, 0.018528160095214845, 0.019941120147705077, 0.018479360580444335, 0.018435840606689454, 0.018409727096557617, 0.018386943817138672, 0.018308832168579103, 0.01834217643737793, 0.01822719955444336, 0.01819011116027832, 0.018178272247314452, 0.018233407974243165, 0.01822015953063965, 0.018954111099243165, 0.0185599365234375, 0.01820163154602051, 0.01804617691040039, 0.01811164855957031, 0.01818009567260742, 0.018444992065429686, 0.018081792831420897, 0.018343103408813476, 0.018559711456298828, 0.018257919311523436, 0.01816307258605957, 0.018051712036132813, 0.017911808013916015, 0.017897472381591797, 0.01784182357788086, 0.01811008071899414, 0.01802511978149414, 0.017948352813720703, 0.017948991775512697, 0.018045984268188476, 0.019989503860473632, 0.020513856887817383, 0.0179866886138916, 0.01797088050842285, 0.017952863693237304, 0.018054399490356445, 0.017857280731201172, 0.01798748779296875, 0.01801955223083496, 0.01791200065612793, 0.018180192947387694, 0.01808211135864258, 0.018082080841064455, 0.01839308738708496, 0.01838719940185547, 0.01801116752624512, 0.017988319396972655, 0.01820467185974121, 0.01803878402709961, 0.017936384201049805, 0.0180633602142334, 0.018329599380493163, 0.018397184371948243, 0.018448383331298827, 0.018386175155639648, 0.018914047241210936, 0.0184333438873291, 0.01854316711425781, 0.018796672821044923, 0.01878790473937988, 0.018891199111938477, 0.018832895278930666, 0.018921215057373045, 0.019004383087158204, 0.018935840606689455, 0.018824960708618162, 0.01885593605041504, 0.018804767608642577, 0.019825664520263672, 0.018900192260742188, 0.018848831176757813, 0.019108543395996092, 0.018970624923706055, 0.01886591911315918, 0.018827520370483398, 0.018851839065551757, 0.018843839645385742, 0.01869923210144043, 0.019450719833374024, 0.01880473518371582, 0.018903039932250978, 0.018827104568481447, 0.018771711349487304, 0.018823583602905272, 0.01901468849182129, 0.019045343399047853, 0.018797760009765626]",tokens/s,54.67230889894254,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3128.594432,4369.350656,0.0,3990.880256,3908.719616,s,1,10.4420439453125,10.4420439453125,0.0,10.4420439453125,10.4420439453125,10.4420439453125,10.4420439453125,[10.4420439453125],,kWh,9.952271615411518e-05,1.0970786173134378e-05,3.213197014997893e-05,0.00014262547247722847,,MB,3179.76576,4725.866496,0.0,4318.035968,4274.555904,s,10,2.15499136352539,0.215499136352539,0.0018707195778059705,0.2153262405395508,0.21774974975585937,0.2177758834838867,0.2177967904663086,"[0.21505363464355468, 0.21228501892089843, 0.2140268096923828, 0.21306515502929688, 0.21755984497070313, 0.21780201721191406, 0.21680245971679687, 0.21774394226074217, 0.21520895385742186, 0.2154435272216797]",tokens/s,1187.9397956435653,kWh,6.569238207130856e-06,7.244694912347458e-07,4.345781254399489e-06,1.1639488952765092e-05,tokens/kWh,21994092.78524933,MB,3181.510656,4727.963648,0.0,4320.13312,4274.558464,s,10,31.113200927734376,3.1113200927734375,0.011244953616570474,3.113736328125,3.122223779296875,3.1230088623046877,3.1236369287109373,"[3.1237939453125, 3.09076123046875, 3.120275634765625, 3.11411083984375, 3.11336181640625, 3.12204931640625, 3.111911376953125, 3.089753173828125, 3.112337158203125, 3.114846435546875]",tokens/s,20.248639844652455,kWh,9.059633147994427e-05,9.992893618104263e-06,4.7238454457402324e-05,0.00014782767955545087,tokens/kWh,426171.879241116,,s,630,31.109904785156225,0.04938080124627977,0.000835842498500185,0.0493242073059082,0.049889156723022465,0.05036906566619873,0.05271712337493898,"[0.050149375915527344, 0.04960255813598633, 0.049223678588867184, 0.049248096466064456, 0.04950236892700195, 0.04911513519287109, 0.049127422332763675, 0.0488158073425293, 0.04885440063476563, 0.048605567932128904, 0.04854950332641601, 0.048665534973144534, 0.04873011016845703, 0.04868505477905274, 0.048955230712890624, 0.04934672164916992, 0.0490332145690918, 0.049210784912109375, 0.049258655548095706, 0.0493633918762207, 0.04905136108398438, 0.04917686462402344, 0.04921343994140625, 0.04940367889404297, 0.049215038299560546, 0.04973814392089844, 0.04935091018676758, 0.049517696380615234, 0.049541919708251954, 0.04939952087402344, 0.049651615142822264, 0.05923273468017578, 0.04958560180664062, 0.04955100631713867, 0.04976732635498047, 0.049180255889892575, 0.049305313110351565, 0.049527103424072266, 0.051222305297851566, 0.04941817474365234, 0.049658622741699215, 0.049356704711914064, 0.04970819091796875, 0.04909347152709961, 0.048977344512939454, 0.04958812713623047, 0.049162910461425784, 0.04922745513916016, 0.04953878402709961, 0.04921750259399414, 0.049543807983398434, 0.0494919662475586, 0.049498111724853515, 0.04976796722412109, 0.04963375854492187, 0.0495022087097168, 0.04985651016235351, 0.049616897583007816, 0.04959151840209961, 0.04933817672729492, 0.049281089782714844, 0.05339161682128906, 0.04937599945068359, 0.05022768020629883, 0.05065283203125, 0.04936495971679687, 0.04943500900268555, 0.04931955337524414, 0.05036684799194336, 0.04907827377319336, 0.0490618896484375, 0.04904550552368164, 0.049036960601806644, 0.04872943878173828, 0.049646080017089846, 0.04898867034912109, 0.04922329711914063, 0.04921148681640625, 0.04924358367919922, 0.048995166778564456, 0.04921548843383789, 0.04952678298950195, 0.04953260803222656, 0.04918048095703125, 0.049299392700195316, 0.04897439956665039, 0.04863385772705078, 0.048349246978759766, 0.04802348709106445, 0.05030646514892578, 0.048337505340576174, 0.04769792175292969, 0.047695358276367186, 0.04773529434204102, 0.04779008102416992, 0.047806015014648436, 0.04799327850341797, 0.04776291275024414, 0.047788097381591794, 0.04769760131835937, 0.04806057739257812, 0.048637664794921875, 0.04900764846801758, 0.0489114875793457, 0.04876569747924805, 0.04917814254760742, 0.049754592895507814, 0.048790687561035155, 0.048982433319091793, 0.048955169677734375, 0.04910147094726563, 0.04911513519287109, 0.04884889602661133, 0.04893062210083008, 0.04897811126708984, 0.04975820922851563, 0.04922531127929688, 0.04927324676513672, 0.04926822280883789, 0.04948633575439453, 0.05095337677001953, 0.04936908721923828, 0.04988809585571289, 0.050270206451416014, 0.05026611328125, 0.04969065475463867, 0.05055897521972656, 0.04971142578125, 0.04948550415039062, 0.049285118103027346, 0.048964897155761716, 0.04893360137939453, 0.04931961441040039, 0.049000991821289065, 0.04892649459838867, 0.0489411849975586, 0.04884048080444336, 0.0492237777709961, 0.05177897644042969, 0.05037836837768555, 0.049218399047851566, 0.04993584060668945, 0.04935504150390625, 0.04946736145019531, 0.04968694305419922, 0.04941209411621094, 0.04929740905761719, 0.05240217590332031, 0.049468639373779294, 0.049781536102294924, 0.049604606628417966, 0.050033889770507815, 0.049854782104492186, 0.049400287628173827, 0.04923350524902344, 0.05019894409179688, 0.04957990264892578, 0.04941222381591797, 0.04980099105834961, 0.04969903945922852, 0.049514495849609375, 0.049391616821289064, 0.049532928466796876, 0.049648990631103514, 0.04930342483520508, 0.0493985595703125, 0.049202430725097654, 0.04937564849853516, 0.04940630340576172, 0.04948582458496094, 0.04926639938354492, 0.04946255874633789, 0.049007614135742186, 0.049229248046875, 0.04903129577636719, 0.04914432144165039, 0.04912691116333008, 0.04929548645019531, 0.049279296875, 0.04929945755004883, 0.04935475158691406, 0.04937331390380859, 0.0496044807434082, 0.04962508773803711, 0.04948604965209961, 0.049409568786621096, 0.04950451278686523, 0.04951859283447266, 0.049454559326171876, 0.0501096305847168, 0.04978176116943359, 0.04959033584594726, 0.0495230712890625, 0.04948144149780274, 0.04957443237304687, 0.049635326385498044, 0.04928291320800781, 0.049447296142578125, 0.049078048706054686, 0.049040992736816405, 0.04887091064453125, 0.04872608184814453, 0.04888246536254883, 0.04887065505981445, 0.04868588638305664, 0.04913151931762695, 0.049627391815185544, 0.04941107177734375, 0.04949679946899414, 0.04949151992797852, 0.04958214569091797, 0.04959686279296875, 0.049813472747802734, 0.04971110534667969, 0.04948787307739258, 0.05071392059326172, 0.0496126708984375, 0.04928185653686523, 0.04931584167480469, 0.0491635856628418, 0.04920595169067383, 0.05293875122070312, 0.04927283096313476, 0.04934617614746094, 0.049819072723388674, 0.049590496063232424, 0.04945993423461914, 0.04967628860473633, 0.05015347290039063, 0.05013708877563477, 0.04957548904418945, 0.04981190490722656, 0.049833984375, 0.049641311645507814, 0.049589599609375, 0.049722175598144534, 0.04976579284667969, 0.04926115036010742, 0.049848384857177734, 0.04929119873046875, 0.04913529586791992, 0.0486803207397461, 0.0488988151550293, 0.04873350524902344, 0.04910147094726563, 0.04879894256591797, 0.04860208129882813, 0.048641216278076174, 0.048738815307617187, 0.048580894470214846, 0.04883164978027344, 0.04911222457885742, 0.05549923324584961, 0.04984969711303711, 0.04945417785644531, 0.04946432113647461, 0.04919385528564453, 0.04902912139892578, 0.04897177505493164, 0.048602977752685544, 0.04851087951660156, 0.048020927429199216, 0.04816979217529297, 0.04881817626953125, 0.04976566314697266, 0.04858544158935547, 0.04849059295654297, 0.048694271087646485, 0.049328094482421876, 0.04877376174926758, 0.04895571136474609, 0.04914790344238281, 0.04882406234741211, 0.04889622497558594, 0.04931545639038086, 0.04988150405883789, 0.04891856002807617, 0.04871571350097656, 0.050221088409423825, 0.05005923080444336, 0.049063167572021484, 0.04925724792480469, 0.04925439834594727, 0.04931379318237305, 0.04923712158203125, 0.049132415771484375, 0.04917862319946289, 0.0493383674621582, 0.049205310821533205, 0.049237728118896484, 0.049473567962646486, 0.049311935424804686, 0.04948323059082031, 0.05007535934448242, 0.04911299133300781, 0.04906665420532227, 0.04938294219970703, 0.049068416595458984, 0.049285472869873045, 0.04951244735717773, 0.04940595245361328, 0.0493889274597168, 0.04963391876220703, 0.04950425720214844, 0.05011369705200195, 0.049813472747802734, 0.049547264099121094, 0.04952963256835938, 0.04965795135498047, 0.051337215423583986, 0.049657310485839844, 0.04940035247802734, 0.04998758316040039, 0.04980710220336914, 0.05008127975463867, 0.05046102523803711, 0.04986006546020508, 0.04949593734741211, 0.04975289535522461, 0.04933849716186523, 0.048993377685546874, 0.04902979278564453, 0.04944121551513672, 0.04934371185302734, 0.0492367057800293, 0.04909203338623047, 0.04938172912597656, 0.05195705413818359, 0.05027824020385742, 0.04982470321655273, 0.049847518920898434, 0.049476318359375, 0.04949612808227539, 0.049704959869384766, 0.04916239929199219, 0.04971491241455078, 0.05121036911010742, 0.04974943923950195, 0.04988166427612305, 0.04925030517578125, 0.050479103088378906, 0.049409278869628905, 0.04923843383789062, 0.052410720825195316, 0.04976617431640625, 0.049283294677734374, 0.049530879974365234, 0.04989091110229492, 0.049821216583251955, 0.049914752960205075, 0.04929945755004883, 0.04884380722045899, 0.04886601638793946, 0.048675201416015626, 0.04886105728149414, 0.04917814254760742, 0.0495211181640625, 0.04943667221069336, 0.049211040496826175, 0.0492973747253418, 0.04940224075317383, 0.04926790237426758, 0.04959519958496094, 0.049374496459960934, 0.050574302673339844, 0.049357982635498045, 0.04965232086181641, 0.04955750274658203, 0.05030044937133789, 0.04917510223388672, 0.04931916809082031, 0.049205921173095704, 0.049085502624511716, 0.04854880142211914, 0.04873420715332031, 0.0486195182800293, 0.04886495971679688, 0.04915555191040039, 0.053026302337646485, 0.04979321670532227, 0.04954608154296875, 0.04900191879272461, 0.04900409698486328, 0.048597023010253905, 0.048649185180664065, 0.048475807189941406, 0.048425086975097655, 0.04867583847045898, 0.048919551849365236, 0.048508926391601564, 0.04850400161743164, 0.04892300796508789, 0.0485994873046875, 0.04864409637451172, 0.04883635330200195, 0.04919132614135742, 0.04960345458984375, 0.049320865631103515, 0.04920038223266601, 0.04923270416259766, 0.04952585601806641, 0.04944579315185547, 0.049498111724853515, 0.04945724868774414, 0.04982979202270508, 0.04972771072387695, 0.04965923309326172, 0.04942278289794922, 0.04943769454956055, 0.049238113403320315, 0.04915702438354492, 0.05146787261962891, 0.04941865539550781, 0.04913971328735352, 0.04959436798095703, 0.04928224182128906, 0.049201984405517575, 0.04984201431274414, 0.04949225616455078, 0.04941945648193359, 0.049299968719482425, 0.049479873657226565, 0.04988896179199219, 0.05025823974609375, 0.049631233215332034, 0.049579105377197265, 0.04975094223022461, 0.049649185180664065, 0.049352638244628905, 0.049222175598144534, 0.04977983856201172, 0.049716094970703124, 0.049432575225830076, 0.049487648010253904, 0.049532672882080075, 0.049495838165283204, 0.04955775833129883, 0.049140159606933596, 0.04909619140625, 0.04916502380371094, 0.04909161758422852, 0.05037088012695313, 0.04959187316894531, 0.04932438278198242, 0.04922550582885742, 0.048699169158935546, 0.04857263946533203, 0.04835036849975586, 0.0480401611328125, 0.048071296691894534, 0.04807398223876953, 0.0478864631652832, 0.0477968635559082, 0.04871526336669922, 0.047880702972412106, 0.04779008102416992, 0.047702014923095705, 0.04786182403564453, 0.04869286346435547, 0.0482852783203125, 0.048425697326660154, 0.0485560302734375, 0.04868710327148437, 0.048639553070068356, 0.04844588851928711, 0.04914803314208984, 0.04901583862304688, 0.04925753784179687, 0.05124995040893555, 0.04903014373779297, 0.04905779266357422, 0.04910899353027344, 0.05103529739379883, 0.04935356903076172, 0.04958617782592773, 0.049037311553955076, 0.04937318420410156, 0.0490618896484375, 0.049631233215332034, 0.04930303955078125, 0.04959283065795898, 0.04948563385009765, 0.04963248062133789, 0.0487147216796875, 0.04865987014770508, 0.04873583984375, 0.048953857421875, 0.04889263916015625, 0.049090049743652345, 0.04889014434814453, 0.04899225616455078, 0.04871894454956055, 0.04906390380859375, 0.04911391830444336, 0.049727615356445314, 0.049127422332763675, 0.04939785766601563, 0.04964291381835938, 0.05087862396240234, 0.04977699279785156, 0.04964966583251953, 0.04962918472290039, 0.04942233657836914, 0.04969472122192383, 0.05077436828613281, 0.049870849609375, 0.049876991271972655, 0.050147232055664064, 0.04977849578857422, 0.04960192108154297, 0.04962406539916992, 0.04972329711914063, 0.049510017395019534, 0.04932140731811523, 0.049636096954345704, 0.04966947174072266, 0.050131103515625, 0.05200966262817383, 0.049486846923828126, 0.04921446228027344, 0.0528422737121582, 0.04955158233642578, 0.04936816024780273, 0.048890785217285154, 0.04919270324707031, 0.051128574371337894, 0.0493007698059082, 0.04904364776611328, 0.04918966293334961, 0.04946051025390625, 0.04997964859008789, 0.049418560028076174, 0.049232864379882814, 0.04956649780273437, 0.04939484786987305, 0.04959135818481445, 0.049694656372070316, 0.049544448852539065, 0.04909267044067383, 0.048872127532958984, 0.04862144088745117, 0.04847171020507812, 0.048196063995361325, 0.04825619125366211, 0.04829062271118164, 0.04843244934082031, 0.048290496826171876, 0.048649982452392576, 0.04923980712890625, 0.049360958099365235, 0.04916592025756836, 0.049218399047851566, 0.049285118103027346, 0.04967555236816406, 0.049375232696533204, 0.04958899307250977, 0.04930044937133789, 0.049124351501464845, 0.0498524169921875, 0.04929299163818359, 0.04894534301757812, 0.04886950302124023, 0.04893491363525391, 0.048639999389648435, 0.04849158477783203, 0.048758975982666014, 0.048978561401367186, 0.05046700668334961, 0.049582080841064455, 0.050132991790771485, 0.04927897644042969, 0.04920304107666015, 0.049299488067626955, 0.04902076721191406, 0.048994464874267576, 0.04874431991577149, 0.04864131164550781, 0.048710208892822265, 0.04857427215576172, 0.04873622512817383, 0.04929753494262695, 0.04912761688232422, 0.0489323844909668, 0.04909545516967773, 0.049091934204101566, 0.04916252899169922, 0.04927321624755859, 0.049192512512207034, 0.049358463287353514, 0.04933868789672852, 0.04921395111083984, 0.04903436660766602, 0.049895519256591796, 0.04951724624633789, 0.04949020767211914, 0.04952431869506836, 0.049418464660644534, 0.04953724670410156, 0.04866230392456055, 0.048234657287597654, 0.04877807998657226, 0.0520711669921875, 0.04949967956542969, 0.0491445426940918, 0.049324031829833984, 0.04936703872680664, 0.049153217315673826, 0.04949484634399414, 0.049170433044433595, 0.04914729690551758, 0.04967484664916992, 0.049213409423828125, 0.04951033782958984, 0.04935507202148438, 0.04930310440063477, 0.05007177734375, 0.04949139022827148, 0.04973625564575195, 0.049500160217285157, 0.049622337341308595, 0.04934064102172851, 0.04949244689941406, 0.04994867324829102, 0.049410049438476565, 0.04942598342895508, 0.049460670471191404, 0.04969526290893555, 0.0501049919128418, 0.05339321517944336, 0.04979859161376953]",tokens/s,20.250785219394096,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.369152,10246.5536,0.0,9860.808704,9797.323264,s,1,12.4646513671875,12.4646513671875,0.0,12.4646513671875,12.4646513671875,12.4646513671875,12.4646513671875,[12.4646513671875],,kWh,0.00015677204867916618,1.7285749644863794e-05,5.235254188200311e-05,0.00022641034020603307,,MB,3021.86496,10626.138112,0.0,10211.033088,10096.835072,s,10,7.4292164916992185,0.7429216491699219,0.006665599289488292,0.745841552734375,0.7476935180664063,0.7480543090820313,0.7483429418945312,"[0.724827880859375, 0.7396456298828125, 0.7405772705078125, 0.7466072387695313, 0.742830810546875, 0.746778564453125, 0.7450758666992188, 0.7468447875976563, 0.7484151000976562, 0.7476133422851563]",tokens/s,344.5854623916706,kWh,2.1602723358332317e-05,2.3816632340083317e-06,1.4358027359428244e-05,3.834241395176889e-05,tokens/kWh,6676679.259736323,MB,3032.02304,10628.235264,0.0,10213.13024,10096.837632,s,10,34.91660595703125,3.491660595703125,0.0033925189934705976,3.4927337646484373,3.4947266357421873,3.495775061035156,3.496613801269531,"[3.4855791015625, 3.49337255859375, 3.48746923828125, 3.492094970703125, 3.4898974609375, 3.488794189453125, 3.49424609375, 3.493835205078125, 3.496823486328125, 3.49449365234375]",tokens/s,18.042990798569733,kWh,0.00010236015592375187,1.1291553674374102e-05,6.807223302917203e-05,0.00018172394262729795,tokens/kWh,346679.6894738754,,s,630,34.91343323135378,0.055418147986275805,0.0006959057097443701,0.05533124923706055,0.05586218299865722,0.05606899890899658,0.05975755733489991,"[0.059810592651367185, 0.05535657501220703, 0.0547476806640625, 0.05444992065429687, 0.05470790481567383, 0.05490345764160156, 0.05468796920776367, 0.05472988891601562, 0.05507977676391602, 0.05479401779174805, 0.05489459228515625, 0.055245025634765625, 0.05528985595703125, 0.05506028747558594, 0.05479443359375, 0.05519564819335938, 0.055344993591308594, 0.055715999603271484, 0.05568716812133789, 0.05533235168457031, 0.05511743927001953, 0.054827743530273435, 0.05509545516967773, 0.05528307342529297, 0.054994815826416014, 0.05482358551025391, 0.05517081451416016, 0.05533116912841797, 0.055174720764160155, 0.05498720169067383, 0.055162113189697264, 0.05522713470458984, 0.055041152954101565, 0.05533935928344726, 0.05553129577636719, 0.055470848083496095, 0.05608243179321289, 0.05520147323608399, 0.05537814331054688, 0.055203937530517576, 0.05506662368774414, 0.05509939193725586, 0.055314430236816405, 0.0550645751953125, 0.055076862335205076, 0.055154399871826174, 0.05548812866210937, 0.055177886962890624, 0.05527142333984375, 0.05525299072265625, 0.05571379089355469, 0.055384063720703126, 0.055431167602539064, 0.05564166259765625, 0.0559354248046875, 0.05698355102539063, 0.05543494415283203, 0.055296321868896485, 0.05552556610107422, 0.05544121551513672, 0.055314590454101566, 0.0552973747253418, 0.05561395263671875, 0.06015087890625, 0.05583145523071289, 0.060641281127929686, 0.05434777450561523, 0.05456076812744141, 0.054830142974853516, 0.05461062240600586, 0.05482726287841797, 0.054506942749023436, 0.054423454284667966, 0.05496284866333008, 0.05493145751953125, 0.05515484619140625, 0.05510550308227539, 0.05534502410888672, 0.055166400909423825, 0.055607872009277345, 0.0559529914855957, 0.05621596908569336, 0.05531587219238281, 0.05480303955078125, 0.05487411117553711, 0.055242752075195314, 0.05494784164428711, 0.054831104278564455, 0.05476988983154297, 0.0553592643737793, 0.0550830078125, 0.055107040405273436, 0.05523817443847656, 0.05526630401611328, 0.05529942321777344, 0.05511552047729492, 0.05580278396606445, 0.055976993560791014, 0.0559769287109375, 0.05561328125, 0.05553782272338867, 0.05533695983886719, 0.05522998428344727, 0.05529420852661133, 0.054949310302734374, 0.05505513763427734, 0.05508822250366211, 0.055069503784179685, 0.05525715255737305, 0.05507279968261719, 0.05524684906005859, 0.05543731307983398, 0.05561548614501953, 0.05531033706665039, 0.05558476638793945, 0.056702945709228514, 0.05574844741821289, 0.055836673736572265, 0.05557449722290039, 0.05536979293823242, 0.05537603378295899, 0.05580595016479492, 0.05545574569702148, 0.055244800567626956, 0.05542214584350586, 0.05564067077636719, 0.05984815979003906, 0.055327262878417965, 0.05478364944458008, 0.05463904190063477, 0.054753185272216794, 0.054605056762695316, 0.054919937133789065, 0.05473689651489258, 0.05467136001586914, 0.054658912658691404, 0.054960289001464845, 0.055211742401123046, 0.0551297607421875, 0.05536377716064453, 0.055152992248535156, 0.05522774505615234, 0.055478431701660155, 0.05620336151123047, 0.05551769638061523, 0.05546393585205078, 0.055375873565673826, 0.055070465087890624, 0.05469388961791992, 0.05496403121948242, 0.054822689056396486, 0.05524127960205078, 0.05493353652954101, 0.054821952819824216, 0.05528678512573242, 0.05529395294189453, 0.05508915328979492, 0.05522431945800781, 0.055467742919921875, 0.05540678405761719, 0.05549900817871094, 0.055487712860107424, 0.055968704223632815, 0.055785152435302736, 0.055363582611083983, 0.055112735748291015, 0.05523545455932617, 0.055029537200927736, 0.05522428894042969, 0.05522467041015625, 0.05528371047973633, 0.05503398513793945, 0.0553675537109375, 0.05529587173461914, 0.05572211074829102, 0.05554332733154297, 0.05551929473876953, 0.05546435165405274, 0.05592195129394531, 0.055712001800537106, 0.055587200164794924, 0.05573436737060547, 0.05577072143554687, 0.055640350341796874, 0.05535702514648438, 0.05533135986328125, 0.05527292633056641, 0.05603587341308594, 0.05527923202514649, 0.05988800048828125, 0.05533542251586914, 0.054779903411865234, 0.05456486511230469, 0.05463606262207031, 0.05469404983520508, 0.0548969612121582, 0.054681598663330076, 0.05465087890625, 0.054891777038574216, 0.05491987228393555, 0.05507648086547851, 0.055183616638183594, 0.05540883255004883, 0.05553091049194336, 0.05514057540893555, 0.055603584289550784, 0.056338432312011716, 0.055769088745117185, 0.05530121612548828, 0.05514915084838867, 0.05509059143066406, 0.05475439834594727, 0.05477340698242188, 0.05491513442993164, 0.055296096801757816, 0.05507276916503906, 0.0548568000793457, 0.0553803825378418, 0.055325183868408206, 0.05571123123168945, 0.055304702758789064, 0.055506240844726565, 0.05566550445556641, 0.055871456146240235, 0.05641551971435547, 0.05564422225952149, 0.055618080139160156, 0.05527961730957031, 0.05513391876220703, 0.055040287017822265, 0.055154048919677734, 0.05499763107299805, 0.05513420867919922, 0.055207744598388675, 0.055675071716308595, 0.055246688842773437, 0.05523062515258789, 0.0553779182434082, 0.055812255859375, 0.05579964828491211, 0.05577078247070313, 0.055601310729980466, 0.05614543914794922, 0.05582710266113281, 0.05646649551391601, 0.05563283157348633, 0.055828479766845705, 0.05554975891113281, 0.05529209518432617, 0.05580710220336914, 0.055226814270019534, 0.05589651107788086, 0.05868134307861328, 0.05517478561401367, 0.055093631744384766, 0.054977985382080076, 0.0546965103149414, 0.05466316986083984, 0.054870014190673826, 0.05466067123413086, 0.05475577545166015, 0.05480243301391602, 0.05534678268432617, 0.055248481750488285, 0.05538659286499024, 0.055275871276855466, 0.055431167602539064, 0.05532211303710938, 0.055388671875, 0.055846847534179685, 0.055816001892089843, 0.05523891067504883, 0.05505583953857422, 0.05500096130371094, 0.05541545486450195, 0.05511526489257813, 0.05484511947631836, 0.055020351409912106, 0.05495808029174805, 0.05489625549316406, 0.05531276702880859, 0.05552537536621094, 0.0551649284362793, 0.05524889755249023, 0.055638015747070314, 0.055810047149658204, 0.055506240844726565, 0.05549663925170899, 0.0556387825012207, 0.05575475311279297, 0.05542828750610351, 0.0552374382019043, 0.05540220642089844, 0.055295936584472655, 0.05524924850463867, 0.05497446441650391, 0.05515468978881836, 0.05541888046264649, 0.05560281753540039, 0.05534352111816406, 0.05573769760131836, 0.05566847991943359, 0.05609113693237305, 0.055734657287597654, 0.055773025512695314, 0.05554185485839844, 0.05591427230834961, 0.055543102264404294, 0.05546912002563477, 0.05566454315185547, 0.05576287841796875, 0.05541638565063477, 0.055193248748779296, 0.055226848602294924, 0.05562406539916992, 0.05886025619506836, 0.055049087524414064, 0.05479430389404297, 0.054664031982421875, 0.05460089492797852, 0.0544284782409668, 0.05441654586791992, 0.055093727111816405, 0.05475571060180664, 0.054980609893798826, 0.05517311859130859, 0.05549260711669922, 0.055458976745605466, 0.055274337768554685, 0.055204864501953124, 0.05532160186767578, 0.05551103973388672, 0.055836673736572265, 0.05570553588867187, 0.0554005126953125, 0.055232192993164064, 0.05497417449951172, 0.0547845458984375, 0.05481887817382813, 0.05504000091552735, 0.05489459228515625, 0.05509894561767578, 0.05507859039306641, 0.05535628890991211, 0.055283103942871094, 0.05569785690307617, 0.05535737609863281, 0.05542102432250977, 0.05558476638793945, 0.05583222579956055, 0.05557408142089844, 0.05558524703979492, 0.055425342559814454, 0.055458976745605466, 0.05536854553222656, 0.05526732635498047, 0.05508643341064453, 0.05512006378173828, 0.05498313522338867, 0.0551649284362793, 0.05525299072265625, 0.055787521362304686, 0.05554995346069336, 0.055570430755615234, 0.055497825622558596, 0.05587251281738281, 0.05577481460571289, 0.05577043151855469, 0.05558953475952148, 0.05589360046386719, 0.05592550277709961, 0.05560076904296875, 0.05547455978393555, 0.0553963508605957, 0.05570560073852539, 0.05524860763549805, 0.05585539245605469, 0.05519551849365235, 0.059991905212402344, 0.05547126388549805, 0.05485548782348633, 0.05660790252685547, 0.05451785659790039, 0.0545164794921875, 0.054831104278564455, 0.05487152099609375, 0.05507740783691406, 0.054967681884765626, 0.05499353790283203, 0.05509225463867187, 0.05562374496459961, 0.055368606567382815, 0.0550645751953125, 0.055175167083740234, 0.05583420944213867, 0.05582275390625, 0.05570764923095703, 0.05544345474243164, 0.05543526458740235, 0.055074272155761717, 0.054843936920166016, 0.05482851028442383, 0.055245342254638674, 0.05511577606201172, 0.05502975845336914, 0.05509939193725586, 0.0551005744934082, 0.05537980651855469, 0.055618560791015625, 0.055475486755371096, 0.05543395233154297, 0.05554156875610351, 0.05578361511230469, 0.05567670440673828, 0.05607766342163086, 0.055484958648681644, 0.05551712036132812, 0.055226238250732425, 0.05544963073730469, 0.05534566497802734, 0.055226367950439455, 0.0550010871887207, 0.055433216094970705, 0.055272800445556644, 0.05545846557617187, 0.05543936157226562, 0.05575475311279297, 0.055656448364257816, 0.055656448364257816, 0.0557916145324707, 0.05598348617553711, 0.055760673522949215, 0.05566083145141602, 0.05560377502441406, 0.055911712646484375, 0.0553131217956543, 0.055477470397949216, 0.055591712951660155, 0.05556838226318359, 0.0555601921081543, 0.05518950271606445, 0.0600719985961914, 0.05534848022460938, 0.05494451141357422, 0.054642078399658206, 0.05463100814819336, 0.05454227066040039, 0.05504000091552735, 0.05481068801879883, 0.05481372833251953, 0.055032958984375, 0.05527536010742187, 0.05508652877807617, 0.05548704147338867, 0.05538816070556641, 0.05523455810546875, 0.05506047821044922, 0.05543955230712891, 0.05655881500244141, 0.05652131271362305, 0.05560729598999024, 0.05513785552978516, 0.05506467056274414, 0.05493532943725586, 0.05507241439819336, 0.05509622573852539, 0.05501270294189453, 0.05509356689453125, 0.055089569091796874, 0.05542291259765625, 0.05545097732543945, 0.055271839141845705, 0.05528102493286133, 0.05526383972167969, 0.055871646881103514, 0.05573030471801758, 0.055910400390625, 0.05569472122192383, 0.05600473785400391, 0.05504179382324219, 0.05518320083618164, 0.05521500778198242, 0.0554700813293457, 0.055211391448974606, 0.05517964935302734, 0.055255233764648436, 0.05527097702026367, 0.055331329345703124, 0.05544723129272461, 0.0553988151550293, 0.05586115264892578, 0.055787521362304686, 0.05573782348632812, 0.05590185546875, 0.056073089599609376, 0.055721023559570315, 0.0553807373046875, 0.05602844619750977, 0.05550339126586914, 0.055680416107177735, 0.05542956924438477, 0.05539894485473633, 0.0554700813293457, 0.05560681533813477, 0.058896224975585935, 0.05566230392456055, 0.05480809783935547, 0.05470912170410156, 0.05480243301391602, 0.05464883041381836, 0.054886398315429685, 0.05482710266113281, 0.054646686553955076, 0.05471548843383789, 0.05660764694213867, 0.055414302825927735, 0.05526166534423828, 0.05516191864013672, 0.055087200164794924, 0.05598704147338867, 0.05589424133300781, 0.05630543899536133, 0.05563910293579102, 0.05544847869873047, 0.055191585540771484, 0.05496387100219727, 0.054878559112548825, 0.05514368057250976, 0.05498336029052735, 0.05480044937133789, 0.0551649284362793, 0.055316478729248046, 0.05539779281616211, 0.05523721694946289, 0.05516463851928711, 0.0567606086730957, 0.055662368774414064, 0.05558294296264649, 0.05606399917602539, 0.055766719818115235, 0.05577878570556641, 0.05546425628662109, 0.05551721572875976, 0.056054302215576175, 0.05555401611328125, 0.055279552459716795, 0.05526534271240234, 0.05518905639648437, 0.055768863677978515, 0.055351966857910155, 0.05551491165161133, 0.05527068710327149, 0.055935935974121095, 0.05569446563720703, 0.05577804946899414, 0.055803680419921876, 0.056271198272705075, 0.05599603271484375, 0.05580543899536133, 0.055575328826904295, 0.05593097686767578, 0.05535948944091797, 0.055398048400878905, 0.055144798278808596, 0.0555601921081543, 0.05537334442138672, 0.055400096893310546, 0.05962771224975586, 0.05537411117553711, 0.05511948776245117, 0.05465248107910156, 0.05460416030883789, 0.0548994255065918, 0.05503372955322266, 0.0549370231628418, 0.05490041732788086, 0.05474371337890625, 0.05513993453979492, 0.055470718383789065, 0.055330814361572264, 0.05545369720458984, 0.05529135894775391, 0.05521257781982422, 0.05561955261230469, 0.05615824127197266, 0.055623680114746096, 0.05531615829467774, 0.05534342575073242, 0.055318145751953124, 0.054894977569580075, 0.05500233459472656, 0.055005985260009764, 0.055192897796630856, 0.05504889678955078, 0.05502361679077149, 0.05526287841796875, 0.055607521057128906, 0.05545792007446289, 0.055314430236816405, 0.05563596725463867, 0.055952896118164064, 0.05574099349975586, 0.05563299179077148, 0.05559587097167969, 0.05595046234130859, 0.05552012634277344, 0.05529763031005859, 0.05526713562011719, 0.05536380767822266, 0.054972606658935545, 0.055105728149414064, 0.05518710327148438, 0.055613536834716794, 0.055622943878173826, 0.05550908660888672, 0.05541155242919922, 0.05589606475830078, 0.05578099060058594, 0.05566108703613281, 0.055779071807861326, 0.05571596908569336, 0.05639782333374024, 0.055932926177978515, 0.05562777709960937, 0.05546188735961914, 0.05557788848876953, 0.05540668869018555, 0.05530220794677734, 0.05544198226928711, 0.05584281539916992]",tokens/s,18.04463043852797,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 111970 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3894, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading model, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear model._modules[name] = target_cls( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 131, in __init__ assert out_features % (32 // self.w_bit) == 0 AssertionError " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1542.725632,1546.584064,0.0,1168.113664,1154.613248,s,1,8.49350390625,8.49350390625,0.0,8.49350390625,8.49350390625,8.49350390625,8.49350390625,[8.49350390625],,kWh,4.117946988750039e-05,4.534977710168172e-06,1.3313066206013802e-05,5.902751380368236e-05,,MB,1532.3136,1777.270784,0.0,1369.440256,1323.44832,s,10,0.7752161865234374,0.07752161865234375,0.0008892316428154286,0.07745612716674805,0.07800702743530273,0.07897365608215332,0.0797469589996338,"[0.07994028472900391, 0.0769590072631836, 0.07756575775146485, 0.07749362945556641, 0.07741862487792969, 0.07645967864990234, 0.07779222106933593, 0.07695645141601562, 0.07706829071044922, 0.07756224060058593]",tokens/s,3302.304627410669,kWh,2.483743855932555e-06,2.73911777137436e-07,1.6543563140676495e-06,4.41201194713764e-06,tokens/kWh,58023414.95609138,MB,1535.668224,1798.242304,0.0,1390.411776,1377.26208,s,10,14.564807617187501,1.4564807617187499,0.012264741907122768,1.4549331665039062,1.4655761474609377,1.475623583984375,1.483661533203125,"[1.447943603515625, 1.4488612060546875, 1.4510828857421876, 1.45186474609375, 1.4580015869140626, 1.4856710205078125, 1.4610059814453125, 1.4633433837890626, 1.46022216796875, 1.43681103515625]",tokens/s,43.254948266982645,kWh,4.219580011198176e-05,4.653848486550735e-06,2.0553828119132958e-05,6.740347671766544e-05,tokens/kWh,934669.8874879943,,s,630,14.562426942825304,0.023114963401310026,0.0011230676053886127,0.022984591484069825,0.023376509284973146,0.023573486328125,0.02451666646957398,"[0.02337593650817871, 0.0228723201751709, 0.02276927947998047, 0.022599359512329102, 0.022639583587646485, 0.022726367950439454, 0.02265497589111328, 0.02265907287597656, 0.022983999252319337, 0.02302227210998535, 0.023000608444213866, 0.022987232208251954, 0.022992416381835936, 0.02297494316101074, 0.02308710479736328, 0.02312598419189453, 0.02322640037536621, 0.02326118469238281, 0.02336355209350586, 0.023187007904052735, 0.02308691215515137, 0.023191904067993162, 0.02300124740600586, 0.023142559051513672, 0.02305830383300781, 0.02309065628051758, 0.022999711990356445, 0.022925216674804686, 0.02297612762451172, 0.022876127243041992, 0.022929567337036133, 0.022913536071777343, 0.022762399673461914, 0.02286262321472168, 0.023071935653686523, 0.023212640762329102, 0.023346912384033202, 0.023265439987182616, 0.023296512603759766, 0.02336992073059082, 0.023311552047729493, 0.02293209648132324, 0.022820768356323243, 0.022863967895507813, 0.022796287536621093, 0.022816768646240236, 0.02287820816040039, 0.022775264739990236, 0.022740831375122072, 0.022685407638549804, 0.02294268798828125, 0.022877216339111328, 0.022834144592285156, 0.023060480117797853, 0.022962175369262695, 0.022964319229125976, 0.02293494415283203, 0.022858240127563476, 0.0229182071685791, 0.022844160079956054, 0.02286947250366211, 0.022984895706176758, 0.023154943466186524, 0.022935552597045897, 0.022816543579101563, 0.022840896606445314, 0.022944639205932617, 0.022894079208374024, 0.022872159957885742, 0.02289628791809082, 0.022957727432250975, 0.022783039093017578, 0.02294700813293457, 0.022874080657958984, 0.022942527770996094, 0.022834463119506834, 0.02288902473449707, 0.02274492835998535, 0.022851423263549806, 0.02287593650817871, 0.022805023193359374, 0.02280243110656738, 0.02275328063964844, 0.02290483283996582, 0.023350400924682616, 0.023073408126831056, 0.022804000854492187, 0.022870752334594728, 0.022829055786132812, 0.022861024856567384, 0.022836095809936525, 0.02292665672302246, 0.023068351745605467, 0.023069599151611327, 0.02309459114074707, 0.023175615310668946, 0.023474431991577147, 0.023201791763305665, 0.023103551864624025, 0.023156639099121093, 0.0231506233215332, 0.023220224380493162, 0.023183359146118163, 0.023052352905273438, 0.022943008422851564, 0.02309596824645996, 0.02295568084716797, 0.02279360008239746, 0.02298588752746582, 0.02301670455932617, 0.022811199188232423, 0.022668960571289063, 0.02285955238342285, 0.02282080078125, 0.02313279914855957, 0.024370975494384765, 0.023226591110229493, 0.022960128784179686, 0.02322329521179199, 0.022854496002197265, 0.0229803524017334, 0.02316739273071289, 0.023166431427001952, 0.023052831649780274, 0.02288844871520996, 0.022986751556396484, 0.023299808502197265, 0.023183839797973633, 0.022881376266479493, 0.02290777587890625, 0.022827072143554686, 0.023562240600585937, 0.02286988830566406, 0.02276355171203613, 0.022849824905395506, 0.022986207962036133, 0.022966623306274414, 0.022844800949096678, 0.022899072647094728, 0.022976768493652343, 0.02327142333984375, 0.02331443214416504, 0.02314854431152344, 0.023322816848754882, 0.023074623107910155, 0.022945823669433593, 0.02283839988708496, 0.022856544494628907, 0.023005184173583985, 0.023003135681152344, 0.022984575271606446, 0.02290496063232422, 0.02305023956298828, 0.02305788803100586, 0.023017311096191408, 0.02287481689453125, 0.023076160430908203, 0.022993600845336915, 0.02317679977416992, 0.02306038475036621, 0.023136768341064453, 0.023179264068603517, 0.023119295120239258, 0.02306105613708496, 0.023126016616821288, 0.023142463684082033, 0.023147455215454103, 0.023860160827636718, 0.02317478370666504, 0.023144575119018556, 0.023148672103881836, 0.02309872055053711, 0.023065439224243166, 0.022890592575073244, 0.022982751846313477, 0.02287392044067383, 0.022978559494018554, 0.022839296340942384, 0.022915071487426757, 0.022804479598999023, 0.022814495086669922, 0.022771999359130858, 0.022900672912597658, 0.022976512908935546, 0.022961536407470704, 0.022882944107055665, 0.022976255416870116, 0.023054592132568358, 0.02299660873413086, 0.023850208282470704, 0.023499551773071288, 0.023224319458007812, 0.023085216522216796, 0.022882144927978517, 0.02271392059326172, 0.02318777656555176, 0.022712575912475587, 0.022955904006958006, 0.023119871139526366, 0.022779903411865234, 0.022771135330200195, 0.022722272872924804, 0.023109952926635743, 0.022785856246948243, 0.02279702377319336, 0.022779903411865234, 0.022923263549804687, 0.02268505668640137, 0.022676095962524415, 0.023016992568969726, 0.023009664535522462, 0.02286367988586426, 0.022856191635131837, 0.02268547248840332, 0.022783647537231444, 0.022747488021850587, 0.022990720748901367, 0.02276335906982422, 0.022884639739990234, 0.02290480041503906, 0.022716255187988282, 0.022917024612426756, 0.025352319717407226, 0.02406825637817383, 0.023017471313476562, 0.022939647674560547, 0.023185407638549805, 0.022837247848510742, 0.023500864028930663, 0.023045984268188477, 0.02293071937561035, 0.022890560150146483, 0.022833919525146483, 0.02279167938232422, 0.02279270362854004, 0.022773439407348633, 0.02314067268371582, 0.0228121280670166, 0.022825504302978517, 0.022836671829223634, 0.022905471801757813, 0.023947200775146484, 0.024237503051757814, 0.023089727401733397, 0.023217248916625976, 0.02305683135986328, 0.023052768707275392, 0.02288640022277832, 0.02291472053527832, 0.022946048736572265, 0.02317283248901367, 0.023240095138549806, 0.023355871200561523, 0.023154560089111327, 0.02306255912780762, 0.023037567138671874, 0.02321830368041992, 0.023339263916015623, 0.023220096588134766, 0.02331660842895508, 0.023150463104248047, 0.0233123836517334, 0.02324287986755371, 0.023373983383178712, 0.023221376419067383, 0.02344799995422363, 0.023371904373168946, 0.023096864700317382, 0.023118463516235352, 0.02306892776489258, 0.02286755180358887, 0.02303104019165039, 0.022936319351196287, 0.02305449676513672, 0.02289254379272461, 0.02287311935424805, 0.022838239669799806, 0.022768735885620117, 0.022746015548706054, 0.022833120346069335, 0.022777055740356444, 0.022899520874023437, 0.02327552032470703, 0.02287161636352539, 0.023136703491210938, 0.023207935333251953, 0.02320191955566406, 0.02312716865539551, 0.023067392349243165, 0.023160831451416015, 0.023248895645141602, 0.023248096466064454, 0.0235467529296875, 0.023404447555541993, 0.023469760894775392, 0.02346015930175781, 0.02334671974182129, 0.02331286430358887, 0.023235647201538086, 0.022999807357788084, 0.02305766487121582, 0.023177568435668944, 0.023159391403198244, 0.0236844482421875, 0.02314931106567383, 0.022984607696533203, 0.022900928497314454, 0.022994752883911132, 0.022939647674560547, 0.023172384262084962, 0.023284000396728517, 0.022935840606689455, 0.023328927993774413, 0.023029312133789063, 0.023019968032836916, 0.023623680114746092, 0.02327398490905762, 0.023121599197387696, 0.023060192108154298, 0.022912639617919922, 0.0229385929107666, 0.022853023529052736, 0.023147232055664064, 0.022777727127075195, 0.022757375717163086, 0.0228002872467041, 0.022846944808959962, 0.022687711715698243, 0.02271504020690918, 0.02285977554321289, 0.023002527236938478, 0.0230380802154541, 0.02281929588317871, 0.02287311935424805, 0.02289148712158203, 0.022845439910888672, 0.022833152770996092, 0.023181312561035155, 0.02304332733154297, 0.023036672592163087, 0.023112768173217772, 0.023235519409179686, 0.028821504592895508, 0.02369049644470215, 0.02336582374572754, 0.023416608810424806, 0.023180063247680665, 0.023355295181274414, 0.023199840545654295, 0.02408995246887207, 0.02315523147583008, 0.02322649574279785, 0.023013408660888673, 0.02327142333984375, 0.023065696716308592, 0.04848057556152344, 0.02325721549987793, 0.022942144393920897, 0.0231014404296875, 0.02313852882385254, 0.022983552932739258, 0.022958112716674806, 0.022935487747192382, 0.023325632095336914, 0.02326540756225586, 0.022957952499389648, 0.023332639694213866, 0.023236671447753907, 0.0232509765625, 0.022951616287231445, 0.0230382080078125, 0.022960319519042968, 0.023162879943847657, 0.023000864028930663, 0.02290915107727051, 0.022943296432495118, 0.023144544601440428, 0.023014944076538087, 0.02294304084777832, 0.022931455612182617, 0.022923967361450196, 0.022955968856811525, 0.023376224517822265, 0.02324799919128418, 0.02322697639465332, 0.022962175369262695, 0.022934816360473634, 0.022895328521728514, 0.02304614448547363, 0.02301247978210449, 0.023050752639770508, 0.02296460723876953, 0.022960351943969726, 0.029336639404296875, 0.023007743835449217, 0.02288377571105957, 0.022813472747802734, 0.02288025665283203, 0.02275481605529785, 0.02290892791748047, 0.022696447372436524, 0.02289232063293457, 0.02292348861694336, 0.022808576583862306, 0.02324390411376953, 0.022875007629394532, 0.022988800048828126, 0.02289004707336426, 0.022872512817382812, 0.023019039154052734, 0.02288844871520996, 0.02276736068725586, 0.02457059288024902, 0.022874111175537108, 0.02292086410522461, 0.022814624786376952, 0.022804927825927735, 0.022854816436767577, 0.022891359329223634, 0.022920192718505858, 0.02288342475891113, 0.023138208389282225, 0.02308255958557129, 0.023251392364501952, 0.02331363105773926, 0.0232774715423584, 0.02352627182006836, 0.023633184432983397, 0.023511775970458983, 0.0243056640625, 0.023411903381347656, 0.023240703582763672, 0.023163904190063478, 0.023212928771972657, 0.02328057670593262, 0.023088640213012695, 0.023326911926269532, 0.023109312057495116, 0.0232825927734375, 0.023053600311279298, 0.02311827278137207, 0.024211328506469728, 0.023583072662353516, 0.0237922248840332, 0.02358268737792969, 0.02338822364807129, 0.023433183670043944, 0.023676351547241212, 0.023712255477905272, 0.023502464294433593, 0.023333087921142578, 0.023361759185791017, 0.023233535766601563, 0.023204864501953124, 0.02297964859008789, 0.023082975387573243, 0.022985471725463866, 0.0231364803314209, 0.022999040603637694, 0.023185407638549805, 0.023185440063476562, 0.023343072891235352, 0.023379072189331055, 0.023451679229736327, 0.023409439086914063, 0.023543487548828124, 0.0234520320892334, 0.023334144592285156, 0.023358240127563476, 0.023336864471435546, 0.023412799835205077, 0.02345599937438965, 0.02353036880493164, 0.023509536743164063, 0.0236527042388916, 0.023326719284057617, 0.023236320495605468, 0.022974752426147462, 0.023013343811035158, 0.022898687362670898, 0.022997024536132813, 0.02287740707397461, 0.023017696380615234, 0.02301804733276367, 0.022821983337402343, 0.02282588768005371, 0.02288844871520996, 0.022883903503417968, 0.02270867156982422, 0.022660224914550782, 0.022754175186157227, 0.022674495697021485, 0.02278495979309082, 0.022846687316894532, 0.022923263549804687, 0.023967872619628905, 0.02315945625305176, 0.022968063354492186, 0.022994943618774414, 0.02350921630859375, 0.023500831604003906, 0.023023616790771483, 0.023031679153442383, 0.023061824798583985, 0.02417292785644531, 0.023533376693725586, 0.02342233657836914, 0.023140607833862306, 0.0231976318359375, 0.02328985595703125, 0.023386560440063476, 0.02368707275390625, 0.023027807235717773, 0.022915071487426757, 0.02307891273498535, 0.023126016616821288, 0.023053407669067383, 0.022919551849365234, 0.02294432067871094, 0.022805503845214844, 0.0227906551361084, 0.022791776657104492, 0.023489343643188478, 0.027436479568481446, 0.023142688751220702, 0.02330793571472168, 0.023106143951416015, 0.02308924865722656, 0.023268608093261717, 0.02329267120361328, 0.023144447326660156, 0.024950464248657225, 0.02336185646057129, 0.023273664474487303, 0.022911903381347656, 0.024384639739990235, 0.02286732864379883, 0.022967744827270507, 0.02281705665588379, 0.022784704208374022, 0.022882303237915038, 0.02307030487060547, 0.022860191345214845, 0.02284774398803711, 0.022797311782836914, 0.022838016510009766, 0.022743040084838868, 0.02287820816040039, 0.02278825569152832, 0.023799232482910156, 0.022923679351806642, 0.022775808334350587, 0.022845439910888672, 0.023152639389038086, 0.023003135681152344, 0.022939647674560547, 0.023152639389038086, 0.022841344833374022, 0.022761472702026365, 0.023048383712768555, 0.022982463836669922, 0.022978559494018554, 0.022791360855102537, 0.022876991271972656, 0.022796287536621093, 0.022812671661376953, 0.02292643165588379, 0.02308336067199707, 0.02277827262878418, 0.022908319473266603, 0.02288252830505371, 0.02325337600708008, 0.022784255981445314, 0.022810016632080078, 0.02369571113586426, 0.023177248001098633, 0.022845407485961915, 0.022749183654785156, 0.022749343872070314, 0.022617952346801758, 0.022651136398315428, 0.022681343078613282, 0.022603967666625976, 0.022742847442626953, 0.02259542465209961, 0.022501535415649414, 0.022633663177490236, 0.022536544799804686, 0.0228353271484375, 0.02269164848327637, 0.022741247177124023, 0.02271571159362793, 0.02271673583984375, 0.022686368942260744, 0.022636287689208983, 0.022687744140625, 0.022933216094970704, 0.0228887996673584, 0.022800703048706055, 0.022812416076660156, 0.023159008026123046, 0.022841119766235353, 0.02279187202453613, 0.023216032028198243, 0.022848159790039062, 0.022879232406616212, 0.022786943435668946, 0.022773759841918945, 0.022806528091430665, 0.02273689651489258, 0.022940895080566407, 0.022823711395263672, 0.022765567779541016, 0.02285875129699707, 0.022852031707763672, 0.022818784713745117, 0.022704736709594726, 0.022758943557739258, 0.022662912368774414, 0.022827743530273437, 0.022662784576416014, 0.022700416564941407, 0.022691295623779296, 0.02273539161682129, 0.02283033561706543, 0.02265983963012695, 0.02272051239013672, 0.02277097511291504, 0.02277459144592285, 0.0227542724609375]",tokens/s,43.262019612080614,,, 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1545.527296,1546.584064,0.0,1168.113664,1154.613248,s,1,8.5124970703125,8.5124970703125,0.0,8.5124970703125,8.5124970703125,8.5124970703125,8.5124970703125,[8.5124970703125],,kWh,4.064591162913681e-05,4.4762609561822e-06,1.3347788456019583e-05,5.846996104133859e-05,,MB,1650.245632,1777.270784,0.0,1369.440256,1323.44832,s,10,0.7711464309692383,0.07711464309692383,0.0010966688767949236,0.07673311996459961,0.07833795166015625,0.07897385406494141,0.07948257598876954,"[0.07960975646972657, 0.07690499114990235, 0.07630387115478515, 0.07656124877929688, 0.0779483871459961, 0.07723251342773438, 0.07819664001464843, 0.07608220672607421, 0.0761595230102539, 0.07614729309082031]",tokens/s,3319.732669685559,kWh,2.497256980805001e-06,2.7540118011858825e-07,1.6537216458632824e-06,4.426379806786871e-06,tokens/kWh,57835073.16915752,MB,1656.844288,1798.242304,0.0,1390.411776,1377.26208,s,10,15.17874658203125,1.517874658203125,0.008304287893454717,1.5187279052734375,1.5292023071289063,1.5295946838378907,1.5299085852050782,"[1.500471923828125, 1.511712646484375, 1.529987060546875, 1.516548583984375, 1.5182763671875, 1.519807861328125, 1.5225753173828125, 1.511072265625, 1.5291151123046876, 1.519179443359375]",tokens/s,41.50540340042308,kWh,4.4015917780858925e-05,4.8546132297960675e-06,2.1319602288336758e-05,7.019013329899176e-05,tokens/kWh,897562.0509457695,,s,630,15.17503857803344,0.024087362822275315,0.0004091950367639072,0.024088511466979982,0.024378726768493654,0.02466751842498779,0.025500861911773684,"[0.02437936019897461, 0.023972448348999024, 0.023807743072509765, 0.023576576232910155, 0.023678432464599608, 0.023594079971313478, 0.02373017692565918, 0.023701248168945314, 0.023993471145629882, 0.023794559478759764, 0.02387104034423828, 0.023673280715942383, 0.024012256622314453, 0.023677471160888672, 0.023459968566894532, 0.023271295547485353, 0.023171072006225587, 0.023131904602050782, 0.0233221435546875, 0.023519264221191407, 0.02350150489807129, 0.02366873550415039, 0.02379385566711426, 0.023779136657714844, 0.02360495948791504, 0.023713632583618163, 0.023409088134765624, 0.023418880462646483, 0.023478111267089843, 0.023436864852905272, 0.023394048690795897, 0.023476512908935546, 0.023571104049682618, 0.02390185546875, 0.024166656494140626, 0.023975231170654296, 0.023651008605957032, 0.023588064193725587, 0.02362057685852051, 0.02380780792236328, 0.024567808151245117, 0.024211456298828125, 0.02431795120239258, 0.024098880767822267, 0.024003711700439453, 0.024068927764892577, 0.02411097526550293, 0.024008832931518554, 0.024016895294189454, 0.02410495948791504, 0.024246271133422852, 0.024086528778076172, 0.02418182373046875, 0.02414419174194336, 0.02423075294494629, 0.024092447280883788, 0.024194400787353517, 0.024008832931518554, 0.02395529556274414, 0.023833280563354493, 0.023826656341552736, 0.023807775497436522, 0.02379136085510254, 0.02450127983093262, 0.023922975540161134, 0.02467500877380371, 0.02388787269592285, 0.02380384063720703, 0.023787776947021486, 0.023928640365600586, 0.024093727111816406, 0.024026079177856444, 0.02394028854370117, 0.024025920867919923, 0.02408857536315918, 0.02396134376525879, 0.024058176040649415, 0.025439647674560546, 0.024270751953125, 0.023966239929199218, 0.02395350456237793, 0.024376800537109375, 0.024062496185302734, 0.024071392059326173, 0.023978784561157228, 0.023937023162841797, 0.02371788787841797, 0.02389740753173828, 0.024273151397705077, 0.024228288650512696, 0.024637439727783202, 0.024116800308227538, 0.024220096588134767, 0.02408768081665039, 0.02391334342956543, 0.02405743980407715, 0.023980159759521485, 0.02400284767150879, 0.023922016143798828, 0.0240994873046875, 0.023992191314697264, 0.023870719909667968, 0.024066944122314454, 0.02400886344909668, 0.023969215393066408, 0.023800159454345705, 0.023639488220214843, 0.023583423614501952, 0.023668672561645506, 0.02391244888305664, 0.02393071937561035, 0.02365827178955078, 0.023666847229003907, 0.023613664627075197, 0.024123231887817384, 0.02472326469421387, 0.023953344345092772, 0.023730592727661134, 0.02372825622558594, 0.023877504348754883, 0.02366454315185547, 0.023640159606933595, 0.023654495239257813, 0.02391155242919922, 0.02352617645263672, 0.023592832565307618, 0.024359743118286134, 0.024160255432128908, 0.02406768035888672, 0.02438800048828125, 0.024246271133422852, 0.024163871765136718, 0.024242656707763672, 0.024176639556884767, 0.024131584167480468, 0.024176607131958006, 0.024121376037597658, 0.02495699119567871, 0.024242111206054687, 0.024242271423339845, 0.02427280044555664, 0.024328191757202147, 0.0241889591217041, 0.024252384185791016, 0.024429664611816407, 0.024268960952758788, 0.02427907180786133, 0.02436751937866211, 0.0242936954498291, 0.024228031158447266, 0.024309215545654298, 0.024185184478759766, 0.02429132843017578, 0.02474185562133789, 0.02475391960144043, 0.024153791427612304, 0.024226400375366212, 0.02410041618347168, 0.02412384033203125, 0.024010751724243166, 0.024054847717285156, 0.024081344604492187, 0.023986175537109376, 0.023957311630249025, 0.02391059112548828, 0.02365545654296875, 0.024316896438598634, 0.025343360900878905, 0.025043487548828125, 0.024055904388427734, 0.024252416610717774, 0.02393907165527344, 0.02415001678466797, 0.023953407287597657, 0.02395136070251465, 0.023917631149291994, 0.02395849609375, 0.023862367630004884, 0.024007776260375976, 0.023930431365966797, 0.02397536087036133, 0.023990175247192384, 0.024030080795288088, 0.024069568634033204, 0.024582496643066408, 0.02712704086303711, 0.02558470344543457, 0.024188671112060547, 0.02418294334411621, 0.024673696517944335, 0.024170751571655272, 0.02562713623046875, 0.024176544189453125, 0.02420524787902832, 0.024092832565307618, 0.02418649673461914, 0.024152448654174805, 0.023966815948486327, 0.02401353645324707, 0.024149248123168945, 0.024132543563842774, 0.024024511337280275, 0.02394767951965332, 0.023879615783691407, 0.023731903076171876, 0.023832895278930663, 0.023824607849121094, 0.02382374382019043, 0.023801759719848634, 0.02409766387939453, 0.02383856010437012, 0.023736320495605468, 0.023572032928466796, 0.023383743286132814, 0.023509759902954102, 0.02342092704772949, 0.023650304794311523, 0.02354956817626953, 0.023607519149780272, 0.02384707260131836, 0.02376495933532715, 0.024141599655151367, 0.02406387138366699, 0.024033855438232422, 0.024305471420288084, 0.024108608245849608, 0.02413609504699707, 0.024121376037597658, 0.024161983489990234, 0.024288576126098634, 0.02433967971801758, 0.02418262481689453, 0.024206975936889648, 0.024233375549316406, 0.024211904525756837, 0.024222272872924805, 0.024344480514526368, 0.024336383819580077, 0.0243056640625, 0.024225088119506837, 0.024264991760253905, 0.024332223892211916, 0.024271327972412108, 0.024199167251586915, 0.024156160354614258, 0.024262655258178712, 0.024088191986083984, 0.024072576522827148, 0.023984128952026368, 0.02393622398376465, 0.024133920669555664, 0.024082944869995116, 0.02464543914794922, 0.02407606315612793, 0.024086751937866212, 0.024125631332397462, 0.024178144454956054, 0.024011104583740235, 0.023903520584106445, 0.0240032958984375, 0.023894208908081055, 0.02380985641479492, 0.023814144134521483, 0.0238983039855957, 0.02390415954589844, 0.02405571174621582, 0.024071615219116212, 0.024039680480957032, 0.02411552047729492, 0.024031007766723633, 0.02454550361633301, 0.025049087524414062, 0.02415964889526367, 0.024158815383911132, 0.024141727447509767, 0.024043615341186524, 0.023995807647705078, 0.02444553565979004, 0.02415420722961426, 0.02451241683959961, 0.024352767944335937, 0.024180543899536132, 0.024037567138671875, 0.024211616516113282, 0.024256223678588866, 0.024266880035400392, 0.024215551376342775, 0.02430975914001465, 0.024406112670898438, 0.024133344650268555, 0.024111328125, 0.024120447158813476, 0.02457676887512207, 0.024182880401611328, 0.02421366310119629, 0.02419046401977539, 0.024178592681884766, 0.024291776657104493, 0.02434230422973633, 0.024350303649902344, 0.02434480094909668, 0.024109312057495117, 0.0240231990814209, 0.024041471481323243, 0.024094720840454102, 0.023973184585571287, 0.02374928092956543, 0.023680992126464843, 0.024297536849975584, 0.023281280517578124, 0.023413055419921874, 0.023461919784545898, 0.02351241683959961, 0.02389219284057617, 0.02336591911315918, 0.024352224349975585, 0.024123872756958008, 0.024131040573120117, 0.023820703506469726, 0.023779520034790037, 0.023820127487182617, 0.024006591796875, 0.024205408096313476, 0.024202720642089844, 0.023976608276367186, 0.023904287338256835, 0.023658592224121092, 0.02369113540649414, 0.02392678451538086, 0.024000192642211916, 0.02394688034057617, 0.024320703506469726, 0.023963647842407225, 0.023733375549316406, 0.023901248931884767, 0.023914304733276368, 0.024053760528564453, 0.02391244888305664, 0.023955520629882814, 0.023927967071533204, 0.024025888442993165, 0.02399807929992676, 0.023912832260131835, 0.023953407287597657, 0.0243786563873291, 0.024249055862426757, 0.024205312728881836, 0.02465996742248535, 0.024195072174072265, 0.024284799575805663, 0.024426399230957033, 0.024235519409179687, 0.024265632629394532, 0.024172607421875, 0.024182783126831055, 0.02410700798034668, 0.024204896926879882, 0.02420368003845215, 0.024188512802124022, 0.024514976501464843, 0.024215295791625978, 0.024199424743652345, 0.024125375747680665, 0.02424835205078125, 0.024082527160644532, 0.024076223373413086, 0.02411315155029297, 0.024195072174072265, 0.02416640090942383, 0.02411315155029297, 0.024307968139648438, 0.024267776489257813, 0.02432080078125, 0.024238016128540037, 0.02412950325012207, 0.02425369644165039, 0.024361440658569336, 0.02434079933166504, 0.024786815643310547, 0.02611782455444336, 0.025200735092163085, 0.02437379264831543, 0.02435478401184082, 0.02418262481689453, 0.024242176055908202, 0.025047040939331053, 0.0242192325592041, 0.024297664642333985, 0.02423740768432617, 0.024193119049072266, 0.024201824188232423, 0.024145984649658205, 0.024189056396484374, 0.024174591064453126, 0.024176223754882813, 0.02422006416320801, 0.02425651168823242, 0.02454732894897461, 0.024207359313964845, 0.02424025535583496, 0.02429939270019531, 0.02433433532714844, 0.02412860870361328, 0.024162879943847658, 0.024125791549682616, 0.024258495330810547, 0.02417465591430664, 0.024231807708740234, 0.024237375259399414, 0.024224576950073243, 0.02415001678466797, 0.02400377655029297, 0.024060735702514647, 0.024006431579589843, 0.024006559371948243, 0.023999935150146486, 0.023795711517333985, 0.023837568283081055, 0.02384492874145508, 0.023586879730224608, 0.02353753662109375, 0.023535360336303712, 0.023472639083862306, 0.023443008422851564, 0.023470272064208986, 0.023346431732177736, 0.023503711700439453, 0.023560096740722656, 0.02437459182739258, 0.024555999755859374, 0.02450351905822754, 0.0245032958984375, 0.02405171203613281, 0.024139776229858398, 0.024138816833496092, 0.02406822395324707, 0.024126272201538086, 0.02411929512023926, 0.024207359313964845, 0.024084320068359377, 0.024305503845214845, 0.024704639434814452, 0.024228384017944336, 0.0243507194519043, 0.02428108787536621, 0.024401920318603516, 0.024278976440429687, 0.02440096092224121, 0.024318815231323242, 0.02438159942626953, 0.024391679763793944, 0.02433807945251465, 0.02430191993713379, 0.024373279571533204, 0.02414588737487793, 0.024207359313964845, 0.025393152236938478, 0.024238336563110353, 0.02380303955078125, 0.023583135604858398, 0.023893375396728516, 0.02386207962036133, 0.023749919891357423, 0.023941856384277344, 0.023820480346679686, 0.023523136138916014, 0.023402496337890624, 0.023322080612182616, 0.023273439407348634, 0.023329343795776367, 0.023592960357666014, 0.023605024337768555, 0.024938720703125, 0.023723264694213868, 0.02356710433959961, 0.023899520874023437, 0.023808063507080077, 0.02392531204223633, 0.02349056053161621, 0.02355187225341797, 0.023472095489501955, 0.023436511993408203, 0.023344064712524416, 0.023465728759765624, 0.023649631500244142, 0.023675264358520506, 0.023783967971801757, 0.024000511169433594, 0.02392460823059082, 0.023875104904174806, 0.025107040405273437, 0.023931968688964845, 0.02396883201599121, 0.02390412712097168, 0.024176607131958006, 0.02415363121032715, 0.02396726417541504, 0.023992767333984377, 0.024110687255859374, 0.024029760360717772, 0.024022464752197267, 0.023944128036499025, 0.02412748718261719, 0.024258687973022462, 0.024623104095458984, 0.02415555191040039, 0.025356224060058594, 0.024563776016235352, 0.024957536697387695, 0.024266752243041992, 0.024252031326293947, 0.024188608169555665, 0.02425107192993164, 0.024180736541748047, 0.02429120063781738, 0.02444300842285156, 0.024160480499267577, 0.02402895927429199, 0.0242872314453125, 0.02403296089172363, 0.024048063278198244, 0.023918464660644533, 0.02369536018371582, 0.02364825630187988, 0.02384486389160156, 0.023961599349975587, 0.023967744827270508, 0.02413132858276367, 0.02396771240234375, 0.023744800567626952, 0.023764991760253908, 0.023938688278198242, 0.024058143615722657, 0.02416854476928711, 0.024066047668457033, 0.02403443145751953, 0.024102783203125, 0.024017919540405275, 0.023957504272460937, 0.024164159774780272, 0.024223936080932616, 0.024253856658935546, 0.02418662452697754, 0.024176544189453125, 0.02424928092956543, 0.024848512649536133, 0.02424323272705078, 0.024302080154418947, 0.02416828727722168, 0.02404812812805176, 0.024203264236450195, 0.024151647567749023, 0.024191488265991212, 0.024165311813354493, 0.02417056083679199, 0.02404035186767578, 0.02413884735107422, 0.024492704391479492, 0.024123647689819335, 0.024172544479370117, 0.02415001678466797, 0.024235551834106445, 0.027676288604736327, 0.025770368576049803, 0.024306272506713866, 0.024530208587646485, 0.024254623413085936, 0.024817472457885743, 0.024364927291870116, 0.02446953582763672, 0.02440220832824707, 0.025464895248413087, 0.024067968368530274, 0.02406985664367676, 0.024103263854980468, 0.023832576751708984, 0.024138912200927735, 0.02516873550415039, 0.02427622413635254, 0.024103679656982423, 0.02383078384399414, 0.02396953582763672, 0.02398745536804199, 0.0238654727935791, 0.02381452751159668, 0.023568639755249022, 0.02420047950744629, 0.025280256271362305, 0.024034048080444338, 0.0248853759765625, 0.025515552520751952, 0.02379225540161133, 0.023761856079101563, 0.02365951919555664, 0.02346780776977539, 0.02368943977355957, 0.023474271774291993, 0.023490272521972656, 0.023414079666137694, 0.023658784866333007, 0.024224096298217774, 0.024170400619506836, 0.0241298885345459, 0.024070144653320313, 0.024008703231811524, 0.024016895294189454, 0.024153472900390625, 0.024132192611694334, 0.024057376861572267, 0.024082944869995116, 0.02396988868713379, 0.02408844757080078, 0.02396342468261719, 0.024019168853759765, 0.02382236862182617, 0.023940160751342775, 0.024053983688354492, 0.024126176834106446, 0.024201215744018553, 0.024000511169433594, 0.023973888397216796, 0.02429132843017578, 0.02409062385559082, 0.02409267234802246, 0.02434662437438965, 0.024172544479370117, 0.023940095901489256, 0.024015775680541994, 0.024057855606079103, 0.023969120025634765]",tokens/s,41.51554520012578,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 27384 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6726.176768,7525.564416,0.0,7147.094016,7138.9184,s,1,11.4454169921875,11.4454169921875,0.0,11.4454169921875,11.4454169921875,11.4454169921875,11.4454169921875,[11.4454169921875],,kWh,0.00013004618234999722,1.4337578162869243e-05,4.364336824799753e-05,0.000188027128760864,,MB,1571.659776,8236.498944,0.0,7828.668416,7715.649536,s,10,7.225968872070313,0.7225968872070312,0.004365442437216515,0.7243254699707031,0.7271126220703125,0.7274005187988282,0.7276308361816406,"[0.71417431640625, 0.7217698364257813, 0.7167904052734375, 0.7242161254882813, 0.7270486450195313, 0.7276884155273438, 0.7265798950195312, 0.724434814453125, 0.7244411010742188, 0.7188253173828125]",tokens/s,354.2777508902463,kWh,2.1206718300000676e-05,2.3387232408683474e-06,1.4104039061001359e-05,3.764948060187038e-05,tokens/kWh,6799562.594424802,MB,1579.339776,8383.299584,0.0,7975.469056,7906.518528,s,10,30.671578369140633,3.0671578369140624,0.005155731992710853,3.0669029541015624,3.0737444580078126,3.075187292480469,3.076341560058594,"[3.063729248046875, 3.05644482421875, 3.0667685546875, 3.066685791015625, 3.06440234375, 3.06835498046875, 3.067037353515625, 3.068101318359375, 3.076630126953125, 3.073423828125]",tokens/s,20.540188457789228,kWh,8.943397612999658e-05,9.865027055089127e-06,5.941510308759398e-05,0.00015871410627267971,tokens/kWh,396940.142748008,,s,630,30.667361335754393,0.048678351326594274,0.0005182079638856422,0.04866662406921387,0.04917172470092774,0.04932351245880127,0.05109099552154541,"[0.051337215423583986, 0.04890745544433594, 0.0479754867553711, 0.04796390533447266, 0.04803337478637695, 0.04811407852172851, 0.04793462371826172, 0.047780288696289065, 0.048269695281982425, 0.04780563354492187, 0.048038753509521484, 0.047972255706787106, 0.04797353744506836, 0.048315486907958984, 0.048239776611328125, 0.04808492660522461, 0.04820659255981445, 0.04831379318237305, 0.04803334426879883, 0.04864284896850586, 0.04904982376098633, 0.04881734466552735, 0.04834137725830078, 0.04871939086914062, 0.04845865631103516, 0.048361377716064455, 0.04835932922363281, 0.04830003356933594, 0.04814828872680664, 0.04830579376220703, 0.04815539169311524, 0.04835123062133789, 0.04865993499755859, 0.048832126617431644, 0.04874924850463867, 0.048369056701660154, 0.04870332717895508, 0.04861231994628906, 0.0488524169921875, 0.04904198455810547, 0.04909260940551758, 0.04888723373413086, 0.05062508773803711, 0.04871987152099609, 0.04917855834960937, 0.04908044815063477, 0.04886111831665039, 0.04890192031860351, 0.04880815887451172, 0.048549121856689456, 0.04845030212402344, 0.04878540802001953, 0.04871148681640625, 0.048750526428222654, 0.04869350433349609, 0.04862963104248047, 0.04882979202270508, 0.04900742340087891, 0.04926393508911133, 0.049119583129882814, 0.048978271484375, 0.04898812866210937, 0.049258495330810545, 0.050716670989990234, 0.04861244964599609, 0.047969184875488284, 0.047816703796386716, 0.04761932754516601, 0.04799465560913086, 0.04792623901367187, 0.04797439956665039, 0.04782284927368164, 0.04793958282470703, 0.047834270477294924, 0.04819235229492187, 0.048043617248535155, 0.04801507186889648, 0.04802630233764649, 0.04819353485107422, 0.04816502380371094, 0.04828553771972656, 0.04819529724121094, 0.048646430969238284, 0.04853964614868164, 0.04842496109008789, 0.04851900863647461, 0.04810947036743164, 0.047901153564453125, 0.04805814361572266, 0.04834099197387695, 0.048414913177490235, 0.04822809600830078, 0.04825676727294922, 0.04831059265136719, 0.04826259231567383, 0.04821049499511719, 0.04847555160522461, 0.048619968414306644, 0.04860956954956055, 0.048510848999023436, 0.048623615264892575, 0.04864604949951172, 0.04878550338745117, 0.048936958312988284, 0.048788833618164065, 0.048876190185546876, 0.04879359817504883, 0.04890009689331055, 0.04869734573364258, 0.04889136123657226, 0.048579265594482425, 0.04861244964599609, 0.04873484802246094, 0.04895347213745117, 0.04878745651245117, 0.048895999908447264, 0.0485494384765625, 0.04864863967895508, 0.049067489624023436, 0.048812576293945316, 0.04926873779296875, 0.04898787307739258, 0.04899238586425781, 0.04885638427734375, 0.04934441757202149, 0.04918163299560547, 0.051087520599365235, 0.04877091217041016, 0.04802716827392578, 0.047852001190185546, 0.047874431610107425, 0.04789388656616211, 0.04806300735473633, 0.04790691375732422, 0.04790857696533203, 0.04794806289672852, 0.04842291259765625, 0.048447296142578124, 0.04839657592773437, 0.04839759826660156, 0.04820851135253906, 0.04842214584350586, 0.048571136474609374, 0.04841267013549805, 0.048484447479248044, 0.04891366577148438, 0.04909872055053711, 0.04882092666625976, 0.0485618896484375, 0.04847030258178711, 0.048345088958740234, 0.048418113708496094, 0.04834998321533203, 0.04833065414428711, 0.04802105712890625, 0.048185150146484376, 0.04825356674194336, 0.04840614318847656, 0.04863209533691406, 0.04876502227783203, 0.048617473602294924, 0.0486932487487793, 0.04850483322143555, 0.04866457748413086, 0.04898569488525391, 0.049041824340820314, 0.04927078247070313, 0.049057151794433596, 0.04896422576904297, 0.04908031845092774, 0.04910220718383789, 0.04905023956298828, 0.048707103729248045, 0.04873401641845703, 0.048712352752685546, 0.04892614364624023, 0.048783935546875, 0.04888595199584961, 0.04882153701782226, 0.048690849304199216, 0.04896764755249024, 0.049171360015869144, 0.048996353149414064, 0.04913071823120117, 0.04927372741699219, 0.04922278213500977, 0.0489664306640625, 0.04928006362915039, 0.049327041625976564, 0.05166169738769531, 0.04873011016845703, 0.04817251205444336, 0.04821456146240234, 0.04800511932373047, 0.048293888092041014, 0.047983905792236325, 0.048022239685058594, 0.048080894470214845, 0.04817657470703125, 0.04842879867553711, 0.048306880950927736, 0.04818732833862305, 0.048265407562255856, 0.048715774536132815, 0.048306270599365236, 0.04833238220214844, 0.04828192138671875, 0.04856012725830078, 0.04902822494506836, 0.04876172637939453, 0.04864156723022461, 0.04847625732421875, 0.04850307083129883, 0.048631584167480466, 0.04875433731079101, 0.048339839935302734, 0.0484035530090332, 0.04832447814941406, 0.048593505859375, 0.0486907844543457, 0.048489086151123045, 0.048395999908447264, 0.048621631622314455, 0.048462047576904296, 0.048413726806640626, 0.048589790344238284, 0.04860313415527344, 0.04868627166748047, 0.04889190292358398, 0.04910976028442383, 0.04903430557250977, 0.04896819305419922, 0.049180831909179684, 0.04898646545410156, 0.048983776092529296, 0.0486607666015625, 0.04867891311645508, 0.04869529724121094, 0.048715774536132815, 0.048645633697509766, 0.04857062530517578, 0.04860879898071289, 0.048863967895507815, 0.04887142562866211, 0.04866867065429688, 0.04902707290649414, 0.04908982467651367, 0.049072319030761716, 0.04938108825683594, 0.04915488052368164, 0.049121280670166016, 0.04919311904907227, 0.05109241485595703, 0.04869721603393555, 0.04802764892578125, 0.048070655822753904, 0.04806576156616211, 0.04838604736328125, 0.04822844696044922, 0.04841337585449219, 0.04816915130615235, 0.04813804626464844, 0.04844748687744141, 0.04836457443237305, 0.04822073745727539, 0.048267166137695314, 0.04842342376708984, 0.04849382400512695, 0.048315135955810544, 0.048137569427490236, 0.04844543838500977, 0.048966304779052734, 0.04897792053222656, 0.04883865737915039, 0.04874649429321289, 0.04839219284057617, 0.04818124771118164, 0.04829740905761719, 0.04845209503173828, 0.0483326416015625, 0.04827936172485352, 0.04829430389404297, 0.04845676803588867, 0.04834400177001953, 0.04841062545776367, 0.048353279113769534, 0.04862566375732422, 0.04882339096069336, 0.04864441680908203, 0.04885974502563477, 0.0490967025756836, 0.049036865234375, 0.049050048828125, 0.04918201446533203, 0.04918137741088867, 0.04894425582885742, 0.04888051223754883, 0.04877878570556641, 0.048525791168212894, 0.04853952026367187, 0.048650367736816406, 0.04861692810058594, 0.048700000762939455, 0.04850067138671875, 0.04852912139892578, 0.04878950500488281, 0.0486640625, 0.048788257598876957, 0.048822048187255856, 0.04870576095581055, 0.04908425521850586, 0.0491333122253418, 0.04921782302856445, 0.04912140655517578, 0.04888345718383789, 0.05115887832641602, 0.0489697265625, 0.048191104888916016, 0.0480321273803711, 0.047865856170654295, 0.04808041763305664, 0.04803606414794922, 0.0481446418762207, 0.048205825805664064, 0.047917057037353515, 0.04813107299804688, 0.04844044876098633, 0.04828339385986328, 0.04810732650756836, 0.04827987289428711, 0.04821171188354492, 0.04808291244506836, 0.048451583862304685, 0.048551551818847655, 0.04882908630371094, 0.04904550552368164, 0.04890560150146484, 0.04854556655883789, 0.04856422424316406, 0.04831318283081055, 0.04836268615722656, 0.04827190399169922, 0.048259361267089844, 0.048601089477539064, 0.04828492736816406, 0.048226688385009764, 0.048621952056884764, 0.048347137451171876, 0.048830463409423826, 0.04877721786499024, 0.048773120880126954, 0.048748542785644534, 0.048656383514404294, 0.04906777572631836, 0.04908841705322266, 0.04927667236328125, 0.048976158142089846, 0.0488135986328125, 0.04905859375, 0.0488611831665039, 0.04900044631958008, 0.04878287887573242, 0.048852832794189456, 0.04865087890625, 0.04931174468994141, 0.0489345588684082, 0.04872022247314453, 0.04881123352050781, 0.04872476959228516, 0.048920574188232424, 0.0490885124206543, 0.04903055953979492, 0.0490379524230957, 0.049557472229003904, 0.049495742797851565, 0.04920556640625, 0.04908854293823242, 0.049436641693115235, 0.05170995330810547, 0.04914585494995117, 0.04816691207885742, 0.04819257736206055, 0.04774803161621094, 0.04834406280517578, 0.04811983871459961, 0.0480467529296875, 0.04791843032836914, 0.04809417724609375, 0.04802348709106445, 0.048019519805908205, 0.0482979850769043, 0.04832441711425781, 0.048093376159667967, 0.04824883270263672, 0.0481341438293457, 0.04832255935668945, 0.048336894989013675, 0.048795326232910156, 0.04898643112182617, 0.04898988723754883, 0.048775489807128904, 0.04847411346435547, 0.048345088958740234, 0.048387134552001956, 0.048140384674072265, 0.04812275314331055, 0.04810134506225586, 0.048387935638427734, 0.04823011016845703, 0.04837599945068359, 0.04852556610107422, 0.048643840789794925, 0.04840041732788086, 0.04840665435791015, 0.04871177673339844, 0.048633281707763674, 0.048925342559814455, 0.04911299133300781, 0.049102783203125, 0.04939795303344727, 0.04926451110839844, 0.048971294403076175, 0.04914742279052734, 0.049080894470214846, 0.04887948989868164, 0.048615966796875, 0.048573982238769534, 0.04876537704467773, 0.04873023986816406, 0.0487784309387207, 0.049079200744628904, 0.04896681594848633, 0.04886592102050781, 0.04859699249267578, 0.0488590087890625, 0.0489463996887207, 0.04892079925537109, 0.049498817443847654, 0.04955104064941406, 0.04975033569335938, 0.049458335876464844, 0.051140960693359376, 0.04872947311401367, 0.04797817611694336, 0.04787222290039062, 0.04774918365478516, 0.04783967971801758, 0.04783536148071289, 0.048130046844482424, 0.04829183959960937, 0.04821958541870117, 0.048263744354248045, 0.04830729675292969, 0.048347198486328125, 0.04826780700683594, 0.04844166564941406, 0.04839014434814453, 0.048411808013916015, 0.048536350250244144, 0.048356960296630856, 0.048691551208496095, 0.04892409515380859, 0.04893727874755859, 0.0487407341003418, 0.04873622512817383, 0.04838790512084961, 0.04823823928833008, 0.048075328826904296, 0.04831436920166016, 0.04874230575561524, 0.048739681243896486, 0.04853631973266601, 0.04841600036621094, 0.04865081787109375, 0.048736446380615236, 0.048662559509277344, 0.04869526290893555, 0.04870348739624023, 0.048653438568115236, 0.048949535369873044, 0.04881782531738281, 0.04903523254394531, 0.04885295867919922, 0.04942335891723633, 0.049190814971923826, 0.04900048065185547, 0.04908448028564453, 0.04870963287353516, 0.04873625564575195, 0.048874561309814456, 0.0488969612121582, 0.048775169372558595, 0.04883014297485352, 0.04880374526977539, 0.04849926376342773, 0.0488642578125, 0.04913443374633789, 0.04904912185668946, 0.04916374588012695, 0.04917500686645508, 0.049557727813720705, 0.04928316879272461, 0.04916604614257813, 0.04910335922241211, 0.05163417434692383, 0.049086463928222655, 0.04826057434082031, 0.04834560012817383, 0.04815187072753906, 0.048159358978271484, 0.04841891098022461, 0.04825696182250976, 0.04817887878417969, 0.047958145141601564, 0.04802585601806641, 0.04824883270263672, 0.04841471862792969, 0.048449535369873044, 0.04828979110717774, 0.04844342422485352, 0.04837577438354492, 0.048645694732666014, 0.048814208984375, 0.049041728973388675, 0.049430335998535156, 0.049503711700439455, 0.04905344009399414, 0.04879049682617188, 0.04870943832397461, 0.048801822662353514, 0.048360862731933595, 0.048489215850830075, 0.0485294075012207, 0.04857833480834961, 0.04853168106079102, 0.04859494400024414, 0.04878716659545899, 0.04882460784912109, 0.04888576126098633, 0.04879359817504883, 0.049061313629150394, 0.04906246566772461, 0.04927897644042969, 0.049209342956542966, 0.04924415969848633, 0.0493873291015625, 0.04915577697753906, 0.04910540771484375, 0.048963584899902345, 0.04887673568725586, 0.048931648254394534, 0.04866217422485351, 0.04908886337280274, 0.04897766494750976, 0.0486761589050293, 0.0489944953918457, 0.048715808868408206, 0.04884515380859375, 0.048724063873291014, 0.048820606231689455, 0.04909660720825195, 0.049225727081298826, 0.04928409576416016, 0.04932675170898437, 0.049434112548828124, 0.04906409454345703, 0.04916678237915039, 0.05058281707763672, 0.04844412612915039, 0.04788019180297851, 0.04783513641357422, 0.04787353515625, 0.04844819259643555, 0.04811161422729492, 0.04827654266357422, 0.048287841796875, 0.04855878448486328, 0.048553951263427736, 0.04829523086547852, 0.04845228958129883, 0.048476158142089845, 0.04845568084716797, 0.048508384704589846, 0.04835382461547852, 0.04833280181884766, 0.04857241439819336, 0.049391616821289064, 0.04919039916992188, 0.04842502212524414, 0.04864031982421875, 0.04833292770385742, 0.0486379508972168, 0.048684894561767576, 0.04843244934082031, 0.048780128479003905, 0.04887756729125976, 0.04882227325439453, 0.0486868782043457, 0.048774784088134765, 0.04886383819580078, 0.04867862319946289, 0.048809310913085935, 0.04889484786987305, 0.04877257537841797, 0.04877331161499023, 0.04908867263793945, 0.04931955337524414, 0.0491383056640625, 0.049188705444335935, 0.049000350952148435, 0.049030784606933595, 0.048853633880615234, 0.04892047882080078, 0.04897980880737305, 0.048981727600097655, 0.0489700813293457, 0.048946369171142576, 0.04880486297607422, 0.0490332145690918, 0.04877715301513672, 0.04898556900024414, 0.04901744079589844, 0.04884038543701172, 0.048922943115234374, 0.0490590705871582, 0.049330944061279296, 0.049225631713867186, 0.049332351684570314, 0.049290592193603516, 0.049166976928710936]",tokens/s,20.543012915346488,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8197.865472,11078.139904,0.0,10699.669504,10468.411392,s,1,13.45709765625,13.45709765625,0.0,13.45709765625,13.45709765625,13.45709765625,13.45709765625,[13.45709765625],,kWh,0.0001882311869374765,2.0753804868426186e-05,6.216949417997553e-05,0.00027115448598587824,,MB,3945.213952,11596.136448,0.0,11188.30592,10923.769856,s,10,7.943306579589843,0.7943306579589844,0.0047716555835403416,0.794206756591797,0.7988052307128907,0.800322964477539,0.8015371514892579,"[0.784219970703125, 0.7924779052734375, 0.7932594604492188, 0.7914694213867187, 0.8018406982421875, 0.795154052734375, 0.7906920776367188, 0.7984679565429688, 0.79746826171875, 0.7982567749023437]",tokens/s,322.28392223685097,kWh,2.3125247469863842e-05,2.550328321795675e-06,1.5457726041385905e-05,4.113330183304542e-05,tokens/kWh,6223667.651069438,MB,3945.213952,11598.2336,0.0,11190.403072,10923.772416,s,10,35.77825537109375,3.577825537109375,0.005760191824826535,3.5798658447265623,3.58402451171875,3.5842446777343753,3.584420810546875,"[3.56743115234375, 3.57274609375, 3.56941748046875, 3.576769287109375, 3.5807265625, 3.58446484375, 3.579005126953125, 3.582830322265625, 3.580888916015625, 3.5839755859375]",tokens/s,17.60846059891994,kWh,0.00010474067334097117,1.1553485363672723e-05,6.941428630061068e-05,0.00018570844500525457,tokens/kWh,339241.43836440734,,s,630,35.7751304016113,0.05678592127239893,0.0007543399283584048,0.056694927215576174,0.05728260116577148,0.057606292915344234,0.06109767765045166,"[0.06104678344726563, 0.0568721923828125, 0.05640176010131836, 0.05626144027709961, 0.05611859130859375, 0.05637529754638672, 0.05663616180419922, 0.05645075225830078, 0.05637263870239258, 0.055994464874267576, 0.056225761413574216, 0.056209632873535156, 0.056580894470214846, 0.05636288070678711, 0.05614960098266602, 0.05589443206787109, 0.056635391235351565, 0.05762870407104492, 0.057264095306396486, 0.05636710357666016, 0.05656576156616211, 0.055984127044677735, 0.05602886581420898, 0.056226112365722655, 0.05611635208129883, 0.055866432189941403, 0.05608995056152344, 0.05687753677368164, 0.05641372680664063, 0.056235488891601564, 0.056392318725585935, 0.05638387298583984, 0.056239776611328125, 0.056869216918945316, 0.05690572738647461, 0.05664153671264648, 0.057019489288330075, 0.05674662399291992, 0.05665203094482422, 0.05625654220581055, 0.05695283126831055, 0.05671315383911133, 0.05638931274414063, 0.0564854736328125, 0.05685702514648437, 0.05692851257324219, 0.05634467315673828, 0.056270305633544924, 0.056752670288085935, 0.05655551910400391, 0.05676614379882813, 0.05675980758666992, 0.05677753448486328, 0.05695078277587891, 0.05695484924316406, 0.056772640228271484, 0.05698771286010742, 0.0569117431640625, 0.05710665512084961, 0.056638721466064454, 0.056545631408691406, 0.05644927978515625, 0.05694668960571289, 0.061118465423583984, 0.05682390213012695, 0.056452320098876956, 0.05634838485717773, 0.0563988151550293, 0.05616643142700195, 0.056104896545410156, 0.05622784042358398, 0.05613078308105469, 0.05603615951538086, 0.056377342224121094, 0.05621062469482422, 0.05617958450317383, 0.05663532638549805, 0.056128990173339846, 0.05669507217407226, 0.057019775390625, 0.056853374481201174, 0.056842239379882815, 0.05651865768432617, 0.056268798828125, 0.05628326416015625, 0.05656768035888672, 0.05633187103271484, 0.05597564697265625, 0.05704518508911133, 0.05634905624389648, 0.05662665557861328, 0.05606671905517578, 0.05609187316894531, 0.05721548843383789, 0.05687283325195312, 0.05723587036132813, 0.0570511360168457, 0.056614753723144534, 0.0567353286743164, 0.05706787109375, 0.05688956832885742, 0.05684633636474609, 0.056371200561523435, 0.05677056121826172, 0.05670502471923828, 0.056721153259277346, 0.05662508773803711, 0.05642601776123047, 0.056516895294189455, 0.05644339370727539, 0.05677423858642578, 0.057073982238769534, 0.057460830688476565, 0.057112575531005856, 0.0568642578125, 0.05698934555053711, 0.056973697662353516, 0.05718255996704102, 0.05684348678588867, 0.05676668930053711, 0.05667295837402344, 0.056600574493408204, 0.05713670349121094, 0.05698809432983398, 0.0563507194519043, 0.056704513549804686, 0.06123471832275391, 0.056406463623046875, 0.05642419052124024, 0.05622332763671875, 0.05585580825805664, 0.05534080123901367, 0.05591475296020508, 0.05597814559936523, 0.05584880065917969, 0.05613363265991211, 0.0564901123046875, 0.056629119873046876, 0.05653094482421875, 0.056430591583251956, 0.056668159484863284, 0.05641625595092774, 0.05682902526855469, 0.05763782501220703, 0.057277793884277346, 0.0568408317565918, 0.05624422454833984, 0.05599027252197265, 0.056594432830810545, 0.0564381103515625, 0.056349342346191406, 0.05624422454833984, 0.05653299331665039, 0.05644902420043945, 0.056457183837890626, 0.056401950836181644, 0.05581545639038086, 0.05628339385986328, 0.05695945739746094, 0.057270271301269535, 0.0570880012512207, 0.05728179168701172, 0.056937217712402344, 0.05663536071777344, 0.05686483383178711, 0.056522239685058595, 0.05658998489379883, 0.05645779037475586, 0.056541439056396484, 0.05628623962402344, 0.05660976028442383, 0.056639072418212894, 0.05644236755371094, 0.05639251327514649, 0.056340576171875, 0.0569826545715332, 0.05729475021362305, 0.05692924880981445, 0.05711872100830078, 0.05713100814819336, 0.05705657577514649, 0.056982208251953125, 0.056909534454345705, 0.05672323226928711, 0.056666622161865236, 0.0566640625, 0.05710147094726563, 0.056546142578125, 0.05630271911621094, 0.06100582504272461, 0.05661036682128906, 0.05628768157958984, 0.05673779296875, 0.05624406433105469, 0.05603958511352539, 0.05575833511352539, 0.05679679870605469, 0.05667830276489258, 0.05613471984863281, 0.05644278335571289, 0.05633337783813477, 0.05675523376464844, 0.05673334503173828, 0.056715198516845707, 0.05645344161987305, 0.057624671936035154, 0.057440193176269534, 0.05730710220336914, 0.05688115310668945, 0.05669491195678711, 0.0566803207397461, 0.05634857559204102, 0.05636105728149414, 0.056712799072265625, 0.05644739151000976, 0.056225727081298825, 0.056428062438964845, 0.05630966567993164, 0.05642303848266601, 0.05644902420043945, 0.05600403213500976, 0.05680620956420898, 0.05697417449951172, 0.05722000122070312, 0.0572042236328125, 0.05759027099609375, 0.05688857650756836, 0.05688361740112305, 0.056439136505126955, 0.056610816955566405, 0.056444190979003904, 0.05655376052856445, 0.05657545471191406, 0.056676609039306644, 0.056799968719482424, 0.056530879974365233, 0.05680953598022461, 0.0569420166015625, 0.05674860763549805, 0.05662105560302735, 0.05690163040161133, 0.05698294448852539, 0.0571602897644043, 0.057434112548828124, 0.057304672241210934, 0.057018302917480466, 0.05678873443603515, 0.05637174224853515, 0.05689785766601563, 0.05678681564331055, 0.05676851272583008, 0.056643585205078124, 0.061511806488037106, 0.05690806579589844, 0.05637315368652344, 0.05581177520751953, 0.05602105712890625, 0.05600457763671875, 0.05583638381958008, 0.05635913467407227, 0.05643299102783203, 0.05620240020751953, 0.05673455810546875, 0.05678444671630859, 0.05643513488769531, 0.05648384094238281, 0.05654937744140625, 0.056948734283447267, 0.057040897369384766, 0.05761843109130859, 0.0573004150390625, 0.06046937561035156, 0.055427391052246096, 0.056199295043945316, 0.05657782363891602, 0.05653484725952149, 0.056378944396972656, 0.056253280639648434, 0.05630771255493164, 0.056156158447265625, 0.05681884765625, 0.05671539306640625, 0.05658108901977539, 0.05676134490966797, 0.05705804824829101, 0.057016063690185546, 0.05690521621704102, 0.05795926284790039, 0.05729462432861328, 0.0568034553527832, 0.05668044662475586, 0.05618460845947266, 0.05676012802124023, 0.05664790344238281, 0.05636140823364258, 0.05643648147583008, 0.0566569595336914, 0.057178848266601565, 0.056612350463867187, 0.05652880096435547, 0.057064254760742186, 0.05716793441772461, 0.057013694763183596, 0.057591457366943356, 0.057766334533691406, 0.056961441040039064, 0.05681478500366211, 0.05708803176879883, 0.057058078765869144, 0.05647974395751953, 0.056899585723876954, 0.056586238861083986, 0.05675740814208984, 0.05673625564575195, 0.056762432098388674, 0.061968479156494144, 0.05691775894165039, 0.05628553771972656, 0.05667216110229492, 0.05638143920898438, 0.0562973747253418, 0.05626211166381836, 0.05600934219360352, 0.05566009521484375, 0.05706172943115234, 0.05659247970581055, 0.056512161254882814, 0.05624457550048828, 0.056426048278808594, 0.0566071662902832, 0.056559616088867185, 0.057923583984375, 0.057030654907226565, 0.056578048706054686, 0.056915969848632814, 0.056608352661132816, 0.05645897674560547, 0.05648454284667969, 0.0563974723815918, 0.056538753509521485, 0.05657276916503906, 0.05652044677734375, 0.05669900894165039, 0.056610721588134766, 0.05668665695190429, 0.05687020874023437, 0.05690851211547852, 0.05689142227172852, 0.05677257537841797, 0.057021888732910156, 0.057382720947265625, 0.05727104187011719, 0.05697884750366211, 0.05680550384521484, 0.05643312072753906, 0.05671321487426758, 0.0567275505065918, 0.05687295913696289, 0.0566743049621582, 0.05671456146240234, 0.05666681671142578, 0.056784511566162106, 0.05662553787231445, 0.05688323211669922, 0.05715350341796875, 0.05705900955200195, 0.057166145324707034, 0.05758566284179688, 0.05748735809326172, 0.057451583862304687, 0.056947647094726564, 0.056587905883789064, 0.056691104888916016, 0.05690774536132812, 0.05718412780761719, 0.0569672966003418, 0.05698121643066406, 0.05945577621459961, 0.06121228790283203, 0.05647536087036133, 0.055841663360595706, 0.05600246429443359, 0.05631372833251953, 0.056010753631591796, 0.05613881683349609, 0.05623440170288086, 0.05650076675415039, 0.05663049697875976, 0.05665158462524414, 0.05648249435424805, 0.05704297637939453, 0.05653260803222656, 0.05639228820800781, 0.05634051132202148, 0.0567459831237793, 0.05739110565185547, 0.05751193618774414, 0.05670220947265625, 0.056228126525878906, 0.056389633178710936, 0.05665840148925781, 0.05666332626342773, 0.056339168548583986, 0.05604556655883789, 0.055875007629394534, 0.05612550354003906, 0.056530784606933594, 0.05790995025634765, 0.05705929565429688, 0.056667457580566405, 0.05789971160888672, 0.05653094482421875, 0.05789878463745117, 0.0575860481262207, 0.05715494537353515, 0.05646768188476563, 0.05639817428588867, 0.05661481475830078, 0.056576160430908205, 0.05664751815795899, 0.05665792083740234, 0.056357982635498044, 0.056955360412597654, 0.05654739379882812, 0.0569409294128418, 0.05670041656494141, 0.056617343902587894, 0.05679731369018555, 0.057148799896240235, 0.05721152114868164, 0.057178112030029295, 0.056971263885498044, 0.0571575698852539, 0.05744566345214844, 0.05716662216186524, 0.05695632171630859, 0.05683465576171875, 0.057059326171875, 0.0566514892578125, 0.05656598281860352, 0.05727648162841797, 0.06091088104248047, 0.056832736968994144, 0.05597990417480469, 0.05595904159545898, 0.05570624160766602, 0.05648793411254883, 0.05630771255493164, 0.05653299331665039, 0.05626857757568359, 0.056344799041748043, 0.05629132843017578, 0.05647324752807617, 0.05673731231689453, 0.05681840133666992, 0.056403743743896485, 0.06030684661865234, 0.05637936019897461, 0.05786697769165039, 0.05786563110351563, 0.05717007827758789, 0.05704771041870117, 0.05639382553100586, 0.056266273498535156, 0.05608047866821289, 0.05670310211181641, 0.05669494247436523, 0.05628313446044922, 0.05648121643066406, 0.056586017608642576, 0.05678982543945312, 0.05686687850952148, 0.05688636779785156, 0.05643142318725586, 0.05708595275878906, 0.0570511360168457, 0.05706547164916992, 0.057059326171875, 0.05731068801879883, 0.056686241149902346, 0.056722305297851563, 0.05636640167236328, 0.05653369522094726, 0.05657964706420898, 0.05673740768432617, 0.05680748748779297, 0.05657881546020508, 0.056899585723876954, 0.05698332977294922, 0.0567946891784668, 0.056885089874267575, 0.056742721557617185, 0.057048095703125, 0.05748630523681641, 0.05718425750732422, 0.05711183929443359, 0.05693008041381836, 0.056983871459960936, 0.05702105712890625, 0.056809249877929686, 0.05679740905761719, 0.056641216278076174, 0.05702892684936523, 0.05739724731445312, 0.06168371200561523, 0.05670912170410156, 0.05594879913330078, 0.055761409759521485, 0.056280830383300784, 0.055890174865722654, 0.05582640075683594, 0.056550590515136716, 0.056293663024902345, 0.056046142578125, 0.057369792938232425, 0.057072574615478516, 0.0571226577758789, 0.05672700881958008, 0.057131584167480466, 0.05670665740966797, 0.05688361740112305, 0.05783347320556641, 0.057761791229248044, 0.05732352066040039, 0.0566640625, 0.056202529907226566, 0.05634860610961914, 0.05648873519897461, 0.056469024658203124, 0.0563707504272461, 0.056112033843994144, 0.0563507194519043, 0.056303390502929686, 0.056387680053710934, 0.055979839324951174, 0.05595577621459961, 0.05664767837524414, 0.057320831298828125, 0.05727910232543945, 0.05697945785522461, 0.057112575531005856, 0.05705833435058594, 0.056715904235839845, 0.05672585678100586, 0.05713510513305664, 0.0566927375793457, 0.05650022506713867, 0.05670502471923828, 0.05694460678100586, 0.056833953857421876, 0.05698537445068359, 0.05672175979614258, 0.05672367858886719, 0.056839969635009764, 0.05722304153442383, 0.057079936981201174, 0.05722534561157227, 0.057433982849121094, 0.05732352066040039, 0.057427967071533206, 0.05719244766235351, 0.05709388732910156, 0.056669471740722656, 0.05651068878173828, 0.057086719512939456, 0.05701551818847656, 0.05685532760620117, 0.06136342239379883, 0.05695318222045898, 0.05648854446411133, 0.056069984436035156, 0.05587558364868164, 0.05600249481201172, 0.05608262252807617, 0.055869312286376954, 0.05610934448242187, 0.055835582733154296, 0.05591939163208008, 0.05662656021118164, 0.056871551513671875, 0.05648588943481445, 0.05651827239990234, 0.05707321548461914, 0.05778515243530274, 0.05780889511108398, 0.057587039947509765, 0.057114334106445314, 0.05664863967895508, 0.05661859130859375, 0.056586334228515625, 0.05643500900268555, 0.05655299377441406, 0.056560096740722654, 0.05659852981567383, 0.056406017303466796, 0.056774654388427735, 0.05695484924316406, 0.0563546257019043, 0.05677008056640625, 0.057082561492919924, 0.0573908462524414, 0.058119552612304684, 0.057770881652832035, 0.05704499053955078, 0.05688649749755859, 0.05655590438842773, 0.05680374526977539, 0.0568682861328125, 0.056583934783935544, 0.0566544303894043, 0.056807647705078124, 0.05668364715576172, 0.05710435104370117, 0.05686947250366211, 0.05674220657348633, 0.05661859130859375, 0.05717203140258789, 0.05745904159545898, 0.05728988647460938, 0.05798963165283203, 0.05721942520141601, 0.056764415740966793, 0.05706137466430664, 0.05724063873291016, 0.05680633544921875, 0.056799232482910154, 0.05685164642333984, 0.05673654556274414, 0.056977344512939454, 0.05699388885498047]",tokens/s,17.60999870378179,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4917.706752,7227.768832,0.0,6849.298432,6444.4416,s,1,11.1887177734375,11.1887177734375,0.0,11.1887177734375,11.1887177734375,11.1887177734375,11.1887177734375,[11.1887177734375],,kWh,0.00012569621067492942,1.385766822740644e-05,4.088281048403308e-05,0.00018043668938636895,,MB,2799.86176,7559.118848,0.0,7151.28832,6822.664192,s,10,4.020862365722656,0.4020862365722656,0.003901422977759983,0.4032820281982422,0.4047774627685547,0.40488236846923825,0.40496629302978515,"[0.39109088134765624, 0.4041576232910156, 0.4045347595214844, 0.40344390869140623, 0.404754150390625, 0.40246356201171873, 0.40498727416992186, 0.4005614929199219, 0.40312014770507815, 0.4017485656738281]",tokens/s,636.679340686634,kWh,1.1629275077404182e-05,1.2824702195391937e-06,7.747196368695065e-06,2.065894166563844e-05,tokens/kWh,12391728.68307185,MB,2799.86176,7561.216,0.0,7153.385472,6822.666752,s,10,26.55316528320312,2.655316528320312,0.022885641151044064,2.650044189453125,2.68346796875,2.6880259765625,2.6916723828125,"[2.6769189453125, 2.654231201171875, 2.692583984375, 2.672745361328125, 2.631368896484375, 2.645857177734375, 2.629407470703125, 2.627463623046875, 2.640133544921875, 2.682455078125]",tokens/s,23.725984954363334,kWh,7.841556979218403e-05,8.649395373571439e-06,5.191951803130231e-05,0.00013898448319705776,tokens/kWh,453288.01137229166,,s,630,26.55058037567139,0.042143778374081564,0.0007510202816334307,0.04209764862060547,0.042799008941650396,0.04308290672302246,0.04451943481445315,"[0.043050369262695315, 0.042723968505859376, 0.04230339050292969, 0.042414176940917966, 0.043726081848144534, 0.04287907028198242, 0.042371681213378906, 0.042048927307128905, 0.042234046936035156, 0.04298908615112305, 0.04257068634033203, 0.04478297424316406, 0.04221948623657226, 0.04212393569946289, 0.04221116638183594, 0.042196640014648436, 0.04222224044799805, 0.04278403091430664, 0.04237097549438477, 0.042602977752685546, 0.04230297470092773, 0.04273017501831055, 0.04251798248291016, 0.042531360626220704, 0.042122337341308595, 0.04204022216796875, 0.04191436767578125, 0.042202911376953124, 0.04231516647338867, 0.04235756683349609, 0.042188800811767575, 0.04215555191040039, 0.042215648651123046, 0.04220691299438477, 0.042168033599853515, 0.042410144805908205, 0.04224892807006836, 0.04223344039916992, 0.04226271820068359, 0.042250431060791016, 0.04224844741821289, 0.04248899078369141, 0.04231388854980469, 0.04243289566040039, 0.04228921508789062, 0.042982654571533205, 0.04274975967407227, 0.04255020904541015, 0.04237334442138672, 0.04236265563964844, 0.04246028900146484, 0.04233919906616211, 0.04254540634155273, 0.04230857467651367, 0.04268435287475586, 0.04262588882446289, 0.0425615348815918, 0.04299139022827148, 0.042422367095947267, 0.04227699279785156, 0.04309401702880859, 0.04243046569824219, 0.04290707015991211, 0.043122432708740235, 0.04241129684448242, 0.0425618896484375, 0.04256422424316406, 0.04281935882568359, 0.04258793640136719, 0.04271558380126953, 0.04284128189086914, 0.04260265731811524, 0.04257244873046875, 0.042379264831542966, 0.04228300857543945, 0.042031105041503904, 0.04188150405883789, 0.04207974243164062, 0.04235939025878906, 0.042176513671875, 0.042008575439453126, 0.04190617752075195, 0.04215155029296875, 0.0420665283203125, 0.04206979370117187, 0.04171712112426758, 0.04163238525390625, 0.04157030487060547, 0.04158464050292969, 0.04142899322509765, 0.04173366546630859, 0.04183087921142578, 0.0420843505859375, 0.04209267044067383, 0.04279897689819336, 0.04198323059082031, 0.04194153594970703, 0.04198329544067383, 0.041847713470458986, 0.04185878372192383, 0.0421255989074707, 0.04196761703491211, 0.042017951965332034, 0.041939807891845704, 0.04199532699584961, 0.041931137084960934, 0.042439231872558596, 0.04205567932128906, 0.04214492797851563, 0.04246409606933594, 0.0419488639831543, 0.042539329528808595, 0.042090496063232424, 0.04209183883666992, 0.04198060989379883, 0.04171305465698242, 0.042199649810791016, 0.042076160430908206, 0.042016864776611325, 0.04171084976196289, 0.04182819366455078, 0.04194319915771484, 0.04198467254638672, 0.042016769409179686, 0.04209616088867187, 0.04237673568725586, 0.043112449645996094, 0.04229097747802734, 0.042266849517822266, 0.04307558441162109, 0.04255744171142578, 0.04215193557739258, 0.04266995239257813, 0.04195500946044922, 0.042191295623779296, 0.04217657470703125, 0.04233824157714844, 0.04226233673095703, 0.0424071044921875, 0.04232908630371094, 0.04239680099487304, 0.042457984924316405, 0.042534912109375, 0.042688190460205076, 0.043727169036865236, 0.042883071899414066, 0.04316515350341797, 0.04265407943725586, 0.04280335998535156, 0.04683366394042969, 0.04271820831298828, 0.04327731323242188, 0.04293759918212891, 0.04272800064086914, 0.04279929733276367, 0.04272556686401367, 0.04281734466552734, 0.04264934539794922, 0.04271539306640625, 0.04273356628417969, 0.04729967880249023, 0.04230646514892578, 0.04247539138793945, 0.04347711944580078, 0.04261273574829102, 0.04236697769165039, 0.042323455810546876, 0.04280166244506836, 0.042396705627441404, 0.04268569564819336, 0.04256739044189453, 0.04236675262451172, 0.04247328186035156, 0.042234272003173826, 0.042272735595703125, 0.042342430114746095, 0.04218991851806641, 0.042171104431152344, 0.042248382568359374, 0.043146656036376956, 0.04248636627197266, 0.042452991485595705, 0.04235468673706055, 0.042555328369140624, 0.042743873596191403, 0.04249599838256836, 0.04244275283813476, 0.04252671813964844, 0.04344627380371094, 0.04331520080566406, 0.04274319839477539, 0.04243251037597656, 0.042345054626464845, 0.04277248001098633, 0.04229321670532227, 0.04238700866699219, 0.04232758331298828, 0.04252377700805664, 0.04239545440673828, 0.042446849822998046, 0.042323680877685545, 0.04352643203735351, 0.04284415817260742, 0.04274892807006836, 0.0422691535949707, 0.042570270538330075, 0.0426618881225586, 0.04286054229736328, 0.042395038604736326, 0.04224470520019531, 0.04248166275024414, 0.0430458869934082, 0.04288963317871094, 0.04252531051635742, 0.042584030151367185, 0.04259430313110352, 0.04363673782348633, 0.042551296234130856, 0.04229232025146484, 0.042142623901367186, 0.042255775451660156, 0.04233071899414063, 0.042167713165283206, 0.0423983039855957, 0.04231577682495117, 0.04255923080444336, 0.04224230575561523, 0.042426368713378904, 0.04289945602416992, 0.042590206146240234, 0.04287510299682617, 0.04308889770507812, 0.04238415908813477, 0.04267766571044922, 0.04229180908203125, 0.04269055938720703, 0.04225772857666016, 0.04254175949096679, 0.042409984588623044, 0.04238729476928711, 0.04258627319335938, 0.04204553604125977, 0.041775009155273435, 0.0416313591003418, 0.04174256134033203, 0.041781406402587894, 0.041734142303466795, 0.041836544036865236, 0.04170867156982422, 0.04156502532958985, 0.04154985427856445, 0.04154982376098633, 0.04301548767089844, 0.04262368011474609, 0.042694496154785155, 0.042135711669921874, 0.042176513671875, 0.042164222717285156, 0.04244198226928711, 0.04195817565917969, 0.04172796630859375, 0.041543678283691404, 0.04157574462890625, 0.041379745483398435, 0.04132479858398438, 0.04139471817016602, 0.041940990447998046, 0.04202489471435547, 0.04152873611450195, 0.042224288940429684, 0.04172185516357422, 0.04159801483154297, 0.041406944274902345, 0.04151350402832031, 0.04144918441772461, 0.04143894577026367, 0.04147577667236328, 0.041364288330078124, 0.04150614547729492, 0.04143894577026367, 0.04143404769897461, 0.04128768157958984, 0.041420799255371094, 0.041457279205322266, 0.041255294799804686, 0.04150579071044922, 0.04203766250610352, 0.04175455856323242, 0.04232233428955078, 0.0415830078125, 0.042880767822265624, 0.041592479705810544, 0.04178579330444336, 0.04185075378417969, 0.04178137588500976, 0.04175807952880859, 0.041553825378417966, 0.041799617767333985, 0.04163836669921875, 0.04159718322753906, 0.04169276809692383, 0.04152099227905273, 0.042199710845947265, 0.04173206329345703, 0.04155136108398438, 0.04149654388427734, 0.04235318374633789, 0.04148601531982422, 0.0413570556640625, 0.04136783981323242, 0.041614719390869144, 0.041599903106689456, 0.041766368865966796, 0.04191267013549805, 0.042381057739257814, 0.04323331069946289, 0.04211507034301758, 0.04473187255859375, 0.04230319976806641, 0.04167353439331055, 0.04179148864746094, 0.04196556854248047, 0.04187136077880859, 0.042141792297363284, 0.042209121704101564, 0.042469375610351565, 0.04298144149780273, 0.042426368713378904, 0.04247283172607422, 0.04238809585571289, 0.04270284652709961, 0.042602497100830077, 0.04253081512451172, 0.04262070465087891, 0.042135326385498044, 0.042272289276123046, 0.042019519805908206, 0.04195673751831055, 0.04172867202758789, 0.04188179016113281, 0.04255097579956055, 0.04166892623901367, 0.04165222549438476, 0.042495391845703126, 0.04205424118041992, 0.04177305603027344, 0.04156415939331055, 0.04155324935913086, 0.0418939208984375, 0.041517696380615234, 0.041315391540527345, 0.04148857498168945, 0.04197030258178711, 0.04140390396118164, 0.041644161224365234, 0.041810176849365235, 0.041982433319091794, 0.0451929931640625, 0.04190185546875, 0.0417624626159668, 0.04150944137573242, 0.041498207092285154, 0.04141507339477539, 0.04143718338012695, 0.041506431579589845, 0.041320926666259764, 0.041235488891601564, 0.04125350570678711, 0.04121392059326172, 0.043397407531738284, 0.041603073120117184, 0.04145356750488281, 0.04136934280395508, 0.041266910552978514, 0.04120835113525391, 0.04130944061279297, 0.041593601226806644, 0.04162518310546875, 0.043474720001220706, 0.042169567108154296, 0.04163631820678711, 0.04159337615966797, 0.041350177764892575, 0.04148083114624023, 0.04142489624023438, 0.041363807678222654, 0.04153753662109375, 0.043300254821777344, 0.04158723068237305, 0.041689151763916014, 0.04187136077880859, 0.041844608306884766, 0.04157392120361328, 0.04181667327880859, 0.041529342651367186, 0.041578495025634765, 0.04151705551147461, 0.04137955093383789, 0.04145148849487305, 0.041877120971679685, 0.041554622650146485, 0.04151910400390625, 0.041532672882080075, 0.04144355010986328, 0.041202014923095706, 0.041445022583007814, 0.0413721923828125, 0.041332736968994144, 0.04141449737548828, 0.04128924942016601, 0.04290828704833984, 0.04304921722412109, 0.041860992431640626, 0.04144329452514649, 0.04167782211303711, 0.04138713455200195, 0.0413917121887207, 0.041256736755371094, 0.041412479400634764, 0.041476638793945315, 0.04153139114379883, 0.043845630645751955, 0.04167084884643555, 0.04147897720336914, 0.04163273620605469, 0.04160428619384766, 0.04209561538696289, 0.04138275146484375, 0.04224012756347656, 0.04136025619506836, 0.0413306884765625, 0.04162736129760742, 0.04194079971313477, 0.041578975677490235, 0.04140630340576172, 0.04205923080444336, 0.041492191314697266, 0.041439361572265625, 0.04139299011230469, 0.043151168823242186, 0.04190399932861328, 0.04269571304321289, 0.04172079849243164, 0.041441280364990236, 0.04161280059814453, 0.04169779205322265, 0.04142489624023438, 0.04139436721801758, 0.04137100982666016, 0.04162534332275391, 0.04133763122558594, 0.04151395034790039, 0.04177606582641601, 0.04171980667114258, 0.04170137786865234, 0.04142489624023438, 0.0414343376159668, 0.041517345428466794, 0.04150505447387695, 0.04149174499511719, 0.041393089294433597, 0.041406688690185545, 0.04138166427612305, 0.04145529556274414, 0.04275436782836914, 0.04150406265258789, 0.04141030502319336, 0.04116144180297852, 0.04178761672973633, 0.04143513488769531, 0.041267200469970705, 0.04135273742675781, 0.04127324676513672, 0.04118710327148437, 0.04124716949462891, 0.041252670288085935, 0.04137337493896484, 0.041220638275146486, 0.041243038177490234, 0.04116265487670898, 0.04134912109375, 0.04136140823364258, 0.041383167266845704, 0.041341022491455076, 0.04177945709228516, 0.04209913635253906, 0.042399105072021485, 0.042089054107666016, 0.042247840881347656, 0.042213119506835935, 0.04214435195922851, 0.041944225311279296, 0.04212966537475586, 0.042162784576416014, 0.04201267242431641, 0.041987998962402344, 0.04198102569580078, 0.04316057586669922, 0.04195270538330078, 0.041963680267333985, 0.04219865417480469, 0.04232886505126953, 0.04232191848754883, 0.041973758697509765, 0.04295068740844726, 0.041785953521728515, 0.04151897430419922, 0.04136153411865234, 0.04135116958618164, 0.04126924896240235, 0.04138598251342773, 0.041348991394042967, 0.04158272171020508, 0.04145971298217774, 0.041809921264648435, 0.04164966583251953, 0.04158310317993164, 0.04150886535644531, 0.041793537139892575, 0.04179574584960938, 0.04170940780639648, 0.04191641616821289, 0.04182204818725586, 0.04187561416625977, 0.04194713592529297, 0.041569599151611326, 0.04247372817993164, 0.04175302505493164, 0.04176105499267578, 0.041620193481445314, 0.0416286735534668, 0.04220848083496094, 0.042175521850585936, 0.042108097076416016, 0.042175041198730466, 0.04249599838256836, 0.04202060699462891, 0.0424901123046875, 0.042512577056884764, 0.042028255462646484, 0.04193679809570312, 0.04195603179931641, 0.04208025741577148, 0.042063648223876954, 0.041883777618408204, 0.04206396865844726, 0.04194246292114258, 0.04239023971557617, 0.04175881576538086, 0.041809566497802736, 0.04310230255126953, 0.04223590469360351, 0.04257177734375, 0.04195942306518555, 0.04192371368408203, 0.04188051223754883, 0.04179142379760742, 0.04176428985595703, 0.04182278442382813, 0.042210399627685545, 0.041821086883544925, 0.04180377578735352, 0.041867263793945314, 0.04162355041503906, 0.04171494293212891, 0.04165219116210937, 0.04181628799438476, 0.042844352722167967, 0.04187590408325195, 0.04186928176879883, 0.042772510528564456, 0.04235449600219727, 0.04203683090209961, 0.04184124755859375, 0.042022911071777344, 0.041992416381835936, 0.04208351898193359, 0.0420533447265625, 0.04233919906616211, 0.04211312103271484, 0.04234640121459961, 0.042450111389160154, 0.042805248260498044, 0.042377662658691403, 0.042434944152832034, 0.042485759735107424, 0.042358016967773436, 0.04210492706298828, 0.04296540832519531, 0.04227827072143555, 0.04251091384887695, 0.04224646377563476, 0.042145790100097655, 0.042063072204589845, 0.041952030181884765, 0.04200243377685547, 0.04235059356689453, 0.0461578254699707, 0.05191017532348633, 0.04245078277587891, 0.04257036972045898, 0.0424136962890625, 0.04256192016601563, 0.04248950576782227, 0.04231439971923828, 0.04212047958374023, 0.04206131362915039, 0.04213238525390625, 0.04233420944213867, 0.0422542724609375, 0.042117183685302734, 0.04207001495361328, 0.04223696136474609, 0.04243555068969727, 0.042176097869873044, 0.04202950286865234, 0.04280112075805664, 0.04214076614379883, 0.042224544525146485, 0.04330495834350586, 0.04399932861328125, 0.0425634880065918, 0.042362846374511716, 0.042598430633544924, 0.04242544174194336, 0.04326902389526367, 0.04240153503417969, 0.04241843032836914, 0.042504417419433595, 0.04223680114746094]",tokens/s,23.728294865346015,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11125.399552,12225.282048,0.0,11846.811648,11814.785024,s,1,14.380548828125,14.380548828125,0.0,14.380548828125,14.380548828125,14.380548828125,14.380548828125,[14.380548828125],,kWh,0.0002051947944083281,2.2627169896284204e-05,6.826366572199039e-05,0.0002960856300266027,,MB,2025.725952,13158.514688,0.0,12750.68416,12632.68864,s,10,13.033351928710939,1.303335192871094,0.0030176019811829413,1.30350146484375,1.3065957397460939,1.307156756591797,1.3076055700683595,"[1.2995831298828124, 1.2985087890625, 1.3007293701171876, 1.302571044921875, 1.301576904296875, 1.304431884765625, 1.3059696044921876, 1.3077177734375, 1.3057923583984374, 1.3064710693359376]",tokens/s,196.4191570980771,kWh,3.8096743162915723e-05,4.20160009565377e-06,2.54181314456009e-05,6.77164747041704e-05,tokens/kWh,3780468.506642948,MB,2029.989888,13431.144448,0.0,13023.31392,12936.694272,s,10,48.8005390625,4.88005390625,0.005832230603029691,4.881221435546875,4.8853884765625,4.8875189453125,4.8892233203125,"[4.876849609375, 4.868734375, 4.87260205078125, 4.8784443359375, 4.88436376953125, 4.8896494140625, 4.8816982421875, 4.88074462890625, 4.88253759765625, 4.8849150390625]",tokens/s,12.909693460417397,kWh,0.00014267798116250106,1.5738480656990593e-05,9.457529788239882e-05,0.0002529917597018905,tokens/kWh,249019.96837460322,,s,630,48.79621814727783,0.07745431451948863,0.000700290987406876,0.07746585845947265,0.07827880630493164,0.07849911270141602,0.079734945602417,"[0.07999199676513671, 0.07639737701416016, 0.0753826904296875, 0.07479312133789062, 0.07575341033935547, 0.0764651870727539, 0.07665968322753906, 0.0768532485961914, 0.07630847930908204, 0.07698841857910156, 0.07695516967773437, 0.07716275024414063, 0.07774384307861328, 0.07737599945068359, 0.07691878509521484, 0.07713520050048828, 0.07645046234130859, 0.07700032043457031, 0.07659142303466797, 0.07694099426269531, 0.07653619384765625, 0.0762734375, 0.07755375671386719, 0.076770751953125, 0.07753183746337891, 0.07749779510498046, 0.07766278076171874, 0.07761100769042968, 0.07717241668701172, 0.07700220489501954, 0.07674348449707032, 0.07761087799072265, 0.07719747161865234, 0.07699481964111328, 0.0770184326171875, 0.07723455810546875, 0.07734054565429688, 0.07797164916992187, 0.0777541732788086, 0.07811862182617188, 0.07836924743652343, 0.07762032318115235, 0.07716547393798828, 0.07764582061767578, 0.07754752349853515, 0.07748198699951171, 0.0777359390258789, 0.07761920166015625, 0.07794656372070312, 0.07780178833007813, 0.07827865600585937, 0.07825408172607422, 0.07833920288085937, 0.07853555297851562, 0.07837286376953125, 0.07831324768066407, 0.07835465240478516, 0.0784336929321289, 0.07835298919677734, 0.07825408172607422, 0.07778902435302734, 0.07820899200439453, 0.0784975357055664, 0.07987193298339844, 0.07667190551757813, 0.07625631713867187, 0.07632972717285157, 0.0766343002319336, 0.07690774536132812, 0.07665510559082031, 0.07655043029785157, 0.07668736267089844, 0.07524352264404296, 0.07507465362548828, 0.07630726623535156, 0.07828070068359375, 0.07675299072265625, 0.07656175994873046, 0.07668745422363281, 0.07698694610595704, 0.07668313598632813, 0.07702950286865234, 0.07690444946289063, 0.07701280212402344, 0.07711353302001953, 0.07697612762451173, 0.07727308654785156, 0.07753727722167969, 0.07758233642578125, 0.07675494384765626, 0.07746150207519531, 0.07737526702880859, 0.077283203125, 0.0769682846069336, 0.07701248168945313, 0.07717938995361329, 0.07757004547119141, 0.07732838439941406, 0.07774329376220702, 0.07753401947021485, 0.07750617980957031, 0.07739568328857421, 0.07751337432861329, 0.07776025390625, 0.077275390625, 0.07776268768310547, 0.07724227142333985, 0.07694902038574218, 0.0773155517578125, 0.0774432601928711, 0.07782275390625, 0.07796694183349609, 0.07782841491699219, 0.07773776245117188, 0.07749049377441407, 0.07822745513916016, 0.07783958435058594, 0.07782479858398438, 0.07766630554199219, 0.07747980499267579, 0.07783414459228516, 0.07776483154296875, 0.0777534408569336, 0.07757059478759766, 0.07849132537841796, 0.078110595703125, 0.07980032348632812, 0.07654768371582031, 0.07603446197509765, 0.07628185272216798, 0.07658003234863281, 0.07645081329345703, 0.07648976135253906, 0.0760607681274414, 0.07629593658447266, 0.07701805114746094, 0.07666687774658203, 0.07742259216308593, 0.0774052505493164, 0.07685584259033203, 0.07745782470703125, 0.07658905792236329, 0.07712735748291015, 0.07725001525878906, 0.0773139877319336, 0.07686441802978515, 0.07624076843261719, 0.07762528228759766, 0.0772589111328125, 0.07702326202392579, 0.07736729431152344, 0.07758016204833984, 0.07744278717041016, 0.07758854675292969, 0.07741871643066406, 0.07684722900390625, 0.0766541748046875, 0.07709458923339843, 0.07702191925048828, 0.07731199645996094, 0.07689353942871094, 0.07730838775634766, 0.07757008361816406, 0.07766851043701171, 0.0775167999267578, 0.07765113830566406, 0.07742047882080078, 0.07821340942382812, 0.07727513885498047, 0.07757839965820312, 0.07765776062011719, 0.0775145263671875, 0.07760908508300782, 0.07752921295166015, 0.07760288238525391, 0.07750646209716797, 0.0776971206665039, 0.07751136016845703, 0.07765606689453125, 0.07830937957763671, 0.07824384307861328, 0.07803903961181641, 0.07749836730957031, 0.07750857543945312, 0.07770278167724609, 0.07808975982666015, 0.07785513305664063, 0.07825456237792969, 0.07838105773925781, 0.08045798492431641, 0.07623375701904297, 0.07686857604980468, 0.07663744354248046, 0.07620275115966797, 0.07719843292236328, 0.07636243438720704, 0.07667107391357422, 0.07659942626953126, 0.07678912353515625, 0.077056640625, 0.07693708801269532, 0.07681433868408204, 0.07693507385253906, 0.07678793334960937, 0.07703961944580077, 0.07701612854003906, 0.07697299194335938, 0.07710304260253906, 0.07665251159667968, 0.076912353515625, 0.07689810943603516, 0.07646265411376953, 0.07650220489501953, 0.07807469177246094, 0.07724851226806641, 0.07734067535400391, 0.07729151916503907, 0.07719926452636719, 0.07692502593994141, 0.07712694549560548, 0.07704214477539062, 0.07756006622314453, 0.07746489715576171, 0.07730451202392578, 0.07748812866210937, 0.07787725067138672, 0.07751475524902343, 0.07837411499023438, 0.07815452575683594, 0.0776253433227539, 0.07746681976318359, 0.07766838073730468, 0.07727372741699219, 0.0777811508178711, 0.07739507293701171, 0.0775582733154297, 0.07752880096435547, 0.07803766632080078, 0.07813484954833984, 0.07805734252929687, 0.07792406463623047, 0.0781566390991211, 0.07818150329589844, 0.0773333740234375, 0.07787462615966798, 0.07836492919921875, 0.07816019439697265, 0.07784857940673828, 0.07799967956542969, 0.07864979553222656, 0.0785264663696289, 0.07839129638671875, 0.07941907501220703, 0.07615885162353515, 0.07597049713134765, 0.076746337890625, 0.0766207046508789, 0.07650828552246093, 0.07702153778076172, 0.07669200134277344, 0.07639449310302734, 0.07783360290527344, 0.07718876647949219, 0.07700959777832031, 0.07788572692871094, 0.07724441528320312, 0.07688745880126953, 0.07706463623046875, 0.07700415802001953, 0.076847900390625, 0.07689215850830078, 0.07704985809326172, 0.07701708984375, 0.07694131469726563, 0.0773199691772461, 0.077070556640625, 0.07760662078857422, 0.07824559783935547, 0.07758086395263672, 0.07715824127197266, 0.07735052490234375, 0.07673420715332031, 0.07698307037353516, 0.07718860626220703, 0.07749427032470703, 0.07783475494384766, 0.07745327758789063, 0.0772874526977539, 0.07737958526611328, 0.07795027160644531, 0.07798985290527344, 0.07757091522216797, 0.07777471923828125, 0.0775167999267578, 0.07736115264892578, 0.07806928253173828, 0.07760867309570313, 0.07763785552978515, 0.07787904357910157, 0.07806031799316407, 0.07811446380615235, 0.07806601715087891, 0.07823715209960938, 0.07812969970703125, 0.07876812744140625, 0.0780206069946289, 0.07763148498535156, 0.07852236938476563, 0.0781979217529297, 0.07832457733154297, 0.07829708862304688, 0.07835852813720703, 0.07830290985107421, 0.07818067169189453, 0.07819798278808594, 0.07949008178710938, 0.07630947113037109, 0.07654399871826172, 0.0767279052734375, 0.07679571533203125, 0.07683452606201172, 0.07671817779541015, 0.07648336029052734, 0.077061279296875, 0.07672918701171876, 0.07650646209716797, 0.07709343719482421, 0.07784397125244141, 0.07778755187988282, 0.07696380615234374, 0.07658518218994141, 0.07724646759033203, 0.07698767852783203, 0.07731196594238281, 0.07696665954589844, 0.07759257507324219, 0.07744921875, 0.07677267456054687, 0.07754201507568359, 0.07744723510742188, 0.07741379547119141, 0.07773040008544922, 0.07749427032470703, 0.0771904296875, 0.07752777862548828, 0.07761212921142578, 0.07724739074707031, 0.07736524963378906, 0.07733964538574219, 0.07755852508544922, 0.07733478546142578, 0.07791206359863281, 0.07779235076904296, 0.07842243194580079, 0.07779993438720703, 0.07784595489501953, 0.07818070220947265, 0.07769865417480469, 0.07761984252929688, 0.0779447021484375, 0.07817842864990235, 0.07819058990478515, 0.0782171859741211, 0.07852655792236328, 0.07856278228759765, 0.07823407745361328, 0.07835823822021484, 0.0785223388671875, 0.07849353790283203, 0.07790435028076172, 0.07783968353271484, 0.0778779525756836, 0.07808159637451172, 0.07837535858154297, 0.07807708740234375, 0.0783532485961914, 0.07782505798339844, 0.07865647888183594, 0.07957488250732422, 0.07621440124511719, 0.07670076751708985, 0.07663410949707031, 0.07657545471191406, 0.07680223846435547, 0.07727922821044922, 0.07618150329589844, 0.07667021179199218, 0.07735491180419922, 0.07680857849121094, 0.07676509094238282, 0.07759315490722657, 0.07722975921630859, 0.07697644805908203, 0.07703961944580077, 0.07710924530029296, 0.07649638366699218, 0.0771690902709961, 0.07691004943847657, 0.07650128173828125, 0.07735337829589843, 0.07755152130126954, 0.07696534729003907, 0.07768256378173828, 0.07798441314697266, 0.07759574127197266, 0.0774395523071289, 0.07758182525634766, 0.0771654052734375, 0.07714406585693359, 0.077501953125, 0.07731158447265625, 0.0775052490234375, 0.0774487075805664, 0.0777808609008789, 0.07801439666748047, 0.07813555145263672, 0.07817833709716797, 0.07813308715820312, 0.07761564636230468, 0.07741667175292968, 0.07760691070556641, 0.07781692504882813, 0.07778934478759765, 0.07775465393066407, 0.0776475830078125, 0.07755648040771485, 0.07742864227294922, 0.07769430541992188, 0.07852256011962891, 0.07822188568115235, 0.07859110260009766, 0.0778944320678711, 0.07767782592773438, 0.07771631622314454, 0.07738572692871094, 0.07762739562988281, 0.07833372497558594, 0.07864956665039062, 0.0778156509399414, 0.07772994995117187, 0.07781075286865234, 0.08009136199951172, 0.07658531188964844, 0.07652806091308594, 0.07612006378173829, 0.07688972473144531, 0.07701129913330078, 0.07674781036376953, 0.07650816345214843, 0.07465984344482422, 0.07679587554931641, 0.07729328155517579, 0.07730207824707032, 0.0775393295288086, 0.07709852600097657, 0.07691107177734376, 0.07715020751953125, 0.0775816650390625, 0.07750624084472656, 0.07719216156005859, 0.07756739044189453, 0.0770456314086914, 0.0769625244140625, 0.07702877044677735, 0.077357666015625, 0.07743385314941406, 0.077591552734375, 0.07781715393066406, 0.07729427337646484, 0.07706009674072266, 0.07716659545898437, 0.07661347198486328, 0.0773625259399414, 0.0777347183227539, 0.07736844635009765, 0.07698883056640625, 0.07718147277832031, 0.07757817840576171, 0.07745331573486328, 0.07797350311279297, 0.07805084991455079, 0.07749430084228516, 0.07738582611083984, 0.07759702301025391, 0.0771618881225586, 0.07743472290039062, 0.07792511749267578, 0.07750569915771484, 0.07709782409667969, 0.07816397094726563, 0.07770870208740234, 0.07772220611572266, 0.07810368347167969, 0.0782610855102539, 0.07814726257324218, 0.07851404571533203, 0.07828015899658203, 0.07789055633544922, 0.07801446533203125, 0.07845273590087891, 0.0780738525390625, 0.07861622619628907, 0.07850019073486328, 0.07801036834716797, 0.08047411346435547, 0.07664435577392578, 0.07627571105957032, 0.0767070083618164, 0.07681497955322265, 0.07661353302001953, 0.07698255920410156, 0.076943359375, 0.07654605102539062, 0.07659878540039063, 0.07819725036621093, 0.07687570953369141, 0.07705811309814453, 0.07754259490966797, 0.07717356872558594, 0.07697612762451173, 0.07704576110839843, 0.07674620819091797, 0.07683945465087891, 0.07684710693359376, 0.07660323333740235, 0.07662403106689453, 0.07783382415771484, 0.07744713592529297, 0.07716643524169922, 0.07719967651367188, 0.07778870391845703, 0.07753139495849609, 0.07767501068115235, 0.07744921875, 0.07753727722167969, 0.0773855972290039, 0.07753536224365234, 0.07713942718505859, 0.07718351745605469, 0.0776495361328125, 0.07766668701171875, 0.0770169906616211, 0.07768271636962891, 0.07738492584228515, 0.07785763549804688, 0.07750656127929688, 0.07743852996826171, 0.07776227569580078, 0.07817084503173828, 0.0773570556640625, 0.07717887878417969, 0.07799993896484375, 0.07849116516113282, 0.07814995574951172, 0.07773423767089843, 0.07813734436035157, 0.0781740493774414, 0.07786454772949218, 0.07808016204833984, 0.07775379180908203, 0.07755260467529297, 0.07823072052001953, 0.07785104370117188, 0.07732265472412109, 0.07851213073730469, 0.07898316955566406, 0.07861820983886719, 0.08027935791015625, 0.07674899291992188, 0.07671097564697266, 0.0763544921875, 0.07640838623046875, 0.07664070129394532, 0.0769269790649414, 0.07666204833984375, 0.07673474884033203, 0.07739167785644531, 0.07709513854980468, 0.07702365112304688, 0.07747328186035156, 0.07767005157470704, 0.07717501068115235, 0.07696835327148438, 0.0770390396118164, 0.07671459197998047, 0.07682067108154297, 0.07663177490234375, 0.07666000366210937, 0.07721453094482422, 0.0771725082397461, 0.07710147094726562, 0.07824111938476562, 0.07744989013671875, 0.07724031829833984, 0.07784770965576172, 0.07711782073974609, 0.07745763397216797, 0.0771361312866211, 0.07749836730957031, 0.07702063751220703, 0.0776115493774414, 0.07762710571289062, 0.07722217559814454, 0.07788748931884766, 0.07796940612792969, 0.07777030181884766, 0.0775269775390625, 0.07794739532470703, 0.0780389404296875, 0.07736329650878906, 0.07764166259765624, 0.07728125, 0.07769916534423828, 0.07785676574707032, 0.0781496353149414, 0.07755554962158204, 0.07767382049560546, 0.07878489685058594, 0.07850972747802734, 0.0783081283569336, 0.07806361389160156, 0.07807718658447266, 0.07850879669189453, 0.07802432250976563, 0.07747212982177734, 0.07762671661376953, 0.07868073272705078, 0.07849779510498046, 0.07806742095947265, 0.07846736145019531]",tokens/s,12.910836616446788,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3549.26592,4490.985472,0.0,4112.515072,3975.832064,s,1,9.7483212890625,9.7483212890625,0.0,9.7483212890625,9.7483212890625,9.7483212890625,9.7483212890625,[9.7483212890625],,kWh,8.036910985833382e-05,8.857929561269235e-06,2.728752183000019e-05,0.00011651456124960323,,MB,1460.297728,4688.11776,0.0,4280.287232,4101.544448,s,10,3.156000061035156,0.31560000610351563,0.0014197231289403167,0.3160218505859375,0.31709080505371096,0.317126579284668,0.3171551986694336,"[0.3167412109375, 0.3138393859863281, 0.3138292846679688, 0.31702584838867187, 0.31340982055664063, 0.3163779296875, 0.3148656005859375, 0.317162353515625, 0.3170828552246094, 0.315665771484375]",tokens/s,811.1533429946543,kWh,9.521249536693518e-06,1.0498812832248867e-06,6.354906517612905e-06,1.6926037337531308e-05,tokens/kWh,15124626.92211797,MB,1471.7952,4698.60352,0.0,4290.772992,4101.547008,s,10,23.867243652343753,2.3867243652343753,0.012184962469175376,2.3887918701171875,2.3986919921874996,2.401724658203125,2.404150791015625,"[2.367410888671875, 2.395916748046875, 2.40475732421875, 2.392780517578125, 2.396465576171875, 2.39801806640625, 2.38480322265625, 2.3697744140625, 2.381960693359375, 2.375356201171875]",tokens/s,26.396009911187807,kWh,6.981038930372296e-05,7.700269786290388e-06,4.4233967286787135e-05,0.0001217446263768005,tokens/kWh,517476.63839399815,,s,630,23.864826679229736,0.03788067726861863,0.0006657831223007035,0.037784208297729494,0.03847223854064941,0.03878150005340576,0.04094101547241213,"[0.038617599487304685, 0.03872358322143555, 0.037953056335449216, 0.037543998718261716, 0.037867935180664065, 0.03717907333374024, 0.037150432586669925, 0.037335296630859376, 0.03702819061279297, 0.03713616180419922, 0.0369870719909668, 0.03721971130371094, 0.0374477424621582, 0.03710940933227539, 0.03709014511108399, 0.03710985565185547, 0.03709952163696289, 0.03725423812866211, 0.03704892730712891, 0.03720428848266601, 0.037548030853271484, 0.04331907272338867, 0.03881929779052734, 0.03794800186157227, 0.037546142578125, 0.037746688842773435, 0.03780022430419922, 0.03706403350830078, 0.03736614227294922, 0.03753104019165039, 0.037730911254882815, 0.03717324829101563, 0.03718143844604492, 0.037617664337158206, 0.03740671920776367, 0.03711555099487305, 0.037111328125, 0.03732592010498047, 0.037770782470703125, 0.03729347229003906, 0.03736441421508789, 0.03736912155151367, 0.03717171096801758, 0.03729792022705078, 0.03718406295776367, 0.03712614440917969, 0.03739836883544922, 0.03773004913330078, 0.037638526916503906, 0.038455329895019534, 0.037384193420410154, 0.03752959823608398, 0.037615615844726565, 0.03749273681640625, 0.03767494583129883, 0.03751119995117187, 0.03742108917236328, 0.037959678649902344, 0.03755324935913086, 0.03740348815917969, 0.03733612823486328, 0.03756748962402344, 0.037478111267089845, 0.03877478408813476, 0.038752254486083985, 0.03825158309936524, 0.03790124893188476, 0.03872972869873047, 0.0377239990234375, 0.03765852737426758, 0.0377182731628418, 0.037520992279052735, 0.037626270294189454, 0.037565982818603516, 0.03745840072631836, 0.03787980651855469, 0.03753372955322266, 0.037676193237304687, 0.03769631958007812, 0.037738494873046875, 0.03762688064575195, 0.0381102066040039, 0.03781836700439453, 0.0382033920288086, 0.037920768737792966, 0.037814273834228515, 0.03786342239379883, 0.03765862274169922, 0.037577953338623044, 0.03753209686279297, 0.037644607543945316, 0.038102783203125, 0.03795142364501953, 0.03868636703491211, 0.0385338249206543, 0.03799193572998047, 0.03815030288696289, 0.03820899200439453, 0.03796057510375977, 0.038338558197021484, 0.038309856414794924, 0.03872108840942383, 0.03796543884277344, 0.037763679504394534, 0.03762128067016601, 0.0375437126159668, 0.0380568962097168, 0.03797401428222656, 0.03777536010742188, 0.03787776184082031, 0.037235904693603515, 0.03707900619506836, 0.03728265762329101, 0.0376258544921875, 0.03771187210083008, 0.03774816131591797, 0.04137631988525391, 0.04167679977416992, 0.03841843032836914, 0.03982089614868164, 0.03804367828369141, 0.037476318359375, 0.03778704071044922, 0.0378798713684082, 0.03732575988769531, 0.037689342498779296, 0.03847212982177734, 0.03798425674438476, 0.03778559875488281, 0.03756582260131836, 0.037290622711181644, 0.03724924850463867, 0.037281566619873044, 0.03740671920776367, 0.03721609497070313, 0.037505184173583984, 0.038109184265136715, 0.037601280212402347, 0.039042560577392575, 0.037776161193847656, 0.03816624069213867, 0.03829555130004883, 0.037814273834228515, 0.03790233612060547, 0.0380489616394043, 0.0377943344116211, 0.038835582733154295, 0.03781929779052735, 0.038152191162109376, 0.03845452880859375, 0.0378858871459961, 0.037880638122558596, 0.03789004898071289, 0.038950782775878906, 0.038082687377929685, 0.03777705764770508, 0.037869918823242185, 0.038012928009033206, 0.03855142211914062, 0.03824617767333984, 0.03829699325561523, 0.038497119903564456, 0.0383499526977539, 0.03813475036621094, 0.03916185760498047, 0.03792812728881836, 0.03803398513793945, 0.03862268829345703, 0.03772848129272461, 0.03784352111816406, 0.03800243377685547, 0.038145759582519534, 0.03871120071411133, 0.038163070678710935, 0.03809075164794922, 0.0383559684753418, 0.038142654418945314, 0.03818726348876953, 0.03794563293457031, 0.03791030502319336, 0.038182910919189454, 0.04315955352783203, 0.03846553421020508, 0.03833446502685547, 0.03824844741821289, 0.03798339080810547, 0.03790934371948242, 0.038156192779541014, 0.03909231948852539, 0.039147872924804684, 0.038664127349853514, 0.03829971313476563, 0.03805952072143555, 0.03842697525024414, 0.03809296035766602, 0.038006591796875, 0.03784268951416016, 0.038163105010986326, 0.037844768524169924, 0.03772415924072266, 0.03810508728027344, 0.038084606170654296, 0.03816556930541992, 0.03803823852539062, 0.03806639862060547, 0.03829759979248047, 0.0382006721496582, 0.038531425476074216, 0.038784961700439456, 0.0377470703125, 0.03788508987426758, 0.038133983612060544, 0.037431934356689456, 0.03786735916137695, 0.037932350158691404, 0.03779875183105469, 0.038152065277099606, 0.03748806381225586, 0.03740947341918945, 0.03728326416015625, 0.03738886260986328, 0.041082878112792966, 0.03774044799804688, 0.03744992065429688, 0.03774044799804688, 0.037119998931884765, 0.037713920593261716, 0.03743648147583008, 0.03794387054443359, 0.037676929473876956, 0.037638656616210936, 0.03759308624267578, 0.03818096160888672, 0.03784489440917969, 0.03870876693725586, 0.037744670867919924, 0.03777171325683594, 0.037821857452392575, 0.03805654525756836, 0.03766886520385742, 0.037908672332763675, 0.037782817840576174, 0.03795817565917969, 0.03777536010742188, 0.037881118774414066, 0.03751510238647461, 0.03768511962890625, 0.03766284942626953, 0.03817356872558594, 0.0380252799987793, 0.0383361587524414, 0.037826847076416016, 0.041443134307861326, 0.03885075378417969, 0.03840409469604492, 0.03844694519042969, 0.0385431022644043, 0.03819356918334961, 0.03812351989746094, 0.03804774475097656, 0.038340606689453126, 0.03795337677001953, 0.038086273193359374, 0.03827561569213867, 0.038223167419433594, 0.03793171310424805, 0.03806412887573242, 0.03798204803466797, 0.03826704025268555, 0.0381030387878418, 0.03815407943725586, 0.03791622543334961, 0.03775859069824219, 0.037835742950439455, 0.03760879898071289, 0.037692062377929686, 0.03873283386230469, 0.03832880020141602, 0.04035395050048828, 0.03817654418945313, 0.0377534065246582, 0.03865740966796875, 0.03832076644897461, 0.037902015686035156, 0.03796118545532227, 0.038216545104980466, 0.03884051132202149, 0.037921600341796875, 0.037763679504394534, 0.03792323303222656, 0.03783190536499023, 0.037643039703369144, 0.037549823760986326, 0.03732710266113281, 0.03789823913574219, 0.03803948974609375, 0.03767529678344726, 0.03773417663574219, 0.03813580703735352, 0.037889278411865235, 0.037743358612060546, 0.03740172958374023, 0.037454719543457034, 0.037591007232666014, 0.037291969299316406, 0.038241790771484374, 0.037403167724609374, 0.03739039993286133, 0.03775692749023438, 0.03721388626098633, 0.03747257614135742, 0.037443584442138675, 0.0375316162109375, 0.03803548812866211, 0.03744480133056641, 0.038703102111816406, 0.03810508728027344, 0.037483840942382815, 0.037300289154052736, 0.0371569938659668, 0.03723929595947266, 0.037803680419921874, 0.03711008071899414, 0.038775905609130856, 0.037886913299560544, 0.03817267227172851, 0.037919807434082034, 0.03752758407592773, 0.03761859130859375, 0.03767091369628906, 0.03775078582763672, 0.037946750640869144, 0.037646976470947266, 0.03753894424438477, 0.037775646209716796, 0.03806787109375, 0.03756060791015625, 0.03763267135620117, 0.03757670211791992, 0.03806208038330078, 0.037574657440185545, 0.0376360969543457, 0.0377077751159668, 0.03757231903076172, 0.037464351654052735, 0.03748044967651367, 0.04123817443847656, 0.03886656188964844, 0.0382696647644043, 0.04059369659423828, 0.03824099349975586, 0.038230785369873045, 0.03819692611694336, 0.03825078582763672, 0.03838390350341797, 0.038672351837158205, 0.038297630310058596, 0.03860684967041016, 0.03810844802856445, 0.03784076690673828, 0.03787235260009766, 0.03802096176147461, 0.03816016006469727, 0.038226238250732424, 0.03812473678588867, 0.0381921272277832, 0.03843686294555664, 0.03818700790405274, 0.03804361724853516, 0.03817475128173828, 0.03830579376220703, 0.037822463989257815, 0.03791782379150391, 0.03818175888061524, 0.03799766540527344, 0.03878390502929688, 0.038100990295410156, 0.037904384613037106, 0.03861811065673828, 0.03797670364379883, 0.03751766586303711, 0.03755011367797852, 0.03784294509887695, 0.0380428466796875, 0.03786771011352539, 0.037630207061767576, 0.03742345428466797, 0.037429054260253905, 0.03752569580078125, 0.03743539047241211, 0.03781340789794922, 0.0378639030456543, 0.037566848754882816, 0.037720062255859374, 0.03824639892578125, 0.037609088897705076, 0.03836467361450195, 0.03868137741088867, 0.03813167953491211, 0.03810275268554687, 0.0378737907409668, 0.03767728042602539, 0.03799660873413086, 0.03741689682006836, 0.03741856002807617, 0.037669376373291014, 0.03745177459716797, 0.03738800048828125, 0.037082977294921875, 0.037361793518066407, 0.037642017364501956, 0.03993449783325195, 0.03873382568359375, 0.037572608947753904, 0.037260478973388675, 0.03743827056884766, 0.03787324905395508, 0.0376569938659668, 0.037889057159423825, 0.03755510330200195, 0.03805356979370117, 0.03737004852294922, 0.03751955032348633, 0.037236160278320315, 0.037839424133300784, 0.039479297637939455, 0.039799041748046875, 0.038045440673828125, 0.038239551544189454, 0.03769606399536133, 0.03753792190551758, 0.03748659133911133, 0.03770163345336914, 0.03797401428222656, 0.038391807556152346, 0.03812870407104492, 0.038083518981933594, 0.037506977081298826, 0.03755401611328125, 0.03746432113647461, 0.037614784240722655, 0.03924870300292969, 0.03814723205566406, 0.038222625732421876, 0.03811334228515625, 0.037918846130371095, 0.03823756790161133, 0.03756304168701172, 0.03733196640014649, 0.037604190826416015, 0.03753334426879883, 0.03730803298950195, 0.037832672119140626, 0.03788671875, 0.037920768737792966, 0.0376297607421875, 0.03797011184692383, 0.03770272064208984, 0.03800371170043945, 0.0376888313293457, 0.03760172653198242, 0.037492385864257814, 0.03768681716918945, 0.03734611129760742, 0.038117374420166016, 0.037599231719970705, 0.03743660736083984, 0.037603614807128906, 0.03790492630004883, 0.037222400665283206, 0.037493824005126956, 0.03749369430541992, 0.037302528381347656, 0.03739798355102539, 0.03750121688842773, 0.03728384017944336, 0.03743539047241211, 0.0373043212890625, 0.037326751708984376, 0.037342529296875, 0.03733174514770508, 0.03816243362426758, 0.03712598419189453, 0.03721846389770508, 0.037197662353515626, 0.03713248062133789, 0.03705238342285156, 0.03700924682617188, 0.03694607925415039, 0.03760332870483398, 0.03952844619750977, 0.038485790252685545, 0.037216190338134766, 0.03725446319580078, 0.03763504028320312, 0.038108959197998046, 0.03759535980224609, 0.0373043212890625, 0.037365665435791014, 0.037642433166503904, 0.037257118225097655, 0.03714451217651367, 0.03718560028076172, 0.037294078826904296, 0.03863663864135742, 0.037733280181884765, 0.03736598587036133, 0.03816147232055664, 0.037501152038574216, 0.03825305557250976, 0.03767055892944336, 0.037455806732177736, 0.03760547256469727, 0.03816851043701172, 0.037613121032714844, 0.03762252807617188, 0.037803359985351566, 0.03807100677490234, 0.03868374252319336, 0.03753871917724609, 0.037634048461914066, 0.037715713500976564, 0.03779759979248047, 0.03749327850341797, 0.03747020721435547, 0.03794659042358398, 0.03826182556152344, 0.03788684844970703, 0.037776222229003904, 0.03784089660644531, 0.03779286575317383, 0.0376135368347168, 0.03750105667114258, 0.038009662628173825, 0.038217727661132815, 0.037838848114013675, 0.03881369781494141, 0.03828940963745117, 0.038168575286865236, 0.03909542465209961, 0.03793913650512695, 0.03784783935546875, 0.037752639770507815, 0.037763423919677734, 0.037986305236816405, 0.037621440887451174, 0.037833023071289065, 0.03790796661376953, 0.0378064956665039, 0.037529502868652344, 0.03749292755126953, 0.037625598907470706, 0.037867774963378904, 0.03746390533447266, 0.03738159942626953, 0.03745248031616211, 0.03741247940063477, 0.03793139266967773, 0.037332672119140625, 0.03730422210693359, 0.03790233612060547, 0.037267871856689457, 0.03803750228881836, 0.037482494354248046, 0.03720191955566406, 0.03759718322753906, 0.03794739151000977, 0.038240734100341796, 0.03747856140136719, 0.037015552520751956, 0.037326847076416016, 0.03701964950561523, 0.037101566314697264, 0.03706633758544922, 0.036865825653076174, 0.03696902465820313, 0.037007423400878904, 0.03992153549194336, 0.03817279815673828, 0.03718739318847656, 0.039765918731689456, 0.03724921417236328, 0.037152862548828124, 0.037263233184814455, 0.037769153594970704, 0.03732704162597656, 0.037426624298095706, 0.03738886260986328, 0.03772012710571289, 0.03724889755249024, 0.03720198440551758, 0.037117023468017575, 0.03705654525756836, 0.03707571029663086, 0.03721539306640625, 0.03715580749511719, 0.03727360153198242, 0.03806604766845703, 0.03782463836669922, 0.037758945465087894, 0.038458881378173826, 0.03807696151733399, 0.0378768310546875, 0.03747318267822266, 0.03795558547973633, 0.03839385604858398, 0.038473217010498044, 0.03883817672729492, 0.037865184783935545, 0.038353790283203126, 0.03849385452270508, 0.03846793746948242, 0.03832012939453125, 0.03799859237670898, 0.03807436752319336, 0.03877856063842773, 0.03746416091918945, 0.03774998474121094, 0.037790271759033205, 0.03757817459106445, 0.037577728271484374, 0.03746326446533203, 0.037720577239990234, 0.037671329498291016, 0.03754940795898438, 0.037439998626708985, 0.03759516906738281, 0.03735551834106445, 0.03742105484008789, 0.037384193420410154]",tokens/s,26.39868323654358,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5168.439296,5444.07552,0.0,5058.330624,5057.441792,s,1,10.6743974609375,10.6743974609375,0.0,10.6743974609375,10.6743974609375,10.6743974609375,10.6743974609375,[10.6743974609375],,kWh,0.00010233887626665517,1.1280437470752896e-05,3.427419408599808e-05,0.00014789350782340614,,MB,1758.347264,5630.722048,0.0,5215.617024,5189.70368,s,10,4.949172912597656,0.4949172912597656,0.0016341862306250671,0.49452246093749996,0.49645894775390625,0.49748504333496096,0.4983059197998047,"[0.4985111389160156, 0.4929393005371094, 0.4936651611328125, 0.493388427734375, 0.49486505126953123, 0.49417987060546875, 0.49359725952148437, 0.4960290832519531, 0.4962309265136719, 0.49576669311523436]",tokens/s,517.2581449889859,kWh,1.5003764791459465e-05,1.6546517807521216e-06,9.964174637999701e-06,2.6622591210211284e-05,tokens/kWh,9615893.43346148,MB,1763.708928,5649.596416,0.0,5234.491392,5189.70624,s,10,44.32415234375001,4.432415234375001,0.028441163786745383,4.439959716796875,4.461984130859375,4.463679321289062,4.465035473632812,"[4.4045810546875, 4.3909091796875, 4.3846337890625, 4.4337421875, 4.4575263671875, 4.46537451171875, 4.461607421875, 4.44617724609375, 4.4546181640625, 4.424982421875]",tokens/s,14.21346978311327,kWh,0.00012996879559270532,1.4336088265125936e-05,7.39782536269995e-05,0.00021828313748483075,tokens/kWh,288615.9724746401,,s,630,44.31952363586423,0.07034845021565755,0.0008575088060482197,0.07029094696044921,0.07120974197387696,0.07179899291992188,0.07347708526611328,"[0.0712586898803711, 0.0696890869140625, 0.06930271911621094, 0.06936579132080078, 0.06947782135009765, 0.06959161376953125, 0.07036927795410156, 0.0717127685546875, 0.0700600357055664, 0.0699228515625, 0.06981350708007812, 0.07023478698730469, 0.06987548828125, 0.07077699279785156, 0.07194022369384766, 0.0700640640258789, 0.06960649871826172, 0.0699626235961914, 0.0759422378540039, 0.07054287719726562, 0.0704428482055664, 0.06980850982666016, 0.0700173110961914, 0.06996086120605469, 0.06974720001220704, 0.06972831726074219, 0.07003750610351563, 0.07023577880859375, 0.06939417266845703, 0.06980255889892578, 0.07000275421142578, 0.06991622161865234, 0.07019894409179687, 0.07032089233398438, 0.0710164794921875, 0.06987152099609376, 0.06982201385498046, 0.06899267578125, 0.06873603057861329, 0.0687573471069336, 0.0690483169555664, 0.06956441497802734, 0.06927680206298828, 0.0689775390625, 0.06885577392578125, 0.06883126068115235, 0.0691845474243164, 0.07079011535644532, 0.0696792984008789, 0.07047958374023437, 0.06941267395019532, 0.06966502380371094, 0.06923677062988282, 0.06927974700927735, 0.06952345275878906, 0.06951321411132813, 0.07006208038330078, 0.0692674560546875, 0.06897254180908204, 0.06963200378417969, 0.0691457290649414, 0.06955088043212891, 0.06985942077636718, 0.07059171295166015, 0.0698388442993164, 0.06975363159179687, 0.06916505432128907, 0.06895820617675781, 0.06900224304199219, 0.06983161926269531, 0.0699637451171875, 0.06987785339355469, 0.07060467529296875, 0.0696332778930664, 0.0694444808959961, 0.06894355010986328, 0.06872710418701172, 0.06940812683105468, 0.06883152008056641, 0.0685223388671875, 0.06864076995849609, 0.06863686370849609, 0.0696398696899414, 0.0697688980102539, 0.0702099838256836, 0.07011961364746094, 0.06997177886962891, 0.07021907043457032, 0.07001904296875, 0.06994198608398437, 0.06946611022949219, 0.06906377410888671, 0.06995241546630859, 0.06949581146240234, 0.06903446197509766, 0.0688051528930664, 0.06871587371826172, 0.0688604507446289, 0.069451904296875, 0.06977494049072265, 0.06968537902832031, 0.07042486572265624, 0.06982994842529297, 0.06977606201171875, 0.06989933013916015, 0.06992377471923829, 0.07033650970458985, 0.06975433349609375, 0.06968803405761718, 0.06962566375732422, 0.06977126312255859, 0.06978582763671876, 0.069838623046875, 0.07030191802978515, 0.0697925796508789, 0.06982911682128906, 0.07009471893310547, 0.0700909423828125, 0.06978937530517579, 0.06990921783447265, 0.07126834869384766, 0.0707092514038086, 0.06991257476806641, 0.07010086059570313, 0.06975049591064453, 0.06972866821289063, 0.07054131317138672, 0.06986348724365235, 0.07014425659179688, 0.0698873291015625, 0.07276783752441406, 0.07008617401123046, 0.07035276794433594, 0.06983535766601562, 0.07020333099365235, 0.07064358520507813, 0.07244000244140625, 0.07053868865966798, 0.07027171325683594, 0.06967193603515626, 0.06943734741210937, 0.06973945617675781, 0.06971539306640626, 0.07142189025878906, 0.06953638458251953, 0.06891919708251953, 0.06889481353759766, 0.07093411254882813, 0.0700051498413086, 0.06965657806396484, 0.06917696380615235, 0.06975113677978516, 0.06899919891357421, 0.06936351776123047, 0.06879782104492188, 0.06895088195800782, 0.06982185363769532, 0.0690770263671875, 0.06849507141113281, 0.06869058990478516, 0.0690540771484375, 0.0696673583984375, 0.0692959976196289, 0.06908038330078126, 0.06983353424072265, 0.06968934631347656, 0.06914832305908203, 0.06859139251708984, 0.06932537841796875, 0.06873465728759766, 0.06871071624755859, 0.069032958984375, 0.06921868896484375, 0.06942374420166016, 0.06888857269287109, 0.06944563293457032, 0.06930131530761718, 0.06874345397949219, 0.06920028686523437, 0.06845670318603515, 0.0694662094116211, 0.06941478729248046, 0.06891295623779296, 0.06942118072509766, 0.07043635559082032, 0.06925167846679688, 0.06912000274658203, 0.06919884490966798, 0.06958787536621094, 0.07124662780761719, 0.07027526092529297, 0.07014739227294922, 0.06965299224853516, 0.06968486022949219, 0.06981417846679687, 0.06967533111572266, 0.06996963500976562, 0.06994502258300782, 0.07009970855712891, 0.07014911651611327, 0.07084134674072265, 0.07003337860107423, 0.0706879653930664, 0.07022879791259766, 0.0698753890991211, 0.07012793731689453, 0.07042806243896485, 0.07047606658935547, 0.06974256134033203, 0.0697050552368164, 0.06996018981933594, 0.0696099853515625, 0.06967638397216797, 0.07001292419433594, 0.07061901092529296, 0.07020771026611328, 0.06985369873046875, 0.06986962890625, 0.0698388442993164, 0.07012351989746093, 0.07062326049804687, 0.0716553955078125, 0.07097110748291016, 0.07042076873779297, 0.06988301086425781, 0.06992985534667968, 0.06984294128417969, 0.07073587036132813, 0.0706723861694336, 0.07041843414306641, 0.071225341796875, 0.07019929504394531, 0.07015321350097656, 0.06992569732666015, 0.07053919982910156, 0.07104758453369141, 0.07045865631103515, 0.07022035217285157, 0.07025414276123047, 0.07018540954589844, 0.07032364654541015, 0.07239328002929687, 0.07092428588867188, 0.07134931182861329, 0.0706519012451172, 0.07032473754882812, 0.07077750396728516, 0.07062300872802735, 0.07068262481689454, 0.07234703826904297, 0.07052349090576172, 0.07040608215332031, 0.07132335662841797, 0.07048016357421875, 0.07028297424316406, 0.07028765106201172, 0.07078060913085937, 0.07054300689697265, 0.07010745239257812, 0.07016925048828125, 0.07002207946777343, 0.0698191375732422, 0.070229248046875, 0.07055213165283203, 0.07100640106201171, 0.07234559631347656, 0.07085794830322266, 0.07097116851806641, 0.07076761627197266, 0.07055564880371094, 0.07042867279052735, 0.07322217559814453, 0.07066793823242187, 0.07076914978027343, 0.07022367858886719, 0.07013910675048828, 0.07340930938720704, 0.0706777572631836, 0.07068873596191406, 0.0717995834350586, 0.07148258972167969, 0.07014419555664063, 0.07093740844726562, 0.07119439697265625, 0.07083350372314454, 0.07084822082519532, 0.07059862518310547, 0.07044960021972656, 0.07065039825439454, 0.07061033630371094, 0.07049686431884766, 0.07072972869873047, 0.07090306854248046, 0.070914306640625, 0.07043321228027344, 0.0702889633178711, 0.07058889770507812, 0.07078246307373047, 0.07081830596923828, 0.07029299163818359, 0.0703012466430664, 0.07081670379638672, 0.07031734466552735, 0.07049407958984374, 0.07040191650390625, 0.07064265441894531, 0.07104313659667968, 0.07075833892822266, 0.07089766693115235, 0.07065599822998046, 0.0704163818359375, 0.07110649871826172, 0.07083833312988282, 0.07060275268554687, 0.07059661102294922, 0.07350476837158203, 0.07081779479980468, 0.07121305847167969, 0.07062732696533203, 0.07053475189208984, 0.07048416137695312, 0.07053059387207031, 0.07051129913330079, 0.07046367645263672, 0.0701475830078125, 0.07015206146240234, 0.07068511962890625, 0.07032422637939453, 0.07051058959960937, 0.070847900390625, 0.0706030044555664, 0.07023999786376953, 0.07089759826660157, 0.07074473571777344, 0.07097548675537109, 0.07065599822998046, 0.07053702545166016, 0.07030531311035157, 0.07059728240966796, 0.07059257507324218, 0.07068051147460938, 0.0704411849975586, 0.0708831024169922, 0.0710635528564453, 0.07075389099121093, 0.07033596801757812, 0.07021692657470703, 0.07068029022216797, 0.0721082534790039, 0.07326841735839844, 0.07058812713623047, 0.07154707336425781, 0.0707624282836914, 0.07193218994140625, 0.07024230194091798, 0.07063600158691406, 0.07046518707275391, 0.07095536041259766, 0.07228160095214843, 0.07098390197753907, 0.07117878723144531, 0.07076553344726562, 0.07111759948730469, 0.07141785430908203, 0.07068876647949218, 0.07064985656738282, 0.07065548706054688, 0.07082444763183594, 0.0711720962524414, 0.07102976226806641, 0.070607421875, 0.07070941162109375, 0.07051929473876953, 0.0709026870727539, 0.07128768157958984, 0.07066524505615235, 0.07185305786132813, 0.07052694702148438, 0.0714820785522461, 0.07169840240478516, 0.07053446197509766, 0.07101081848144532, 0.07103510284423828, 0.07136444854736328, 0.07179827117919922, 0.0705000991821289, 0.0706710433959961, 0.07039734649658203, 0.07045817565917968, 0.0703563232421875, 0.07079993438720703, 0.07056803131103516, 0.0723927001953125, 0.0712069091796875, 0.07030982208251953, 0.07083987426757812, 0.0702529296875, 0.07056192016601562, 0.07309516906738281, 0.07183916473388671, 0.07074617767333985, 0.07101696014404296, 0.07073580932617188, 0.07028141021728515, 0.07497510528564454, 0.07050444793701172, 0.07032240295410157, 0.07056361389160157, 0.07043209838867187, 0.07030563354492188, 0.07037216186523437, 0.07075430297851562, 0.07140966033935547, 0.07032653045654297, 0.0706352310180664, 0.07038159942626954, 0.07021772766113281, 0.07049436950683594, 0.07050633239746094, 0.07110451507568359, 0.07184130859375, 0.07191744232177734, 0.0711358413696289, 0.0702762908935547, 0.07009158325195312, 0.07016038513183594, 0.07013906860351563, 0.07022176361083984, 0.07065074920654296, 0.07037747192382812, 0.07017676544189454, 0.07035286712646484, 0.07037728118896484, 0.07091426849365234, 0.07033430480957031, 0.07055289459228516, 0.07048041534423828, 0.07035062408447265, 0.0707110366821289, 0.07022262573242187, 0.07058432006835938, 0.0725110092163086, 0.07071571350097657, 0.07045305633544922, 0.07050685119628906, 0.07011923217773437, 0.07008070373535157, 0.07024845123291015, 0.07074995422363281, 0.07009657287597656, 0.07052140808105468, 0.07069900512695312, 0.07027507019042968, 0.07040025329589844, 0.07024742126464843, 0.07057279968261719, 0.07062937927246093, 0.07221603393554688, 0.07077555084228515, 0.07061241912841797, 0.06997782135009765, 0.07065238189697266, 0.07043292999267578, 0.06984102630615234, 0.06979366302490235, 0.07008051300048829, 0.07063961791992188, 0.06983679962158203, 0.06996707153320313, 0.07015001678466797, 0.07031430053710938, 0.07026448059082031, 0.07026169586181641, 0.07026278686523438, 0.07007974243164063, 0.0699788818359375, 0.07217766571044922, 0.07026278686523438, 0.07030745697021484, 0.07033500671386719, 0.07030563354492188, 0.07159340667724609, 0.07007843017578125, 0.07043325042724609, 0.07050054168701173, 0.07152611541748047, 0.07038998413085938, 0.07079920196533203, 0.0702242202758789, 0.07037481689453125, 0.0712093734741211, 0.07070310211181641, 0.07044915008544922, 0.07016242980957031, 0.07082530975341797, 0.07116047668457032, 0.07018003082275391, 0.07016941070556641, 0.07136051177978515, 0.07065599822998046, 0.07099139404296875, 0.07106813049316406, 0.07109632110595702, 0.07134003448486329, 0.0708023681640625, 0.07055462646484376, 0.07148441314697265, 0.07144636535644532, 0.07111695861816406, 0.07032572937011719, 0.07040605163574219, 0.07189686584472656, 0.07036172485351562, 0.07012783813476563, 0.07038361358642578, 0.07354521942138671, 0.07024486541748047, 0.07040409851074218, 0.07098915100097657, 0.07449571228027344, 0.07154742431640625, 0.07439750671386719, 0.070595458984375, 0.07013552093505859, 0.07014390563964844, 0.07086271667480469, 0.07226953887939454, 0.07032182312011719, 0.07149014282226562, 0.07102489471435547, 0.07020953369140626, 0.06999858856201172, 0.07387289428710937, 0.07037593841552735, 0.07175782775878906, 0.07044445037841797, 0.07062179565429688, 0.07015187072753906, 0.07006034851074219, 0.07131056213378906, 0.06990914916992187, 0.06992438507080079, 0.07024515533447266, 0.07021510314941407, 0.07001126098632812, 0.0699527359008789, 0.06998239898681641, 0.07049072265625, 0.06999858856201172, 0.07002444458007813, 0.07005686187744141, 0.0703155517578125, 0.06986067199707031, 0.07038566589355469, 0.06988086700439453, 0.06998831939697266, 0.07000064086914062, 0.07002931213378906, 0.07008255767822266, 0.07005184173583984, 0.07009894561767578, 0.07050450897216796, 0.07072550201416015, 0.07021334075927735, 0.07006243133544922, 0.07047942352294922, 0.07046514892578125, 0.0705994873046875, 0.07037312316894531, 0.06992486572265624, 0.0698265609741211, 0.06995308685302734, 0.07019152069091797, 0.07059430694580078, 0.07008678436279298, 0.07029293060302734, 0.06999433898925782, 0.06996451568603515, 0.07015030670166016, 0.07033036804199219, 0.0702525405883789, 0.07005593872070312, 0.07049420928955077, 0.07048191833496094, 0.07173939514160156, 0.07014118194580078, 0.07064009857177735, 0.07040214538574219, 0.0705285415649414, 0.07101404571533203, 0.06974566650390625, 0.0698974380493164, 0.07002191925048828, 0.07013785552978516, 0.07000691223144531, 0.06984015655517578, 0.07020134735107422, 0.07021590423583984, 0.06983650970458985, 0.07048047637939453, 0.06998841857910157, 0.06999648284912109, 0.07010643005371094, 0.07137152099609376, 0.07105945587158204, 0.07213484954833985, 0.07012521362304687, 0.06995574188232422, 0.0699677734375, 0.07045763397216796, 0.0699697265625, 0.07017215728759765, 0.0703267822265625, 0.07027721405029297, 0.06998416137695312, 0.07003475189208984, 0.07027129364013672, 0.07043724822998047, 0.07035494232177734, 0.06972115325927734, 0.07000908660888672, 0.06971577453613281, 0.06983990478515625, 0.06989318084716797, 0.07040278625488282, 0.06984425354003906, 0.0700770263671875, 0.0698226547241211, 0.06997196960449219, 0.06980812835693359]",tokens/s,14.214954230469012,,, 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,889.417728,638.517248,0.0,260.046848,253.520896,s,1,7.95947119140625,7.95947119140625,0.0,7.95947119140625,7.95947119140625,7.95947119140625,7.95947119140625,[7.95947119140625],,kWh,2.1573645654151127e-05,2.372538863577728e-06,7.558894936005034e-06,3.150507945373389e-05,,MB,1197.993984,749.666304,0.0,341.835776,312.39168,s,13,0.16876528072357178,0.012981944671043983,0.0001479043615388075,0.012968255996704101,0.013094316673278808,0.013222700881958007,0.0133575772857666,"[0.01339129638671875, 0.012816160202026368, 0.01293564796447754, 0.012968255996704101, 0.01281935977935791, 0.012997568130493164, 0.013030367851257324, 0.013029151916503907, 0.01294924831390381, 0.013014816284179687, 0.01311030387878418, 0.012891039848327637, 0.012812064170837403]",tokens/s,19719.69581498863,kWh,3.806948366362395e-07,4.198411301527668e-08,2.527649421799144e-07,6.754438918314305e-07,tokens/kWh,379010015.6296765,MB,1209.102336,776.92928,0.0,369.098752,313.0496,s,13,9.86955401611328,0.7591964627779446,0.008135141971594635,0.7592740478515625,0.7656478393554688,0.7727370849609375,0.7804448291015625,"[0.7600068359375, 0.7629833374023437, 0.7575670166015624, 0.7509777221679688, 0.7530004272460937, 0.76631396484375, 0.7823717651367188, 0.7542471313476562, 0.76058740234375, 0.7592740478515625, 0.751735595703125, 0.7594671630859375, 0.7510216064453125]",tokens/s,82.98247303402769,kWh,2.2066482475545642e-05,2.433513627452451e-06,8.384656497666155e-06,3.288465260066424e-05,tokens/kWh,1915787.305556862,,s,819,9.863349944114677,0.012043162324926355,0.00031496773920116716,0.011961824417114257,0.012284908485412598,0.012567424011230469,0.012816493759155272,"[0.011796287536621094, 0.011968704223632812, 0.011915200233459473, 0.011891776084899903, 0.011961440086364745, 0.011865344047546387, 0.011878496170043945, 0.011895359992980957, 0.011929599761962891, 0.011890687942504884, 0.011993087768554688, 0.012023807525634766, 0.01209059238433838, 0.012116864204406738, 0.012681119918823243, 0.01223475170135498, 0.012143936157226563, 0.012116543769836426, 0.012095616340637208, 0.012186816215515137, 0.012251551628112794, 0.012153216361999512, 0.01212009620666504, 0.012156928062438965, 0.012240927696228027, 0.012166943550109863, 0.012101823806762696, 0.01239641571044922, 0.012196991920471191, 0.01226854419708252, 0.012060064315795899, 0.01197935962677002, 0.012107775688171387, 0.012236127853393555, 0.012231231689453125, 0.012126303672790528, 0.011988991737365723, 0.01231065559387207, 0.011973919868469239, 0.011962976455688477, 0.0119716796875, 0.012004256248474121, 0.012066720008850097, 0.012164671897888184, 0.012074943542480468, 0.012098143577575684, 0.012099424362182616, 0.01206287956237793, 0.012087295532226563, 0.01205247974395752, 0.012009471893310546, 0.011985055923461914, 0.011939552307128906, 0.01194816017150879, 0.011915072441101075, 0.011905216217041015, 0.011914624214172363, 0.011859968185424804, 0.011929439544677735, 0.011903776168823241, 0.011952383995056153, 0.011964159965515137, 0.011898880004882812, 0.011673600196838378, 0.01203536033630371, 0.01191539192199707, 0.011936351776123047, 0.01189417552947998, 0.012104415893554687, 0.012018719673156738, 0.012157343864440917, 0.01210531234741211, 0.012163295745849609, 0.012044992446899414, 0.011990976333618165, 0.011879839897155763, 0.011856063842773438, 0.012040224075317383, 0.011984831809997558, 0.011981087684631348, 0.011946144104003907, 0.01189840030670166, 0.011926239967346191, 0.011904000282287597, 0.011936351776123047, 0.0118756160736084, 0.012030816078186036, 0.012051615715026855, 0.012051648139953613, 0.012008511543273925, 0.01205731201171875, 0.012085311889648438, 0.01203321647644043, 0.012001888275146484, 0.011900064468383788, 0.011935872077941894, 0.012069888114929199, 0.012303680419921874, 0.012181856155395509, 0.012059840202331543, 0.012696640014648437, 0.01202188777923584, 0.01209875202178955, 0.01203667163848877, 0.012064831733703613, 0.012107935905456543, 0.012174304008483888, 0.012284735679626464, 0.012240575790405274, 0.012396960258483887, 0.012373760223388671, 0.012519392013549804, 0.013121888160705567, 0.012318559646606445, 0.012291744232177734, 0.012163423538208007, 0.01221241569519043, 0.012189503669738769, 0.012237983703613281, 0.01216598415374756, 0.012154879570007325, 0.012143967628479004, 0.012050496101379395, 0.012118623733520508, 0.012170528411865235, 0.01211023998260498, 0.011717184066772461, 0.012006848335266114, 0.011885343551635742, 0.01196828842163086, 0.011910783767700195, 0.011936127662658691, 0.012124159812927245, 0.012184927940368653, 0.014021087646484375, 0.01225334358215332, 0.01212758445739746, 0.012059040069580078, 0.012007712364196777, 0.011972736358642579, 0.012008607864379883, 0.011986687660217285, 0.01210262393951416, 0.011972607612609864, 0.011913215637207031, 0.011999135971069335, 0.011972703933715821, 0.011902976036071777, 0.011894783973693847, 0.01200492763519287, 0.012003840446472168, 0.012072575569152833, 0.012294560432434083, 0.012018752098083497, 0.012030400276184083, 0.011909536361694336, 0.011890399932861328, 0.011886879920959473, 0.0118985595703125, 0.012220895767211915, 0.012071904182434081, 0.012048768043518067, 0.012084768295288087, 0.011950112342834473, 0.01187116813659668, 0.011939423561096191, 0.011938207626342774, 0.011877568244934082, 0.011938624382019043, 0.01189788818359375, 0.011901439666748047, 0.011956319808959961, 0.011951647758483886, 0.011873023986816406, 0.011915360450744629, 0.011867615699768066, 0.011923999786376953, 0.011913215637207031, 0.012261440277099609, 0.01193065643310547, 0.011951007843017579, 0.012047871589660645, 0.011944543838500977, 0.011912991523742675, 0.011993215560913087, 0.011926912307739257, 0.012007871627807616, 0.011999423980712891, 0.012043840408325195, 0.011690079689025879, 0.012065664291381836, 0.01202995204925537, 0.012007488250732421, 0.011931584358215333, 0.012000639915466309, 0.011931808471679687, 0.011929439544677735, 0.012057439804077148, 0.011970335960388184, 0.012031999588012696, 0.012247039794921874, 0.011903103828430175, 0.011935456275939942, 0.011903136253356934, 0.011843584060668945, 0.0120481595993042, 0.011945343971252442, 0.011853952407836914, 0.011864800453186036, 0.011882495880126954, 0.011941984176635742, 0.011857695579528808, 0.011934176445007324, 0.011855520248413087, 0.011881888389587402, 0.011866047859191894, 0.011974656105041503, 0.011963104248046875, 0.011917471885681152, 0.011865887641906738, 0.011892352104187012, 0.011868800163269043, 0.011867487907409668, 0.011838080406188965, 0.011863295555114745, 0.011938528060913086, 0.011845151901245117, 0.011835519790649413, 0.011936127662658691, 0.011857695579528808, 0.0118471040725708, 0.011836992263793946, 0.011950528144836426, 0.011917887687683105, 0.012009471893310546, 0.011849727630615235, 0.011982848167419433, 0.01186406421661377, 0.011933504104614258, 0.011888895988464356, 0.011974623680114746, 0.011925472259521484, 0.011928768157958984, 0.01182812786102295, 0.011878303527832031, 0.011860223770141601, 0.011855615615844726, 0.01188003158569336, 0.011858336448669434, 0.011831295967102052, 0.011861503601074219, 0.011872223854064941, 0.011595552444458007, 0.011878911972045898, 0.011882559776306152, 0.011912256240844727, 0.011907327651977538, 0.011921279907226563, 0.011884448051452636, 0.01183836841583252, 0.012226559638977052, 0.01205452823638916, 0.011907072067260742, 0.011988415718078613, 0.011868032455444336, 0.011828927993774415, 0.01193391990661621, 0.011887424468994141, 0.011937760353088378, 0.011988991737365723, 0.011914976119995117, 0.011918911933898926, 0.012101375579833984, 0.011877344131469727, 0.012055744171142578, 0.01193231964111328, 0.011871583938598633, 0.011891167640686035, 0.011878591537475586, 0.01187446403503418, 0.011993087768554688, 0.01192572784423828, 0.01205020809173584, 0.012428447723388672, 0.011961312294006347, 0.01199295997619629, 0.011984479904174805, 0.011856287956237792, 0.011925439834594726, 0.01201683235168457, 0.011930496215820312, 0.01204428768157959, 0.011921407699584961, 0.011878399848937989, 0.011929599761962891, 0.011974656105041503, 0.011941184043884277, 0.011958080291748047, 0.01191164779663086, 0.012096991539001465, 0.011932607650756835, 0.011857024192810059, 0.01186905574798584, 0.011876352310180664, 0.011873791694641114, 0.011930272102355957, 0.011832448005676269, 0.011913344383239747, 0.011966624259948731, 0.011904800415039062, 0.011954208374023438, 0.012016256332397461, 0.011935232162475586, 0.01195263957977295, 0.012209792137145997, 0.012643424034118653, 0.012405664443969726, 0.012048383712768555, 0.01202995204925537, 0.011958271980285644, 0.011884544372558594, 0.012037887573242187, 0.011903231620788574, 0.012236063957214355, 0.015813568115234374, 0.015257375717163086, 0.012349184036254884, 0.011911423683166503, 0.01226137638092041, 0.011943936347961426, 0.011907072067260742, 0.012075136184692383, 0.01188758373260498, 0.011994048118591309, 0.011930944442749024, 0.01191817569732666, 0.01186796760559082, 0.011938048362731933, 0.01195315170288086, 0.011954208374023438, 0.01206550407409668, 0.0120664644241333, 0.012370400428771973, 0.012207488059997558, 0.012003840446472168, 0.012050496101379395, 0.012001215934753418, 0.012102848052978515, 0.012135231971740722, 0.012031744003295899, 0.012077312469482421, 0.012263263702392578, 0.012036352157592774, 0.012062560081481934, 0.012180831909179688, 0.012092127799987792, 0.012392543792724609, 0.012042143821716308, 0.012011520385742188, 0.011927424430847168, 0.011945695877075195, 0.01189622402191162, 0.011872960090637206, 0.011923775672912598, 0.012156031608581542, 0.011987071990966797, 0.011911231994628907, 0.011958975791931153, 0.01191868782043457, 0.011885055541992188, 0.012044447898864746, 0.012005599975585937, 0.011896608352661132, 0.012722175598144531, 0.011873888015747071, 0.01189897632598877, 0.011823424339294434, 0.011895903587341309, 0.011773951530456543, 0.01196675205230713, 0.012023520469665528, 0.011955679893493653, 0.011954624176025391, 0.01195734405517578, 0.011969280242919923, 0.01195638370513916, 0.011962688446044922, 0.011916095733642579, 0.011948960304260254, 0.01206505584716797, 0.012142111778259277, 0.012351743698120117, 0.012275712013244629, 0.012285599708557129, 0.012326975822448731, 0.012243231773376466, 0.012250783920288087, 0.012288319587707519, 0.012472352027893067, 0.012429568290710449, 0.01258675193786621, 0.012627200126647949, 0.012683039665222167, 0.01261683177947998, 0.012819552421569824, 0.012567296028137206, 0.012645888328552245, 0.012640768051147461, 0.012564640045166015, 0.012610527992248535, 0.01258790397644043, 0.012578816413879394, 0.012632063865661621, 0.012557600021362305, 0.012640992164611816, 0.012568575859069824, 0.012626976013183593, 0.01259823989868164, 0.012595104217529298, 0.012771424293518066, 0.012698816299438477, 0.012534496307373046, 0.012490303993225097, 0.012632543563842774, 0.012637984275817872, 0.012572959899902343, 0.012560319900512696, 0.012621888160705566, 0.012640128135681153, 0.012676959991455078, 0.01262816047668457, 0.012609631538391113, 0.0125665283203125, 0.012475520133972169, 0.012473088264465332, 0.01231443214416504, 0.01257091236114502, 0.012365119934082031, 0.01224163246154785, 0.012280832290649414, 0.0122391357421875, 0.01170188808441162, 0.01203651237487793, 0.012059231758117676, 0.012019136428833007, 0.011962080001831054, 0.011969344139099121, 0.011931488037109375, 0.012035296440124512, 0.011969311714172363, 0.011905023574829102, 0.011999232292175293, 0.011906271934509277, 0.0120164155960083, 0.012120287895202636, 0.012250240325927735, 0.012197919845581055, 0.012138751983642579, 0.012009696006774902, 0.01195638370513916, 0.011898880004882812, 0.011984095573425294, 0.011922207832336426, 0.011831232070922851, 0.011870271682739257, 0.011886752128601075, 0.011933376312255859, 0.011950240135192872, 0.011943936347961426, 0.011961824417114257, 0.011911711692810058, 0.012017279624938966, 0.011888447761535645, 0.011899135589599609, 0.011864383697509766, 0.012031807899475097, 0.012130496025085449, 0.012304384231567383, 0.012040032386779784, 0.012049792289733886, 0.011929599761962891, 0.011918111801147461, 0.011866080284118652, 0.011887840270996094, 0.011983776092529297, 0.012021439552307129, 0.011908991813659668, 0.011862400054931641, 0.011869600296020508, 0.011872832298278808, 0.011794431686401367, 0.01222383975982666, 0.012304544448852539, 0.011903488159179687, 0.011964415550231934, 0.011926591873168945, 0.011877311706542968, 0.011905152320861817, 0.011880319595336914, 0.011838815689086913, 0.01189136028289795, 0.011876352310180664, 0.011868160247802734, 0.011898880004882812, 0.011673312187194824, 0.011946751594543456, 0.011980223655700683, 0.011911808013916015, 0.011909119606018067, 0.011898880004882812, 0.011835519790649413, 0.011906175613403321, 0.01189740753173828, 0.01193183994293213, 0.011907135963439942, 0.012041952133178711, 0.01194825553894043, 0.011958271980285644, 0.01198095989227295, 0.011949888229370117, 0.011972352027893067, 0.011893024444580078, 0.011951199531555176, 0.011942496299743652, 0.011923744201660156, 0.01196835231781006, 0.011909503936767579, 0.011907072067260742, 0.011881279945373534, 0.011901951789855958, 0.01190112018585205, 0.011925312042236328, 0.011890015602111816, 0.011889311790466308, 0.01201257610321045, 0.011926495552062988, 0.012007295608520508, 0.011995264053344726, 0.011913215637207031, 0.011890687942504884, 0.011890687942504884, 0.011933024406433106, 0.012010144233703614, 0.011953760147094726, 0.011973024368286133, 0.01201142406463623, 0.011999327659606934, 0.012079008102416992, 0.012086848258972167, 0.012038687705993652, 0.012044159889221192, 0.01220348834991455, 0.012141056060791015, 0.012124544143676757, 0.01215056037902832, 0.012082464218139648, 0.012057120323181153, 0.012175552368164063, 0.01218563175201416, 0.012179295539855957, 0.01218511962890625, 0.012284416198730469, 0.012294015884399413, 0.012101280212402343, 0.012441856384277344, 0.01573100757598877, 0.01247436809539795, 0.011737407684326172, 0.011955615997314453, 0.011999551773071288, 0.011993663787841797, 0.01204428768157959, 0.012085247993469238, 0.012088895797729492, 0.012001440048217774, 0.011989279747009278, 0.011958271980285644, 0.011978752136230468, 0.01194604778289795, 0.011929535865783691, 0.011936863899230958, 0.012002207756042481, 0.01200879955291748, 0.01208131217956543, 0.012352000236511231, 0.012308480262756348, 0.011918432235717773, 0.011904959678649902, 0.01196720027923584, 0.011929951667785644, 0.011988896369934082, 0.01189254379272461, 0.011997376441955566, 0.011952128410339356, 0.011976192474365235, 0.011975168228149414, 0.01202790355682373, 0.012148063659667968, 0.012120736122131348, 0.012296192169189453, 0.012259327888488769, 0.012199808120727539, 0.012089056015014648, 0.012001376152038574, 0.012028223991394043, 0.012066816329956055, 0.011963839530944825, 0.012165696144104003, 0.012064448356628418, 0.012214591979980469, 0.012103679656982422, 0.012042400360107421, 0.012054368019104004, 0.01203711986541748, 0.012601856231689454, 0.012132672309875489, 0.012161215782165528, 0.012103615760803222, 0.012074080467224121, 0.012055520057678223, 0.012033663749694824, 0.012139967918395996, 0.012018624305725098, 0.012072959899902343, 0.01201801586151123, 0.011916288375854492, 0.01185244846343994, 0.011902015686035156, 0.011854111671447754, 0.012061344146728516, 0.011669504165649413, 0.012023072242736816, 0.012059359550476075, 0.012035327911376953, 0.012178175926208496, 0.0120381441116333, 0.01194934368133545, 0.011917887687683105, 0.011980159759521485, 0.0120449275970459, 0.01226972770690918, 0.011993280410766602, 0.011974047660827637, 0.012023391723632813, 0.011928383827209472, 0.011915264129638671, 0.011881983757019043, 0.011950143814086914, 0.011898719787597656, 0.011842016220092774, 0.01192784023284912, 0.011874367713928222, 0.011918720245361327, 0.011880831718444824, 0.011874079704284668, 0.011860223770141601, 0.01186406421661377, 0.011841535568237305, 0.011925503730773926, 0.012042240142822265, 0.012006815910339355, 0.011985504150390625, 0.011866111755371094, 0.01187564754486084, 0.011848383903503418, 0.011824864387512207, 0.011868448257446288, 0.011992992401123047, 0.011879551887512206, 0.011831775665283203, 0.011915776252746582, 0.011939552307128906, 0.01188595199584961, 0.011850655555725098, 0.01181056022644043, 0.011869983673095703, 0.011866592407226562, 0.011806719779968262, 0.011823103904724122, 0.011915264129638671, 0.011938048362731933, 0.011978367805480956, 0.011982848167419433, 0.011883872032165527, 0.011957183837890625, 0.011882335662841796, 0.011937055587768555, 0.011866847991943359, 0.011966464042663574, 0.012013567924499511, 0.011846879959106445, 0.011884480476379395, 0.011881600379943848, 0.01163475227355957, 0.012802559852600098, 0.01192313575744629, 0.011937376022338866, 0.012113408088684082, 0.013355487823486329, 0.01422985553741455, 0.012040384292602539, 0.012047743797302246, 0.011976672172546386, 0.011903552055358887, 0.011902976036071777, 0.012000800132751465, 0.011916928291320801, 0.012075712203979492, 0.011957792282104493, 0.011977343559265137, 0.011864031791687012, 0.011888671875, 0.011991040229797363, 0.012156448364257812, 0.011907456398010254, 0.011886591911315919, 0.011956319808959961, 0.012056032180786132, 0.011946528434753419, 0.011976703643798828, 0.012105728149414062, 0.01242643165588379, 0.012192383766174316, 0.012044544219970704, 0.012025792121887207, 0.011939840316772461, 0.011904959678649902, 0.011911231994628907, 0.011898207664489745, 0.011936415672302246, 0.011995327949523927, 0.011955904006958009, 0.011962495803833007, 0.011935615539550781, 0.01193996810913086, 0.01194803237915039, 0.011952383995056153, 0.012101280212402343, 0.011931936264038085, 0.011913215637207031, 0.011883584022521973, 0.011879263877868652, 0.011839391708374024, 0.011886591911315919, 0.011970560073852539, 0.011876352310180664, 0.011917407989501954, 0.011861760139465332, 0.011882016181945801, 0.01194048023223877, 0.011950079917907714, 0.01185971164703369, 0.011864319801330566, 0.011874303817749024, 0.011874303817749024, 0.013195263862609862, 0.011663455963134766, 0.011964735984802246, 0.011970879554748536, 0.011921152114868164, 0.012010016441345216, 0.012117247581481933, 0.012348447799682617, 0.011914976119995117, 0.011877920150756836, 0.011937824249267578, 0.011895071983337403, 0.011890591621398926, 0.01189094352722168, 0.011842911720275878, 0.011985024452209473, 0.011895071983337403, 0.011905088424682617, 0.011905280113220215, 0.011857983589172363, 0.011873439788818359, 0.011854559898376464, 0.011886591911315919, 0.011874272346496581, 0.011886367797851563, 0.011839743614196777, 0.011968511581420899, 0.011878399848937989, 0.011890687942504884, 0.011857536315917969, 0.011872480392456055, 0.011886752128601075, 0.01187820816040039, 0.011870271682739257, 0.011880576133728027, 0.0118920316696167, 0.01185478401184082, 0.011855680465698243, 0.01186348819732666, 0.011888768196105957, 0.011813440322875977, 0.011849535942077636, 0.011845088005065918, 0.011878432273864746, 0.011926176071166993, 0.011876192092895508, 0.011941887855529786, 0.011907008171081543, 0.011929311752319337, 0.01208080005645752, 0.012118528366088867, 0.01195638370513916, 0.011902015686035156, 0.011908160209655762, 0.011849632263183594, 0.011920672416687011, 0.012040063858032227, 0.012040672302246093, 0.011929216384887696, 0.01189782428741455, 0.011894559860229493, 0.011890560150146485, 0.011901056289672852, 0.011907072067260742]",tokens/s,83.0346692189184,,, 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4169.797632,4725.866496,0.0,4347.396096,4328.833024,s,1,10.168271484375,10.168271484375,0.0,10.168271484375,10.168271484375,10.168271484375,10.168271484375,[10.168271484375],,kWh,9.025456107503941e-05,9.946536858022936e-06,2.9206134476017498e-05,0.00012940723240907983,,MB,1367.089152,5212.40576,0.0,4804.575232,4748.27776,s,10,3.915197418212891,0.3915197418212891,0.004421960725253466,0.3930104522705078,0.3947453002929687,0.39542315673828127,0.39596544189453126,"[0.3796112060546875, 0.3929827880859375, 0.39303811645507813, 0.38973516845703127, 0.39199102783203127, 0.39459466552734374, 0.3935941467285156, 0.38942996215820314, 0.39411932373046876, 0.39610101318359375]",tokens/s,653.862302853817,kWh,1.1309097754005902e-05,1.2471398294166202e-06,7.5085636991536815e-06,2.0064801282576203e-05,tokens/kWh,12758661.119774176,MB,1372.336128,5321.457664,0.0,4913.627136,4878.100992,s,10,19.812693237304686,1.9812693237304686,0.011225055036968759,1.9823427734374999,1.9938609130859375,1.9943759521484377,1.9947879833984377,"[1.991484619140625, 1.9722833251953125, 1.964276123046875, 1.9937464599609376, 1.9711112060546876, 1.9695313720703125, 1.974825439453125, 1.989860107421875, 1.99068359375, 1.9948909912109376]",tokens/s,31.797797121988097,kWh,5.8298273269744395e-05,6.430324215388951e-06,3.8719584394445744e-05,0.0001034481818795791,tokens/kWh,609000.5532754206,,s,630,19.80954297065734,0.03144371900104341,0.0005867593209498358,0.0313822717666626,0.031838938522338864,0.03218983402252197,0.03419297065734864,"[0.033762046813964844, 0.03153267288208008, 0.03125686454772949, 0.031538591384887696, 0.031449663162231446, 0.03143084716796875, 0.03164499282836914, 0.03148371124267578, 0.03145123291015625, 0.031691551208496094, 0.031807424545288086, 0.03168671989440918, 0.03228876876831055, 0.03387596893310547, 0.03170918464660644, 0.0314466552734375, 0.03171494483947754, 0.0313798713684082, 0.03173417663574219, 0.03166201591491699, 0.031355903625488284, 0.03144150352478027, 0.031711328506469724, 0.03152025604248047, 0.031591007232666016, 0.03161315155029297, 0.03167225646972656, 0.03169696044921875, 0.03174399948120117, 0.03182796859741211, 0.031707263946533205, 0.03164508819580078, 0.03175267219543457, 0.0313384952545166, 0.03167436790466309, 0.031765951156616214, 0.03162719917297363, 0.03149596786499023, 0.031416671752929684, 0.03132671928405762, 0.031637535095214844, 0.03200960159301758, 0.031658592224121096, 0.0315098876953125, 0.031615167617797854, 0.03180108833312988, 0.031480192184448245, 0.03139769554138184, 0.031547679901123046, 0.03143843269348145, 0.0314783992767334, 0.03172294425964355, 0.03136774444580078, 0.03142643165588379, 0.03149017524719238, 0.03126681518554687, 0.031184223175048827, 0.031093408584594726, 0.031110240936279298, 0.030880512237548827, 0.030982303619384765, 0.031395263671875, 0.03121753692626953, 0.03423360061645508, 0.032199424743652345, 0.03136511993408203, 0.03141961669921875, 0.0315830078125, 0.03149619293212891, 0.031080448150634765, 0.031857696533203125, 0.031238880157470703, 0.031097087860107422, 0.03209830474853516, 0.031213567733764647, 0.031252479553222655, 0.030887136459350584, 0.03086809539794922, 0.03114409637451172, 0.03079987144470215, 0.03102720069885254, 0.030686656951904298, 0.030695968627929688, 0.031242271423339844, 0.03122790336608887, 0.031100927352905275, 0.03153919982910156, 0.0312805118560791, 0.03120102310180664, 0.03148643112182617, 0.031166879653930665, 0.031178272247314454, 0.03159491157531738, 0.03162656021118164, 0.031146751403808594, 0.031141599655151366, 0.03126908874511719, 0.03128121566772461, 0.032555007934570314, 0.031068159103393556, 0.03102720069885254, 0.030674623489379882, 0.030873472213745118, 0.03096620750427246, 0.031001951217651365, 0.031462047576904295, 0.031440895080566404, 0.0313384952545166, 0.03135487937927246, 0.03130086326599121, 0.031492864608764645, 0.03129887962341309, 0.03163542366027832, 0.03129955291748047, 0.031156160354614257, 0.03147014427185059, 0.031333919525146484, 0.031103744506835937, 0.03105561637878418, 0.030732511520385742, 0.03088345527648926, 0.03110540771484375, 0.031098880767822266, 0.030988288879394532, 0.03125391960144043, 0.03131843185424805, 0.03395647811889648, 0.03156598472595215, 0.03160396766662597, 0.03127577590942383, 0.03112940788269043, 0.03162335968017578, 0.031346176147460936, 0.03095964813232422, 0.031176288604736327, 0.03092473602294922, 0.0308023681640625, 0.03069593620300293, 0.033898494720458985, 0.03160470390319824, 0.031188928604125976, 0.030807647705078125, 0.031191551208496093, 0.03123344039916992, 0.03143503952026367, 0.03143430328369141, 0.031374080657958985, 0.0314202880859375, 0.03162118339538574, 0.031266048431396486, 0.03120207977294922, 0.03147145652770996, 0.03131411170959473, 0.031085887908935548, 0.03092323112487793, 0.030941408157348634, 0.031380607604980466, 0.03133529663085938, 0.031133312225341797, 0.0309169921875, 0.030830591201782227, 0.030744575500488282, 0.030658559799194338, 0.03105120086669922, 0.030701791763305664, 0.030746976852416993, 0.03095919990539551, 0.03132246398925781, 0.03105183982849121, 0.03080147171020508, 0.030652864456176758, 0.031959039688110355, 0.03069308853149414, 0.030578975677490235, 0.030580480575561522, 0.030607616424560547, 0.03068083190917969, 0.031090944290161134, 0.03092889595031738, 0.03119215965270996, 0.03136588859558105, 0.030842432022094725, 0.03073699188232422, 0.030701568603515625, 0.030693376541137695, 0.030830240249633788, 0.031175008773803713, 0.03136860847473145, 0.031228511810302735, 0.03370697784423828, 0.03141734313964844, 0.03135456085205078, 0.03136748886108399, 0.03124336051940918, 0.03082326316833496, 0.03085443115234375, 0.031116064071655274, 0.03114188766479492, 0.031117311477661135, 0.031065887451171875, 0.031004735946655274, 0.030936447143554688, 0.030752767562866212, 0.03070572853088379, 0.03071251106262207, 0.030824415206909178, 0.030691295623779296, 0.03109622383117676, 0.031332128524780276, 0.031385951995849606, 0.03260678482055664, 0.03509862518310547, 0.03164723205566406, 0.03148441505432129, 0.03128319931030273, 0.031734783172607424, 0.03146009635925293, 0.031379680633544925, 0.03143888092041015, 0.031455232620239255, 0.031485151290893555, 0.031589151382446286, 0.03143270492553711, 0.03125862312316895, 0.033635425567626956, 0.03182480049133301, 0.031780672073364255, 0.03176057624816894, 0.03196108818054199, 0.0318525447845459, 0.031834112167358396, 0.033685504913330076, 0.03216707229614258, 0.03191484832763672, 0.032161792755126956, 0.03198566436767578, 0.03184435272216797, 0.032012161254882814, 0.03172681617736817, 0.031576992034912106, 0.031531007766723636, 0.031537023544311524, 0.031475231170654296, 0.03108268737792969, 0.031095199584960938, 0.031073856353759765, 0.031526912689208986, 0.03425750350952148, 0.03189743995666504, 0.0315863037109375, 0.03146751976013184, 0.031211328506469727, 0.034103294372558594, 0.03159654426574707, 0.03129948806762695, 0.031247711181640624, 0.0314270076751709, 0.031494464874267575, 0.03139536094665527, 0.031375839233398435, 0.031262144088745114, 0.03098876762390137, 0.03104316711425781, 0.030996288299560547, 0.0311213436126709, 0.031124223709106447, 0.031236095428466795, 0.03101286315917969, 0.03085312080383301, 0.030785375595092774, 0.03090652847290039, 0.03096575927734375, 0.030711807250976563, 0.03091632080078125, 0.031137855529785156, 0.03121379280090332, 0.031170560836791993, 0.030650367736816408, 0.030506464004516603, 0.03062633514404297, 0.03080396842956543, 0.030980096817016602, 0.031172607421875, 0.031049440383911133, 0.03095961570739746, 0.030976287841796873, 0.030875648498535156, 0.030939136505126953, 0.030926368713378907, 0.03093948745727539, 0.030834527969360353, 0.03068115234375, 0.03104364776611328, 0.030785696029663086, 0.030727712631225586, 0.030978527069091797, 0.03086479949951172, 0.03086409568786621, 0.03165961647033692, 0.03186307144165039, 0.03154710388183594, 0.031617311477661135, 0.03178886413574219, 0.03158582305908203, 0.031599264144897464, 0.03180496025085449, 0.032250335693359375, 0.031727231979370114, 0.03188931274414063, 0.03193900871276856, 0.03196854400634765, 0.0325557746887207, 0.03170918464660644, 0.031817728042602536, 0.031838207244873046, 0.03379836654663086, 0.031479711532592776, 0.031326303482055666, 0.031223743438720704, 0.031350847244262695, 0.03161292839050293, 0.031567264556884765, 0.031584447860717776, 0.03150886344909668, 0.03165750312805176, 0.0313492488861084, 0.03140812873840332, 0.03135676765441894, 0.031633119583129886, 0.03137580871582031, 0.031152128219604492, 0.03104732894897461, 0.031123039245605468, 0.03165056037902832, 0.031678335189819336, 0.031092863082885742, 0.030976095199584962, 0.03073219108581543, 0.030707712173461913, 0.030643648147583007, 0.03135750389099121, 0.030621696472167968, 0.03114726448059082, 0.030713760375976562, 0.03126153564453125, 0.031254528045654296, 0.03117670440673828, 0.03135487937927246, 0.0313093433380127, 0.03131030464172363, 0.031307775497436525, 0.031210943222045897, 0.03140665626525879, 0.031509855270385745, 0.031191295623779296, 0.031038944244384765, 0.03116713523864746, 0.0316275520324707, 0.03096575927734375, 0.030957311630249024, 0.03087299156188965, 0.030589792251586916, 0.03084671974182129, 0.03089638328552246, 0.030841983795166016, 0.030845632553100587, 0.031102783203125, 0.030741888046264647, 0.031175296783447267, 0.03141465568542481, 0.031145984649658204, 0.03135638427734375, 0.031384063720703126, 0.031514656066894534, 0.03128934478759766, 0.0314040641784668, 0.03162953567504883, 0.03122150421142578, 0.03393308639526367, 0.031508575439453124, 0.0314619197845459, 0.031218912124633787, 0.031371648788452146, 0.031297504425048826, 0.03255353546142578, 0.031887359619140625, 0.03153302383422851, 0.031383583068847656, 0.03139705657958984, 0.03148064041137695, 0.031352352142333985, 0.031246816635131836, 0.03128319931030273, 0.03116166305541992, 0.03130179214477539, 0.03136073684692383, 0.031231903076171876, 0.031365472793579104, 0.03149676895141602, 0.03138150405883789, 0.03132566452026367, 0.031446720123291014, 0.03125766372680664, 0.03132374382019043, 0.031567840576171874, 0.031393983840942385, 0.031162399291992188, 0.032578655242919925, 0.03157488059997558, 0.03134016036987305, 0.031318464279174806, 0.031029247283935548, 0.03077731132507324, 0.030867488861083984, 0.03107244873046875, 0.030803775787353514, 0.031198911666870117, 0.031740224838256836, 0.031155744552612306, 0.0311441593170166, 0.031125759124755858, 0.03123606491088867, 0.031064096450805663, 0.031041536331176758, 0.031248607635498048, 0.03110028839111328, 0.031025568008422853, 0.03126860809326172, 0.03094553565979004, 0.030772960662841797, 0.03089779281616211, 0.030763423919677735, 0.03096601676940918, 0.030919872283935546, 0.03096793556213379, 0.030866111755371094, 0.0310435848236084, 0.03122585678100586, 0.03118489646911621, 0.03135897636413574, 0.03318368148803711, 0.03370880126953125, 0.03143270492553711, 0.031115264892578126, 0.03125043106079101, 0.030922752380371094, 0.030916608810424805, 0.03132806396484375, 0.031244319915771486, 0.031383712768554686, 0.03155353546142578, 0.03131711959838867, 0.03134144020080566, 0.031532608032226565, 0.03123040008544922, 0.031092735290527345, 0.031117311477661135, 0.03121971130371094, 0.03111689567565918, 0.03126438331604004, 0.03142511940002441, 0.032388801574707034, 0.03143935966491699, 0.031531007766723636, 0.031559680938720705, 0.03143475151062012, 0.031268863677978515, 0.031492095947265625, 0.03159859275817871, 0.0321781120300293, 0.03144915199279785, 0.03134464073181152, 0.03145465660095215, 0.03142025566101074, 0.03184902381896973, 0.0313753604888916, 0.03159670448303223, 0.031219743728637697, 0.0317869758605957, 0.03455740737915039, 0.03144758415222168, 0.03147081565856934, 0.03150297546386719, 0.031446880340576175, 0.03136953544616699, 0.03131740760803223, 0.031004831314086913, 0.03083513641357422, 0.03123200035095215, 0.03138768005371094, 0.03133616065979004, 0.031371519088745116, 0.03215359878540039, 0.03302809524536133, 0.03146873664855957, 0.03154003143310547, 0.03142051124572754, 0.031354784011840824, 0.03139104080200195, 0.03177542304992676, 0.03168870353698731, 0.03465798568725586, 0.03160505676269531, 0.031291391372680666, 0.03422959899902344, 0.03273731231689453, 0.0315645751953125, 0.03142451286315918, 0.0316231689453125, 0.03148595237731933, 0.03142646408081055, 0.03160073661804199, 0.03159654426574707, 0.03150233650207519, 0.03172761535644531, 0.031582143783569334, 0.03168671989440918, 0.03170918464660644, 0.03155763244628906, 0.031808639526367186, 0.03231356811523438, 0.03207030487060547, 0.031913631439208986, 0.03171977615356445, 0.031667583465576174, 0.03168115234375, 0.03149955177307129, 0.031439584732055666, 0.03141548728942871, 0.03139769554138184, 0.03134502410888672, 0.03132019233703613, 0.0313492488861084, 0.03141836738586426, 0.031664127349853514, 0.031477567672729495, 0.03129267120361328, 0.03142304039001465, 0.03130982398986817, 0.031402368545532224, 0.031322111129760744, 0.03129958343505859, 0.031051136016845702, 0.03130633544921875, 0.031200576782226562, 0.031242143630981444, 0.031363136291503904, 0.031494911193847654, 0.031409280776977536, 0.03170582389831543, 0.03212249755859375, 0.031676959991455075, 0.03188662338256836, 0.031677152633666994, 0.03156377601623535, 0.03211468887329102, 0.03171439933776855, 0.03147276878356933, 0.031399711608886716, 0.03147747230529785, 0.031232288360595703, 0.031336448669433595, 0.031383039474487305, 0.031231935501098634, 0.031145824432373046, 0.03132080078125, 0.03178313636779785, 0.03428972625732422, 0.031711679458618164, 0.03147107124328613, 0.03144908714294434, 0.031643999099731444, 0.031697088241577145, 0.03154889678955078, 0.031453151702880856, 0.031647872924804685, 0.03142086410522461, 0.03134383964538574, 0.03145603179931641, 0.03141993522644043, 0.031441375732421874, 0.03152252769470215, 0.03130406379699707, 0.03132131195068359, 0.031183135986328124, 0.03161123275756836, 0.03152207946777344, 0.03135087966918945, 0.03143894386291504, 0.03145692825317383, 0.03145619201660156, 0.03163545608520508, 0.03151785659790039, 0.03150729560852051, 0.031776607513427736, 0.03159212875366211, 0.03170761680603027, 0.03212287902832031, 0.03167014312744141, 0.031838336944580076, 0.03182796859741211, 0.032481281280517575, 0.0319815673828125, 0.03202816009521484, 0.03167897605895996, 0.03164159965515137, 0.0317640323638916, 0.03174006462097168, 0.031448768615722655, 0.031506975173950194, 0.032005889892578125, 0.031450815200805664, 0.03152960014343262, 0.03179929542541504, 0.031645696640014646, 0.031553119659423826, 0.0318666877746582, 0.03148041534423828, 0.031550592422485355, 0.03173219108581543, 0.031647872924804685, 0.03158201599121094, 0.03161750411987305, 0.03157401657104492, 0.03165798377990723, 0.03160883140563965, 0.031643648147583005, 0.031647743225097655, 0.031510528564453126, 0.03181158447265625]",tokens/s,31.802853853477593,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2187.812864,2479.816704,0.0,2101.346304,1978.345472,s,1,8.974201171875,8.974201171875,0.0,8.974201171875,8.974201171875,8.974201171875,8.974201171875,[8.974201171875],,kWh,5.411849286667044e-05,5.962155132541093e-06,1.7299180505997436e-05,7.737982850520897e-05,,MB,2176.872448,2775.515136,0.0,2367.684608,2241.29024,s,10,1.5665681762695316,0.15665681762695313,0.0006752935553594327,0.1566033477783203,0.1574509567260742,0.15768791122436523,0.15787747482299805,"[0.15792486572265624, 0.1566112060546875, 0.15604762268066405, 0.15642784118652345, 0.15707344055175781, 0.15613334655761718, 0.15659548950195312, 0.15544761657714845, 0.15739830017089843, 0.156908447265625]",tokens/s,1634.1452857137235,kWh,4.813034964958767e-06,5.307966585377469e-07,3.2062001970166566e-06,8.55003182051317e-06,tokens/kWh,29941409.0349707,MB,2183.655424,2859.401216,0.0,2451.570688,2334.829056,s,10,19.94945349121094,1.9949453491210936,0.014159525420776585,1.9951657104492186,2.0117086303710936,2.0154676574707033,2.0184748791503906,"[2.010873291015625, 2.0029676513671877, 1.993408447265625, 2.0192266845703126, 1.988684326171875, 1.99708642578125, 1.9908304443359375, 1.9644140625, 1.9850391845703126, 1.9969229736328125]",tokens/s,31.579812463411944,kWh,5.700232462295954e-05,6.287186092617085e-06,3.092157710517864e-05,9.421108782075526e-05,tokens/kWh,668711.0982081317,,s,630,19.946422578811628,0.03166098822033595,0.0006018373266020355,0.03161563205718994,0.03217015266418457,0.03240320053100586,0.033214518318176275,"[0.032889984130859376, 0.03231804656982422, 0.032000286102294925, 0.03188083267211914, 0.03180768013000488, 0.03148726463317871, 0.031736288070678714, 0.03147599983215332, 0.03157417678833008, 0.031692384719848636, 0.031965599060058594, 0.03184611129760742, 0.03229520034790039, 0.032833534240722655, 0.03225190353393555, 0.03208294296264649, 0.032471519470214844, 0.0320599365234375, 0.03210649490356445, 0.03217168045043945, 0.03204265594482422, 0.03194256019592285, 0.032318241119384764, 0.03223756790161133, 0.03223875045776367, 0.03191689682006836, 0.031968704223632814, 0.03215008163452148, 0.032366592407226565, 0.031762304306030276, 0.0318670711517334, 0.0321860466003418, 0.03219232177734375, 0.03172831916809082, 0.03186662483215332, 0.03156959915161133, 0.03164396858215332, 0.03164883232116699, 0.03288345718383789, 0.033003711700439455, 0.03191398429870605, 0.031528959274291994, 0.03140630340576172, 0.031825599670410154, 0.03306505584716797, 0.03144864082336426, 0.031617599487304686, 0.03117430305480957, 0.03154483222961426, 0.03125888061523437, 0.03157449531555176, 0.031302911758422855, 0.032250625610351566, 0.03278643035888672, 0.03172319984436035, 0.03151228713989258, 0.03201273727416992, 0.031408287048339846, 0.03126681518554687, 0.03168012809753418, 0.03135894393920898, 0.03103785514831543, 0.03141190338134765, 0.0320516471862793, 0.03142223930358887, 0.031312639236450196, 0.03241164779663086, 0.031774688720703125, 0.0315794563293457, 0.03139673614501953, 0.03135385513305664, 0.031564640045166015, 0.031143936157226562, 0.03139910316467285, 0.031166559219360353, 0.03119900894165039, 0.031224096298217773, 0.031252960205078124, 0.03114112091064453, 0.03128211212158203, 0.03189555168151856, 0.032024574279785153, 0.031817728042602536, 0.03196649551391602, 0.03175907135009766, 0.032231422424316404, 0.032233470916748046, 0.031903743743896484, 0.03184409523010254, 0.03180159950256348, 0.03166524887084961, 0.031528863906860355, 0.03175472068786621, 0.03184614372253418, 0.03161110305786133, 0.03158006477355957, 0.031179071426391602, 0.031271263122558596, 0.031145984649658204, 0.03160883140563965, 0.03139097595214844, 0.03183283233642578, 0.03181711959838867, 0.03220873641967773, 0.031924640655517575, 0.0318460807800293, 0.03189827156066895, 0.031948320388793944, 0.031877344131469726, 0.032013790130615234, 0.03183286476135254, 0.03205961608886719, 0.03216531372070312, 0.032037216186523436, 0.03172700881958008, 0.03183638381958008, 0.032077728271484376, 0.03259235382080078, 0.03216998291015625, 0.03221263885498047, 0.032050529479980466, 0.03239158248901367, 0.032449119567871096, 0.032185630798339845, 0.03230563354492187, 0.03240300750732422, 0.032710304260253904, 0.032088512420654296, 0.032403358459472655, 0.03194675254821777, 0.03172099113464356, 0.03144956779479981, 0.03147724723815918, 0.03111577606201172, 0.03134390449523926, 0.030922752380371094, 0.03104755210876465, 0.03083964729309082, 0.03139379119873047, 0.031079647064208984, 0.03140892791748047, 0.03102902412414551, 0.031275232315063475, 0.03133609580993652, 0.03153545570373535, 0.03114188766479492, 0.031251935958862304, 0.031160863876342773, 0.03173686408996582, 0.03124496078491211, 0.03134086418151855, 0.03130118370056152, 0.031734144210815426, 0.03164985656738281, 0.03160204887390137, 0.031506303787231446, 0.03137107276916504, 0.03164051246643067, 0.03151158332824707, 0.031689695358276364, 0.03194844818115234, 0.031983968734741214, 0.03188236808776856, 0.03173670387268066, 0.03195699119567871, 0.03188556861877442, 0.03212988662719726, 0.03174614334106445, 0.03205612945556641, 0.0316231689453125, 0.03198361587524414, 0.0319815673828125, 0.03210361480712891, 0.03160668754577637, 0.03170806312561035, 0.03172537612915039, 0.0322841911315918, 0.031632032394409176, 0.03168460845947266, 0.031541248321533204, 0.03170918464660644, 0.031415807723999024, 0.03193497657775879, 0.03174195289611816, 0.03186870384216309, 0.03175036811828613, 0.03184774398803711, 0.03177337646484375, 0.03182387161254883, 0.03248796844482422, 0.03185462379455566, 0.032111038208007814, 0.031784959793090824, 0.03181158447265625, 0.03153305625915527, 0.03158815956115723, 0.03166022491455078, 0.041218048095703126, 0.03204198455810547, 0.03200511932373047, 0.03180694389343262, 0.03221673583984375, 0.03204800033569336, 0.03209759902954101, 0.03538201522827149, 0.03205545425415039, 0.0320261116027832, 0.032129310607910154, 0.03189948844909668, 0.032084129333496095, 0.03196640014648437, 0.03245344161987305, 0.031673759460449216, 0.03173814392089844, 0.031669631958007816, 0.03206601715087891, 0.03177110481262207, 0.032072799682617184, 0.0323199691772461, 0.03193900871276856, 0.03174195289611816, 0.0318659839630127, 0.031637920379638675, 0.03195337677001953, 0.032137279510498044, 0.03220217514038086, 0.03194259262084961, 0.03203539276123047, 0.031889408111572266, 0.03210649490356445, 0.03163036727905273, 0.031515775680541994, 0.03142195129394531, 0.03302435302734375, 0.031244031906127928, 0.03127935981750488, 0.03134230422973633, 0.03149648094177246, 0.03164159965515137, 0.03170918464660644, 0.031641504287719724, 0.03166745567321777, 0.031555551528930664, 0.031656831741333005, 0.03160268783569336, 0.031473663330078124, 0.03130150413513184, 0.03137548828125, 0.03153670310974121, 0.03194207954406738, 0.03175699234008789, 0.03205955123901367, 0.03352163314819336, 0.03171737670898438, 0.031535327911376955, 0.03138742446899414, 0.031633535385131833, 0.0313809928894043, 0.031387807846069336, 0.03116054344177246, 0.03162300872802734, 0.031510528564453126, 0.03174963188171387, 0.03134927940368652, 0.03131347274780273, 0.03139027214050293, 0.03205036926269531, 0.03182675170898437, 0.03157366371154785, 0.031236448287963868, 0.03125651168823242, 0.03104956817626953, 0.0310798397064209, 0.03093587112426758, 0.031126880645751955, 0.03157600021362305, 0.031248735427856444, 0.031054208755493164, 0.031205375671386718, 0.030864671707153322, 0.03111599922180176, 0.03098975944519043, 0.03127292823791504, 0.03108105659484863, 0.03218851089477539, 0.030863264083862304, 0.03160016059875488, 0.031308256149291995, 0.03185849571228027, 0.0318670711517334, 0.03250790405273438, 0.03242393493652344, 0.03208201599121094, 0.03176972770690918, 0.03194345664978027, 0.03186252784729004, 0.031770719528198245, 0.03154496002197266, 0.03159708786010742, 0.03141430473327637, 0.03170265579223633, 0.03161737632751465, 0.03163459205627441, 0.031344831466674807, 0.0313350715637207, 0.031285247802734374, 0.03157401657104492, 0.03162851142883301, 0.03161167907714844, 0.03160204887390137, 0.032131744384765626, 0.03208188629150391, 0.03188281631469726, 0.03161065673828125, 0.03155167961120606, 0.03256447982788086, 0.032025344848632814, 0.03206758499145508, 0.03193014335632324, 0.03192860794067383, 0.0318033275604248, 0.0317270393371582, 0.03138319969177246, 0.03149203109741211, 0.031928447723388674, 0.031505279541015625, 0.03126473617553711, 0.031331647872924806, 0.03186963272094727, 0.03286220932006836, 0.031472896575927736, 0.03151747131347656, 0.030986207962036133, 0.03147792053222656, 0.03167625617980957, 0.031890911102294923, 0.03151107215881348, 0.031696895599365234, 0.03155958366394043, 0.03179718399047852, 0.031819328308105466, 0.031826528549194336, 0.03327142333984375, 0.033505374908447266, 0.03192655944824219, 0.031780864715576174, 0.0315861759185791, 0.03195097541809082, 0.03209820938110351, 0.03337424087524414, 0.031807519912719726, 0.03151055908203125, 0.03127840042114258, 0.031344383239746094, 0.031214111328125, 0.03138368034362793, 0.03136745643615722, 0.031514623641967776, 0.03165593528747559, 0.03165388870239258, 0.03134998321533203, 0.0313526725769043, 0.031090784072875976, 0.031222623825073244, 0.031223392486572264, 0.031449504852294925, 0.03183843231201172, 0.03134169578552246, 0.03188969612121582, 0.031234432220458984, 0.031064064025878906, 0.03130316734313965, 0.03128780746459961, 0.03123200035095215, 0.03140812873840332, 0.03164892768859863, 0.031806400299072266, 0.03188435173034668, 0.03270057678222656, 0.031624063491821286, 0.031741920471191405, 0.031459327697753905, 0.03191110420227051, 0.03135980796813965, 0.03135487937927246, 0.031090688705444337, 0.031180192947387695, 0.031179359436035156, 0.031287296295166016, 0.0310515193939209, 0.031217920303344728, 0.03138355255126953, 0.03224371337890625, 0.03166982460021973, 0.03242438507080078, 0.031666112899780274, 0.031952959060668945, 0.031853567123413085, 0.032008224487304685, 0.03176752090454102, 0.03181977653503418, 0.03160444831848144, 0.031781152725219725, 0.03140108871459961, 0.031607679367065426, 0.03149619293212891, 0.03159568023681641, 0.032191326141357425, 0.032048351287841795, 0.03130652809143066, 0.031254240036010746, 0.03125833511352539, 0.03118342399597168, 0.031369216918945314, 0.031627264022827145, 0.03176860809326172, 0.0317620792388916, 0.03175446319580078, 0.03200009536743164, 0.03168608093261719, 0.03179782485961914, 0.031821088790893556, 0.03191456031799316, 0.03219827270507813, 0.031989664077758787, 0.03145750427246094, 0.03142051124572754, 0.03125689506530762, 0.03151203155517578, 0.03145577621459961, 0.03125388717651367, 0.03108425521850586, 0.03117558479309082, 0.030971904754638672, 0.031270912170410156, 0.030935039520263673, 0.03140140724182129, 0.03165446472167969, 0.03205843353271484, 0.03181868743896484, 0.031460351943969726, 0.03174723243713379, 0.03139788818359375, 0.03138851165771484, 0.03146732711791992, 0.031549568176269534, 0.03133856010437012, 0.031479263305664064, 0.03116659164428711, 0.03124393653869629, 0.03169740867614746, 0.031146240234375, 0.031021055221557618, 0.031223487854003907, 0.031137952804565428, 0.031195295333862304, 0.031101951599121092, 0.0313246078491211, 0.031085248947143554, 0.03108233642578125, 0.031182880401611327, 0.03161497688293457, 0.03123788833618164, 0.03127097511291504, 0.03107244873046875, 0.031178144454956053, 0.03164143943786621, 0.03133097648620606, 0.031161792755126955, 0.03139667129516602, 0.031110591888427734, 0.031179168701171874, 0.030974079132080078, 0.03116428756713867, 0.03099238395690918, 0.03113926315307617, 0.03094175910949707, 0.030892032623291016, 0.030887935638427736, 0.030896127700805662, 0.030850496292114258, 0.030906944274902343, 0.03144636726379395, 0.031074975967407225, 0.031045631408691408, 0.031031295776367186, 0.030877151489257813, 0.031005216598510743, 0.030829599380493164, 0.031122400283813478, 0.03096575927734375, 0.03100054359436035, 0.030932064056396483, 0.03117900848388672, 0.03202732849121094, 0.031909280776977536, 0.030955135345458986, 0.03104867172241211, 0.030963712692260743, 0.031059648513793944, 0.030936607360839842, 0.031062816619873045, 0.030930944442749023, 0.03093708801269531, 0.0320662727355957, 0.031303680419921875, 0.03135078430175781, 0.031385408401489255, 0.0318383674621582, 0.03221059036254883, 0.03252467346191406, 0.031616287231445314, 0.031945215225219724, 0.031704288482666015, 0.03186108779907226, 0.03163759994506836, 0.03174822425842285, 0.03157036781311035, 0.03180127906799316, 0.03153696060180664, 0.03211430358886719, 0.03223820877075195, 0.03165184020996094, 0.03127033615112305, 0.03135487937927246, 0.031594112396240236, 0.0313578872680664, 0.031145984649658204, 0.03160883140563965, 0.03177471923828125, 0.03178662490844727, 0.03144921684265137, 0.0313940486907959, 0.031185056686401365, 0.031293279647827146, 0.031178752899169923, 0.03126067161560059, 0.03103539276123047, 0.03132620811462403, 0.031190303802490233, 0.03142495918273926, 0.031754079818725585, 0.03128713607788086, 0.031230560302734377, 0.03132825660705566, 0.0311778564453125, 0.031248672485351563, 0.03141894340515137, 0.03331689453125, 0.031940607070922854, 0.031715616226196286, 0.03112291145324707, 0.03163804817199707, 0.031210559844970703, 0.0312961597442627, 0.031116447448730468, 0.031277471542358395, 0.031098976135253906, 0.031155616760253906, 0.031037567138671875, 0.0313558406829834, 0.031100799560546875, 0.031264768600463864, 0.031071807861328123, 0.031144384384155274, 0.031129056930541993, 0.031562400817871095, 0.0324956169128418, 0.033075199127197266, 0.03180518341064453, 0.031428255081176755, 0.03145171165466309, 0.03133852767944336, 0.03139529609680176, 0.031187488555908204, 0.031514623641967776, 0.030951103210449218, 0.030934688568115234, 0.031108991622924805, 0.03125737571716308, 0.031024383544921874, 0.031035999298095703, 0.031266975402832034, 0.03188889694213867, 0.03237529754638672, 0.03182089614868164, 0.031609760284423825, 0.031866880416870115, 0.03184025573730469, 0.03196246337890625, 0.031812000274658206, 0.03192857551574707, 0.03195449638366699, 0.03197587203979492, 0.03157740783691406, 0.031401792526245115, 0.03140083122253418, 0.031664127349853514, 0.03185971260070801, 0.031908607482910155, 0.03165996742248535, 0.03171360015869141, 0.03141222381591797, 0.03149004745483398, 0.03135078430175781, 0.03132620811462403, 0.031393152236938476, 0.031576704025268555, 0.031991519927978516, 0.03199551963806152, 0.03201103973388672, 0.03211251068115235, 0.03190390396118164, 0.03200579071044922, 0.03183760070800781, 0.032088863372802735, 0.03197120094299316, 0.03193628883361816, 0.03182150459289551, 0.03177481651306152, 0.03142918395996094, 0.03153408050537109, 0.03170816040039062, 0.03158016014099121, 0.03140812873840332, 0.031290752410888675, 0.032989822387695315, 0.03220585632324219, 0.03144598388671875, 0.031547391891479495]",tokens/s,31.584611100600355,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1303.523328,1083.113472,0.0,704.643072,675.01056,s,1,8.4019462890625,8.4019462890625,0.0,8.4019462890625,8.4019462890625,8.4019462890625,8.4019462890625,[8.4019462890625],,kWh,3.635637827502857e-05,4.003097544512459e-06,1.1330009063975632e-05,5.1689484883516665e-05,,MB,1330.29888,1403.977728,0.0,996.1472,942.608384,s,10,0.3297423706054688,0.03297423706054688,0.000629820502509376,0.032741056442260746,0.03361081008911133,0.03414358177185058,0.03456979911804199,"[0.033492416381835935, 0.032814849853515626, 0.032725887298583986, 0.032740161895751956, 0.0329180793762207, 0.03271689605712891, 0.034676353454589845, 0.032493854522705076, 0.032421920776367186, 0.03274195098876953]",tokens/s,7763.636791047994,kWh,1.0881640219019337e-06,1.200033213602986e-07,7.203083003272397e-07,1.928475643589472e-06,tokens/kWh,132747333.80791223,MB,1348.374528,1412.366336,0.0,1004.535808,942.610944,s,10,18.513116699218752,1.8513116699218752,0.023257271581990157,1.8506577758789065,1.8760862670898437,1.888021649169922,1.8975699548339844,"[1.853917236328125, 1.8137567138671875, 1.85634521484375, 1.8734339599609375, 1.859286376953125, 1.89995703125, 1.8473983154296876, 1.8208701171875, 1.8445382080078125, 1.843613525390625]",tokens/s,34.029926469732985,kWh,5.3085660723922634e-05,5.8550354700993705e-06,2.1319292664671445e-05,8.025998885869346e-05,tokens/kWh,784949.0249857676,,s,630,18.510737812042247,0.029382123511178155,0.0012229306279341063,0.02936871910095215,0.02985663948059082,0.030057206535339356,0.03157573390960693,"[0.028767648696899413, 0.028725536346435546, 0.028883424758911133, 0.028831584930419922, 0.028948480606079102, 0.028816831588745116, 0.028691007614135743, 0.028680192947387696, 0.02928803253173828, 0.028837631225585938, 0.02898192024230957, 0.029050880432128907, 0.02945961570739746, 0.02886128044128418, 0.02919628715515137, 0.02906460762023926, 0.02924959945678711, 0.02922550392150879, 0.02917087936401367, 0.02905939292907715, 0.028994047164916992, 0.02905625534057617, 0.0289881591796875, 0.029278207778930664, 0.02925347137451172, 0.029429920196533205, 0.029216768264770508, 0.029077503204345705, 0.029198047637939453, 0.029296287536621092, 0.02955084800720215, 0.02940083122253418, 0.02930748748779297, 0.02929199981689453, 0.02954092788696289, 0.0293621768951416, 0.02929248046875, 0.029343807220458984, 0.029493152618408205, 0.02933564758300781, 0.03016841506958008, 0.02949964714050293, 0.029784479141235352, 0.029855104446411134, 0.029760128021240236, 0.0304616641998291, 0.03053919982910156, 0.02989695930480957, 0.030007871627807617, 0.029971744537353514, 0.03020262336730957, 0.02999407958984375, 0.029700096130371095, 0.029897632598876952, 0.029853696823120116, 0.029636608123779298, 0.029706047058105468, 0.030012639999389648, 0.02989529609680176, 0.029684032440185547, 0.029855743408203125, 0.029736991882324218, 0.02998476791381836, 0.0294932804107666, 0.02938409614562988, 0.02949305534362793, 0.029430559158325195, 0.02933456039428711, 0.029295936584472656, 0.028919456481933593, 0.028618175506591795, 0.028500543594360352, 0.02854911994934082, 0.02851430320739746, 0.028528383255004883, 0.02874150466918945, 0.02864780807495117, 0.028675264358520507, 0.028736480712890623, 0.028751712799072265, 0.028764320373535157, 0.028676095962524413, 0.028485471725463868, 0.02874777603149414, 0.028733440399169922, 0.028626943588256838, 0.028631040573120117, 0.028691808700561525, 0.028729312896728514, 0.028613311767578125, 0.028515871047973634, 0.028633567810058595, 0.028516351699829103, 0.02855731201171875, 0.02933318328857422, 0.028815807342529295, 0.02877017593383789, 0.028647232055664062, 0.02871062469482422, 0.028643808364868163, 0.028817087173461913, 0.028846399307250976, 0.02893840026855469, 0.028620031356811522, 0.02867465591430664, 0.028647104263305665, 0.028639551162719726, 0.02853068733215332, 0.028893184661865235, 0.028903423309326173, 0.028620800018310546, 0.028692480087280273, 0.028575935363769532, 0.029648448944091795, 0.028958976745605467, 0.028868608474731446, 0.02872319984436035, 0.028755008697509767, 0.028952735900878906, 0.028740095138549804, 0.02864156723022461, 0.028635135650634767, 0.028790111541748046, 0.028741600036621094, 0.028614944458007812, 0.028635360717773437, 0.02870822334289551, 0.02881827163696289, 0.02895235252380371, 0.028876800537109375, 0.02876825523376465, 0.028794879913330077, 0.0287673282623291, 0.028591007232666017, 0.02879443168640137, 0.029009536743164064, 0.02947974395751953, 0.03056844711303711, 0.029394304275512696, 0.029907392501831054, 0.028948671340942384, 0.028737535476684572, 0.02886000061035156, 0.02877020835876465, 0.02928428840637207, 0.02937094306945801, 0.029122047424316407, 0.02913484764099121, 0.029334016799926758, 0.028948768615722657, 0.02905673599243164, 0.029222944259643554, 0.029455999374389648, 0.029151456832885742, 0.029460607528686525, 0.029566976547241212, 0.02950713539123535, 0.029747840881347656, 0.029542207717895508, 0.029491199493408202, 0.029587007522583007, 0.029663007736206056, 0.029576992034912108, 0.029620256423950195, 0.030034784317016602, 0.029689855575561523, 0.029612031936645508, 0.029621376037597656, 0.029500288009643556, 0.02953215980529785, 0.02953625679016113, 0.02993062400817871, 0.029444959640502928, 0.029476287841796876, 0.029286624908447266, 0.02931455993652344, 0.029468639373779297, 0.029610912322998048, 0.03031449508666992, 0.032233470916748046, 0.029894655227661132, 0.03143827247619629, 0.029747264862060547, 0.029739007949829102, 0.029495935440063476, 0.029877824783325194, 0.029491487503051757, 0.029552415847778322, 0.029671072006225586, 0.0295316162109375, 0.029436447143554687, 0.029471839904785156, 0.0295281925201416, 0.029670080184936522, 0.02957276725769043, 0.029469120025634767, 0.029782079696655275, 0.02949318313598633, 0.02953215980529785, 0.029423519134521483, 0.029779455184936524, 0.029502111434936522, 0.029550527572631834, 0.029566240310668946, 0.02948089599609375, 0.029600543975830076, 0.029515487670898437, 0.029607295989990234, 0.029658016204833985, 0.029624160766601564, 0.029802656173706053, 0.029566816329956055, 0.02952822494506836, 0.02955264091491699, 0.029706239700317383, 0.02972876739501953, 0.029515775680541992, 0.029586816787719728, 0.02978879928588867, 0.02951315116882324, 0.029571136474609374, 0.029573631286621094, 0.0295314884185791, 0.02958812713623047, 0.030703039169311525, 0.030691904067993166, 0.029748544692993165, 0.029891103744506837, 0.02964233589172363, 0.029957759857177736, 0.029565696716308595, 0.029704383850097656, 0.02980659294128418, 0.02989583969116211, 0.029924192428588868, 0.029490367889404297, 0.029613887786865235, 0.02984163284301758, 0.029651744842529297, 0.031116704940795898, 0.030810367584228514, 0.029875904083251952, 0.029304927825927734, 0.02931769561767578, 0.029249536514282228, 0.029031904220581054, 0.030568256378173828, 0.030089471817016603, 0.029699871063232422, 0.03127555274963379, 0.02993987274169922, 0.029437183380126953, 0.029253503799438477, 0.028942176818847656, 0.028889280319213867, 0.02884150314331055, 0.028747455596923828, 0.029227872848510743, 0.029655168533325196, 0.02932316780090332, 0.029196544647216795, 0.029333248138427734, 0.029509632110595704, 0.029558784484863283, 0.029396991729736328, 0.02934809684753418, 0.029265663146972657, 0.029198335647583007, 0.029289663314819334, 0.029224992752075196, 0.029117216110229494, 0.029392959594726563, 0.029624223709106445, 0.029722591400146485, 0.0296694393157959, 0.029619712829589844, 0.02954051208496094, 0.029628768920898437, 0.029711551666259765, 0.029548448562622072, 0.029526943206787108, 0.029616128921508788, 0.02953011131286621, 0.02975030326843262, 0.02929478454589844, 0.029422143936157226, 0.029458656311035156, 0.0294335994720459, 0.029391008377075194, 0.02940732765197754, 0.02954787254333496, 0.029463167190551757, 0.029582592010498048, 0.02954524803161621, 0.029535327911376953, 0.02947727966308594, 0.02966988754272461, 0.02964614486694336, 0.029774688720703126, 0.029720415115356447, 0.029634048461914062, 0.029938175201416017, 0.029701919555664064, 0.029575391769409178, 0.029814720153808594, 0.029603519439697266, 0.029646623611450196, 0.029679647445678713, 0.029553216934204103, 0.02962611198425293, 0.029829376220703124, 0.029612031936645508, 0.030300384521484376, 0.030142240524291992, 0.02979430389404297, 0.029786239624023436, 0.02983795166015625, 0.03049033546447754, 0.02978937530517578, 0.029664127349853516, 0.029669376373291017, 0.02976883125305176, 0.02986470413208008, 0.029694080352783203, 0.02977177619934082, 0.029727935791015625, 0.02966192054748535, 0.02958345603942871, 0.029663232803344725, 0.029689855575561523, 0.029708288192749024, 0.056118751525878904, 0.029751840591430663, 0.0297574405670166, 0.029654720306396484, 0.029544767379760743, 0.029511680603027345, 0.02939641571044922, 0.02932793617248535, 0.030744575500488282, 0.029437376022338868, 0.029295167922973632, 0.02937651252746582, 0.029884544372558594, 0.02979827117919922, 0.029634559631347656, 0.02945142364501953, 0.02968047904968262, 0.02952396774291992, 0.029628000259399413, 0.029942176818847657, 0.029693056106567382, 0.029690336227416993, 0.029780384063720702, 0.029652992248535157, 0.029876224517822264, 0.029741056442260744, 0.029666656494140624, 0.030075551986694336, 0.030234624862670898, 0.029716480255126954, 0.02976972770690918, 0.029558784484863283, 0.029620223999023438, 0.02985321617126465, 0.02968806457519531, 0.02979043197631836, 0.02952396774291992, 0.0294783992767334, 0.029592063903808592, 0.029949951171875, 0.029633695602416993, 0.029616735458374024, 0.029671680450439452, 0.02992937660217285, 0.030313983917236328, 0.029833824157714843, 0.029910911560058595, 0.02978160095214844, 0.02958812713623047, 0.029869855880737303, 0.02975152015686035, 0.029549760818481444, 0.02966815948486328, 0.02957107162475586, 0.029525503158569336, 0.02952038383483887, 0.029398975372314454, 0.029613407135009765, 0.029682304382324217, 0.029595743179321288, 0.02941472053527832, 0.0291843204498291, 0.029131135940551757, 0.029345792770385744, 0.028943775177001953, 0.029055583953857423, 0.029128639221191407, 0.02897926330566406, 0.029212671279907225, 0.029474815368652343, 0.029464128494262696, 0.02966147232055664, 0.02951587104797363, 0.029544511795043946, 0.029577215194702147, 0.029454336166381836, 0.029480960845947264, 0.029560768127441406, 0.02967558479309082, 0.029591615676879884, 0.029703840255737305, 0.029513055801391602, 0.029467584609985352, 0.02975129508972168, 0.02970419120788574, 0.029799583435058594, 0.029756256103515625, 0.02960972785949707, 0.02972431945800781, 0.029567583084106445, 0.0295133113861084, 0.02945680046081543, 0.029241344451904298, 0.029264896392822266, 0.02998147201538086, 0.02936649513244629, 0.02922064018249512, 0.02894460868835449, 0.028651519775390624, 0.028846015930175783, 0.028540512084960938, 0.028616640090942384, 0.028563999176025392, 0.028605567932128907, 0.028563583374023437, 0.02855193519592285, 0.028590080261230468, 0.028446720123291015, 0.02856550407409668, 0.028556991577148437, 0.02855526351928711, 0.028661760330200195, 0.028829599380493166, 0.029269567489624025, 0.02881795120239258, 0.028657920837402345, 0.028601472854614257, 0.028951168060302734, 0.02876150321960449, 0.02906502342224121, 0.028575775146484374, 0.029484832763671875, 0.02884444808959961, 0.02873353576660156, 0.02873289680480957, 0.02869964790344238, 0.028680192947387696, 0.028669952392578125, 0.028849920272827147, 0.028719423294067382, 0.028994943618774412, 0.028765920639038087, 0.028805984497070312, 0.028833791732788085, 0.029142368316650392, 0.028918432235717773, 0.028762048721313476, 0.028867712020874025, 0.03230160140991211, 0.029228799819946288, 0.0290199031829834, 0.028844959259033204, 0.028884288787841796, 0.028736064910888672, 0.028663936614990233, 0.03196086311340332, 0.029190208435058592, 0.029009151458740234, 0.028922624588012695, 0.02872470474243164, 0.028543615341186525, 0.028526527404785156, 0.02873334312438965, 0.02881558418273926, 0.02882262420654297, 0.028820383071899415, 0.028559104919433594, 0.028582143783569335, 0.02857369613647461, 0.02852454376220703, 0.029298688888549803, 0.028636703491210936, 0.028629472732543945, 0.02873910331726074, 0.028673824310302735, 0.02874361610412598, 0.02864204788208008, 0.028684288024902343, 0.028635135650634767, 0.028708864212036132, 0.02884003257751465, 0.02856857681274414, 0.028627552032470704, 0.028678144454956055, 0.028610559463500978, 0.02940835189819336, 0.028757984161376954, 0.029036735534667967, 0.028530912399291994, 0.02878220748901367, 0.029135103225708007, 0.031590656280517576, 0.030779808044433594, 0.028827648162841796, 0.028570911407470704, 0.028731359481811523, 0.028635904312133788, 0.02921062469482422, 0.028778495788574218, 0.02870195198059082, 0.028444896697998046, 0.0285283203125, 0.02848182487487793, 0.028691007614135743, 0.028572799682617188, 0.028676992416381837, 0.028554943084716795, 0.02855353546142578, 0.02896281623840332, 0.028825599670410155, 0.028962047576904296, 0.02915001678466797, 0.02904672050476074, 0.02896447944641113, 0.028825120925903322, 0.028754783630371095, 0.028811264038085937, 0.028677215576171877, 0.028666784286499023, 0.028645376205444335, 0.029060447692871094, 0.029295263290405275, 0.029495296478271486, 0.029284351348876952, 0.02927743911743164, 0.02935475158691406, 0.02941276741027832, 0.02903071975708008, 0.029052608489990233, 0.0288753604888916, 0.029146976470947265, 0.029575328826904296, 0.029904895782470704, 0.02969599914550781, 0.02955673599243164, 0.029597343444824217, 0.029276512145996095, 0.030193023681640625, 0.03394009780883789, 0.029671424865722655, 0.029439008712768555, 0.02939798355102539, 0.029756959915161134, 0.03422256088256836, 0.0295996150970459, 0.029621856689453125, 0.02929484748840332, 0.029556575775146483, 0.0290863037109375, 0.02901452827453613, 0.02889289665222168, 0.03153919982910156, 0.029198432922363283, 0.029204383850097656, 0.029108224868774416, 0.029732864379882814, 0.02956470489501953, 0.02956924819946289, 0.02998886489868164, 0.029845504760742186, 0.02941542434692383, 0.02939084815979004, 0.029468671798706055, 0.029347072601318358, 0.029451007843017577, 0.029298688888549803, 0.029403135299682616, 0.029337440490722656, 0.029405344009399415, 0.029337600708007814, 0.02931622314453125, 0.02918684768676758, 0.02903049659729004, 0.029120223999023438, 0.029248863220214843, 0.029399391174316405, 0.02929929542541504, 0.02931711959838867, 0.028997631072998048, 0.029310976028442383, 0.02926748847961426, 0.029182432174682617, 0.02920038414001465, 0.03021356773376465, 0.029272031784057618, 0.029350496292114257, 0.029351455688476562, 0.029309215545654296, 0.029218656539916992, 0.029290847778320313, 0.029265920639038087, 0.029263391494750976, 0.029177824020385743, 0.029099872589111328, 0.02893276786804199, 0.029087648391723633, 0.028843456268310547, 0.028854944229125976, 0.02911027145385742, 0.028751615524291993, 0.028799232482910157, 0.02866511917114258, 0.028626720428466798, 0.028600671768188476, 0.028682144165039062, 0.028983552932739257, 0.029071487426757813, 0.029044416427612303, 0.02918412780761719]",tokens/s,34.03429978842609,,, 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1844.768768,2718.892032,0.0,2340.421632,2284.9536,s,1,9.19907421875,9.19907421875,0.0,9.19907421875,9.19907421875,9.19907421875,9.19907421875,[9.19907421875],,kWh,6.098281516240907e-05,6.719490604809796e-06,2.0080571620006982e-05,8.778287738722584e-05,,MB,1678.987264,3067.019264,0.0,2659.188736,2578.241536,s,10,0.8648788452148437,0.08648788452148438,0.0008393809032997067,0.08629409790039062,0.08777180709838868,0.08789060707092285,0.08798564704895019,"[0.08774540710449219, 0.08606150054931641, 0.08800940704345703, 0.08627216339111328, 0.08684835052490235, 0.08557577514648437, 0.08529590606689454, 0.08686914825439453, 0.08631603240966797, 0.08588515472412109]",tokens/s,2959.952153025633,kWh,2.7864079055557844e-06,3.070940271263117e-07,1.8510438088761718e-06,4.944545741558267e-06,tokens/kWh,51774220.19749016,MB,1678.987264,3067.019264,0.0,2659.188736,2578.244096,s,10,19.281598876953126,1.9281598876953123,0.015830657136111852,1.9316155395507812,1.942700866699219,1.9435855407714844,1.944293280029297,"[1.931883544921875, 1.9333028564453125, 1.9425042724609376, 1.8866256103515624, 1.9407467041015625, 1.922415771484375, 1.9283448486328125, 1.94447021484375, 1.9313475341796875, 1.91995751953125]",tokens/s,32.67363894562838,kWh,5.6074434942366776e-05,6.1849856945941064e-06,2.7192284981325268e-05,8.945170561828615e-05,tokens/kWh,704290.651190459,,s,630,19.27919442558289,0.03060189591362363,0.0005375087792078687,0.03058670425415039,0.030940896224975586,0.031229864120483397,0.03296664829254151,"[0.03162393569946289, 0.030367712020874023, 0.03023865509033203, 0.030432256698608398, 0.030716800689697267, 0.030396543502807617, 0.030440959930419922, 0.030578304290771484, 0.030530431747436523, 0.030824256896972657, 0.03241164779663086, 0.030447071075439452, 0.030411487579345704, 0.030443519592285157, 0.03015782356262207, 0.03042780876159668, 0.030471904754638672, 0.030531455993652343, 0.030479328155517578, 0.030258975982666015, 0.030298175811767577, 0.031333536148071287, 0.03057539176940918, 0.030471551895141603, 0.030570335388183593, 0.030421791076660157, 0.0304323844909668, 0.030411680221557616, 0.030556127548217772, 0.03056447982788086, 0.03060223960876465, 0.030667423248291015, 0.03064271926879883, 0.030603967666625976, 0.030726144790649414, 0.03066889572143555, 0.03089664077758789, 0.03064681625366211, 0.030690176010131836, 0.03054080009460449, 0.030851264953613282, 0.030590400695800782, 0.030561792373657228, 0.030532480239868164, 0.030511104583740234, 0.030635135650634766, 0.030540191650390625, 0.030582847595214843, 0.030654880523681642, 0.030564512252807617, 0.030965599060058593, 0.03058835220336914, 0.030640703201293945, 0.030715904235839843, 0.03080601692199707, 0.030869312286376953, 0.030895488739013672, 0.030747167587280272, 0.03092508888244629, 0.03101817512512207, 0.030965887069702148, 0.030780223846435546, 0.031192096710205078, 0.03163539123535156, 0.0309333438873291, 0.030966848373413087, 0.03063065528869629, 0.030729536056518555, 0.03073523139953613, 0.03076268768310547, 0.03066659164428711, 0.030513280868530272, 0.030716255187988283, 0.03059891128540039, 0.030762367248535157, 0.03060211181640625, 0.03060531234741211, 0.030728191375732423, 0.030611455917358397, 0.03052342414855957, 0.030445728302001953, 0.031063871383666994, 0.030330368041992187, 0.030378496170043946, 0.030439231872558595, 0.030519487380981446, 0.03060083198547363, 0.030787456512451173, 0.030685152053833008, 0.030718496322631836, 0.030768991470336914, 0.03066304016113281, 0.030856639862060546, 0.03069372749328613, 0.03060870361328125, 0.030638784408569337, 0.030744575500488282, 0.03076688003540039, 0.03080828857421875, 0.030800031661987304, 0.030653823852539064, 0.030704095840454103, 0.030817440032958984, 0.030892736434936525, 0.030779552459716798, 0.030707712173461913, 0.03168460845947266, 0.030651552200317383, 0.030595455169677734, 0.030623743057250977, 0.03058953666687012, 0.03049033546447754, 0.030574304580688477, 0.03062009620666504, 0.030537727355957032, 0.030500095367431642, 0.030624767303466797, 0.030404352188110353, 0.030693023681640626, 0.030563871383666993, 0.030489408493041992, 0.030529535293579102, 0.030470144271850585, 0.03058406448364258, 0.030689823150634767, 0.030555551528930663, 0.031258016586303713, 0.03082681655883789, 0.030720256805419923, 0.03059916877746582, 0.030604896545410157, 0.03077571105957031, 0.030674943923950194, 0.03082147216796875, 0.030631839752197267, 0.030450687408447266, 0.030487552642822265, 0.030483455657958985, 0.030702655792236327, 0.030747583389282226, 0.030594879150390625, 0.03046009635925293, 0.030619647979736327, 0.03304447937011719, 0.031723007202148434, 0.030788095474243164, 0.030766592025756836, 0.030718719482421875, 0.031735551834106444, 0.031083871841430664, 0.03096028709411621, 0.031151391983032226, 0.030845375061035157, 0.030824192047119142, 0.03089161682128906, 0.030827455520629883, 0.030724096298217773, 0.031659456253051756, 0.03237331390380859, 0.03091472053527832, 0.03070911979675293, 0.03092323112487793, 0.030801216125488282, 0.030869600296020507, 0.031363679885864255, 0.03129046440124512, 0.030651296615600586, 0.030504127502441407, 0.03030713653564453, 0.030259199142456054, 0.030038015365600586, 0.029995008468627928, 0.03003392028808594, 0.030117984771728515, 0.03017923164367676, 0.030354528427124022, 0.03034921646118164, 0.03035238456726074, 0.030517248153686522, 0.030418943405151368, 0.03043062400817871, 0.030419551849365234, 0.033509632110595704, 0.030666496276855467, 0.03078963279724121, 0.033481822967529294, 0.03021113586425781, 0.030112703323364257, 0.030148992538452147, 0.03061625671386719, 0.03014656066894531, 0.03027916717529297, 0.030331615447998048, 0.029944992065429686, 0.029802879333496093, 0.02980454444885254, 0.02985580825805664, 0.029597888946533202, 0.02961408042907715, 0.030299232482910155, 0.02972559928894043, 0.02967081642150879, 0.029565536499023437, 0.029518848419189454, 0.030112768173217775, 0.029701152801513673, 0.029557727813720704, 0.02961408042907715, 0.029732864379882814, 0.029563936233520507, 0.029585887908935547, 0.029708320617675782, 0.029587200164794922, 0.029524703979492188, 0.029577215194702147, 0.029554975509643554, 0.029615840911865234, 0.029644800186157227, 0.029708288192749024, 0.02984761619567871, 0.029880287170410157, 0.029467615127563476, 0.02979327964782715, 0.029615488052368164, 0.02980928039550781, 0.029847455978393556, 0.02988435173034668, 0.029804704666137695, 0.03039232063293457, 0.029898975372314455, 0.030296895980834963, 0.02995689582824707, 0.029902591705322265, 0.029915584564208984, 0.029839487075805665, 0.030096511840820312, 0.0301841926574707, 0.030148256301879884, 0.030205856323242186, 0.03015110397338867, 0.030101631164550783, 0.030284896850585937, 0.03038643264770508, 0.030315040588378906, 0.030379615783691406, 0.030185567855834962, 0.030273855209350584, 0.030345216751098632, 0.030361728668212892, 0.03026521682739258, 0.03056230354309082, 0.0304388484954834, 0.031356927871704104, 0.030468095779418947, 0.03058652877807617, 0.031006080627441406, 0.03051152038574219, 0.030614080429077147, 0.030650367736816408, 0.030447391510009764, 0.030522720336914062, 0.030446399688720704, 0.0305982723236084, 0.03055619239807129, 0.030626720428466796, 0.030655807495117187, 0.030726591110229493, 0.03093222427368164, 0.03074764823913574, 0.03097964859008789, 0.030908031463623045, 0.031369855880737305, 0.031080671310424805, 0.030842912673950194, 0.030832607269287108, 0.030887903213500975, 0.030833984375, 0.030866111755371094, 0.030803840637207033, 0.030736127853393556, 0.031062623977661134, 0.030719776153564454, 0.03088934326171875, 0.030574943542480467, 0.030617887496948243, 0.030875648498535156, 0.030668800354003906, 0.030932352066040038, 0.03080665588378906, 0.030643583297729492, 0.031063840866088866, 0.03129430389404297, 0.03082659149169922, 0.030871519088745115, 0.030613439559936523, 0.03064374351501465, 0.03073686408996582, 0.030698591232299805, 0.03081679916381836, 0.030869728088378907, 0.030763391494750977, 0.030796768188476563, 0.030679872512817383, 0.030901376724243163, 0.031050624847412108, 0.030867008209228514, 0.030931392669677735, 0.030758399963378907, 0.0306177921295166, 0.030951072692871093, 0.03079132843017578, 0.030950399398803712, 0.030806175231933595, 0.0310864315032959, 0.030732288360595703, 0.031123519897460938, 0.030662176132202148, 0.030794431686401367, 0.030455520629882812, 0.030303808212280275, 0.03035500717163086, 0.03035171127319336, 0.030447168350219725, 0.03052774429321289, 0.03034409523010254, 0.0304965763092041, 0.030370912551879882, 0.030421920776367187, 0.030559520721435546, 0.030446239471435547, 0.030533695220947267, 0.030496768951416016, 0.03059027290344238, 0.030939840316772462, 0.03148185539245606, 0.030375680923461913, 0.03037980842590332, 0.03030473518371582, 0.030341119766235353, 0.030195711135864257, 0.030331903457641602, 0.03053260803222656, 0.031339967727661136, 0.03046236801147461, 0.030590944290161133, 0.030484575271606446, 0.03044361686706543, 0.030451967239379884, 0.03052864074707031, 0.030521984100341796, 0.030414848327636718, 0.030543872833251953, 0.03078054428100586, 0.031298431396484375, 0.030380096435546875, 0.03034720039367676, 0.030291967391967774, 0.030240768432617186, 0.03038928031921387, 0.03019593620300293, 0.030255168914794923, 0.03036150360107422, 0.030085760116577147, 0.03015305519104004, 0.029876031875610352, 0.0298590087890625, 0.0299835205078125, 0.029755327224731447, 0.029857887268066406, 0.029902687072753908, 0.030128288269042968, 0.03040460777282715, 0.03058892822265625, 0.030547775268554688, 0.030521472930908202, 0.030564416885375978, 0.03269232177734375, 0.03277609634399414, 0.031184192657470702, 0.03063007926940918, 0.030554239273071288, 0.030486848831176756, 0.030451295852661132, 0.030519775390625, 0.030537023544311523, 0.030216896057128906, 0.030244863510131836, 0.03022585678100586, 0.03019219207763672, 0.03014656066894531, 0.030031040191650392, 0.03032966423034668, 0.03038768005371094, 0.030464544296264648, 0.030553279876708986, 0.030472671508789063, 0.030650527954101562, 0.03060140800476074, 0.030668800354003906, 0.03085433578491211, 0.030780223846435546, 0.03045782470703125, 0.030582815170288085, 0.030584159851074218, 0.030639936447143554, 0.031035327911376955, 0.030835424423217773, 0.03070518493652344, 0.030652223587036134, 0.030702112197875976, 0.03054732894897461, 0.030742496490478517, 0.030712352752685548, 0.030611263275146485, 0.030621631622314453, 0.030632640838623045, 0.030928735733032228, 0.03054761505126953, 0.030585344314575196, 0.03160678482055664, 0.03049852752685547, 0.030660160064697267, 0.030676767349243163, 0.030969823837280273, 0.030618560791015624, 0.030593055725097656, 0.03044668769836426, 0.03051817512512207, 0.030445632934570314, 0.030434272766113282, 0.030636287689208983, 0.030355360031127928, 0.03064313507080078, 0.030672767639160155, 0.030775104522705078, 0.030727487564086914, 0.030581567764282228, 0.030801376342773436, 0.030718559265136718, 0.03070332717895508, 0.030697216033935548, 0.03150819206237793, 0.030886367797851564, 0.030672191619873047, 0.03074105644226074, 0.030719392776489256, 0.03082499122619629, 0.030739519119262697, 0.03066975975036621, 0.030654464721679688, 0.03054719924926758, 0.030495487213134765, 0.03058687973022461, 0.030776927947998047, 0.03079542350769043, 0.030403327941894532, 0.031096832275390625, 0.03361715316772461, 0.03105254364013672, 0.03043440055847168, 0.03037481689453125, 0.030119935989379884, 0.030072959899902343, 0.03007798385620117, 0.030026752471923827, 0.030109535217285155, 0.029954048156738283, 0.03026848030090332, 0.030963712692260743, 0.030704160690307618, 0.030707647323608398, 0.03456169509887695, 0.030657344818115235, 0.03073859214782715, 0.03069443130493164, 0.03099945640563965, 0.030631711959838867, 0.030590463638305664, 0.03153126335144043, 0.030701728820800783, 0.03058095932006836, 0.030652095794677734, 0.03502889633178711, 0.030787359237670897, 0.030534271240234376, 0.030633983612060548, 0.030547840118408203, 0.03043084716796875, 0.030441408157348634, 0.030343744277954103, 0.030431232452392577, 0.030418943405151368, 0.030494720458984374, 0.03046723175048828, 0.03063484764099121, 0.030445119857788087, 0.030537343978881835, 0.030567232131958007, 0.03058278465270996, 0.03502870559692383, 0.030623647689819337, 0.03066713523864746, 0.03093212890625, 0.030712671279907226, 0.03119545555114746, 0.030795616149902345, 0.0307476806640625, 0.030598112106323242, 0.03040777587890625, 0.03067750358581543, 0.030562719345092772, 0.030641279220581054, 0.030601503372192383, 0.03063868713378906, 0.03058073616027832, 0.030406656265258788, 0.03059663963317871, 0.030582815170288085, 0.030540224075317382, 0.03057391929626465, 0.030496543884277343, 0.030622592926025392, 0.03058070373535156, 0.0305677433013916, 0.030711904525756836, 0.030603168487548828, 0.030640863418579103, 0.03057459259033203, 0.03069241523742676, 0.0306279354095459, 0.030767200469970703, 0.030696191787719727, 0.030780960083007812, 0.03075267219543457, 0.030714431762695314, 0.03055411148071289, 0.030470144271850585, 0.030674591064453124, 0.030677087783813478, 0.030664831161499023, 0.03131609535217285, 0.030697471618652345, 0.030813440322875977, 0.030712575912475587, 0.030732288360595703, 0.030860639572143553, 0.030681184768676758, 0.030545503616333007, 0.030561248779296876, 0.030621696472167968, 0.030664703369140626, 0.03060326385498047, 0.030674112319946288, 0.03049951934814453, 0.030468223571777343, 0.030658559799194338, 0.031041536331176758, 0.03068070411682129, 0.030466432571411132, 0.030406656265258788, 0.03038559913635254, 0.030427743911743164, 0.0304434871673584, 0.031024864196777344, 0.030576383590698242, 0.030730207443237303, 0.03074924850463867, 0.03132809638977051, 0.030873920440673826, 0.03086067199707031, 0.030976320266723634, 0.03078963279724121, 0.030793727874755858, 0.030485952377319336, 0.030503488540649413, 0.030469568252563476, 0.03051558494567871, 0.030570688247680663, 0.030410751342773438, 0.03057823944091797, 0.030414751052856445, 0.030298112869262695, 0.030147104263305663, 0.030310400009155275, 0.030357503890991212, 0.030319936752319337, 0.030581439971923828, 0.03057663917541504, 0.030695423126220703, 0.0306213436126709, 0.030503263473510744, 0.030596895217895506, 0.03051884841918945, 0.03096847915649414, 0.03055820846557617, 0.030533184051513673, 0.03062009620666504, 0.030720064163208008, 0.030610879898071288, 0.0307391357421875, 0.030515008926391602, 0.030526464462280273, 0.030655263900756836, 0.03069593620300293, 0.030392032623291015, 0.030280799865722657, 0.03018844795227051, 0.030281728744506835, 0.03026857566833496, 0.03013104057312012, 0.02986160087585449, 0.02984783935546875, 0.029971744537353514, 0.030116575241088867, 0.02997657585144043, 0.03015884780883789, 0.03037772750854492, 0.030201536178588867, 0.0303374080657959, 0.030507200241088866, 0.030435327529907227, 0.030307424545288085, 0.030792608261108398, 0.030426719665527343, 0.030299999237060546, 0.030218656539916993, 0.030310272216796875, 0.030639583587646485, 0.030550848007202147, 0.03058073616027832]",tokens/s,32.67771391754884,,, 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,808.18176,537.853952,0.0,159.383552,141.514752,s,1,7.6847021484375,7.6847021484375,0.0,7.6847021484375,7.6847021484375,7.6847021484375,7.6847021484375,[7.6847021484375],,kWh,1.780135854581507e-05,1.955537785813888e-06,5.623060053999707e-06,2.5379956385628663e-05,,MB,1145.286656,607.059968,0.0,199.22944,184.525824,s,24,0.17903702497482296,0.0074598760406176245,7.120415389335122e-05,0.00743944001197815,0.007569103956222534,0.007601238560676574,0.007614982557296753,"[0.00751251220703125, 0.007367648124694824, 0.007426559925079346, 0.00760364818572998, 0.0073504958152771, 0.00743887996673584, 0.007481056213378906, 0.007424928188323974, 0.007618368148803711, 0.007587584018707275, 0.007427648067474365, 0.0075259838104248045, 0.007387135982513428, 0.007512735843658447, 0.007440000057220459, 0.007423744201660156, 0.007369279861450195, 0.007523935794830322, 0.007436384201049804, 0.007444096088409424, 0.007463903903961182, 0.0074160962104797366, 0.00744649600982666, 0.007407904148101807]",tokens/s,34316.924115914,kWh,2.2021106029556478e-07,2.428474853573679e-08,1.2590722860827754e-07,3.704030374395791e-07,tokens/kWh,691139040.7854289,MB,1156.395008,609.15712,0.0,201.326592,184.528384,s,24,10.067228973388671,0.4194678738911947,0.0022759284913262696,0.4189407043457031,0.42268928833007813,0.42317518920898434,0.42340203582763675,"[0.42137295532226565, 0.4157905578613281, 0.41947012329101563, 0.41589859008789065, 0.4232285461425781, 0.4189258422851562, 0.42165786743164063, 0.423453857421875, 0.4181371154785156, 0.41752493286132814, 0.4171626281738281, 0.4222610168457031, 0.4212044677734375, 0.42287283325195313, 0.41895556640625, 0.41828021240234375, 0.4200715026855469, 0.4179279479980469, 0.4219461364746094, 0.41966140747070313, 0.41865359497070315, 0.4182972106933594, 0.41855194091796877, 0.415922119140625]",tokens/s,150.19028612508595,kWh,1.2044924637447583e-05,1.3283434455706353e-06,4.416584831309737e-06,1.778985291432796e-05,tokens/kWh,3541344.6251295176,,s,1512,10.05633614206313,0.006651015966972977,0.00013737650396346672,0.006620895862579346,0.0067276641845703125,0.0068116720438003535,0.007398489761352548,"[0.006614304065704345, 0.0067135682106018064, 0.006707200050354004, 0.006791679859161377, 0.006664127826690674, 0.00666534423828125, 0.006680960178375244, 0.006682752132415771, 0.006635007858276368, 0.006681536197662354, 0.006719200134277343, 0.006696832180023193, 0.006676896095275879, 0.00708403205871582, 0.0066312642097473146, 0.0066845760345458985, 0.006676735877990722, 0.006629471778869629, 0.006638815879821777, 0.006664896011352539, 0.00664086389541626, 0.0066646718978881836, 0.0066624641418457035, 0.0066427521705627445, 0.0067265920639038084, 0.006782911777496338, 0.006838335990905762, 0.006679967880249023, 0.006670368194580078, 0.0067090878486633304, 0.006726655960083008, 0.006737631797790527, 0.006741727828979492, 0.006703328132629394, 0.006743775844573975, 0.006756703853607177, 0.006674655914306641, 0.00667625617980957, 0.0067051520347595215, 0.0066416640281677245, 0.006670335769653321, 0.006656000137329102, 0.006715199947357178, 0.00678876781463623, 0.006634016036987305, 0.006612991809844971, 0.006660096168518067, 0.006628896236419678, 0.006616991996765137, 0.006631840229034424, 0.006596159934997558, 0.006607295989990235, 0.006650015830993652, 0.0066112961769104, 0.006600351810455323, 0.0067108159065246585, 0.0067957439422607424, 0.006623167991638184, 0.0066592001914978025, 0.006662816047668457, 0.006553887844085693, 0.006590271949768066, 0.0065924801826477055, 0.006522719860076904, 0.006564000129699707, 0.006567935943603515, 0.006599967956542969, 0.006543935775756836, 0.006595744132995606, 0.006657023906707763, 0.006621471881866455, 0.00654691219329834, 0.006570335865020752, 0.0065469760894775395, 0.006513023853302002, 0.00654252815246582, 0.006560512065887451, 0.0065270400047302245, 0.006557568073272705, 0.006506432056427002, 0.006537407875061035, 0.00653926420211792, 0.0065413122177124024, 0.006574368000030518, 0.006569695949554443, 0.006624256134033203, 0.006556672096252441, 0.006533055782318115, 0.006533184051513672, 0.006551551818847656, 0.006553791999816895, 0.006554848194122315, 0.006531712055206299, 0.006518080234527588, 0.00659500789642334, 0.006510816097259522, 0.006547520160675049, 0.006627264022827148, 0.006546944141387939, 0.006510784149169922, 0.006689087867736816, 0.006649407863616943, 0.006562240123748779, 0.00659660816192627, 0.006594560146331787, 0.006621183872222901, 0.0065842242240905765, 0.006576320171356201, 0.006584256172180176, 0.006590432167053223, 0.006593728065490722, 0.00658739185333252, 0.006640480041503906, 0.0065771198272705075, 0.006610720157623291, 0.00668665599822998, 0.006608831882476807, 0.006586719989776611, 0.006831488132476807, 0.006709440231323242, 0.006783103942871094, 0.006623456001281738, 0.006610367774963379, 0.006637216091156006, 0.006624256134033203, 0.006814943790435791, 0.0065598077774047855, 0.0066026878356933594, 0.006608672142028808, 0.006762720108032226, 0.006608640193939209, 0.006654399871826172, 0.00662713623046875, 0.007237055778503418, 0.0067794561386108395, 0.006615039825439453, 0.006597792148590088, 0.006591519832611084, 0.006575488090515137, 0.006595071792602539, 0.006575647830963135, 0.006574495792388916, 0.006586368083953857, 0.006586080074310303, 0.006774208068847656, 0.0066179518699646, 0.006536767959594726, 0.0066497278213500976, 0.006600448131561279, 0.0066117758750915525, 0.00659881591796875, 0.0066118078231811525, 0.006621632099151611, 0.006595424175262451, 0.0066343040466308595, 0.006648736000061035, 0.0066212801933288575, 0.006612736225128174, 0.006621344089508057, 0.006754303932189941, 0.006635519981384277, 0.006635072231292724, 0.0066273918151855465, 0.006602911949157715, 0.006643680095672608, 0.006648287773132324, 0.006637216091156006, 0.00662886381149292, 0.006621823787689209, 0.006615039825439453, 0.006651616096496582, 0.006680863857269287, 0.006643008232116699, 0.006615583896636963, 0.006744287967681885, 0.006665184020996094, 0.006791679859161377, 0.006666719913482666, 0.006673408031463623, 0.006632448196411133, 0.006635263919830322, 0.006629087924957275, 0.00667907190322876, 0.0067226881980895995, 0.0066689600944519044, 0.006693088054656983, 0.006743711948394775, 0.006660799980163574, 0.0066776638031005855, 0.006540639877319336, 0.007158751964569092, 0.006627103805541992, 0.006575967788696289, 0.006596735954284668, 0.006600895881652832, 0.006565855979919434, 0.006589759826660156, 0.006623968124389649, 0.006581823825836181, 0.006560416221618653, 0.0065963840484619144, 0.00659660816192627, 0.006573440074920655, 0.006900352001190185, 0.006567520141601562, 0.006594687938690186, 0.006619423866271973, 0.006608895778656006, 0.006582272052764892, 0.006619135856628418, 0.006594592094421386, 0.006586336135864258, 0.006610559940338135, 0.006609280109405518, 0.00658841609954834, 0.006600160121917725, 0.006597152233123779, 0.006604800224304199, 0.006567039966583252, 0.006549824237823487, 0.006557248115539551, 0.00657260799407959, 0.006574175834655762, 0.006622591972351074, 0.006584735870361328, 0.00658844804763794, 0.0066219520568847655, 0.006602528095245361, 0.00656006383895874, 0.0066186881065368654, 0.006568064212799072, 0.006600800037384033, 0.006593823909759522, 0.006583136081695556, 0.0065781760215759275, 0.00656771183013916, 0.006536255836486817, 0.006560704231262207, 0.006528319835662842, 0.0065502080917358395, 0.006582079887390136, 0.006545599937438965, 0.00653926420211792, 0.006551424026489258, 0.006549056053161621, 0.00657260799407959, 0.0065474557876586915, 0.006551551818847656, 0.006559072017669677, 0.006549759864807129, 0.006533696174621582, 0.00662332820892334, 0.006468512058258056, 0.006569983959197998, 0.0066007041931152345, 0.006575263977050781, 0.006603616237640381, 0.00660211181640625, 0.006588575839996338, 0.006650335788726806, 0.0069324798583984375, 0.006631423950195312, 0.006909952163696289, 0.006774879932403564, 0.006651072025299072, 0.006802144050598144, 0.006714399814605713, 0.007000800132751465, 0.006676896095275879, 0.006831967830657959, 0.006677631855010986, 0.006697919845581055, 0.00675219202041626, 0.006668416023254395, 0.006627552032470703, 0.0066902079582214355, 0.006680831909179687, 0.006598495960235596, 0.006764480113983154, 0.006627647876739502, 0.006631328105926514, 0.006710559844970703, 0.00765718412399292, 0.006691359996795655, 0.006903327941894531, 0.00685971212387085, 0.006695968151092529, 0.006660831928253174, 0.006764800071716309, 0.006670400142669678, 0.00664569616317749, 0.006684000015258789, 0.0066258878707885745, 0.006780672073364258, 0.007104191780090332, 0.006660511970520019, 0.006642015933990478, 0.006622464179992676, 0.006645664215087891, 0.00663424015045166, 0.006627295970916748, 0.00674019193649292, 0.006684031963348389, 0.00669532823562622, 0.006703296184539795, 0.006645088195800781, 0.006656479835510254, 0.006897503852844238, 0.006700799942016601, 0.006646175861358642, 0.006684671878814697, 0.006632800102233887, 0.006611711978912353, 0.006588287830352783, 0.006610496044158936, 0.0066119680404663084, 0.006622208118438721, 0.006667744159698486, 0.006588543891906738, 0.006637440204620361, 0.006592895984649658, 0.006590047836303711, 0.006593088150024414, 0.006608799934387207, 0.006645535945892334, 0.006692768096923828, 0.006621600151062012, 0.006686560153961182, 0.006635744094848633, 0.006637792110443116, 0.006686431884765625, 0.006645760059356689, 0.006605088233947754, 0.006917856216430664, 0.006642911911010742, 0.006619232177734375, 0.006689472198486328, 0.006657375812530518, 0.006671072006225586, 0.006649792194366455, 0.006610208034515381, 0.006601439952850342, 0.006620863914489746, 0.006629183769226075, 0.006590976238250733, 0.006633471965789795, 0.006602399826049805, 0.006594687938690186, 0.006627552032470703, 0.006608895778656006, 0.006572288036346435, 0.006647552013397217, 0.006885087966918945, 0.0066276159286499025, 0.006625279903411865, 0.006660096168518067, 0.00658841609954834, 0.006643712043762207, 0.006708352088928222, 0.006582208156585693, 0.006611904144287109, 0.006589920043945313, 0.006605343818664551, 0.006623231887817383, 0.006610559940338135, 0.006605184078216553, 0.006611040115356445, 0.006627232074737549, 0.0067051520347595215, 0.006632832050323487, 0.006650496006011963, 0.006617311954498291, 0.006743840217590332, 0.006711264133453369, 0.006652991771697998, 0.006714560031890869, 0.00669052791595459, 0.006598144054412842, 0.006526976108551025, 0.006937632083892822, 0.006622176170349121, 0.006647071838378906, 0.006641407966613769, 0.006574687957763672, 0.006602784156799316, 0.006593952178955078, 0.006630112171173095, 0.006621407985687256, 0.006595935821533203, 0.006623104095458985, 0.006830880165100098, 0.0066007041931152345, 0.006660096168518067, 0.006619328022003173, 0.006595647811889648, 0.006652895927429199, 0.006641439914703369, 0.00661900806427002, 0.006659327983856201, 0.006628223896026611, 0.006645760059356689, 0.006665535926818848, 0.006634175777435303, 0.006629216194152832, 0.006684832096099854, 0.006692863941192627, 0.006680768013000488, 0.006692319869995117, 0.006651296138763428, 0.006697184085845948, 0.006695648193359375, 0.0066844158172607426, 0.006689023971557617, 0.006719359874725342, 0.006667744159698486, 0.006677152156829834, 0.006964863777160645, 0.006783616065979004, 0.006870783805847168, 0.006680768013000488, 0.006719295978546143, 0.0067235522270202635, 0.006684703826904297, 0.006696671962738037, 0.0066679039001464845, 0.006656832218170166, 0.006708767890930176, 0.006676608085632324, 0.0066070399284362796, 0.006639616012573242, 0.007557439804077149, 0.006667967796325684, 0.006647808074951172, 0.006633823871612549, 0.00659116792678833, 0.006674784183502197, 0.0066648321151733395, 0.006666240215301514, 0.006723743915557861, 0.006690976142883301, 0.006669727802276612, 0.0065797438621521, 0.006646240234375, 0.006647808074951172, 0.0066538558006286625, 0.00661900806427002, 0.0066500802040100095, 0.006686272144317627, 0.006638016223907471, 0.006633471965789795, 0.006717440128326416, 0.0067338237762451176, 0.006601984024047851, 0.006680768013000488, 0.006611519813537597, 0.00658406400680542, 0.007115007877349854, 0.006666240215301514, 0.006653952121734619, 0.0066183037757873535, 0.006623583793640137, 0.006624735832214355, 0.0066447358131408694, 0.006694911956787109, 0.006621183872222901, 0.00671724796295166, 0.006589888095855713, 0.006607615947723388, 0.006623136043548584, 0.0066269440650939945, 0.007408095836639404, 0.008238752365112305, 0.006781375885009765, 0.006733727931976318, 0.0067276802062988285, 0.006672383785247803, 0.006684351921081543, 0.006647264003753662, 0.006636096000671387, 0.006666528224945068, 0.006895296096801758, 0.00666860818862915, 0.0067051520347595215, 0.006676671981811523, 0.006612800121307373, 0.006663616180419922, 0.006658720016479492, 0.006610847949981689, 0.006612736225128174, 0.006594719886779785, 0.006803199768066406, 0.006658400058746338, 0.006677728176116943, 0.006660672187805175, 0.006670239925384521, 0.006709568023681641, 0.00666374397277832, 0.006714816093444824, 0.00671011209487915, 0.00698905611038208, 0.006664576053619385, 0.00668726396560669, 0.0066509442329406735, 0.006707680225372314, 0.006628287792205811, 0.006668320178985596, 0.006647583961486817, 0.006663839817047119, 0.006631968021392823, 0.006618944168090821, 0.00663372802734375, 0.006666175842285156, 0.006598656177520752, 0.006590464115142822, 0.006729504108428955, 0.006735775947570801, 0.006723264217376709, 0.006646399974822998, 0.006623551845550537, 0.006600575923919678, 0.006600607872009277, 0.00667193603515625, 0.006599135875701904, 0.006571904182434082, 0.006626399993896484, 0.0066139202117919925, 0.00656390380859375, 0.006621183872222901, 0.006598239898681641, 0.006578527927398682, 0.0066295361518859865, 0.006592351913452149, 0.006606847763061524, 0.006596447944641114, 0.006574111938476563, 0.0065651841163635255, 0.006622015953063965, 0.0066244797706604, 0.0065586881637573246, 0.006614848136901856, 0.00667625617980957, 0.006609119892120361, 0.006600096225738526, 0.006599264144897461, 0.006592031955718994, 0.006592991828918457, 0.006575456142425537, 0.006611231803894043, 0.006578495979309082, 0.0065598077774047855, 0.006594560146331787, 0.006612991809844971, 0.006575424194335938, 0.006607135772705078, 0.006581920146942139, 0.006877952098846436, 0.0068055038452148435, 0.006742335796356201, 0.006583487987518311, 0.006608863830566406, 0.006695680141448975, 0.006616864204406738, 0.0065946559906005855, 0.0065801281929016115, 0.006668288230895996, 0.006881279945373535, 0.006635519981384277, 0.006500671863555909, 0.00663315200805664, 0.006600224018096924, 0.00660425615310669, 0.006657023906707763, 0.006690815925598144, 0.006604032039642334, 0.00664243221282959, 0.006625279903411865, 0.0065732159614562986, 0.007563168048858642, 0.006654911994934082, 0.006645952224731445, 0.006911808013916015, 0.006748159885406494, 0.006660096168518067, 0.006622943878173828, 0.0065865921974182125, 0.0065905280113220215, 0.00658022403717041, 0.006602431774139404, 0.00657587194442749, 0.0066113600730896, 0.006590879917144775, 0.0065797119140625, 0.0065763840675354, 0.006596320152282715, 0.00658460807800293, 0.006586016178131104, 0.006597184181213379, 0.006563136100769043, 0.006602367877960205, 0.006590591907501221, 0.006568672180175782, 0.006563104152679443, 0.006568672180175782, 0.006612991809844971, 0.006596096038818359, 0.006574592113494873, 0.006563392162322998, 0.006605343818664551, 0.006539455890655518, 0.006555359840393066, 0.006536255836486817, 0.006560832023620605, 0.0066145601272583005, 0.006582623958587647, 0.006557248115539551, 0.006645247936248779, 0.006577055931091308, 0.006600575923919678, 0.006607135772705078, 0.006573567867279053, 0.006590400218963623, 0.006584832191467285, 0.006553120136260986, 0.00659497594833374, 0.006566080093383789, 0.006598688125610351, 0.006596672058105469, 0.006608607769012451, 0.006563136100769043, 0.006869696140289307, 0.006553599834442139, 0.0066109437942504885, 0.006612031936645508, 0.00711520004272461, 0.006574592113494873, 0.006612480163574219, 0.006578688144683838, 0.0065491518974304195, 0.006586719989776611, 0.00658841609954834, 0.006557568073272705, 0.006577600002288819, 0.0065560641288757325, 0.006591104030609131, 0.006571296215057373, 0.006582015991210937, 0.006707839965820313, 0.006612448215484619, 0.006619647979736328, 0.006620927810668946, 0.006592383861541748, 0.006580031871795654, 0.006632031917572022, 0.006636960029602051, 0.00661078405380249, 0.0066130561828613285, 0.006625984191894531, 0.006612256050109864, 0.006635392189025879, 0.006635712146759033, 0.006570655822753907, 0.0066375679969787596, 0.0066007041931152345, 0.006598015785217285, 0.00662553596496582, 0.006656383991241455, 0.006593696117401123, 0.006642271995544433, 0.006591775894165039, 0.006617760181427002, 0.006590591907501221, 0.006568128108978271, 0.006616896152496338, 0.00659884786605835, 0.006584320068359375, 0.0065781760215759275, 0.006574175834655762, 0.006575744152069091, 0.006631040096282959, 0.006613471984863281, 0.0065680317878723145, 0.006608672142028808, 0.006620800018310547, 0.0065790719985961916, 0.006601535797119141, 0.006587135791778565, 0.006634943962097168, 0.006587200164794922, 0.006598624229431152, 0.00663318395614624, 0.006723904132843018, 0.006658016204833984, 0.006695168018341064, 0.006567647933959961, 0.006608799934387207, 0.006635424137115479, 0.006624800205230713, 0.006593472003936768, 0.006665408134460449, 0.006660736083984375, 0.006604991912841797, 0.006643712043762207, 0.006620319843292236, 0.0065911998748779295, 0.006609024047851563, 0.0065987520217895505, 0.0066538558006286625, 0.006662144184112549, 0.006616352081298828, 0.007473887920379639, 0.0077844481468200685, 0.007501823902130127, 0.006674208164215088, 0.006666816234588623, 0.006903039932250977, 0.0068100161552429195, 0.006664415836334228, 0.006655168056488037, 0.006697504043579101, 0.006655519962310791, 0.006628896236419678, 0.006669312000274658, 0.006677504062652588, 0.006606912136077881, 0.00664467191696167, 0.007150847911834717, 0.006986368179321289, 0.006652031898498535, 0.0067153282165527345, 0.006674496173858643, 0.0066184959411621095, 0.006643807888031006, 0.0066260480880737304, 0.0065896639823913574, 0.006596447944641114, 0.006624127864837646, 0.006606080055236816, 0.006586495876312256, 0.006575839996337891, 0.006646527767181397, 0.006618175983428955, 0.006560704231262207, 0.00663750410079956, 0.006600031852722168, 0.006580959796905517, 0.006604800224304199, 0.006574368000030518, 0.006588287830352783, 0.006620192050933838, 0.006623936176300049, 0.006619264125823974, 0.006655168056488037, 0.006656864166259766, 0.006642911911010742, 0.006650623798370362, 0.00662332820892334, 0.006541056156158447, 0.006664703845977784, 0.006573344230651855, 0.0066377601623535155, 0.006588031768798828, 0.006594719886779785, 0.006611711978912353, 0.006612991809844971, 0.006594560146331787, 0.006647808074951172, 0.006700128078460693, 0.006593023777008057, 0.006679999828338623, 0.006628032207489014, 0.0066072320938110355, 0.006618624210357666, 0.006611648082733154, 0.006594592094421386, 0.006620863914489746, 0.006680575847625733, 0.0067645440101623535, 0.006639616012573242, 0.006617311954498291, 0.006631199836730957, 0.006610144138336182, 0.007117824077606201, 0.006667295932769776, 0.006633952140808106, 0.00664192008972168, 0.006625311851501465, 0.006656000137329102, 0.0066416640281677245, 0.006606847763061524, 0.006660096168518067, 0.0066085438728332516, 0.006617440223693848, 0.006675551891326905, 0.0066730880737304685, 0.0066665921211242676, 0.0067564802169799805, 0.00679091215133667, 0.006783008098602295, 0.00682377576828003, 0.006762176036834717, 0.006699264049530029, 0.006740096092224121, 0.006727231979370117, 0.0068059201240539555, 0.006739840030670166, 0.006724095821380615, 0.0067066879272460935, 0.0067053442001342774, 0.006733439922332764, 0.006721951961517334, 0.006684256076812744, 0.006729472160339356, 0.006733856201171875, 0.0067242240905761716, 0.0067358717918396, 0.006668479919433594, 0.006690624237060547, 0.006715392112731934, 0.006686399936676026, 0.006573503971099854, 0.006692512035369873, 0.006663167953491211, 0.0066715202331542964, 0.0067226881980895995, 0.00664089584350586, 0.0066656961441040035, 0.006665215969085693, 0.006647039890289306, 0.006638336181640625, 0.006679967880249023, 0.006648416042327881, 0.006684288024902344, 0.006730112075805664, 0.006735167980194092, 0.006699647903442383, 0.006764832019805908, 0.00671721601486206, 0.006694911956787109, 0.006813695907592773, 0.006788991928100586, 0.006764416217803955, 0.006728288173675537, 0.006737567901611328, 0.006727712154388428, 0.006673696041107177, 0.0067058558464050295, 0.00666428804397583, 0.006635424137115479, 0.006670335769653321, 0.007223199844360351, 0.006723680019378662, 0.007021632194519043, 0.006750495910644532, 0.006701536178588867, 0.00670739221572876, 0.006809599876403808, 0.006658048152923584, 0.0066109437942504885, 0.006633471965789795, 0.006668288230895996, 0.006612703800201416, 0.006745376110076904, 0.006613279819488525, 0.006922976016998291, 0.006795263767242431, 0.006642943859100342, 0.006636223793029785, 0.006662208080291748, 0.006597856044769287, 0.006572256088256836, 0.0066483840942382815, 0.006693088054656983, 0.006544511795043945, 0.0075615358352661135, 0.006627488136291504, 0.006577824115753174, 0.006608479976654052, 0.006609856128692627, 0.006586368083953857, 0.006635615825653076, 0.006622303962707519, 0.006561952114105225, 0.0065548157691955565, 0.0066415038108825685, 0.00671830415725708, 0.006623231887817383, 0.006585472106933594, 0.006568831920623779, 0.006598656177520752, 0.006623424053192138, 0.00702784013748169, 0.006654272079467774, 0.006630080223083496, 0.006603616237640381, 0.006650720119476318, 0.006610367774963379, 0.006574656009674072, 0.006637216091156006, 0.006608895778656006, 0.006817952156066894, 0.00666864013671875, 0.0066022400856018066, 0.00662886381149292, 0.006613311767578125, 0.006593056201934814, 0.006592351913452149, 0.0066152639389038086, 0.006705088138580322, 0.006645760059356689, 0.006633024215698242, 0.006609248161315918, 0.006655424118041992, 0.006627999782562256, 0.006601856231689453, 0.0067916159629821775, 0.006666079998016357, 0.006590112209320069, 0.006609856128692627, 0.0066007041931152345, 0.006561791896820069, 0.0066415038108825685, 0.006612927913665772, 0.006574560165405274, 0.006723328113555908, 0.006669792175292968, 0.006608416080474853, 0.006637792110443116, 0.006576831817626953, 0.006989920139312744, 0.006642975807189941, 0.0066546878814697265, 0.006587776184082031, 0.00658406400680542, 0.006581151962280273, 0.00657747220993042, 0.006617760181427002, 0.00659827184677124, 0.006592959880828858, 0.006614175796508789, 0.006594336032867432, 0.006562816143035889, 0.00658841609954834, 0.006942560195922851, 0.00677459192276001, 0.006623199939727783, 0.0065147199630737306, 0.006588384151458741, 0.006638815879821777, 0.006611711978912353, 0.0065924482345581055, 0.006608287811279297, 0.006623263835906982, 0.006611616134643555, 0.006776832103729248, 0.006998015880584717, 0.0073108158111572265, 0.007446144104003906, 0.00668559980392456, 0.006649600028991699, 0.006608511924743652, 0.006639359951019287, 0.006622335910797119, 0.00659660816192627, 0.006636735916137696, 0.006631872177124023, 0.006592832088470459, 0.00656163215637207, 0.0065576000213623045, 0.006585919857025146, 0.006570271968841553, 0.0065593280792236325, 0.006578815937042236, 0.0065651521682739255, 0.006554304122924804, 0.006577663898468018, 0.006613344192504883, 0.006582240104675293, 0.006584544181823731, 0.006619135856628418, 0.006590144157409668, 0.006625599861145019, 0.006591904163360596, 0.006597216129302978, 0.006575168132781983, 0.006550015926361084, 0.00662992000579834, 0.006584127902984619, 0.006578271865844727, 0.006625120162963867, 0.00659881591796875, 0.006649856090545654, 0.006606847763061524, 0.006595935821533203, 0.006592832088470459, 0.0066154561042785645, 0.006574016094207763, 0.006584320068359375, 0.006608895778656006, 0.0065775041580200195, 0.00659113597869873, 0.006608416080474853, 0.006562272071838379, 0.006653696060180664, 0.006630752086639404, 0.006594880104064942, 0.006611551761627197, 0.006582272052764892, 0.006557439804077149, 0.0064637441635131835, 0.006572896003723144, 0.0065504322052001955, 0.006788896083831787, 0.006713056087493897, 0.006718080043792724, 0.006646687984466553, 0.00661187219619751, 0.006676544189453125, 0.007654911994934082, 0.006598176002502442, 0.006611936092376709, 0.0068074560165405276, 0.0072145919799804685, 0.00663420820236206, 0.0066184959411621095, 0.006685056209564209, 0.006641791820526123, 0.0066102719306945805, 0.006682271957397461, 0.00667955207824707, 0.006637375831604004, 0.006662335872650147, 0.006639520168304444, 0.006624576091766358, 0.006673183917999268, 0.006619135856628418, 0.006627520084381103, 0.006714879989624023, 0.006639359951019287, 0.006652480125427246, 0.006657663822174072, 0.0066072320938110355, 0.006623487949371338, 0.00679091215133667, 0.006635359764099121, 0.006614496231079101, 0.00663212776184082, 0.006631423950195312, 0.0066375679969787596, 0.0066375679969787596, 0.006649856090545654, 0.006606880187988281, 0.006629024028778076, 0.006668223857879639, 0.006611328125, 0.006582176208496094, 0.00656924819946289, 0.00663804817199707, 0.006613344192504883, 0.006577856063842773, 0.006638207912445068, 0.006661824226379395, 0.006594336032867432, 0.006663839817047119, 0.006601632118225098, 0.006588064193725586, 0.006631775856018067, 0.0066202559471130375, 0.006560319900512695, 0.006635680198669434, 0.006606688022613525, 0.006542623996734619, 0.006697055816650391, 0.006638815879821777, 0.006601535797119141, 0.006631328105926514, 0.006856607913970947, 0.006852255821228027, 0.006642208099365234, 0.006586368083953857, 0.006567967891693115, 0.006608863830566406, 0.006607935905456543, 0.006577280044555664, 0.00678278398513794, 0.006620543956756592, 0.006566495895385742, 0.006581600189208984, 0.006634175777435303, 0.006578464031219483, 0.006608607769012451, 0.006586368083953857, 0.006608128070831299, 0.006596896171569824, 0.006551136016845703, 0.006621535778045654, 0.006607135772705078, 0.006572159767150879, 0.006620287895202637, 0.0065710082054138185, 0.0065855679512023925, 0.0066546878814697265, 0.006588479995727539, 0.006573952198028564, 0.006610847949981689, 0.00659219217300415, 0.006664735794067383, 0.006598656177520752, 0.006602719783782959, 0.0066089282035827635, 0.006587456226348877, 0.006562751770019531, 0.0065821118354797365, 0.006565824031829834, 0.006587935924530029, 0.006600959777832031, 0.006587039947509765, 0.006550528049468994, 0.0065862398147583006, 0.00655452823638916, 0.0065532798767089845, 0.006560256004333496, 0.006592319965362549, 0.007219200134277344, 0.006729568004608154, 0.006602079868316651, 0.006546239852905273, 0.0066109437942504885, 0.006643712043762207, 0.006667295932769776, 0.006671328067779541, 0.006715072154998779, 0.006660128116607666, 0.006650144100189209, 0.006633471965789795, 0.0065426878929138186, 0.006706079959869385, 0.006658207893371582, 0.006727519989013672, 0.006676479816436768, 0.006659135818481445, 0.006663455963134766, 0.00667190408706665, 0.006637119770050049, 0.0066334400177001955, 0.0078096318244934085, 0.0067840638160705565, 0.006841343879699707, 0.006657375812530518, 0.00667849588394165, 0.006984320163726807, 0.006617087841033936, 0.006655007839202881, 0.006640448093414307, 0.006621344089508057, 0.006626848220825195, 0.006571936130523682, 0.006605375766754151, 0.006653600215911866, 0.006646111965179443, 0.006616096019744873, 0.006656767845153809, 0.0069749121665954586, 0.006791967868804932, 0.006685855865478515, 0.006687935829162598, 0.006633120059967041, 0.006697279930114746, 0.006661664009094239, 0.006645919799804688, 0.0075797438621521, 0.006694784164428711, 0.0066499199867248535, 0.006594528198242187, 0.006649087905883789, 0.0066926078796386715, 0.006639711856842041, 0.0066893439292907714, 0.006629727840423584, 0.00659660816192627, 0.006581344127655029, 0.00661516809463501, 0.006613215923309326, 0.006596896171569824, 0.00655347204208374, 0.006610847949981689, 0.006699423789978027, 0.006592576026916504, 0.006666272163391113, 0.006619135856628418, 0.006608895778656006, 0.0066416640281677245, 0.006593920230865478, 0.00659827184677124, 0.006611839771270752, 0.006620448112487793, 0.006613632202148437, 0.006622432231903076, 0.006513696193695068, 0.00659884786605835, 0.006631360054016113, 0.0066507840156555175, 0.006612927913665772, 0.006587808132171631, 0.0065625920295715335, 0.006608704090118408, 0.006580448150634766, 0.0065699520111083985, 0.0066249918937683104, 0.006596672058105469, 0.0065782079696655275, 0.006606880187988281, 0.006567903995513916, 0.006576128005981445, 0.006592512130737305, 0.006592319965362549, 0.0065742721557617186, 0.006639616012573242, 0.006651904106140137, 0.006739967823028564, 0.0066744318008422855, 0.00662937593460083, 0.006602752208709717, 0.006627327919006347, 0.006602848052978515, 0.007052351951599121, 0.006633952140808106, 0.00659935998916626, 0.006631103992462159, 0.006623424053192138, 0.0066005120277404785, 0.00658784008026123, 0.006638144016265869, 0.0066085438728332516, 0.006584159851074219, 0.00662284803390503, 0.007574399948120117, 0.006637184143066406, 0.006988160133361817, 0.006635615825653076, 0.006664095878601074, 0.006707136154174805, 0.0067702078819274905, 0.006590432167053223, 0.00659935998916626, 0.006645631790161133, 0.006627327919006347, 0.006656320095062256, 0.006663584232330323, 0.0066583361625671385, 0.006694911956787109, 0.006660096168518067, 0.0066390719413757324, 0.0066418561935424805, 0.006651552200317383, 0.006619840145111084, 0.006666368007659912, 0.006731647968292237, 0.0066228160858154295, 0.006625311851501465, 0.0066596798896789555, 0.006598400115966797, 0.006672671794891357, 0.0066416640281677245, 0.006604512214660644, 0.006640096187591552, 0.006634655952453613, 0.006597280025482178, 0.006668447971343994, 0.006680416107177734, 0.006616256237030029, 0.006631872177124023, 0.00659222412109375, 0.006654592037200928, 0.006687967777252197, 0.0066427202224731445, 0.006907392024993897, 0.006701663970947265, 0.006667967796325684, 0.006617087841033936, 0.0066490240097045895, 0.006691648006439209, 0.006633471965789795, 0.00667251205444336, 0.00666815996170044, 0.006649280071258545, 0.006681248188018799, 0.0066622719764709475, 0.006628352165222168, 0.0066260800361633305, 0.006618144035339356, 0.006632415771484375, 0.006660223960876465, 0.006641536235809326, 0.006643136024475097, 0.006664256095886231, 0.006647424221038818, 0.006634367942810059, 0.006653952121734619, 0.0066304001808166506, 0.006615039825439453, 0.006588799953460693, 0.006611839771270752, 0.0065616960525512694, 0.00661897611618042, 0.006608575820922852, 0.006609216213226319, 0.006597792148590088, 0.006589280128479004, 0.006625279903411865, 0.006612991809844971, 0.0065814399719238284, 0.006634592056274414, 0.006629087924957275, 0.006606751918792724, 0.006621344089508057, 0.006608287811279297, 0.006650400161743164, 0.006658048152923584, 0.006610015869140625, 0.006631968021392823, 0.00662332820892334, 0.006576032161712647, 0.006691199779510498, 0.006516863822937011, 0.006606751918792724, 0.006596447944641114, 0.006588704109191894, 0.006573023796081543, 0.006597536087036133, 0.006620351791381836, 0.006576992034912109, 0.006604032039642334, 0.006597311973571778, 0.006553823947906494, 0.006619232177734375, 0.006583775997161865, 0.006598911762237549, 0.006610144138336182, 0.006564640045166016, 0.006595808029174805, 0.006596960067749023, 0.007074240207672119, 0.006799359798431396, 0.0076447358131408695, 0.006618847846984863, 0.006633823871612549, 0.007188864231109619, 0.006773952007293701, 0.006630080223083496, 0.0065762557983398435, 0.006620192050933838, 0.006611936092376709, 0.006559296131134033, 0.00660422420501709, 0.006572480201721191, 0.0065582718849182126, 0.006581535816192627, 0.006599071979522705, 0.006559264183044433, 0.006640416145324707, 0.006592031955718994, 0.006613247871398926, 0.006612576007843017, 0.006576896190643311, 0.006585343837738037, 0.006612063884735108, 0.0065922880172729495, 0.006692224025726319, 0.006633823871612549, 0.006599167823791504, 0.006643487930297852, 0.006565951824188233, 0.006574016094207763, 0.00659660816192627, 0.0065797119140625, 0.006566400051116943, 0.006602303981781006, 0.00656771183013916, 0.006574816226959228, 0.0065550079345703125, 0.006586944103240967, 0.00656163215637207, 0.006559904098510742, 0.0065413122177124024, 0.006651999950408935, 0.0065673599243164064, 0.006504767894744873, 0.006613376140594482, 0.00658022403717041, 0.006579872131347656, 0.006591104030609131, 0.006586080074310303, 0.006576128005981445, 0.006572192192077637, 0.006565728187561035, 0.006563839912414551, 0.006566239833831787, 0.00656876802444458, 0.0065730881690979, 0.006563839912414551, 0.006559904098510742, 0.006590112209320069, 0.006555520057678223, 0.0065697598457336425, 0.006609248161315918, 0.0065474557876586915, 0.00659660816192627, 0.007102719783782959, 0.007218815803527832, 0.007493760108947754, 0.007320767879486084, 0.00659881591796875, 0.0066013760566711426, 0.006582143783569336, 0.006600831985473633, 0.006616511821746826, 0.006584256172180176, 0.0066312642097473146, 0.006613183975219727, 0.006600895881652832, 0.006582687854766846, 0.0065487041473388675, 0.006565887928009034, 0.006605631828308105, 0.006574048042297363, 0.006584320068359375, 0.0066436161994934085, 0.00661897611618042, 0.006577727794647217, 0.006634175777435303, 0.006559743881225586, 0.006614143848419189, 0.006564191818237305, 0.006550111770629883, 0.006571839809417725, 0.006586368083953857, 0.006570112228393554, 0.006586495876312256, 0.006586143970489502, 0.0065342397689819335, 0.006808640003204346, 0.0065493440628051755, 0.0068854718208312985, 0.00658022403717041, 0.00658841609954834, 0.0065413122177124024, 0.006717696189880371, 0.00659552001953125, 0.00658457612991333, 0.00657203197479248, 0.006559135913848877, 0.006609504222869873, 0.0067049598693847655, 0.006594751834869385, 0.006606527805328369, 0.006570303916931152, 0.006557695865631104, 0.006594560146331787, 0.0065615038871765135, 0.0065642881393432615, 0.006588255882263184, 0.006615039825439453, 0.006574079990386963, 0.006612512111663818, 0.006594528198242187, 0.006583040237426758, 0.0066128640174865725, 0.006607744216918945, 0.006583104133605957, 0.006568128108978271, 0.006576128005981445, 0.00661078405380249, 0.006581984043121338, 0.006559936046600342, 0.006584447860717773, 0.006625408172607422, 0.00657369613647461, 0.0066211199760437016, 0.006576831817626953, 0.006603775978088379, 0.006607583999633789, 0.006588511943817139, 0.006586175918579101, 0.006637695789337158, 0.006565887928009034, 0.006555712223052979, 0.006603007793426514, 0.006573152065277099, 0.0065829439163208004, 0.00659449577331543, 0.0065660161972045895, 0.00659443187713623, 0.006563744068145752, 0.006584415912628174, 0.006631423950195312, 0.006601024150848389, 0.006586048126220703, 0.006621183872222901, 0.006582208156585693, 0.006587808132171631, 0.006611616134643555, 0.006608895778656006, 0.006606272220611572, 0.006596543788909912, 0.006650176048278809, 0.006594880104064942, 0.006602399826049805, 0.006606207847595215, 0.006622176170349121, 0.006600831985473633, 0.006590176105499268, 0.006624576091766358]",tokens/s,150.3529693757632,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1846.84544,2466.11968,0.0,2080.374784,2078.348288,s,1,8.765169921875,8.765169921875,0.0,8.765169921875,8.765169921875,8.765169921875,8.765169921875,[8.765169921875],,kWh,4.1773937099954615e-05,4.600870021146371e-06,1.4480011583994923e-05,6.085481870509591e-05,,MB,1991.872512,2663.251968,0.0,2248.146944,2179.679232,s,10,1.240409423828125,0.12404094238281249,0.0006159039845250523,0.124,0.1250013671875,0.12503546600341797,0.12506274505615234,"[0.12506956481933593, 0.12425395202636719, 0.12385536193847656, 0.12323359680175781, 0.12376697540283203, 0.1233116455078125, 0.12499378967285156, 0.1234249267578125, 0.12414463806152344, 0.12435497283935547]",tokens/s,2063.8346910485275,kWh,3.851116668092322e-06,4.247089922841152e-07,2.5531526273155593e-06,6.828978287691996e-06,tokens/kWh,37487306.18479106,MB,1997.438976,2663.251968,0.0,2248.146944,2179.681792,s,10,18.4619130859375,1.84619130859375,0.007694998476195262,1.8421884765624998,1.856949365234375,1.8573169677734374,1.8576110498046874,"[1.8576845703125, 1.839062255859375, 1.842160888671875, 1.838141845703125, 1.8511507568359375, 1.855139404296875, 1.842216064453125, 1.8420477294921875, 1.83744189453125, 1.85686767578125]",tokens/s,34.12430754426382,kWh,5.37268679773256e-05,5.9254607308307e-06,2.884476137988737e-05,8.849709008804366e-05,tokens/kWh,711887.8139080368,,s,630,18.46001127243043,0.029301605194333995,0.0005097920750915514,0.02919083213806152,0.02964666633605957,0.029831091117858886,0.031281340675354014,"[0.03045359992980957, 0.029776287078857423, 0.02959587287902832, 0.029772863388061524, 0.02936716842651367, 0.029517887115478515, 0.02941257667541504, 0.02919094467163086, 0.029626367568969726, 0.03164387130737305, 0.02953727912902832, 0.029364864349365236, 0.029288320541381835, 0.029408639907836914, 0.029286687850952148, 0.02923993682861328, 0.029707359313964843, 0.029557600021362304, 0.029323328018188478, 0.0292227840423584, 0.02930496025085449, 0.029566976547241212, 0.029500543594360353, 0.029901248931884766, 0.032403648376464846, 0.029403232574462892, 0.029693119049072264, 0.029506528854370117, 0.029383840560913085, 0.029648767471313477, 0.029237600326538087, 0.029205120086669922, 0.029310752868652343, 0.029292320251464842, 0.029235647201538085, 0.029599103927612305, 0.0296280632019043, 0.02926652717590332, 0.02983087921142578, 0.029354272842407228, 0.029285888671875, 0.02918668746948242, 0.029108640670776367, 0.02920022392272949, 0.029204383850097656, 0.029867807388305665, 0.029216447830200196, 0.029219104766845704, 0.029489503860473634, 0.029374176025390625, 0.02944233512878418, 0.029245439529418944, 0.02911395263671875, 0.029076896667480468, 0.029065568923950194, 0.02912323188781738, 0.02923091125488281, 0.02961756706237793, 0.02920457649230957, 0.0291329288482666, 0.029303359985351562, 0.02915328025817871, 0.02906447982788086, 0.029874176025390626, 0.02932111930847168, 0.029446239471435546, 0.029274112701416017, 0.02925881576538086, 0.02916204833984375, 0.029241279602050783, 0.02920044708251953, 0.0292043514251709, 0.02926233673095703, 0.029732864379882814, 0.029203935623168944, 0.02940777587890625, 0.02917580795288086, 0.0291778564453125, 0.029337600708007814, 0.02944819259643555, 0.029372224807739256, 0.029186239242553712, 0.02911824035644531, 0.029116384506225584, 0.029016319274902343, 0.029157375335693358, 0.029608095169067383, 0.029212415695190428, 0.029108352661132812, 0.02905084800720215, 0.02909388732910156, 0.029568960189819336, 0.028908704757690428, 0.029464576721191408, 0.029326240539550782, 0.029032447814941405, 0.02895587158203125, 0.029137344360351564, 0.028995935440063476, 0.028968639373779297, 0.028907520294189453, 0.029149503707885743, 0.02912227249145508, 0.02902176094055176, 0.02917158317565918, 0.029009952545166015, 0.02918396759033203, 0.02899849510192871, 0.028884672164916993, 0.029276256561279298, 0.029096160888671875, 0.029152992248535157, 0.02897279930114746, 0.02901251220703125, 0.029089344024658202, 0.028948543548583984, 0.028952224731445313, 0.02895129585266113, 0.029071552276611328, 0.029084831237792968, 0.030234880447387695, 0.029423999786376952, 0.029034496307373047, 0.028989280700683594, 0.028999839782714844, 0.02900377655029297, 0.029798112869262695, 0.029188383102416993, 0.029246688842773438, 0.029117216110229494, 0.0291549129486084, 0.0290164794921875, 0.028940288543701172, 0.028895231246948243, 0.029138944625854493, 0.02896076774597168, 0.02879052734375, 0.029406560897827148, 0.03062175941467285, 0.02928316879272461, 0.029151199340820312, 0.029255008697509764, 0.029200063705444337, 0.029004159927368163, 0.02888297653198242, 0.028971616744995116, 0.029149023056030274, 0.029003936767578124, 0.02896895980834961, 0.028874752044677734, 0.028968320846557618, 0.028987552642822264, 0.029234848022460937, 0.031370048522949216, 0.029638687133789064, 0.029406784057617187, 0.029086111068725586, 0.02905504035949707, 0.029636543273925783, 0.029128704071044922, 0.029075456619262696, 0.02984489631652832, 0.02933932876586914, 0.029451168060302735, 0.029179616928100584, 0.029210304260253905, 0.028996095657348633, 0.029315168380737305, 0.029114208221435546, 0.029308479309082033, 0.029111007690429687, 0.029193151473999025, 0.02932012748718262, 0.029378559112548826, 0.029093856811523437, 0.029077215194702147, 0.029264192581176757, 0.029120096206665037, 0.028939712524414064, 0.029239456176757814, 0.02906342315673828, 0.029108800888061524, 0.02904457664489746, 0.029523712158203125, 0.02899580764770508, 0.029265663146972657, 0.02917830467224121, 0.02951350402832031, 0.029177215576171874, 0.029693952560424806, 0.0292509765625, 0.02928188705444336, 0.029268991470336913, 0.029163007736206056, 0.029085567474365234, 0.029161376953125, 0.02938275146484375, 0.029513536453247072, 0.02926665687561035, 0.029232864379882813, 0.028995616912841797, 0.029130624771118163, 0.029051359176635743, 0.028985343933105468, 0.02957926368713379, 0.02996633529663086, 0.029722272872924806, 0.029340000152587892, 0.029300640106201172, 0.029579359054565428, 0.029054975509643553, 0.028999679565429686, 0.028807167053222657, 0.02902835273742676, 0.029068479537963866, 0.028895551681518555, 0.02904729652404785, 0.029094079971313476, 0.029091264724731444, 0.029020544052124023, 0.02902016067504883, 0.028940288543701172, 0.0295731201171875, 0.029263872146606446, 0.029057024002075195, 0.02901215934753418, 0.028929376602172853, 0.02896329689025879, 0.0290119686126709, 0.028989599227905272, 0.028994495391845704, 0.02900265693664551, 0.029231008529663087, 0.029355552673339842, 0.029108800888061524, 0.02912451171875, 0.029226432800292967, 0.029020832061767577, 0.029444095611572265, 0.028991487503051756, 0.02913702392578125, 0.028972000122070313, 0.029090719223022463, 0.029452032089233398, 0.029275999069213868, 0.029197952270507813, 0.029025056838989257, 0.028956575393676756, 0.02901139259338379, 0.029252256393432617, 0.02925056076049805, 0.02901299285888672, 0.030295616149902345, 0.029435455322265627, 0.029143135070800782, 0.02934864044189453, 0.029157375335693358, 0.029206527709960937, 0.029243392944335936, 0.0292672004699707, 0.029907680511474608, 0.029050912857055664, 0.029171712875366212, 0.029252735137939453, 0.029721471786499025, 0.029278207778930664, 0.029088031768798827, 0.030605024337768554, 0.03573052978515625, 0.02910873603820801, 0.02908527946472168, 0.029653247833251954, 0.029192480087280273, 0.02921881675720215, 0.029018400192260742, 0.029134239196777344, 0.029549152374267577, 0.02930073547363281, 0.02924460792541504, 0.029026943206787108, 0.02905107116699219, 0.029111967086791993, 0.02919043159484863, 0.029081279754638673, 0.02913657569885254, 0.029053632736206054, 0.029185152053833006, 0.029204927444458007, 0.029427936553955078, 0.02961020851135254, 0.029228736877441406, 0.029093631744384764, 0.029063455581665038, 0.028860576629638673, 0.02913475227355957, 0.028969343185424806, 0.029095775604248048, 0.029030399322509767, 0.029130752563476563, 0.029493247985839844, 0.02913689613342285, 0.029191232681274413, 0.029567039489746094, 0.029054975509643553, 0.029117311477661133, 0.029470495223999024, 0.02918627166748047, 0.030080671310424804, 0.02928656005859375, 0.029117792129516602, 0.029170528411865234, 0.029124607086181642, 0.029233152389526368, 0.029380607604980468, 0.029253376007080077, 0.029727392196655274, 0.02930179214477539, 0.029211296081542968, 0.029342016220092772, 0.029646432876586915, 0.029513887405395508, 0.029445695877075195, 0.029204320907592774, 0.029268863677978516, 0.029343360900878905, 0.02934409523010254, 0.029249055862426758, 0.0293809928894043, 0.029362272262573243, 0.029287807464599608, 0.03406911849975586, 0.02948908805847168, 0.029153631210327147, 0.029118175506591796, 0.029114528656005858, 0.029336544036865236, 0.030089759826660158, 0.029710399627685548, 0.02926416015625, 0.02918560028076172, 0.029247936248779298, 0.029285440444946288, 0.029223871231079102, 0.029396543502807616, 0.03047897529602051, 0.029723712921142578, 0.029597503662109375, 0.02927712059020996, 0.02968726348876953, 0.030097951889038087, 0.029742399215698243, 0.029594303131103516, 0.029816831588745117, 0.029689855575561523, 0.02944540786743164, 0.02968227195739746, 0.029239423751831056, 0.029185695648193358, 0.02927359962463379, 0.02925449562072754, 0.029247007369995116, 0.02921478462219238, 0.0292327995300293, 0.029160192489624023, 0.029347007751464843, 0.02965996742248535, 0.02936627197265625, 0.029056703567504883, 0.02902252769470215, 0.029097984313964844, 0.02897305679321289, 0.029405439376831054, 0.028951648712158204, 0.028952735900878906, 0.028946624755859376, 0.029053247451782227, 0.028925952911376954, 0.029233152389526368, 0.02958745574951172, 0.02916966438293457, 0.029040639877319335, 0.02911846351623535, 0.029083648681640626, 0.029231103897094726, 0.029148704528808595, 0.029119104385375977, 0.029073280334472658, 0.029040063858032227, 0.0290696964263916, 0.029180063247680663, 0.02920000076293945, 0.029182079315185547, 0.0290296630859375, 0.02927859115600586, 0.029096767425537108, 0.02908073616027832, 0.02903923225402832, 0.029018112182617187, 0.029150943756103515, 0.029147424697875977, 0.029099327087402344, 0.029206335067749025, 0.029087743759155273, 0.029105024337768556, 0.029026111602783202, 0.028968320846557618, 0.029041696548461914, 0.02901318359375, 0.03418329620361328, 0.02931110382080078, 0.029296960830688477, 0.02915238380432129, 0.029096864700317384, 0.029026496887207032, 0.02903209686279297, 0.029010080337524415, 0.029054880142211914, 0.029409151077270507, 0.029276384353637695, 0.029256704330444337, 0.029188383102416993, 0.029016799926757812, 0.02896281623840332, 0.02902016067504883, 0.029243392944335936, 0.029196096420288087, 0.029151424407958985, 0.029150880813598633, 0.02915772819519043, 0.028917760848999025, 0.029644575119018555, 0.029262048721313477, 0.029026304244995117, 0.029181951522827147, 0.02911027145385742, 0.029363391876220703, 0.0291397762298584, 0.029205631256103516, 0.02905926322937012, 0.028950847625732423, 0.030344703674316405, 0.02940438461303711, 0.02921708869934082, 0.028968576431274415, 0.028937055587768556, 0.029195808410644532, 0.029118816375732423, 0.0295316162109375, 0.029317792892456056, 0.02920444869995117, 0.02910006332397461, 0.029148960113525392, 0.029006048202514647, 0.028997631072998048, 0.029093759536743164, 0.029089696884155275, 0.029122783660888673, 0.029181407928466796, 0.031064159393310548, 0.029554847717285157, 0.02983126449584961, 0.02928044891357422, 0.02902835273742676, 0.029116191864013673, 0.029076831817626953, 0.029027200698852538, 0.02902016067504883, 0.029132799148559572, 0.029073408126831055, 0.029029567718505858, 0.02892883110046387, 0.029106176376342774, 0.029149183273315428, 0.03098646354675293, 0.02984227180480957, 0.02929964828491211, 0.029276159286499022, 0.029093088150024413, 0.028951263427734374, 0.029039840698242187, 0.029135007858276368, 0.029493247985839844, 0.02916012763977051, 0.02896611213684082, 0.02891574478149414, 0.029188735961914063, 0.029082880020141602, 0.02921513557434082, 0.029415903091430665, 0.029351936340332032, 0.029121919631958006, 0.02904710388183594, 0.028938560485839843, 0.02895257568359375, 0.030105600357055663, 0.028970975875854493, 0.029040672302246093, 0.029083744049072265, 0.029155231475830077, 0.02899558448791504, 0.02916966438293457, 0.02932262420654297, 0.029149824142456055, 0.02934169578552246, 0.02969424057006836, 0.02920902442932129, 0.029083200454711914, 0.028932479858398436, 0.02901180839538574, 0.02906502342224121, 0.029249343872070312, 0.02927881622314453, 0.029087743759155273, 0.029198335647583007, 0.02920172882080078, 0.02908195114135742, 0.029085344314575195, 0.029473663330078125, 0.029459936141967773, 0.029327327728271485, 0.029153663635253905, 0.029140640258789062, 0.029124000549316405, 0.02923411178588867, 0.029119840621948244, 0.028988063812255858, 0.029050880432128907, 0.02897715187072754, 0.028901376724243165, 0.02904377555847168, 0.029035455703735353, 0.029087167739868164, 0.029173728942871093, 0.02907811164855957, 0.029028287887573244, 0.029104192733764647, 0.02900377655029297, 0.02893561553955078, 0.02901465606689453, 0.029092832565307616, 0.029178848266601564, 0.02899715232849121, 0.029028831481933595, 0.02905292892456055, 0.029076896667480468, 0.02952169609069824, 0.029409503936767577, 0.02925788879394531, 0.029262271881103516, 0.0291943359375, 0.0291297607421875, 0.029190719604492186, 0.029487424850463868, 0.02952979278564453, 0.029429407119750978, 0.029166240692138672, 0.02927984046936035, 0.029292032241821288, 0.02932009506225586, 0.029138975143432617, 0.029161439895629884, 0.02902137565612793, 0.029101119995117188, 0.029086496353149412, 0.029100191116333007, 0.02898192024230957, 0.029126752853393556, 0.03137740707397461, 0.02969599914550781, 0.029576416015625, 0.029487903594970704, 0.029380319595336914, 0.029366527557373047, 0.029303071975708007, 0.02943084716796875, 0.029632575988769533, 0.029302560806274414, 0.029170528411865234, 0.029231103897094726, 0.02914240074157715, 0.0290883846282959, 0.02920377540588379, 0.02923811149597168, 0.0298023681640625, 0.029767648696899414, 0.030076095581054688, 0.029645631790161133, 0.029497440338134766, 0.029392799377441405, 0.02917580795288086, 0.029149183273315428, 0.029144096374511718, 0.02918294334411621, 0.029154848098754883, 0.02918448066711426, 0.029136543273925782, 0.029163583755493164, 0.02918025588989258, 0.029246496200561523, 0.029575807571411133, 0.029408767700195314, 0.029791007995605467, 0.029394943237304686, 0.029671424865722655, 0.029999103546142578, 0.02972982406616211, 0.029527008056640627, 0.029275871276855468, 0.02929692840576172, 0.029859840393066408, 0.029470144271850587, 0.029276735305786134, 0.02951372718811035, 0.030138368606567382, 0.029685407638549804, 0.029709791183471678, 0.02955254364013672, 0.02963145637512207, 0.029465791702270507, 0.029489984512329103, 0.02929427146911621, 0.029107744216918946, 0.02954934310913086, 0.02918400001525879, 0.029286399841308593, 0.029296640396118165, 0.029163007736206056, 0.029419136047363282, 0.02982387161254883, 0.029535903930664062]",tokens/s,34.12782314715538,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4757.950464,6022.889472,0.0,5637.144576,5630.431232,s,1,10.583240234375,10.583240234375,0.0,10.583240234375,10.583240234375,10.583240234375,10.583240234375,[10.583240234375],,kWh,9.658068382083607e-05,1.0646348787908809e-05,3.2403081477977436e-05,0.00013963011408672232,,MB,1761.308672,6402.473984,0.0,5987.36896,5889.96608,s,10,5.229000213623047,0.5229000213623047,0.006321765420834981,0.5252388916015625,0.5266760864257812,0.5272976379394532,0.5277948791503906,"[0.5049750061035156, 0.5245005493164062, 0.527919189453125, 0.5262244873046875, 0.5237803955078125, 0.526288818359375, 0.52157421875, 0.5265379638671875, 0.5212223510742188, 0.5259772338867188]",tokens/s,489.57733704627987,kWh,1.4983126049792191e-05,1.6523572255539983e-06,9.975966314100526e-06,2.6611449589446716e-05,tokens/kWh,9619919.393700438,MB,1769.877504,6402.473984,0.0,5987.36896,5889.96864,s,10,28.445169189453125,2.8445169189453123,0.011335098624156928,2.8401705322265625,2.8635853515625,2.8645308349609375,2.8652872216796874,"[2.847841064453125, 2.865476318359375, 2.849661376953125, 2.863375244140625, 2.83591162109375, 2.83580419921875, 2.831446044921875, 2.835312255859375, 2.8380400390625, 2.842301025390625]",tokens/s,22.147873187324578,kWh,8.263928389062282e-05,9.115370797305593e-06,5.465002983109785e-05,0.00014640468451902625,tokens/kWh,430314.09962713823,,s,630,28.442635684967044,0.045147040769788954,0.0004339751458462169,0.04506723213195801,0.04561559104919434,0.04596099967956543,0.04671958602905273,"[0.04636739349365234, 0.04536265563964844, 0.045126174926757814, 0.044918785095214846, 0.04512921524047851, 0.04483327865600586, 0.044786975860595706, 0.044907230377197266, 0.04494131088256836, 0.04481833648681641, 0.04530505752563477, 0.04462886428833008, 0.04543699264526367, 0.04490835189819336, 0.04517078399658203, 0.04551887893676758, 0.04463411331176758, 0.045087966918945316, 0.0448683853149414, 0.0449699821472168, 0.04472124862670898, 0.04478847885131836, 0.044687198638916015, 0.044784225463867185, 0.04450387191772461, 0.0447210578918457, 0.045129726409912106, 0.04561052703857422, 0.044929344177246096, 0.044980384826660155, 0.04496796798706055, 0.04521161651611328, 0.045281280517578126, 0.04514102554321289, 0.04515532684326172, 0.04544870376586914, 0.04506876754760742, 0.0453939208984375, 0.04540550231933594, 0.045564414978027344, 0.04619488143920898, 0.045122974395751955, 0.04530239868164063, 0.04507849502563477, 0.045313953399658206, 0.04559676742553711, 0.04525791931152344, 0.04519414520263672, 0.045210720062255856, 0.04510393524169922, 0.0448587532043457, 0.04747068786621094, 0.046270431518554686, 0.045072254180908204, 0.04550931167602539, 0.045041374206542965, 0.04508905410766602, 0.045211071014404296, 0.045160160064697266, 0.04564873504638672, 0.045072383880615234, 0.04540620803833008, 0.04517622375488281, 0.04710076904296875, 0.04581740951538086, 0.04562579345703125, 0.04537724685668945, 0.0455863037109375, 0.04504393768310547, 0.04522118377685547, 0.04549311828613281, 0.04579676818847656, 0.045391998291015624, 0.047427486419677735, 0.045537857055664065, 0.045833633422851565, 0.04528915023803711, 0.04521052932739258, 0.04528249740600586, 0.04550534439086914, 0.04547283172607422, 0.04535958480834961, 0.045098529815673825, 0.04509414291381836, 0.0455615348815918, 0.04535500717163086, 0.04548825454711914, 0.04604300689697265, 0.04551027297973633, 0.04588483047485352, 0.045203678131103514, 0.04511411285400391, 0.045620574951171874, 0.045572769165039065, 0.04626559829711914, 0.04569343948364258, 0.045405601501464846, 0.04552719879150391, 0.045559680938720704, 0.04562412643432617, 0.045254528045654295, 0.04508480072021484, 0.04518735885620117, 0.045397727966308594, 0.04510924911499024, 0.04532332611083984, 0.04510815811157227, 0.04541356658935547, 0.0452077751159668, 0.0451976318359375, 0.04526092910766601, 0.045937984466552735, 0.045172958374023436, 0.0450382080078125, 0.04524841690063477, 0.045420639038085936, 0.045230079650878906, 0.04528742218017578, 0.04519094467163086, 0.04541891098022461, 0.04551216125488281, 0.04531027221679688, 0.04531203079223633, 0.04546582412719727, 0.04575315093994141, 0.04537449645996094, 0.0469381103515625, 0.04581289672851562, 0.04561996841430664, 0.045481311798095704, 0.04578927993774414, 0.04580419158935547, 0.04508185577392578, 0.045624065399169925, 0.0454854736328125, 0.04543139266967773, 0.045131072998046876, 0.045963966369628906, 0.04561510467529297, 0.045395488739013674, 0.045152992248535154, 0.045268638610839844, 0.045246238708496096, 0.045143775939941407, 0.04514799880981445, 0.04506291198730469, 0.04549631881713867, 0.045388961791992186, 0.04518960189819336, 0.04515264129638672, 0.04518835067749023, 0.044990848541259766, 0.04487936019897461, 0.04471897506713867, 0.04512566375732422, 0.0446459846496582, 0.04485567855834961, 0.0449183349609375, 0.04487932968139648, 0.04482137680053711, 0.044897502899169925, 0.04502236938476562, 0.04498803329467774, 0.04513942337036133, 0.04495955276489258, 0.04512851333618164, 0.045107200622558595, 0.04511129760742188, 0.04518454360961914, 0.04512736129760742, 0.04537628936767578, 0.04535043334960937, 0.04493961715698242, 0.04566438293457031, 0.045081886291503906, 0.04520742416381836, 0.04509689712524414, 0.04567708969116211, 0.04485772705078125, 0.04493107223510742, 0.04528537750244141, 0.044980224609375, 0.045332767486572265, 0.04509891128540039, 0.04515350341796875, 0.04494601440429687, 0.04508879852294922, 0.044953567504882816, 0.04526694488525391, 0.046368545532226565, 0.04547945785522461, 0.045353534698486325, 0.04511129760742188, 0.045350910186767575, 0.0452751350402832, 0.04524771118164062, 0.0452636489868164, 0.04526079940795898, 0.045215103149414064, 0.0455665283203125, 0.04516175842285156, 0.04528822326660156, 0.0451517448425293, 0.04535865783691406, 0.045297950744628904, 0.04496598434448242, 0.04531052780151367, 0.04557619094848633, 0.045571678161621096, 0.045469566345214846, 0.04548252868652344, 0.0453240966796875, 0.04535315322875977, 0.045592575073242186, 0.045397632598876955, 0.046481792449951174, 0.045696609497070315, 0.04544144058227539, 0.0455327033996582, 0.045739585876464844, 0.04587203216552734, 0.04563763046264648, 0.0458158073425293, 0.045590526580810545, 0.04528547286987305, 0.04541225433349609, 0.04521984100341797, 0.0452276496887207, 0.045986175537109375, 0.045666305541992185, 0.04596121597290039, 0.04554751968383789, 0.045314048767089846, 0.045352127075195314, 0.04561388778686523, 0.04531814575195312, 0.04513759994506836, 0.045195072174072266, 0.04524911880493164, 0.045252159118652345, 0.045236385345458985, 0.045209793090820315, 0.045329822540283206, 0.04531193542480469, 0.045504703521728515, 0.04546403121948242, 0.045369342803955076, 0.04553113555908203, 0.04529059219360351, 0.0463737907409668, 0.04524236679077148, 0.04493660736083984, 0.04614131164550781, 0.045297183990478516, 0.04529945755004883, 0.04515059280395508, 0.0449224967956543, 0.04481520080566406, 0.04465663909912109, 0.046614528656005856, 0.04489625549316406, 0.0448798713684082, 0.04474265670776367, 0.04487168121337891, 0.04482003021240234, 0.04455904006958008, 0.044715934753417966, 0.044967777252197264, 0.0447487678527832, 0.044746238708496096, 0.04483654403686523, 0.044610401153564454, 0.04501708984375, 0.0448276481628418, 0.04483375930786133, 0.0451748161315918, 0.045016609191894534, 0.045297664642333986, 0.04547222518920899, 0.04506851196289063, 0.04586627197265625, 0.0453309440612793, 0.04510310363769531, 0.04533785629272461, 0.04507519912719726, 0.04504576110839844, 0.045008640289306644, 0.04535116958618164, 0.04506947326660156, 0.045228897094726564, 0.04530195236206055, 0.045008033752441404, 0.04473523330688477, 0.04468316650390625, 0.04506009674072266, 0.044988704681396485, 0.04497091293334961, 0.04483750534057617, 0.04494911956787109, 0.0449889907836914, 0.044990463256835936, 0.04506595230102539, 0.04482668685913086, 0.04485756683349609, 0.04466201782226562, 0.045121311187744144, 0.044921825408935544, 0.044854400634765625, 0.0446165771484375, 0.0445882568359375, 0.04467987060546875, 0.04497212982177735, 0.04469084930419922, 0.04489174270629883, 0.04497919845581055, 0.045789310455322266, 0.04501590347290039, 0.04455948638916016, 0.04464934539794922, 0.04471708679199219, 0.04462432098388672, 0.04449744033813476, 0.04489011383056641, 0.04453580856323242, 0.048072704315185545, 0.045186752319335936, 0.04484128189086914, 0.045000705718994144, 0.04494540786743164, 0.04495337677001953, 0.045036800384521486, 0.0447149772644043, 0.044875774383544925, 0.044934974670410154, 0.04482672119140625, 0.04489033508300781, 0.04473820877075195, 0.04472800064086914, 0.0450032958984375, 0.044979423522949216, 0.04482332611083984, 0.04487529754638672, 0.04505369567871094, 0.044735198974609376, 0.04493107223510742, 0.04515430450439453, 0.04497612762451172, 0.044730369567871096, 0.04465459060668946, 0.044627967834472655, 0.04457660675048828, 0.04457459259033203, 0.044531681060791015, 0.044669185638427734, 0.044778560638427736, 0.044805118560791016, 0.04589926528930664, 0.04506592178344727, 0.04463494491577148, 0.04491468811035156, 0.044806209564208985, 0.04490611267089844, 0.04480764770507813, 0.04516540908813477, 0.0448351058959961, 0.044762977600097655, 0.04500672149658203, 0.04527718353271484, 0.045172737121582034, 0.045577598571777345, 0.04502387237548828, 0.045080577850341794, 0.04496780776977539, 0.04555788803100586, 0.04520550537109375, 0.04621257781982422, 0.04596073532104492, 0.045203872680664066, 0.045999393463134766, 0.04537004852294922, 0.04515151977539063, 0.045130462646484376, 0.045375423431396486, 0.045135936737060546, 0.044933120727539064, 0.04489625549316406, 0.04504703903198242, 0.04490111923217773, 0.04493926239013672, 0.04496316909790039, 0.044987041473388674, 0.04464988708496094, 0.044773983001708983, 0.044846721649169925, 0.045978206634521485, 0.04509468841552734, 0.04500592041015625, 0.044835742950439454, 0.0449128646850586, 0.04498742294311524, 0.04509772872924805, 0.045088768005371094, 0.045213695526123046, 0.045028831481933596, 0.044724769592285156, 0.04490777587890625, 0.044741344451904294, 0.044861408233642576, 0.04510249710083008, 0.04496633529663086, 0.04504393768310547, 0.044832767486572264, 0.04480364990234375, 0.04498681640625, 0.044623199462890624, 0.04465321731567383, 0.04480543899536133, 0.04476758575439453, 0.04468156814575195, 0.04483891296386719, 0.044660736083984375, 0.044768577575683595, 0.04475155258178711, 0.044695552825927735, 0.044676734924316404, 0.045817310333251954, 0.04478044891357422, 0.04545235061645508, 0.04471612930297852, 0.044816513061523434, 0.04469120025634766, 0.04456752014160156, 0.04445183944702148, 0.04459942245483398, 0.04493913650512695, 0.04471603012084961, 0.0444147834777832, 0.04543097686767578, 0.04483686447143555, 0.0449249267578125, 0.04480944061279297, 0.04636262512207031, 0.04523260879516602, 0.045111232757568356, 0.045011009216308594, 0.04494249725341797, 0.0464040641784668, 0.04506355285644531, 0.04492800140380859, 0.04492902374267578, 0.044901535034179686, 0.04469027328491211, 0.04480223846435547, 0.0450943374633789, 0.04541705703735351, 0.04522780990600586, 0.044832767486572264, 0.044980224609375, 0.04480969619750977, 0.045077022552490235, 0.044903774261474606, 0.04495222473144531, 0.045104705810546875, 0.045521343231201175, 0.04494099044799805, 0.04450131225585938, 0.045107425689697264, 0.04596121597290039, 0.04536707305908203, 0.04493926239013672, 0.04502937698364258, 0.044891422271728515, 0.04468355178833008, 0.044850814819335935, 0.04481014251708984, 0.04476406478881836, 0.04494540786743164, 0.0450780143737793, 0.045312702178955076, 0.04510009765625, 0.04500787353515625, 0.045057792663574216, 0.045297664642333986, 0.044750846862792966, 0.04468326568603516, 0.04480819320678711, 0.04480006408691406, 0.044725440979003904, 0.044883712768554684, 0.0448276481628418, 0.04470374298095703, 0.0446484489440918, 0.04460240173339844, 0.04481942367553711, 0.044762718200683595, 0.04475126266479492, 0.045018272399902345, 0.04489116668701172, 0.04458419036865234, 0.04475699234008789, 0.04489888000488281, 0.04517603302001953, 0.045069087982177736, 0.04495881652832031, 0.04613872146606445, 0.045085601806640625, 0.04498944091796875, 0.044917087554931644, 0.044789310455322265, 0.04504822540283203, 0.045106849670410155, 0.04501119995117187, 0.04514665603637695, 0.04492902374267578, 0.04469945526123047, 0.04496329498291016, 0.04490518569946289, 0.04514815902709961, 0.045088768005371094, 0.044943199157714844, 0.044799999237060545, 0.04517881774902344, 0.04606719970703125, 0.044899040222167966, 0.04473651123046875, 0.044990463256835936, 0.045172737121582034, 0.04501094436645508, 0.04518092727661133, 0.04494694519042969, 0.04488652801513672, 0.0450167350769043, 0.04498688125610351, 0.044891456604003906, 0.04518265533447265, 0.04505587387084961, 0.04491670227050781, 0.044878849029541014, 0.044843006134033206, 0.04513315200805664, 0.045886112213134767, 0.04506012725830078, 0.0446852798461914, 0.04459724807739258, 0.04496323013305664, 0.04481817626953125, 0.04501136016845703, 0.04500249481201172, 0.044931774139404294, 0.04485513687133789, 0.04491820907592774, 0.045017311096191406, 0.04512409591674805, 0.044972030639648435, 0.045211647033691404, 0.04524579238891602, 0.045015552520751956, 0.04507254409790039, 0.044923999786376956, 0.04502735900878906, 0.04502412796020508, 0.044980224609375, 0.045027168273925784, 0.04559404754638672, 0.04508303833007812, 0.045031742095947264, 0.04502732849121094, 0.04606166458129883, 0.04501820755004883, 0.046048160552978515, 0.044843006134033206, 0.044967937469482425, 0.04477951812744141, 0.04494668960571289, 0.04507878494262695, 0.044783615112304685, 0.04479622268676758, 0.045375072479248046, 0.04486963272094727, 0.044757598876953124, 0.04502937698364258, 0.04472217559814453, 0.04501606369018555, 0.0447696647644043, 0.04497881698608398, 0.04485635375976563, 0.044958305358886716, 0.04521955108642578, 0.044970657348632814, 0.04503142547607422, 0.04508691024780274, 0.04487353515625, 0.04504576110839844, 0.045036865234375, 0.045083328247070314, 0.044556289672851565, 0.04498236846923828, 0.04481740951538086, 0.04497296142578125, 0.04475196838378906, 0.0459785270690918, 0.04477132797241211, 0.04474700927734375, 0.04546326446533203, 0.04516048049926758, 0.04513008117675781, 0.04477916717529297, 0.0449695053100586, 0.04486947250366211, 0.04495014572143555, 0.04462723159790039, 0.044948192596435545, 0.04758432006835937, 0.04481119918823242, 0.044875518798828125, 0.0446940803527832, 0.04462969589233398, 0.04464230346679687, 0.04548438262939453, 0.04512636947631836, 0.04523040008544922, 0.04565875244140625, 0.045228031158447264, 0.045231616973876954, 0.04666624069213867, 0.04510246276855469, 0.045039520263671876, 0.045017822265625, 0.0447795524597168, 0.04674137496948242]",tokens/s,22.14984599099505,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 223243 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7394.59072,7808.679936,0.0,7430.209536,7414.23104,s,1,11.9593759765625,11.9593759765625,0.0,11.9593759765625,11.9593759765625,11.9593759765625,11.9593759765625,[11.9593759765625],,kWh,0.00013767597920414496,1.517940819217984e-05,4.538892519999338e-05,0.00019824431259631817,,MB,1693.106176,8574.140416,0.0,8166.309888,8044.111872,s,10,8.038221557617188,0.8038221557617188,0.005012772321141111,0.8047204284667968,0.8073851684570312,0.8079228393554687,0.8083529760742187,"[0.789686279296875, 0.8072656860351562, 0.8084605102539062, 0.806500732421875, 0.805220947265625, 0.8042199096679687, 0.8065791625976563, 0.8036324462890625, 0.8027498168945313, 0.8039060668945313]",tokens/s,318.4784074002153,kWh,2.3507717869551923e-05,2.5917034602081007e-06,1.5627469766923974e-05,4.1726891096684e-05,tokens/kWh,6135132.3636077,MB,1696.731136,8741.912576,0.0,8334.082048,8265.758208,s,10,33.337338378906246,3.3337338378906254,0.005705343794965879,3.3318209228515627,3.3361259033203123,3.3430854125976563,3.348653020019531,"[3.350044921875, 3.329379150390625, 3.331510498046875, 3.331429931640625, 3.33213134765625, 3.32924072265625, 3.331159912109375, 3.333744873046875, 3.334579345703125, 3.33411767578125]",tokens/s,18.89772941197441,kWh,9.74628008175307e-05,1.0751531806857375e-05,6.470653894467688e-05,0.00017292087156906493,tokens/kWh,364328.48983668047,,s,630,33.33370851516723,0.052910648436773386,0.0008905385000460291,0.05287142372131348,0.05350011253356934,0.05368690528869629,0.05574180805206299,"[0.05570675277709961, 0.05300624084472656, 0.05237977600097656, 0.05226287841796875, 0.05208563232421875, 0.052071582794189455, 0.052112064361572265, 0.052238494873046874, 0.05226816177368164, 0.052148769378662106, 0.05244483184814453, 0.05241468811035156, 0.05248764801025391, 0.05235331344604492, 0.05242134475708008, 0.0526429443359375, 0.05261321640014648, 0.052783905029296876, 0.05306777572631836, 0.05314870452880859, 0.053048065185546875, 0.052811870574951174, 0.06940009307861328, 0.051090015411376956, 0.052620704650878904, 0.05255014419555664, 0.05237155151367188, 0.052563968658447265, 0.052725215911865235, 0.05257664108276367, 0.05271075057983399, 0.05247673416137695, 0.052622657775878906, 0.052935489654541014, 0.05291737747192383, 0.053076320648193356, 0.05301683044433594, 0.053219486236572265, 0.05346713638305664, 0.05408358383178711, 0.05385420989990235, 0.05309417724609375, 0.05304054260253906, 0.052920639038085936, 0.05280752182006836, 0.0528267822265625, 0.052838401794433595, 0.0529323501586914, 0.052867328643798825, 0.05304489517211914, 0.052897216796875, 0.05303388977050781, 0.053034496307373044, 0.053230079650878906, 0.05322547149658203, 0.05331308746337891, 0.0533520622253418, 0.053750591278076174, 0.054042625427246097, 0.05389904022216797, 0.05370806503295898, 0.053513153076171875, 0.05337241744995117, 0.05586240005493164, 0.052767616271972656, 0.0519659538269043, 0.0521662712097168, 0.05222643280029297, 0.05204991912841797, 0.052055904388427734, 0.05190598297119141, 0.05198076629638672, 0.0521710090637207, 0.05203523254394531, 0.0522193603515625, 0.05218598556518555, 0.05230742263793945, 0.05228188705444336, 0.05264543914794922, 0.05249683380126953, 0.05298995208740234, 0.053110752105712894, 0.05305510330200195, 0.05286953735351563, 0.05267583847045899, 0.0530536003112793, 0.052284000396728515, 0.05268070220947266, 0.052886783599853514, 0.05301103973388672, 0.052374687194824215, 0.05219126510620117, 0.05222067260742187, 0.05233843231201172, 0.0524169921875, 0.052625343322753905, 0.05276652908325195, 0.052857086181640624, 0.05330099105834961, 0.05318988800048828, 0.05343948745727539, 0.05321932983398438, 0.053190654754638675, 0.05326383972167969, 0.05290399932861328, 0.05293686294555664, 0.052963649749755856, 0.05283020782470703, 0.05283225631713867, 0.05288140869140625, 0.05292828750610352, 0.052607200622558595, 0.05268889617919922, 0.052821151733398436, 0.05313827133178711, 0.05312464141845703, 0.05342051315307617, 0.05349526214599609, 0.053391902923583985, 0.053714942932128903, 0.0538419189453125, 0.053319263458251956, 0.05348956680297851, 0.053314048767089846, 0.05347942352294922, 0.053510143280029294, 0.05607964706420898, 0.052924575805664065, 0.05240275192260742, 0.05188531112670899, 0.052144447326660154, 0.05229350280761719, 0.052294208526611326, 0.05233391952514648, 0.052230335235595705, 0.05211795043945312, 0.05215439987182617, 0.05206220626831055, 0.0522342414855957, 0.05260854339599609, 0.052512351989746096, 0.05256691360473633, 0.05248614501953125, 0.052891647338867184, 0.0532213134765625, 0.05313119888305664, 0.05277094268798828, 0.05260678482055664, 0.05267500686645508, 0.05251456069946289, 0.05242819213867188, 0.052367969512939455, 0.05264540863037109, 0.052679134368896485, 0.05262460708618164, 0.052441886901855465, 0.05243904113769531, 0.05237724685668945, 0.052722015380859376, 0.052983806610107424, 0.05277891159057617, 0.052897857666015624, 0.05301046371459961, 0.05318041610717773, 0.053148735046386716, 0.05282051086425781, 0.05285859298706055, 0.05302880096435547, 0.053056255340576175, 0.0530247688293457, 0.05287936019897461, 0.05284864044189453, 0.053043201446533204, 0.053055038452148436, 0.0530579833984375, 0.05314748764038086, 0.0530351676940918, 0.05332787322998047, 0.053460990905761716, 0.053564510345458984, 0.053577953338623044, 0.053686977386474606, 0.053947841644287106, 0.05349353790283203, 0.053250846862792967, 0.053198848724365234, 0.053349632263183594, 0.0532795524597168, 0.05324755096435547, 0.055804641723632815, 0.0528240966796875, 0.05206016159057617, 0.052321502685546875, 0.05195446395874023, 0.05211699295043945, 0.052216320037841796, 0.05217279815673828, 0.05192652893066406, 0.05198499298095703, 0.052184993743896485, 0.052094974517822266, 0.052307968139648435, 0.05255372619628906, 0.05236940765380859, 0.052600414276123046, 0.05237187194824219, 0.052887168884277344, 0.0530431022644043, 0.053108257293701173, 0.052835262298583985, 0.052580352783203124, 0.052751678466796875, 0.05282476806640625, 0.05287116622924805, 0.052514816284179686, 0.05253683090209961, 0.05251327896118164, 0.05243084716796875, 0.05258006286621094, 0.05259843063354492, 0.052759166717529296, 0.052819393157958985, 0.052795967102050784, 0.05298720169067383, 0.05314012908935547, 0.0531126708984375, 0.053319873809814455, 0.05315283203125, 0.05319776153564453, 0.05315584182739258, 0.05301862335205078, 0.05288755035400391, 0.05288742446899414, 0.05283852767944336, 0.05268201446533203, 0.05263183975219726, 0.0531214714050293, 0.05318041610717773, 0.05320499038696289, 0.05279743957519531, 0.05287526321411133, 0.05338851165771484, 0.0534617919921875, 0.05349990463256836, 0.054063102722167966, 0.053593727111816404, 0.05357401657104492, 0.053612510681152345, 0.05346646499633789, 0.05330960083007812, 0.05354140853881836, 0.05309235382080078, 0.05615280151367188, 0.0526802864074707, 0.051922782897949216, 0.0520648307800293, 0.05206399917602539, 0.05209523010253906, 0.05183283233642578, 0.051888126373291016, 0.052077888488769535, 0.052107105255126955, 0.05201391983032227, 0.05225807952880859, 0.052642017364501956, 0.05287168121337891, 0.052563968658447265, 0.0526737289428711, 0.052679489135742184, 0.05301776123046875, 0.05361484909057617, 0.053140064239501954, 0.052664321899414064, 0.05267852783203125, 0.05264950561523438, 0.05247036743164062, 0.05247580718994141, 0.052590686798095705, 0.05272073745727539, 0.05265631866455078, 0.05255241775512695, 0.05258963012695313, 0.05273491287231445, 0.0524389762878418, 0.05276467132568359, 0.05294496154785156, 0.053116928100585936, 0.053268383026123044, 0.05334640121459961, 0.05309439849853516, 0.05339340972900391, 0.053542465209960935, 0.05313328170776367, 0.05301910400390625, 0.05299523162841797, 0.052936542510986326, 0.05303398513793945, 0.0529202880859375, 0.05273977661132812, 0.05288995361328125, 0.052772575378417966, 0.0528361930847168, 0.053019073486328124, 0.052977664947509766, 0.053319679260253904, 0.05357056045532226, 0.05336975860595703, 0.05342604827880859, 0.05370083236694336, 0.05387059020996094, 0.05354515075683594, 0.05321020889282226, 0.05300707244873047, 0.05319855880737305, 0.053274017333984375, 0.05554691314697266, 0.052928703308105465, 0.05213673782348633, 0.05229363250732422, 0.0518551025390625, 0.052289695739746095, 0.052119647979736325, 0.052197662353515625, 0.05229334259033203, 0.05223747253417969, 0.05240665435791016, 0.05190089416503906, 0.052332542419433595, 0.0524799690246582, 0.052346046447753904, 0.052685665130615233, 0.05266841506958008, 0.05293875122070312, 0.053249534606933595, 0.053254657745361325, 0.052738048553466796, 0.052570014953613284, 0.05261116790771484, 0.0525926399230957, 0.052752384185791014, 0.052451072692871095, 0.05238604736328125, 0.052631553649902345, 0.05257625579833984, 0.052205535888671876, 0.05254873657226562, 0.05235148620605469, 0.05272003173828125, 0.052935745239257814, 0.05292294311523438, 0.05327449417114258, 0.05349017715454102, 0.0531517448425293, 0.05309235382080078, 0.05312067031860351, 0.05279983901977539, 0.05280515289306641, 0.05282249450683594, 0.05285395050048828, 0.052757022857666015, 0.05300809478759766, 0.0528922233581543, 0.0530145263671875, 0.05300428771972656, 0.05286707305908203, 0.05282310485839844, 0.053111713409423826, 0.05306774520874023, 0.05339913558959961, 0.05344073486328125, 0.05360460662841797, 0.05355724716186523, 0.05335785675048828, 0.05335833740234375, 0.05339440155029297, 0.05335209655761719, 0.053063007354736326, 0.053302272796630856, 0.055842720031738284, 0.05286297607421875, 0.052152320861816405, 0.05200032043457031, 0.05201119995117188, 0.05198451232910156, 0.05191692733764648, 0.05190262222290039, 0.051902305603027346, 0.052226047515869144, 0.05212355041503906, 0.052043201446533204, 0.05224310302734375, 0.052714656829833985, 0.052684959411621095, 0.052626174926757814, 0.05258848190307617, 0.052886688232421875, 0.05314031982421875, 0.0530081901550293, 0.05279353713989258, 0.05261068725585937, 0.052547969818115235, 0.052491905212402344, 0.05258601760864258, 0.05242726516723633, 0.05222550582885742, 0.05232326507568359, 0.05238700866699219, 0.05235292816162109, 0.05251567840576172, 0.05264547348022461, 0.05311894226074219, 0.05307846450805664, 0.05310211181640625, 0.05332118225097656, 0.05352124786376953, 0.053276832580566404, 0.053411838531494144, 0.05325823974609375, 0.05309030532836914, 0.05323571014404297, 0.05307187271118164, 0.05300953674316406, 0.05308047866821289, 0.05309711837768555, 0.053020606994628905, 0.05284441757202148, 0.05281587219238281, 0.053065727233886716, 0.05302220916748047, 0.052972030639648435, 0.05313523101806641, 0.053284927368164064, 0.053292160034179685, 0.05327727890014648, 0.0534101448059082, 0.05354860687255859, 0.05346140670776367, 0.05395417785644531, 0.05346550369262695, 0.05345894241333008, 0.053372928619384766, 0.055756126403808594, 0.052789344787597656, 0.05200953674316406, 0.05200476837158203, 0.052856544494628906, 0.05212969589233398, 0.05211180877685547, 0.05196393585205078, 0.05208268737792969, 0.052170753479003906, 0.05224038314819336, 0.052111358642578126, 0.05206835174560547, 0.05242012786865234, 0.05262793731689453, 0.0526376953125, 0.05298995208740234, 0.05326438522338867, 0.05356243133544922, 0.05299296188354492, 0.052585662841796874, 0.05260787200927734, 0.05270719909667969, 0.052453441619873045, 0.05243699264526367, 0.05237145614624023, 0.05257612609863281, 0.05251289749145508, 0.05221907043457031, 0.05208147048950195, 0.05228915023803711, 0.05252124786376953, 0.05253539276123047, 0.05280115127563476, 0.05292070388793945, 0.0531104621887207, 0.05355929565429687, 0.05357526397705078, 0.053290752410888674, 0.05294179153442383, 0.05298332977294922, 0.053156318664550783, 0.053020126342773435, 0.05283484649658203, 0.052973281860351565, 0.05288729476928711, 0.05296796798706055, 0.05292828750610352, 0.05297948837280273, 0.053084606170654296, 0.05320908737182617, 0.053579776763916016, 0.05349158477783203, 0.05383536148071289, 0.05368681716918945, 0.053972991943359375, 0.054005184173583985, 0.05376671981811523, 0.05370787048339844, 0.05342044830322266, 0.053262847900390625, 0.053440513610839846, 0.05333401489257812, 0.05561801528930664, 0.0530470085144043, 0.05219971084594727, 0.05208883285522461, 0.05218624114990234, 0.05224668884277344, 0.052050113677978516, 0.05215900802612305, 0.052170753479003906, 0.05204377746582031, 0.05210726547241211, 0.05237324905395508, 0.052311294555664065, 0.052652416229248045, 0.05262195205688477, 0.05273977661132812, 0.052455551147460935, 0.052905471801757815, 0.053246654510498044, 0.052910079956054686, 0.052778305053710936, 0.05270150375366211, 0.05254182434082031, 0.0526146240234375, 0.0524881591796875, 0.052756671905517576, 0.05260121536254883, 0.052596736907958984, 0.05245132827758789, 0.052381214141845704, 0.05259516906738281, 0.052719615936279295, 0.05281299209594727, 0.05297235107421875, 0.052908031463623044, 0.05351119995117187, 0.05359622573852539, 0.053392288208007815, 0.05329235076904297, 0.05305158233642578, 0.053146110534667966, 0.05299750518798828, 0.053211776733398435, 0.052940032958984376, 0.05299481582641601, 0.05294879913330078, 0.05301631927490234, 0.053238014221191406, 0.052846782684326174, 0.0529112319946289, 0.053185279846191404, 0.05325836944580078, 0.053440513610839846, 0.053694465637207034, 0.05359775924682617, 0.05366620635986328, 0.053501983642578126, 0.05355059051513672, 0.05332223892211914, 0.05350572967529297, 0.053358913421630856, 0.05339516830444336, 0.05350409698486328, 0.055047870635986325, 0.052676929473876956, 0.05187353515625, 0.05205632019042969, 0.0520577278137207, 0.05216470336914063, 0.052181278228759766, 0.052278270721435545, 0.052429824829101565, 0.052242431640625, 0.05228543853759766, 0.05232572937011719, 0.05269750213623047, 0.052752544403076175, 0.052592094421386716, 0.052757118225097654, 0.052711071014404295, 0.052918624877929685, 0.05329305648803711, 0.05309030532836914, 0.05295513534545898, 0.05279948806762695, 0.05280153656005859, 0.05258444976806641, 0.05261270523071289, 0.05252751922607422, 0.05264384078979492, 0.052759872436523435, 0.05255033493041992, 0.052649120330810546, 0.052644001007080075, 0.052834945678710936, 0.052799552917480466, 0.05294633483886719, 0.05324451065063476, 0.05346051025390625, 0.05340127944946289, 0.05345974349975586, 0.05357139205932617, 0.05328844833374023, 0.05292921447753906, 0.05298175811767578, 0.0527968635559082, 0.05281433486938476, 0.052936225891113284, 0.05289833450317383, 0.052975616455078124, 0.0529692497253418, 0.053039169311523436, 0.053045406341552734, 0.053108478546142576, 0.05326399993896484, 0.05324643325805664, 0.05342838287353516, 0.05369241714477539, 0.05357328033447266, 0.05345315170288086, 0.05346294403076172, 0.05343148803710938, 0.053230175018310545, 0.05317631912231445, 0.05321897506713867, 0.053178878784179685]",tokens/s,18.899787274294503,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1007.427584,867.106816,0.0,488.636416,482.553856,s,1,8.09123828125,8.09123828125,0.0,8.09123828125,8.09123828125,8.09123828125,8.09123828125,[8.09123828125],,kWh,2.8708490387528704e-05,3.1514326365997525e-06,9.503063158000646e-06,4.1362986182129104e-05,,MB,1227.771904,1024.393216,0.0,616.562688,582.974464,s,10,0.27607552146911624,0.027607552146911622,0.0003845068961008729,0.027467807769775393,0.027722643280029297,0.028233929824829102,0.028642959060668948,"[0.028745216369628908, 0.02743939208984375, 0.027506624221801758, 0.027577375411987303, 0.02748700714111328, 0.027428768157958985, 0.02740598487854004, 0.027609024047851562, 0.027427520751953125, 0.0274486083984375]",tokens/s,9272.82500953776,kWh,9.256555171676362e-07,1.0208324456250316e-07,6.121418399240817e-07,1.639880601654221e-06,tokens/kWh,156108926.30948946,MB,1238.60992,1036.976128,0.0,629.1456,597.192192,s,10,14.34401013183594,1.4344010131835936,0.006175984855072775,1.4349803466796875,1.4417718017578125,1.4442687744140625,1.4462663525390627,"[1.4467657470703126, 1.4368922119140626, 1.4412169189453126, 1.42808984375, 1.4272528076171875, 1.4339974365234376, 1.4267244873046876, 1.4302384033203126, 1.4359632568359375, 1.4368690185546875]",tokens/s,43.92077209996813,kWh,4.1994459412828755e-05,4.631168553798432e-06,1.6676621991075785e-05,6.330224995770296e-05,tokens/kWh,995225.288865642,,s,630,14.339031734466571,0.022760367832486594,0.00038567347761655764,0.02265856075286865,0.02312019500732422,0.02335652503967285,0.024450917987823496,"[0.022715776443481446, 0.023112159729003906, 0.023201248168945313, 0.023357791900634764, 0.02347452735900879, 0.02612224006652832, 0.023566335678100587, 0.024295103073120116, 0.023484607696533204, 0.023119775772094727, 0.02305638313293457, 0.02300441551208496, 0.022880672454833984, 0.022670976638793944, 0.022641599655151366, 0.02269593620300293, 0.02285158348083496, 0.02285955238342285, 0.02268592071533203, 0.022556640625, 0.022546464920043946, 0.02249318313598633, 0.02254572868347168, 0.022439680099487304, 0.022442943572998048, 0.022618112564086915, 0.022562816619873048, 0.022451200485229493, 0.022602752685546876, 0.023031455993652344, 0.0229881591796875, 0.02298569679260254, 0.023617536544799804, 0.0226693115234375, 0.022666751861572267, 0.02257766342163086, 0.02289459228515625, 0.02451456069946289, 0.025574464797973633, 0.022708927154541016, 0.02253171157836914, 0.02252992057800293, 0.02262835121154785, 0.02354047966003418, 0.02261347198486328, 0.02253468894958496, 0.022486272811889647, 0.02249798393249512, 0.022713888168334962, 0.02264694404602051, 0.022564767837524414, 0.022679807662963868, 0.022587615966796874, 0.022747135162353514, 0.02319673538208008, 0.02265715217590332, 0.022672191619873047, 0.022564863204956053, 0.02296623992919922, 0.022745119094848634, 0.022891872406005858, 0.023302656173706054, 0.022687871932983397, 0.022186784744262694, 0.022656991958618165, 0.022682655334472657, 0.02268057632446289, 0.022566688537597655, 0.022718687057495118, 0.022597503662109376, 0.022585023880004884, 0.02248748779296875, 0.02292889595031738, 0.022671871185302735, 0.023203840255737306, 0.02313862419128418, 0.022859264373779296, 0.02263868713378906, 0.02262819290161133, 0.02263065528869629, 0.022681600570678712, 0.02273689651489258, 0.022609920501708985, 0.022647872924804687, 0.022739423751831054, 0.022563295364379884, 0.022587392807006838, 0.022595359802246095, 0.02279347229003906, 0.02253718376159668, 0.022945024490356444, 0.0227458553314209, 0.022798336029052735, 0.022796287536621093, 0.022747135162353514, 0.022971904754638672, 0.023081151962280274, 0.023021888732910157, 0.023173120498657225, 0.022910560607910156, 0.022757375717163086, 0.022775583267211914, 0.02274982452392578, 0.022691743850708008, 0.02265407943725586, 0.022703071594238282, 0.022693504333496095, 0.022673791885375976, 0.02279337692260742, 0.0226680965423584, 0.022643999099731447, 0.022769632339477538, 0.022760032653808594, 0.02264816093444824, 0.022608736038208007, 0.02267091178894043, 0.022730880737304688, 0.022752864837646485, 0.023999200820922852, 0.02401679992675781, 0.02295408058166504, 0.02285740852355957, 0.0230830078125, 0.022657344818115235, 0.022800384521484376, 0.023675104141235352, 0.02270083236694336, 0.023161983489990233, 0.023257984161376952, 0.022996383666992186, 0.0228002872467041, 0.02261062431335449, 0.022812671661376953, 0.022730752944946288, 0.022552127838134765, 0.022411712646484373, 0.024016416549682618, 0.0236712646484375, 0.0228351993560791, 0.022676959991455078, 0.02260799980163574, 0.022657440185546874, 0.02265907287597656, 0.022443071365356445, 0.022537151336669923, 0.022683488845825196, 0.02338204765319824, 0.022952064514160157, 0.022958080291748048, 0.022990848541259764, 0.0229171199798584, 0.022726655960083008, 0.022671104431152344, 0.02299929618835449, 0.023198944091796875, 0.02324518394470215, 0.023062944412231445, 0.02301139259338379, 0.0230032958984375, 0.02328553581237793, 0.0232857608795166, 0.02313644790649414, 0.02306390380859375, 0.02316454315185547, 0.023103872299194337, 0.02290060806274414, 0.022923936843872072, 0.0228123836517334, 0.02290505599975586, 0.02305638313293457, 0.022863872528076173, 0.022927616119384767, 0.02277507209777832, 0.02261244773864746, 0.022660192489624024, 0.022585407257080078, 0.022672224044799804, 0.022487039566040038, 0.02250873565673828, 0.022542783737182617, 0.022679935455322264, 0.022633888244628905, 0.02251807975769043, 0.022659263610839843, 0.02290287971496582, 0.023052288055419923, 0.022841344833374022, 0.022690912246704102, 0.02255264091491699, 0.022247135162353517, 0.02254841613769531, 0.023210943222045897, 0.02265497589111328, 0.02250752067565918, 0.02244918441772461, 0.0226760311126709, 0.022454687118530273, 0.022693248748779298, 0.02238528060913086, 0.022478847503662108, 0.022378496170043945, 0.02313804817199707, 0.022506879806518554, 0.02250636863708496, 0.02230633544921875, 0.022454368591308595, 0.02241779136657715, 0.02249728012084961, 0.022452192306518556, 0.022509599685668947, 0.022519807815551757, 0.022540191650390624, 0.022525152206420897, 0.022610815048217773, 0.02263859176635742, 0.022541568756103515, 0.02256358337402344, 0.022673408508300782, 0.022460416793823244, 0.02253004837036133, 0.022634496688842775, 0.02267136001586914, 0.022558719635009765, 0.023029727935791014, 0.023095327377319334, 0.02256671905517578, 0.02260323143005371, 0.022698720932006835, 0.022611488342285158, 0.022585376739501953, 0.022538463592529298, 0.022820159912109374, 0.022811168670654296, 0.023069055557250976, 0.022800384521484376, 0.022576255798339842, 0.02272092819213867, 0.02308348846435547, 0.023076543807983397, 0.023341375350952147, 0.022842399597167967, 0.023016000747680666, 0.023354976654052735, 0.02305449676513672, 0.022786720275878906, 0.022550304412841796, 0.022523391723632814, 0.022525951385498046, 0.022500064849853514, 0.022411264419555665, 0.022458368301391602, 0.022619136810302733, 0.022205184936523438, 0.02251568031311035, 0.022622335433959962, 0.022674560546875, 0.02252275276184082, 0.02252992057800293, 0.022497247695922852, 0.02255196762084961, 0.02251228713989258, 0.02253398323059082, 0.022561023712158203, 0.022384639739990234, 0.022525888442993164, 0.02247999954223633, 0.022490047454833986, 0.022579200744628908, 0.022511615753173828, 0.022518880844116212, 0.022495424270629883, 0.022481632232666016, 0.022627967834472656, 0.02257481575012207, 0.02259424018859863, 0.02277987289428711, 0.0227061767578125, 0.02247590446472168, 0.022614912033081056, 0.02280243110656738, 0.022556671142578123, 0.022576223373413085, 0.022547359466552733, 0.022640640258789063, 0.022833152770996092, 0.02265088081359863, 0.02264816093444824, 0.022575136184692382, 0.02283075141906738, 0.02285977554321289, 0.02270102310180664, 0.022634496688842775, 0.02279631996154785, 0.022681568145751955, 0.022548479080200197, 0.022867040634155275, 0.022604703903198242, 0.02274508857727051, 0.02273689651489258, 0.022681280136108397, 0.02275564765930176, 0.022720256805419923, 0.022733055114746093, 0.02271171188354492, 0.022772544860839843, 0.022764991760253907, 0.02291337585449219, 0.022601728439331056, 0.022720352172851562, 0.022722719192504882, 0.022951360702514648, 0.02277452850341797, 0.022869632720947265, 0.02269977569580078, 0.022966720581054687, 0.022420320510864258, 0.022601247787475586, 0.022651359558105467, 0.023166559219360353, 0.02293391990661621, 0.022816768646240236, 0.02271027183532715, 0.022534143447875975, 0.02265088081359863, 0.022511615753173828, 0.02267955207824707, 0.023102527618408204, 0.022862144470214844, 0.022765504837036134, 0.022673120498657228, 0.022658016204833983, 0.023590911865234376, 0.022726655960083008, 0.022544384002685547, 0.022589216232299803, 0.02269388771057129, 0.022714591979980468, 0.022597408294677733, 0.02259312057495117, 0.022571264266967775, 0.02267900848388672, 0.02264566421508789, 0.02266444778442383, 0.022624895095825194, 0.022583423614501955, 0.02263859176635742, 0.022592735290527344, 0.022708703994750976, 0.022874431610107424, 0.02263859176635742, 0.02262406349182129, 0.022536384582519532, 0.022613407135009766, 0.022768224716186523, 0.022724607467651366, 0.02257891273498535, 0.022655263900756836, 0.02259702491760254, 0.02272220802307129, 0.022696895599365233, 0.02273276710510254, 0.022601247787475586, 0.022603328704833985, 0.02274604797363281, 0.022654815673828124, 0.023660703659057616, 0.025016319274902343, 0.022773759841918945, 0.02270537567138672, 0.022602527618408204, 0.022808576583862306, 0.02279609680175781, 0.022894784927368163, 0.022680767059326173, 0.022702688217163085, 0.022847360610961914, 0.02253561592102051, 0.02264361572265625, 0.02231167984008789, 0.022517759323120116, 0.022624128341674803, 0.022509855270385744, 0.022627199172973633, 0.02256355285644531, 0.022693439483642577, 0.022663135528564454, 0.02260259246826172, 0.022480768203735353, 0.022665216445922853, 0.022527776718139648, 0.022604000091552733, 0.022552576065063477, 0.022788095474243163, 0.022509567260742186, 0.022744640350341797, 0.02253664016723633, 0.022632192611694336, 0.02247091293334961, 0.022665216445922853, 0.022652032852172852, 0.022573856353759764, 0.022587488174438477, 0.02254643249511719, 0.022529439926147463, 0.02256752014160156, 0.023143583297729493, 0.022706464767456056, 0.022573631286621095, 0.022455455780029297, 0.022658048629760744, 0.022519647598266603, 0.022550207138061523, 0.02265497589111328, 0.02252217674255371, 0.022648832321166993, 0.022580255508422853, 0.022686687469482422, 0.022563871383666993, 0.022875104904174805, 0.022519168853759767, 0.02256732749938965, 0.02263372802734375, 0.0225515193939209, 0.022507360458374023, 0.022468767166137694, 0.02244918441772461, 0.022559711456298828, 0.02240057563781738, 0.022591936111450196, 0.022510623931884764, 0.022598047256469727, 0.022556447982788087, 0.02247555160522461, 0.02248294448852539, 0.022582880020141603, 0.02257315254211426, 0.0234616641998291, 0.02460316848754883, 0.02282284736633301, 0.022814783096313476, 0.022632448196411133, 0.022362720489501952, 0.022566240310668947, 0.022702432632446288, 0.02259779167175293, 0.02267145538330078, 0.022560127258300783, 0.022575807571411134, 0.022517183303833007, 0.02251628875732422, 0.02251571273803711, 0.022552576065063477, 0.022503423690795898, 0.022681600570678712, 0.022561920166015624, 0.022489248275756837, 0.022577728271484375, 0.022519968032836915, 0.022697984695434572, 0.0226376953125, 0.022613920211791993, 0.022827999114990234, 0.023748607635498048, 0.022558176040649414, 0.022574623107910155, 0.022635679244995117, 0.02258927917480469, 0.022638240814208985, 0.022595935821533204, 0.022531232833862304, 0.025098495483398438, 0.02370368003845215, 0.02265555191040039, 0.022599584579467775, 0.023855104446411132, 0.02251468849182129, 0.022518783569335937, 0.02249728012084961, 0.022834400177001953, 0.022519647598266603, 0.022885311126708986, 0.022566879272460937, 0.022468511581420898, 0.022540416717529297, 0.02256208038330078, 0.02256764793395996, 0.022646751403808594, 0.02292937660217285, 0.022545536041259764, 0.022805440902709962, 0.02251571273803711, 0.022519712448120118, 0.023029855728149414, 0.02270412826538086, 0.02246246337890625, 0.022432992935180664, 0.022457120895385742, 0.022574880599975585, 0.022614240646362305, 0.02253824043273926, 0.022591360092163087, 0.02248716735839844, 0.022689279556274415, 0.022703903198242188, 0.022299264907836912, 0.022494911193847656, 0.0225532169342041, 0.02258243179321289, 0.02250428771972656, 0.022544448852539062, 0.022534080505371094, 0.022564863204956053, 0.022627487182617187, 0.022737728118896485, 0.02265910339355469, 0.022674943923950194, 0.022577600479125978, 0.02281667137145996, 0.022612127304077148, 0.02271753692626953, 0.022607967376708983, 0.022966495513916017, 0.022790016174316405, 0.02256355285644531, 0.022580671310424804, 0.02261459159851074, 0.02264473533630371, 0.0228306884765625, 0.022599456787109375, 0.022725248336791994, 0.022959423065185548, 0.02285212707519531, 0.02281484794616699, 0.022793855667114258, 0.023158367156982423, 0.023151039123535156, 0.022847808837890626, 0.022698047637939454, 0.022533920288085936, 0.022860000610351563, 0.022712160110473632, 0.022759136199951173, 0.022655424118041993, 0.022617088317871094, 0.02270096015930176, 0.024053855895996092, 0.022687744140625, 0.0231646728515625, 0.02378691291809082, 0.02313916778564453, 0.023580671310424805, 0.02327756881713867, 0.023011327743530274, 0.023003135681152344, 0.02267900848388672, 0.022501632690429686, 0.02258358383178711, 0.022763519287109374, 0.022541919708251954, 0.022651296615600586, 0.022665216445922853, 0.022906879425048828, 0.022565887451171874, 0.022780927658081054, 0.0226712646484375, 0.022857824325561524, 0.023045631408691408, 0.023267328262329103, 0.025502784729003906, 0.023565248489379884, 0.023369888305664062, 0.02298784065246582, 0.022897439956665037, 0.022775392532348632, 0.022647199630737306, 0.02268569564819336, 0.022588735580444337, 0.023145151138305665, 0.022662336349487305, 0.022797119140625, 0.022619680404663087, 0.02268822479248047, 0.022749183654785156, 0.022666240692138673, 0.022598560333251954, 0.02258070373535156, 0.022796384811401366, 0.023132608413696288, 0.02312396812438965, 0.022953184127807617, 0.022756223678588868, 0.023138080596923828, 0.022739360809326172, 0.022638399124145506, 0.022509056091308592, 0.02270787239074707, 0.022408031463623048, 0.022406496047973633, 0.022540735244750976, 0.022442207336425782, 0.022571008682250978, 0.023860416412353515, 0.022936384201049806, 0.023011327743530274, 0.022929311752319336, 0.022605920791625978, 0.022517759323120116, 0.02255411148071289, 0.022850048065185546, 0.022381792068481444, 0.02248137664794922, 0.022339487075805665, 0.02253593635559082, 0.02250579261779785, 0.022415712356567384, 0.022404191970825195, 0.022564895629882814, 0.022582271575927734, 0.022523391723632814, 0.022540672302246094, 0.022493152618408202, 0.022538272857666016, 0.022552255630493165, 0.022464479446411133, 0.022776159286499023, 0.022968320846557616, 0.02319919967651367, 0.02296272087097168, 0.02310758399963379, 0.023037343978881835]",tokens/s,43.93602104148196,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 201361 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 196603 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3573.94432,5177.737216,0.0,4775.215104,4427.072512,s,1,11.119751953125,11.119751953125,0.0,11.119751953125,11.119751953125,11.119751953125,11.119751953125,[11.119751953125],,kWh,0.0001128763957124723,1.2443728999051034e-05,4.926892830399432e-05,0.00017458905301551765,,MB,1574.4,5211.291648,0.0,4794.089472,4101.022208,s,10,26.467812255859375,2.6467812255859373,0.0060310607252414905,2.6477728271484375,2.6529197998046876,2.6530576782226563,2.653167980957031,"[2.631557373046875, 2.642576416015625, 2.6442041015625, 2.647812744140625, 2.646906982421875, 2.64773291015625, 2.65036572265625, 2.6505712890625, 2.653195556640625, 2.65288916015625]",tokens/s,96.72125430137407,kWh,7.728714108500526e-05,8.523710005646098e-06,5.137517998899943e-05,0.00013718603107965079,tokens/kWh,1866079.2063542192,MB,1586.487296,5219.680256,0.0,4802.47808,4101.024768,s,10,20.173151733398438,2.017315173339844,0.005070198683335497,2.0176444091796872,2.0230359375,2.02380849609375,2.02442654296875,"[2.0245810546875, 2.0178944091796875, 2.010282958984375, 2.013750732421875, 2.0207354736328127, 2.0173944091796874, 2.0227044677734374, 2.0228642578125, 2.0124609375, 2.0104830322265625]",tokens/s,31.22962679931561,kWh,5.908047322040676e-05,6.517965401473329e-06,3.9209364700799884e-05,0.00010480780332267997,tokens/kWh,601100.280730405,,s,630,20.170489122390755,0.03201664940062023,0.0005471500743864998,0.03193035221099853,0.03238204803466797,0.03266893825531006,0.03456196727752686,"[0.0332011833190918, 0.03219561767578125, 0.03212489700317383, 0.031943775177001955, 0.032779167175292966, 0.03240963363647461, 0.03213820648193359, 0.03196931266784668, 0.0324351692199707, 0.0322537612915039, 0.03212857437133789, 0.0321781120300293, 0.03181347274780273, 0.03177353668212891, 0.03167612838745117, 0.03208425521850586, 0.032122943878173826, 0.03371120071411133, 0.03184896087646484, 0.03199216079711914, 0.03179520034790039, 0.03215353775024414, 0.031899711608886716, 0.03186483192443847, 0.031899904251098635, 0.032075424194335934, 0.03229705429077148, 0.03221913528442383, 0.032054561614990235, 0.03186147117614746, 0.03197337532043457, 0.032061439514160156, 0.03194172859191895, 0.03217692947387695, 0.031998079299926756, 0.03215151977539062, 0.03228041458129883, 0.032098590850830076, 0.03211254501342774, 0.03213324737548828, 0.03205424118041992, 0.0318832950592041, 0.03193280029296875, 0.03231795120239258, 0.032091838836669925, 0.031977792739868165, 0.03222323226928711, 0.03231948852539063, 0.03234764862060547, 0.032309120178222656, 0.03212761688232422, 0.032159744262695314, 0.032373985290527346, 0.03205567932128906, 0.03199017524719238, 0.03211030578613281, 0.031838495254516604, 0.03229695892333984, 0.03200204849243164, 0.032239551544189456, 0.03206681442260742, 0.03187795257568359, 0.031887359619140625, 0.032811969757080076, 0.03193036842346191, 0.031692384719848636, 0.03162908744812012, 0.032167583465576174, 0.03171439933776855, 0.03154854393005371, 0.031601408004760745, 0.032036865234375, 0.03166748809814453, 0.03168943977355957, 0.031473663330078124, 0.03169513511657715, 0.031618560791015625, 0.0317869758605957, 0.03211004638671875, 0.032018688201904295, 0.03183014488220215, 0.03186531257629394, 0.031999935150146486, 0.03153251266479492, 0.031748447418212894, 0.03150252723693848, 0.03165388870239258, 0.031680511474609374, 0.031221759796142577, 0.036329471588134765, 0.031909887313842776, 0.03173110389709473, 0.03174671936035156, 0.03162719917297363, 0.031821952819824216, 0.03156300735473633, 0.03166636848449707, 0.03200454330444336, 0.03267132949829102, 0.03187548828125, 0.03172515106201172, 0.03228508758544922, 0.03167212867736816, 0.031631551742553714, 0.03162931251525879, 0.031735807418823245, 0.03172960090637207, 0.03163667106628418, 0.03185526466369629, 0.03256662368774414, 0.03213151931762695, 0.03187142372131348, 0.031663200378417966, 0.032013118743896486, 0.03174195289611816, 0.03196143913269043, 0.031761184692382816, 0.03181404876708984, 0.03180601692199707, 0.03168400001525879, 0.03168083190917969, 0.03441385650634766, 0.03462246322631836, 0.03529715347290039, 0.031825023651123045, 0.031707551956176756, 0.03278092956542969, 0.03207731246948242, 0.03189401626586914, 0.03162675285339355, 0.03152537536621094, 0.031760704040527346, 0.03186457633972168, 0.032132095336914065, 0.031836288452148434, 0.03323372650146485, 0.034113536834716796, 0.031834463119506835, 0.031568927764892576, 0.031574655532836914, 0.031534912109375, 0.03191164779663086, 0.03163497543334961, 0.03171766471862793, 0.0315644474029541, 0.031700063705444335, 0.03152780723571778, 0.03170262336730957, 0.031604320526123046, 0.03160390472412109, 0.03164944076538086, 0.03206982421875, 0.03223532867431641, 0.03236249542236328, 0.03176038360595703, 0.03213651275634766, 0.031677120208740236, 0.03162508773803711, 0.03152809524536133, 0.031578943252563475, 0.0314881591796875, 0.03160425567626953, 0.031633344650268555, 0.03177731132507324, 0.03162112045288086, 0.03220412826538086, 0.03191427230834961, 0.03190979194641113, 0.036536800384521485, 0.03193814468383789, 0.03155551910400391, 0.03193494415283203, 0.03223865509033203, 0.03157292747497559, 0.03138559913635254, 0.03148547172546387, 0.03155606460571289, 0.031638559341430665, 0.03159753608703613, 0.03138559913635254, 0.031373119354248045, 0.03187139129638672, 0.03171900749206543, 0.032145599365234374, 0.03196928024291992, 0.03177497673034668, 0.03187071990966797, 0.031637279510498044, 0.031723743438720704, 0.032666015625, 0.03172975921630859, 0.03167571258544922, 0.031703744888305664, 0.032151294708251954, 0.03251839828491211, 0.03280806350708008, 0.031775487899780276, 0.03199603271484375, 0.031673759460449216, 0.03150908851623535, 0.03162112045288086, 0.031485631942749025, 0.03183379173278809, 0.031674816131591794, 0.031510623931884765, 0.03167625617980957, 0.03150601577758789, 0.031652799606323244, 0.03201814270019531, 0.032769344329833985, 0.03263343811035156, 0.031950143814086916, 0.03179097557067871, 0.03175843238830566, 0.032053470611572266, 0.036847999572753906, 0.0319202880859375, 0.03194272041320801, 0.03160639953613281, 0.031763999938964844, 0.031804256439208985, 0.03165388870239258, 0.03172352027893066, 0.031700000762939454, 0.031869312286376954, 0.031570528030395506, 0.0316231689453125, 0.03179670333862305, 0.031766080856323244, 0.03161801528930664, 0.03123961639404297, 0.031531583786010745, 0.031327808380126956, 0.0316044807434082, 0.03179999923706055, 0.03166745567321777, 0.03173247909545898, 0.031866880416870115, 0.03176675224304199, 0.03168643188476562, 0.032307201385498044, 0.03192451286315918, 0.031737344741821286, 0.03172784042358399, 0.031650911331176756, 0.03170969581604004, 0.03241820907592773, 0.03596035385131836, 0.03197999954223633, 0.03161020851135254, 0.0315378246307373, 0.031365343093872074, 0.033132545471191405, 0.03196928024291992, 0.03185868835449219, 0.03147539138793945, 0.03154361534118652, 0.03232972717285156, 0.03180886459350586, 0.03152144050598144, 0.03167231941223145, 0.03524143981933594, 0.032096927642822265, 0.03218803024291992, 0.031930335998535155, 0.032061439514160156, 0.032094112396240236, 0.03209664154052734, 0.032092159271240234, 0.03208585739135742, 0.033286304473876954, 0.031950527191162106, 0.03186310386657715, 0.031936511993408204, 0.031909887313842776, 0.03187264060974121, 0.03174028778076172, 0.03173948860168457, 0.03202908706665039, 0.03249055862426758, 0.032166622161865235, 0.032174240112304686, 0.03198345565795899, 0.03220207977294922, 0.032648094177246095, 0.03227849578857422, 0.03210649490356445, 0.032088127136230465, 0.031678272247314454, 0.031872224807739255, 0.03180019187927246, 0.03170051193237305, 0.03259648132324219, 0.03171452713012695, 0.032031520843505856, 0.0319520320892334, 0.03190255928039551, 0.03213516616821289, 0.03266355133056641, 0.032499713897705076, 0.03198566436767578, 0.03191142463684082, 0.031998464584350586, 0.031946144104003905, 0.031897472381591796, 0.03163619232177734, 0.0318156795501709, 0.03215296173095703, 0.03174787139892578, 0.03172793579101563, 0.0318756160736084, 0.032046142578125, 0.03189651107788086, 0.031768575668334964, 0.03185062408447266, 0.033049758911132814, 0.032269153594970706, 0.0321984977722168, 0.03201593780517578, 0.03203311920166016, 0.032038433074951175, 0.03208182525634766, 0.03221321487426758, 0.032234081268310545, 0.03198953628540039, 0.03193673515319824, 0.031850496292114255, 0.03245011138916016, 0.03202883148193359, 0.03208822250366211, 0.0320175666809082, 0.03221942520141602, 0.03251884841918945, 0.032333824157714845, 0.03224371337890625, 0.032247806549072264, 0.03239731216430664, 0.032380352020263674, 0.03208659362792969, 0.03219660949707031, 0.03262681579589844, 0.03230297470092774, 0.03203481674194336, 0.03189555168151856, 0.031759967803955076, 0.031826335906982424, 0.03173702430725098, 0.03179926490783692, 0.03172025680541992, 0.031702976226806644, 0.03193804740905762, 0.03182243156433105, 0.03183206367492676, 0.031977407455444334, 0.03178054428100586, 0.032179969787597656, 0.03310377502441406, 0.03225263977050781, 0.03203241729736328, 0.03237289428710938, 0.031953088760375975, 0.0320184326171875, 0.03207120132446289, 0.03186531257629394, 0.031659072875976565, 0.031365631103515625, 0.0317198715209961, 0.0317071361541748, 0.031710527420043946, 0.031691455841064455, 0.03177471923828125, 0.03178291130065918, 0.031665824890136716, 0.03180726432800293, 0.03147423934936523, 0.03152393531799316, 0.03176540756225586, 0.03177836799621582, 0.033098464965820314, 0.03210233688354492, 0.031889055252075194, 0.03209849548339844, 0.03212515258789062, 0.032053249359130856, 0.0321638412475586, 0.0316866569519043, 0.03220383834838867, 0.03197583961486816, 0.03185446357727051, 0.031760543823242185, 0.03164352035522461, 0.03170982360839844, 0.0315610237121582, 0.031629663467407226, 0.031940031051635745, 0.032088993072509765, 0.03200204849243164, 0.03195404815673828, 0.03181657600402832, 0.032043006896972655, 0.03199699211120605, 0.032034912109375, 0.03213747024536133, 0.032588382720947266, 0.032196319580078125, 0.032152862548828126, 0.03204403305053711, 0.0321003532409668, 0.03205446243286133, 0.032158206939697266, 0.032055614471435546, 0.0320552978515625, 0.03228022384643554, 0.03257913589477539, 0.03234444808959961, 0.032507518768310546, 0.03257219314575195, 0.03309363174438477, 0.03261439895629883, 0.032157951354980466, 0.0324749755859375, 0.03236249542236328, 0.03209616088867188, 0.03203302383422851, 0.032284416198730466, 0.03222323226928711, 0.03192783927917481, 0.0320557746887207, 0.031954944610595705, 0.03182796859741211, 0.03181324768066406, 0.031820159912109375, 0.03189299201965332, 0.03205376052856445, 0.032249950408935545, 0.032051105499267575, 0.03197443199157715, 0.03208086395263672, 0.03200924682617187, 0.03210134506225586, 0.03203456115722656, 0.03317900848388672, 0.03208659362792969, 0.03199590492248535, 0.03213926315307617, 0.03204915237426758, 0.03201001739501953, 0.032319713592529296, 0.03217542266845703, 0.03195747184753418, 0.03179542350769043, 0.0318791675567627, 0.03196479988098144, 0.03190822410583496, 0.0320871696472168, 0.032066432952880856, 0.031889408111572266, 0.03212083053588867, 0.03214950561523437, 0.031915712356567386, 0.03200236892700195, 0.03223936080932617, 0.03214566421508789, 0.032008190155029294, 0.03235609436035156, 0.03235811233520508, 0.03196368026733398, 0.03189555168151856, 0.031974592208862306, 0.03214214324951172, 0.03179929542541504, 0.03183206367492676, 0.031850496292114255, 0.03200553512573242, 0.03245116806030274, 0.03194790458679199, 0.03195379257202149, 0.032010238647460935, 0.03201638412475586, 0.032150558471679684, 0.03214806365966797, 0.03190543937683105, 0.031693536758422854, 0.03198361587524414, 0.031821823120117186, 0.03206969451904297, 0.032237503051757814, 0.03250995254516602, 0.03254816055297852, 0.03288134384155274, 0.032040767669677735, 0.03199113655090332, 0.03197148895263672, 0.03198860740661621, 0.03208486557006836, 0.031959135055541994, 0.03189356803894043, 0.031894304275512694, 0.03189577674865723, 0.03195673561096191, 0.032065567016601564, 0.03179276847839355, 0.033040767669677736, 0.03342272186279297, 0.03313644790649414, 0.03195116806030274, 0.03208736038208008, 0.031916736602783206, 0.03195680046081543, 0.03197686386108398, 0.03200694274902344, 0.0324136962890625, 0.031954944610595705, 0.03206329727172851, 0.03172371292114258, 0.03154489517211914, 0.03151638412475586, 0.031453920364379884, 0.03152076721191406, 0.03180544090270996, 0.031938560485839845, 0.03203071975708008, 0.032053249359130856, 0.031958688735961915, 0.03187923240661621, 0.03182851219177246, 0.031850112915039065, 0.032066913604736326, 0.03187936019897461, 0.03244121551513672, 0.03211215972900391, 0.03187475204467773, 0.03234048080444336, 0.03194179153442383, 0.031738815307617185, 0.03181353569030762, 0.03169059181213379, 0.03244844818115234, 0.03189897537231445, 0.031918975830078126, 0.032003231048583984, 0.03185951995849609, 0.03191939163208008, 0.032025344848632814, 0.03211654281616211, 0.03164969635009766, 0.03179155158996582, 0.031543136596679684, 0.03175014305114746, 0.031679840087890626, 0.03169552040100097, 0.03180339241027832, 0.0318515510559082, 0.03189039993286133, 0.031815296173095704, 0.03194099235534668, 0.03178700828552246, 0.031678464889526366, 0.03165184020996094, 0.03231084823608398, 0.03187676811218262, 0.031933216094970705, 0.032059391021728514, 0.03339059066772461, 0.03181363105773926, 0.031721504211425784, 0.031897567749023435, 0.032763904571533206, 0.031833568572998044, 0.03174454307556152, 0.031682559967041016, 0.03226009750366211, 0.03197337532043457, 0.03155942344665527, 0.031449344635009764, 0.031716928482055665, 0.03160905647277832, 0.03152918434143066, 0.03158835220336914, 0.03148185539245606, 0.03177267265319824, 0.03181977653503418, 0.031453184127807614, 0.03169075202941894, 0.03250380706787109, 0.03342335891723633, 0.03197542381286621, 0.031999040603637695, 0.031808000564575195, 0.03165030479431152, 0.03189958381652832, 0.031542623519897464, 0.03152070426940918, 0.0317359676361084, 0.03191836738586426, 0.031826080322265624, 0.031690879821777346, 0.0319257926940918, 0.03188374328613281, 0.0317255687713623, 0.03190784072875977, 0.03250902557373047, 0.032203678131103516, 0.0318787841796875, 0.031857023239135744, 0.03185663986206055, 0.032020481109619144, 0.0318603515625, 0.03176246452331543, 0.031862112045288084, 0.031898624420166014, 0.03203203201293945, 0.03214761734008789, 0.03210636901855469, 0.032225982666015625, 0.03213235092163086, 0.032577953338623046, 0.032051551818847654, 0.031833568572998044, 0.03189609527587891, 0.03176038360595703, 0.031866880416870115, 0.031959327697753906, 0.031921728134155274, 0.03184246444702148, 0.0316496639251709, 0.03198582458496094, 0.03172905540466309, 0.03192652893066406, 0.031955232620239256]",tokens/s,31.233749274857843,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 199269 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 197126 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3574.972416,4498.259968,0.0,4112.515072,3976.487424,s,1,9.820244140625,9.820244140625,0.0,9.820244140625,9.820244140625,9.820244140625,9.820244140625,[9.820244140625],,kWh,7.56064165208348e-05,8.332437672053822e-06,2.4561408538001372e-05,0.00010850026273088999,,MB,1528.471552,4697.489408,0.0,4282.384384,4102.201856,s,10,3.118543518066406,0.3118543518066406,0.0027885164091623607,0.31227796936035157,0.3147422180175781,0.3151812591552734,0.31553249206542966,"[0.305838623046875, 0.3138003845214844, 0.31090267944335936, 0.3104888916015625, 0.30905331420898435, 0.3146446533203125, 0.31318768310546874, 0.31363873291015626, 0.3113682556152344, 0.3156203002929687]",tokens/s,820.8960321282544,kWh,8.994278929293329e-06,9.91911185920321e-07,5.9880771810302065e-06,1.5974267296243858e-05,tokens/kWh,16025774.156176483,MB,1553.145856,4707.975168,0.0,4290.772992,4102.204416,s,10,20.930068359375003,2.0930068359375005,0.004535972269459129,2.0920119628906253,2.0974590576171876,2.1000917114257813,2.102197834472656,"[2.091965576171875, 2.085289794921875, 2.09565673828125, 2.102724365234375, 2.092058349609375, 2.090868408203125, 2.090119140625, 2.094879638671875, 2.0968740234375, 2.08963232421875]",tokens/s,30.100236137920216,kWh,6.144587963862142e-05,6.7774379014006015e-06,4.061351565577056e-05,0.00010883683319579258,tokens/kWh,578848.1541599601,,s,630,20.92746191406251,0.03321819351438492,0.0005044312832219247,0.03313449478149414,0.03351120300292969,0.03375649185180664,0.03442503734588623,"[0.033941761016845706, 0.03307161712646484, 0.03310563278198242, 0.03300137710571289, 0.03315955352783203, 0.03289408111572266, 0.03280316925048828, 0.0332374382019043, 0.03331830215454101, 0.03303456115722656, 0.03351123046875, 0.03304710388183594, 0.03287039947509766, 0.03294617462158203, 0.03317692947387695, 0.033530529022216794, 0.03356675338745117, 0.0332360954284668, 0.03303062438964844, 0.03407500839233398, 0.03316515350341797, 0.03301801681518555, 0.03323247909545898, 0.03309199905395508, 0.03304227066040039, 0.03285532760620117, 0.03287539291381836, 0.03324518585205078, 0.032994560241699215, 0.03292566299438476, 0.03301043319702148, 0.03301993560791015, 0.03298099136352539, 0.033081344604492184, 0.03318918228149414, 0.03317756652832031, 0.033048606872558596, 0.03335443115234375, 0.033809600830078126, 0.033331390380859374, 0.033141216278076174, 0.033151134490966794, 0.0331038703918457, 0.033089473724365236, 0.03324524688720703, 0.03333232116699219, 0.03333212661743164, 0.03341696166992188, 0.03406691360473633, 0.033511199951171876, 0.0332490234375, 0.033318943023681644, 0.03345635223388672, 0.033130271911621094, 0.03323855972290039, 0.033044193267822264, 0.0332072639465332, 0.033452030181884765, 0.03303833770751953, 0.03309936141967774, 0.03320220947265625, 0.03299161529541016, 0.032901119232177735, 0.03395593643188476, 0.03303945541381836, 0.032945056915283204, 0.033333473205566407, 0.033459999084472655, 0.03402751922607422, 0.03314688110351562, 0.033314815521240236, 0.033279998779296875, 0.03313612747192383, 0.033047039031982424, 0.033060737609863285, 0.03303366470336914, 0.03289158248901367, 0.03300339126586914, 0.03287052917480469, 0.03285939025878906, 0.032905982971191405, 0.032995681762695316, 0.03292329788208008, 0.032919551849365236, 0.03283148956298828, 0.03283788681030273, 0.032853759765625, 0.03283520126342773, 0.032755905151367185, 0.03312985610961914, 0.03319481658935547, 0.03296169662475586, 0.032929855346679686, 0.033053600311279296, 0.03305039978027344, 0.03313411331176758, 0.03336249542236328, 0.033470462799072266, 0.03342486572265625, 0.03322943878173828, 0.033293502807617184, 0.03326025772094727, 0.03312844848632813, 0.033073150634765625, 0.0329257926940918, 0.03303721618652344, 0.03319481658935547, 0.032978782653808596, 0.0332127685546875, 0.03288063812255859, 0.03303174209594727, 0.03296454238891602, 0.032850433349609375, 0.03306809616088867, 0.03308844757080078, 0.03298918533325195, 0.03289907073974609, 0.03290268707275391, 0.033331680297851565, 0.033078304290771486, 0.03355542373657226, 0.03304447937011719, 0.033142784118652346, 0.0330093765258789, 0.0328889274597168, 0.03299347305297851, 0.03401318359375, 0.033181697845458984, 0.033363967895507815, 0.033124351501464845, 0.03335782241821289, 0.03376537704467773, 0.03299123382568359, 0.03265644836425781, 0.03282425689697266, 0.03304758453369141, 0.03299407958984375, 0.033054912567138675, 0.032952320098876955, 0.03293695831298828, 0.03302297592163086, 0.03362323379516602, 0.03327059173583984, 0.033095134735107425, 0.03293369674682617, 0.03315337753295899, 0.03312063980102539, 0.03313388824462891, 0.033151233673095706, 0.033166976928710935, 0.0329920654296875, 0.0333678092956543, 0.0331962890625, 0.03301987075805664, 0.03318991851806641, 0.033099777221679685, 0.03316326522827148, 0.03308240127563476, 0.03320041656494141, 0.03360838317871094, 0.033310302734375, 0.033278369903564455, 0.033163425445556644, 0.03305865478515625, 0.033115646362304685, 0.033319423675537106, 0.03344169616699219, 0.03344188690185547, 0.03335708618164063, 0.033340129852294925, 0.0332573127746582, 0.033466529846191403, 0.03351279830932617, 0.03326566314697266, 0.03324550247192383, 0.0330939826965332, 0.033238945007324217, 0.033529953002929686, 0.03366428756713867, 0.03374563217163086, 0.033157119750976564, 0.03348608016967773, 0.033366783142089844, 0.03354380798339844, 0.03398704147338867, 0.033195934295654296, 0.03313459014892578, 0.0333037109375, 0.03348342514038086, 0.03389404678344726, 0.03313056182861328, 0.03318735885620117, 0.03328230285644531, 0.033615806579589846, 0.03335430526733398, 0.0328089599609375, 0.03294617462158203, 0.03290428924560547, 0.03309660720825195, 0.03309471893310547, 0.033198688507080076, 0.03305712127685547, 0.03315222549438476, 0.03318044662475586, 0.0331343994140625, 0.033417407989501956, 0.033199264526367185, 0.033006431579589844, 0.033091583251953126, 0.03332505416870117, 0.03298713684082031, 0.033062686920166014, 0.03331679916381836, 0.03320041656494141, 0.033642494201660156, 0.033501182556152344, 0.03321414566040039, 0.03336019134521485, 0.033050624847412106, 0.033021728515625, 0.03306835174560547, 0.03309616088867187, 0.03307574462890625, 0.033694976806640624, 0.033527935028076175, 0.03326006317138672, 0.033067008972167966, 0.03319948959350586, 0.03343628692626953, 0.034315582275390624, 0.033305278778076174, 0.03333324813842774, 0.03339388656616211, 0.033269824981689455, 0.0332723503112793, 0.033126590728759765, 0.03329244613647461, 0.033244510650634766, 0.033122112274169925, 0.03331961441040039, 0.03310969543457031, 0.033126720428466795, 0.033888256072998044, 0.04056883239746094, 0.03336102294921875, 0.033391265869140624, 0.03323516845703125, 0.03313363265991211, 0.03323385620117188, 0.03312639999389649, 0.033073150634765625, 0.03336601638793945, 0.03402799987792969, 0.03337420654296875, 0.03336806488037109, 0.03316326522827148, 0.03314688110351562, 0.033277950286865234, 0.03321036911010742, 0.0330313606262207, 0.033061664581298826, 0.033049793243408204, 0.032906078338623045, 0.03276825714111328, 0.0328087043762207, 0.03310182571411133, 0.03316121673583984, 0.03295868682861328, 0.033113441467285155, 0.033034687042236326, 0.03307628631591797, 0.033229759216308594, 0.03299737548828125, 0.03298099136352539, 0.033181697845458984, 0.03317279815673828, 0.03319059371948242, 0.03326310348510742, 0.0332149772644043, 0.03363612747192383, 0.0344024658203125, 0.033392704010009766, 0.033314815521240236, 0.0335810546875, 0.03330252838134766, 0.0332677116394043, 0.03330636978149414, 0.03311846542358399, 0.03302809524536133, 0.03316121673583984, 0.0332861442565918, 0.03319977569580078, 0.033167713165283205, 0.033080894470214846, 0.03312035369873047, 0.03284003067016601, 0.033259231567382815, 0.03292598342895508, 0.03290243148803711, 0.032942817687988284, 0.03319807815551758, 0.03298502349853515, 0.03297219085693359, 0.03290313720703125, 0.03296278381347656, 0.03352556610107422, 0.03300614547729492, 0.0332760009765625, 0.03338444900512695, 0.033148448944091795, 0.0331657600402832, 0.0341258544921875, 0.033337249755859374, 0.03348886489868164, 0.03320633697509766, 0.03378400039672851, 0.03305356979370117, 0.03323817443847656, 0.03313436889648438, 0.032858081817626957, 0.032915489196777344, 0.03281100845336914, 0.032817150115966795, 0.033083393096923826, 0.03292979049682617, 0.033013439178466795, 0.03312262344360352, 0.03283500671386719, 0.032954944610595706, 0.032985088348388675, 0.033235233306884764, 0.03305411148071289, 0.03305916976928711, 0.03326668930053711, 0.03333014297485352, 0.03304447937011719, 0.03291340637207031, 0.03306028747558594, 0.03308745574951172, 0.03316569519042969, 0.03320444869995117, 0.0332628173828125, 0.03314271926879883, 0.033125057220458984, 0.033318943023681644, 0.033310527801513674, 0.03329670333862305, 0.03316940689086914, 0.03313836669921875, 0.03356860733032226, 0.03332137680053711, 0.03302201461791992, 0.03298918533325195, 0.033125759124755856, 0.03315571212768555, 0.033173503875732424, 0.03306496047973633, 0.033197246551513675, 0.03313884735107422, 0.033168033599853514, 0.03325040054321289, 0.033489822387695316, 0.03325132751464844, 0.0332239990234375, 0.03334624099731445, 0.03314688110351562, 0.03298713684082031, 0.03303200149536133, 0.03316924667358399, 0.0330951042175293, 0.03311619186401367, 0.03323801422119141, 0.03304230499267578, 0.03315302276611328, 0.033159168243408206, 0.03297075271606445, 0.03612182235717774, 0.03316329574584961, 0.03394319915771484, 0.033256031036376955, 0.03362998580932617, 0.03335811233520508, 0.033085502624511716, 0.03302620697021484, 0.03309958267211914, 0.03303385543823242, 0.03304723358154297, 0.03311820983886719, 0.03292364883422851, 0.033033279418945315, 0.03298812866210937, 0.033011680603027345, 0.03302195358276367, 0.03441049575805664, 0.03408832168579102, 0.033015712738037106, 0.03293619155883789, 0.032917984008789064, 0.033044097900390625, 0.033030529022216794, 0.03290521621704102, 0.033130622863769534, 0.033046337127685545, 0.033151039123535155, 0.03295590209960937, 0.03305244827270508, 0.03310870361328125, 0.03320217514038086, 0.03319193649291992, 0.03358323287963867, 0.03335359954833984, 0.033589054107666015, 0.03332227325439453, 0.033520095825195315, 0.033058944702148436, 0.0333623046875, 0.033062110900878905, 0.03320291137695312, 0.03314688110351562, 0.03303628921508789, 0.03312566375732422, 0.03299606323242187, 0.03306447982788086, 0.032936416625976565, 0.03311983871459961, 0.03299983978271484, 0.03311222457885742, 0.03315398406982422, 0.033091903686523434, 0.032819808959960936, 0.03289875030517578, 0.03293990325927734, 0.032840129852294925, 0.03308246231079102, 0.033102752685546875, 0.03295151901245117, 0.033095745086669924, 0.03316598510742187, 0.03312841415405274, 0.03349020767211914, 0.03373958587646484, 0.03402316665649414, 0.033270271301269534, 0.0332224006652832, 0.03297689437866211, 0.032970558166503905, 0.034595008850097655, 0.034549758911132815, 0.03320012664794922, 0.03330867385864258, 0.03314688110351562, 0.0330805778503418, 0.03315993499755859, 0.033040382385253905, 0.03341641616821289, 0.03358390426635742, 0.03319625473022461, 0.033172542572021485, 0.03311075210571289, 0.03307212829589844, 0.0330863037109375, 0.03304816055297852, 0.0330184326171875, 0.03322060775756836, 0.03324131011962891, 0.0333001594543457, 0.03348275375366211, 0.033314910888671875, 0.033484798431396484, 0.03323494338989258, 0.03323289489746094, 0.03321990585327148, 0.033323711395263675, 0.033264671325683594, 0.03316835021972656, 0.033384254455566406, 0.03311635208129883, 0.033087295532226564, 0.03304057693481445, 0.03317251205444336, 0.03348502349853515, 0.03309030532836914, 0.03298646545410156, 0.03321881484985351, 0.033281791687011716, 0.03295913696289063, 0.03318694305419922, 0.032897918701171876, 0.03301564788818359, 0.03288079833984375, 0.033050624847412106, 0.03378787231445313, 0.03352169418334961, 0.033492992401123044, 0.03328118515014648, 0.033081375122070315, 0.03330361557006836, 0.033092510223388674, 0.03321308898925781, 0.03303228759765625, 0.033121376037597655, 0.033090145111083984, 0.032979358673095704, 0.03304243087768555, 0.03409737777709961, 0.033476608276367184, 0.03322841644287109, 0.034828670501708986, 0.04015718460083008, 0.03320832061767578, 0.03310176086425781, 0.03299948883056641, 0.03311142349243164, 0.03310041427612305, 0.033162944793701174, 0.03297260665893555, 0.033261695861816404, 0.033100479125976565, 0.033105823516845705, 0.03305859375, 0.0331960334777832, 0.03303615951538086, 0.03301798248291016, 0.033374176025390626, 0.03319587326049805, 0.033036350250244144, 0.032908863067626956, 0.03278291320800781, 0.032661502838134765, 0.0327720947265625, 0.03283148956298828, 0.033064735412597655, 0.03304467010498047, 0.03313257598876953, 0.033159103393554684, 0.03327555084228516, 0.033253631591796874, 0.03323100662231445, 0.03339616012573242, 0.03335222244262695, 0.03318991851806641, 0.033300449371337894, 0.03359747314453125, 0.03443097686767578, 0.033159168243408206, 0.033230846405029296, 0.0335728645324707, 0.03317510223388672, 0.03314438247680664, 0.03305766296386719, 0.033010944366455075, 0.032957183837890626, 0.03323052978515625, 0.033102142333984376, 0.03311737442016602, 0.03311062240600586, 0.03299932861328125, 0.03293001556396485, 0.032976993560791014, 0.032817150115966795, 0.03280857467651367, 0.03290464019775391, 0.032892894744873044, 0.033045726776123045, 0.03306060791015625, 0.03299264144897461, 0.03311203384399414, 0.03431536102294922, 0.03331369781494141, 0.03344179153442383, 0.03304963302612305, 0.03301900863647461, 0.033193824768066406, 0.03301513671875, 0.03322284698486328, 0.03300400161743164, 0.033089534759521484, 0.03304857635498047, 0.032927745819091796, 0.032915393829345704, 0.032968318939208985, 0.03302441787719727, 0.03298307037353516, 0.03293731307983398, 0.03303900909423828, 0.03303833770751953, 0.0332492790222168, 0.032935935974121096, 0.032992801666259765, 0.032850399017333984, 0.03321446228027344, 0.033164833068847654, 0.03315305709838867, 0.03336236953735352, 0.034326366424560543, 0.0330610237121582, 0.03288051223754883, 0.033915008544921875, 0.03325132751464844, 0.03351907348632813, 0.033677921295166016, 0.03318368148803711, 0.03303353500366211, 0.03327862548828125, 0.033285247802734376, 0.03328911972045898, 0.03332710266113281, 0.03325747299194336, 0.033124351501464845, 0.033255424499511715, 0.032925952911376954, 0.032967521667480466, 0.03322713470458984, 0.03311056137084961, 0.03294124984741211, 0.033033023834228514, 0.032778240203857424, 0.03295654296875, 0.03304947280883789, 0.03304550552368164, 0.0330068473815918, 0.032970848083496096, 0.03334352111816406, 0.0333625602722168, 0.033593601226806644, 0.03318963241577148, 0.0328724479675293, 0.03287846374511719, 0.03292172622680664, 0.03304857635498047]",tokens/s,30.103985021550216,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 200296 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3575.058432,4498.259968,0.0,4112.515072,3976.487424,s,1,9.745052734375,9.745052734375,0.0,9.745052734375,9.745052734375,9.745052734375,9.745052734375,[9.745052734375],,kWh,7.586290512500302e-05,8.361036758299814e-06,2.557585379400762e-05,0.00010979979567731045,,MB,1550.41792,4697.489408,0.0,4282.384384,4102.201856,s,10,2.9930591430664064,0.2993059143066406,0.0013898284941426219,0.29978979492187496,0.30086632080078124,0.3008952423095703,0.3009183795166016,"[0.29825421142578123, 0.3009241638183594, 0.29758514404296876, 0.3008598937988281, 0.29713327026367187, 0.30024285888671876, 0.30023895263671874, 0.29787310791015625, 0.3006069030761719, 0.29934063720703125]",tokens/s,855.3121998709539,kWh,8.775139946691764e-06,9.677319727831516e-07,5.846499775235613e-06,1.558937169471053e-05,tokens/kWh,16421444.366924727,MB,1564.495872,4705.878016,0.0,4290.772992,4102.204416,s,10,18.120540893554686,1.8120540893554686,0.003692762876198821,1.8126404418945312,1.8159767333984376,1.8171733642578125,1.8181306689453125,"[1.8183699951171874, 1.8071279296875, 1.81429638671875, 1.8104384765625, 1.8056849365234375, 1.8097779541015624, 1.813513671875, 1.813853515625, 1.8157108154296875, 1.8117672119140624]",tokens/s,34.767174098212784,kWh,5.287697208997384e-05,5.832202051825127e-06,3.4827977208762786e-05,9.353715135056173e-05,tokens/kWh,673529.1709268165,,s,630,18.117982656478876,0.028758702629331556,0.00044623339979754103,0.028659632682800293,0.029115676689147947,0.029361843013763426,0.030644435138702394,"[0.029583520889282226, 0.028624927520751953, 0.028341983795166014, 0.02886684799194336, 0.02883718490600586, 0.032062145233154295, 0.028902559280395507, 0.02898748779296875, 0.028924671173095703, 0.028796031951904298, 0.029268415451049804, 0.029086143493652343, 0.02853068733215332, 0.028653696060180665, 0.028480415344238282, 0.028734432220458985, 0.0286529598236084, 0.028598495483398437, 0.028461536407470702, 0.028452768325805664, 0.028846111297607422, 0.028698656082153322, 0.02863711929321289, 0.028757183074951172, 0.029165567398071288, 0.02867078399658203, 0.028618751525878908, 0.028717056274414062, 0.02858598327636719, 0.02851411247253418, 0.028549503326416016, 0.028705759048461912, 0.02876076889038086, 0.02906096076965332, 0.028972511291503907, 0.029327808380126955, 0.028530271530151367, 0.028714975357055663, 0.028610624313354493, 0.02860214424133301, 0.028489856719970702, 0.028443519592285155, 0.02860985565185547, 0.028783296585083006, 0.029388799667358398, 0.029270015716552734, 0.02923638343811035, 0.029053184509277345, 0.028901216506958007, 0.02854313659667969, 0.028856576919555663, 0.02856380844116211, 0.028620607376098634, 0.028477983474731447, 0.028337823867797853, 0.02861497688293457, 0.02857542419433594, 0.028796703338623046, 0.03062575912475586, 0.03034956741333008, 0.028633087158203126, 0.028595647811889648, 0.028469823837280275, 0.029511680603027345, 0.028694528579711914, 0.028645376205444335, 0.029337600708007814, 0.028594175338745118, 0.028613887786865234, 0.028717824935913086, 0.02893619155883789, 0.028432384490966797, 0.02877235221862793, 0.02861622428894043, 0.028696895599365235, 0.028397056579589845, 0.028619359970092774, 0.028782655715942383, 0.028456064224243165, 0.02860531234741211, 0.028313087463378905, 0.028361215591430664, 0.028477216720581056, 0.0286844482421875, 0.028227519989013673, 0.02850624084472656, 0.02846335983276367, 0.028482719421386717, 0.02848419189453125, 0.028471296310424804, 0.028552608489990236, 0.028805728912353515, 0.028987680435180664, 0.029257440567016603, 0.02893814468383789, 0.02870457649230957, 0.028573984146118163, 0.02854297637939453, 0.028592287063598634, 0.028491615295410156, 0.02865683174133301, 0.028817440032958986, 0.02878544044494629, 0.02913484764099121, 0.028999231338500978, 0.02895280075073242, 0.02873776054382324, 0.02862233543395996, 0.028624927520751953, 0.0286494083404541, 0.028684736251831055, 0.02881955146789551, 0.02857801628112793, 0.028567327499389648, 0.028622848510742187, 0.02848723220825195, 0.028463327407836914, 0.02839097595214844, 0.028582592010498047, 0.028483552932739256, 0.029374399185180665, 0.029804607391357422, 0.02869766426086426, 0.028343231201171874, 0.028246240615844728, 0.028406623840332032, 0.029418176651000976, 0.029071264266967774, 0.02888876724243164, 0.029046112060546875, 0.028904415130615233, 0.028659616470336914, 0.02858777618408203, 0.02846348762512207, 0.02864124870300293, 0.028637184143066406, 0.028503328323364257, 0.028354751586914063, 0.029036991119384764, 0.02835446357727051, 0.02829689598083496, 0.02846566390991211, 0.02856275177001953, 0.028998079299926757, 0.028955039978027345, 0.028731231689453126, 0.02852454376220703, 0.028366847991943358, 0.02838479995727539, 0.02865814399719238, 0.028766016006469726, 0.028620704650878907, 0.028495616912841797, 0.028864639282226563, 0.0290710391998291, 0.028732128143310547, 0.029032447814941405, 0.028882944107055664, 0.028646495819091795, 0.02845136070251465, 0.028567935943603514, 0.02836070442199707, 0.02911052894592285, 0.030113023757934572, 0.02903049659729004, 0.02878483200073242, 0.028825824737548827, 0.02880512046813965, 0.028782175064086913, 0.02858025550842285, 0.028700672149658202, 0.02873360061645508, 0.028584064483642577, 0.02861427116394043, 0.028630271911621093, 0.02857651138305664, 0.028811264038085937, 0.028622751235961915, 0.02862099266052246, 0.028550559997558594, 0.02863555145263672, 0.03234201431274414, 0.02924972724914551, 0.028479616165161134, 0.028493120193481446, 0.028616672515869142, 0.02854153633117676, 0.028736671447753905, 0.02946339225769043, 0.02953625679016113, 0.028639232635498047, 0.028692480087280273, 0.028620384216308595, 0.028602783203125, 0.028553216934204102, 0.028478656768798828, 0.028562240600585938, 0.028480575561523436, 0.028340864181518554, 0.028950847625732423, 0.028872415542602538, 0.028473440170288085, 0.02900979232788086, 0.02920479965209961, 0.028649471282958985, 0.02896054458618164, 0.02881350326538086, 0.028766239166259765, 0.02836479949951172, 0.02856547164916992, 0.028407392501831056, 0.02861510467529297, 0.02875801658630371, 0.02876192092895508, 0.028584127426147462, 0.028506111145019532, 0.028501440048217773, 0.028614656448364258, 0.028631359100341796, 0.02859004783630371, 0.028771968841552736, 0.029084320068359374, 0.02930860710144043, 0.02914336013793945, 0.028734975814819336, 0.028658016204833985, 0.02858812713623047, 0.028774463653564453, 0.03289868927001953, 0.028817792892456055, 0.02895052719116211, 0.02852249526977539, 0.02853887939453125, 0.02842748832702637, 0.028469472885131835, 0.02838380813598633, 0.028434431076049805, 0.028260351181030274, 0.02817638397216797, 0.02847942352294922, 0.02905446434020996, 0.02892643165588379, 0.028639328002929686, 0.02858950424194336, 0.028711488723754883, 0.028628448486328124, 0.028518335342407226, 0.02866441535949707, 0.0285565128326416, 0.02841865539550781, 0.028356096267700196, 0.028590784072875977, 0.029831520080566408, 0.02888697624206543, 0.02873686408996582, 0.029074079513549806, 0.028665855407714845, 0.028420095443725587, 0.02875916862487793, 0.02838319969177246, 0.028613056182861328, 0.028565984725952148, 0.02870681571960449, 0.02857164764404297, 0.028653568267822265, 0.028546367645263672, 0.02861350440979004, 0.02881475257873535, 0.028502431869506836, 0.028489152908325197, 0.028778976440429687, 0.028643072128295897, 0.02872764778137207, 0.02865974426269531, 0.028556480407714843, 0.0285949764251709, 0.028473344802856446, 0.02833612823486328, 0.028208992004394532, 0.028165536880493162, 0.028609407424926757, 0.0291408634185791, 0.028819679260253906, 0.02988377571105957, 0.028412319183349608, 0.028250112533569335, 0.029402912139892576, 0.02888947105407715, 0.028708160400390623, 0.028768159866333007, 0.028634815216064452, 0.028392383575439453, 0.02865964889526367, 0.028983360290527345, 0.028684288024902343, 0.028540096282958984, 0.028318527221679688, 0.028753280639648438, 0.028723039627075196, 0.02887731170654297, 0.028645631790161132, 0.028399648666381835, 0.028672000885009766, 0.02876006317138672, 0.028737535476684572, 0.02860851287841797, 0.028471296310424804, 0.0285614070892334, 0.028590080261230468, 0.02835481643676758, 0.028376287460327148, 0.028351007461547853, 0.02833020782470703, 0.02796931266784668, 0.02916774368286133, 0.02910867118835449, 0.028562816619873047, 0.028397823333740236, 0.02860006332397461, 0.02845350456237793, 0.028493824005126952, 0.028495712280273436, 0.028397535324096678, 0.028665632247924806, 0.028721567153930663, 0.028624767303466796, 0.02852012825012207, 0.028438976287841797, 0.028846368789672852, 0.02880892753601074, 0.028611679077148438, 0.028790975570678713, 0.028815391540527344, 0.02882371139526367, 0.02856368064880371, 0.0288787841796875, 0.028546688079833984, 0.02864614486694336, 0.028589216232299805, 0.028769119262695313, 0.029040159225463866, 0.02984979248046875, 0.028962400436401366, 0.028651264190673827, 0.02872640037536621, 0.028716863632202147, 0.02859324836730957, 0.028467872619628905, 0.02847974395751953, 0.028469247817993162, 0.02864899253845215, 0.028883392333984376, 0.028868608474731446, 0.02881974411010742, 0.03024051284790039, 0.029637823104858397, 0.02912745666503906, 0.028782623291015625, 0.028553407669067384, 0.028247072219848634, 0.028447519302368163, 0.028312639236450197, 0.028635744094848634, 0.028755584716796876, 0.02867296028137207, 0.02873721694946289, 0.02861680030822754, 0.02856755256652832, 0.02882784080505371, 0.02890345573425293, 0.028717952728271483, 0.028543647766113282, 0.02841747283935547, 0.02890559959411621, 0.029180608749389648, 0.028358655929565428, 0.028278783798217775, 0.028698015213012695, 0.029327360153198243, 0.028939903259277342, 0.028643135070800782, 0.028561216354370117, 0.02875014305114746, 0.028776607513427734, 0.028549407958984373, 0.028460479736328124, 0.028408384323120116, 0.02853273582458496, 0.02855244827270508, 0.028508928298950194, 0.028765600204467775, 0.02871356773376465, 0.02862214469909668, 0.028684480667114258, 0.028843936920166017, 0.02884668731689453, 0.028536479949951173, 0.028765920639038087, 0.02885081672668457, 0.029197568893432616, 0.029143808364868164, 0.02917731285095215, 0.02890108871459961, 0.028957311630249023, 0.028850624084472656, 0.028806463241577148, 0.028883392333984376, 0.028794815063476562, 0.02908361625671387, 0.029067359924316406, 0.028809215545654295, 0.029014015197753908, 0.028987552642822264, 0.02895017623901367, 0.02877663993835449, 0.02851430320739746, 0.030652063369750977, 0.030237152099609376, 0.028794496536254884, 0.028649503707885743, 0.028577760696411134, 0.028329280853271483, 0.028375999450683594, 0.028434560775756835, 0.0285483512878418, 0.028524415969848633, 0.028885759353637696, 0.028620800018310546, 0.02882467269897461, 0.028636064529418945, 0.028497919082641602, 0.028439872741699217, 0.0285533447265625, 0.02875040054321289, 0.028497280120849608, 0.028742271423339842, 0.02873923110961914, 0.028737472534179687, 0.02861097526550293, 0.02842582321166992, 0.02862031936645508, 0.029519071578979494, 0.0289466552734375, 0.02875347137451172, 0.02880121612548828, 0.0288919677734375, 0.02889027214050293, 0.02871561622619629, 0.028806720733642578, 0.028641984939575194, 0.028983295440673826, 0.0290119686126709, 0.028884992599487305, 0.028799999237060548, 0.0285665283203125, 0.028556800842285155, 0.028314111709594726, 0.028342271804809572, 0.028810623168945313, 0.029221504211425782, 0.02879283142089844, 0.028999679565429686, 0.028532032012939454, 0.028731744766235353, 0.028571552276611328, 0.02880352020263672, 0.028653343200683593, 0.028709087371826172, 0.02876416015625, 0.02871500778198242, 0.028755104064941406, 0.028695392608642577, 0.028716928482055665, 0.028598400115966798, 0.028440576553344726, 0.028614656448364258, 0.028718687057495116, 0.028872447967529295, 0.02863804817199707, 0.02881926345825195, 0.029220863342285155, 0.028607616424560545, 0.02867238426208496, 0.028340736389160157, 0.028254207611083985, 0.02819071960449219, 0.028676095962524413, 0.029914527893066405, 0.030682720184326173, 0.028818431854248046, 0.028651519775390624, 0.028948480606079102, 0.028945695877075194, 0.02901420783996582, 0.02877289581298828, 0.029163520812988283, 0.028809215545654295, 0.028643327713012694, 0.028454912185668944, 0.028529823303222655, 0.028817920684814452, 0.028581823348999023, 0.0284553279876709, 0.028830911636352537, 0.02934649658203125, 0.028491775512695314, 0.02838118362426758, 0.028644704818725587, 0.02854710388183594, 0.02847782325744629, 0.02863155174255371, 0.028373023986816407, 0.028909151077270507, 0.02927129554748535, 0.028958656311035155, 0.029115327835083006, 0.028944704055786134, 0.029191104888916016, 0.029057344436645507, 0.028502464294433594, 0.028652544021606444, 0.029984800338745118, 0.028691007614135743, 0.028637311935424806, 0.028731679916381835, 0.03190169525146484, 0.029417472839355467, 0.028657663345336915, 0.028655616760253907, 0.028370943069458008, 0.02858755111694336, 0.028203487396240234, 0.02814361572265625, 0.027963392257690428, 0.028672000885009766, 0.02892799949645996, 0.028659008026123048, 0.02869113540649414, 0.028747039794921873, 0.028822240829467775, 0.02858393669128418, 0.028684288024902343, 0.028594112396240233, 0.028634624481201174, 0.02858041572570801, 0.028482719421386717, 0.028506975173950195, 0.02859187126159668, 0.028489984512329102, 0.028854272842407228, 0.029149183273315428, 0.028835840225219726, 0.0286693115234375, 0.028744319915771484, 0.02907472038269043, 0.02891366386413574, 0.028732128143310547, 0.029950111389160157, 0.028907039642333984, 0.028876768112182618, 0.028702144622802735, 0.028707040786743163, 0.028833663940429688, 0.0291474552154541, 0.028928512573242186, 0.028770303726196288, 0.028536096572875976, 0.029679616928100585, 0.028678144454956055, 0.028636224746704103, 0.028684640884399416, 0.028715871810913087, 0.028866304397583007, 0.028728992462158202, 0.028982656478881836, 0.029328351974487306, 0.029265920639038087, 0.029095935821533202, 0.0286691837310791, 0.02899344062805176, 0.02856438446044922, 0.029517280578613282, 0.02852729606628418, 0.02851817512512207, 0.028495296478271485, 0.02866227149963379, 0.028473407745361327, 0.02849932861328125, 0.028310144424438476, 0.028876800537109375, 0.0286167049407959, 0.028637088775634766, 0.028550592422485352, 0.0288057918548584, 0.028649152755737303, 0.02842268753051758, 0.0285383358001709, 0.0287521915435791, 0.028588031768798827, 0.028413536071777344, 0.028608768463134766, 0.028733055114746095, 0.028547935485839844, 0.028669631958007813, 0.028579839706420897, 0.0285196475982666, 0.028582687377929687, 0.028575328826904296, 0.028559904098510742, 0.028523935317993163, 0.028668384552001953, 0.02879897689819336, 0.02909388732910156, 0.028809215545654295, 0.028903423309326173, 0.028778495788574218, 0.028620800018310546, 0.028612415313720704, 0.028678335189819337, 0.02859212875366211, 0.028593183517456055, 0.02863203239440918, 0.0285347843170166, 0.028535936355590822, 0.02875071907043457, 0.02866921615600586, 0.029118816375732423, 0.030863744735717773, 0.02872047996520996, 0.028877536773681642]",tokens/s,34.77208318083448,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 197661 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 199770 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3573.858304,4498.259968,0.0,4112.515072,3975.832064,s,1,9.9098369140625,9.9098369140625,0.0,9.9098369140625,9.9098369140625,9.9098369140625,9.9098369140625,[9.9098369140625],,kWh,7.71244912666892e-05,8.4931572988024e-06,2.4564464096002436e-05,0.00011018211266149404,,MB,1475.559424,4695.392256,0.0,4280.287232,4101.546496,s,10,3.21341879272461,0.321341879272461,0.0009606308905420439,0.32158241271972654,0.322099154663086,0.3225342269897461,0.32288228485107423,"[0.31968353271484373, 0.321568359375, 0.32009951782226564, 0.32149874877929685, 0.3219676513671875, 0.32027291870117186, 0.32200247192382814, 0.32175982666015623, 0.3229692993164063, 0.32159646606445313]",tokens/s,796.6593105747708,kWh,9.728334770972602e-06,1.0728555819278639e-06,6.464597764266796e-06,1.7265788117167263e-05,tokens/kWh,14827009.242946798,MB,1528.05376,4705.878016,0.0,4288.67584,4101.549056,s,10,27.032142089843752,2.703214208984375,0.006222692345312746,2.7035152587890625,2.7099307373046875,2.7117899536132812,2.7132773266601564,"[2.709180908203125, 2.692736572265625, 2.7006904296875, 2.709517578125, 2.70197607421875, 2.698481201171875, 2.705054443359375, 2.695737548828125, 2.7051181640625, 2.713649169921875]",tokens/s,23.305589246539856,kWh,7.847960744360887e-05,8.656207248310847e-06,4.894389100693296e-05,0.00013607970569885268,tokens/kWh,462963.96421829687,,s,630,27.02953358840942,0.04290402156890384,0.0004449023108310315,0.042823087692260745,0.04330169067382812,0.04359654121398926,0.04477046756744384,"[0.04358883285522461, 0.04274169540405273, 0.04336489486694336, 0.042750431060791017, 0.043140960693359376, 0.044244705200195314, 0.043333854675292965, 0.04382726287841797, 0.04302169418334961, 0.0431209602355957, 0.04268636703491211, 0.042858367919921876, 0.04277199935913086, 0.04415094375610352, 0.0432525749206543, 0.04312268829345703, 0.04322304153442383, 0.04294655990600586, 0.043392192840576174, 0.04272854232788086, 0.042970912933349606, 0.04287276840209961, 0.04252454376220703, 0.04270844650268555, 0.04266460800170899, 0.0429035530090332, 0.04299913787841797, 0.04289014434814453, 0.043065185546875, 0.04310416030883789, 0.04300502395629883, 0.04360284805297852, 0.044262622833251955, 0.04301289749145508, 0.0433623046875, 0.042777633666992186, 0.04298441696166992, 0.043235328674316405, 0.04291993713378906, 0.04286054229736328, 0.04271308898925781, 0.042571361541748044, 0.0427872314453125, 0.04259616088867187, 0.042788928985595706, 0.042395774841308596, 0.04246323013305664, 0.042447006225585934, 0.042788928985595706, 0.04275791931152344, 0.04262902450561523, 0.04269680023193359, 0.04313849639892578, 0.043633216857910155, 0.042618881225585936, 0.04317366409301758, 0.04382332611083985, 0.04271491241455078, 0.042594528198242186, 0.042696990966796876, 0.04266774368286133, 0.04258611297607422, 0.042665184020996096, 0.043171550750732424, 0.04271401596069336, 0.042848255157470705, 0.042484798431396485, 0.0424252815246582, 0.04276224136352539, 0.04260432052612305, 0.04259657669067383, 0.04322739028930664, 0.04354611206054688, 0.04252288055419922, 0.04255846405029297, 0.04243523025512695, 0.04241007995605469, 0.042508544921875, 0.04283967971801758, 0.042584449768066406, 0.042592254638671875, 0.042409984588623044, 0.04262092971801758, 0.04244070434570312, 0.04277411270141602, 0.042541473388671876, 0.04257177734375, 0.04280319976806641, 0.042757537841796874, 0.04301884841918945, 0.042784160614013675, 0.042822238922119144, 0.042933727264404295, 0.042739742279052736, 0.04286515045166016, 0.04343807983398437, 0.043138206481933596, 0.04290031814575195, 0.04301551818847656, 0.04297574234008789, 0.04299177551269531, 0.04280281448364258, 0.042936702728271485, 0.04272742462158203, 0.0437125129699707, 0.04264550399780274, 0.042640960693359375, 0.04256415939331055, 0.04256137466430664, 0.04251619338989258, 0.04243280029296875, 0.04258310317993164, 0.04259120178222656, 0.0425615348815918, 0.042665985107421874, 0.042821632385253904, 0.042638721466064455, 0.042753761291503906, 0.04289993667602539, 0.042474014282226566, 0.04257987213134766, 0.04249958419799805, 0.04274393463134766, 0.04248521423339844, 0.042709918975830076, 0.042543102264404296, 0.04340895843505859, 0.042619327545166015, 0.042881023406982424, 0.042485759735107424, 0.04242768096923828, 0.04281856155395508, 0.04270460891723633, 0.042625022888183595, 0.04257177734375, 0.042499168395996094, 0.0424725456237793, 0.04256748962402344, 0.04271686553955078, 0.043245887756347655, 0.04317782211303711, 0.04269004821777344, 0.04268447875976562, 0.04295683288574219, 0.04309980773925781, 0.04294543838500976, 0.04264905548095703, 0.04275228881835937, 0.042189056396484376, 0.042600448608398435, 0.04266739273071289, 0.04271987152099609, 0.04253257751464844, 0.04251881790161133, 0.04239769744873047, 0.04274995040893555, 0.04270064163208008, 0.04330912017822266, 0.0431940803527832, 0.04297356796264649, 0.04310220718383789, 0.043431934356689454, 0.042831871032714845, 0.04329808044433594, 0.043210784912109376, 0.0430118408203125, 0.042988479614257814, 0.04297548675537109, 0.045219711303710934, 0.04428278350830078, 0.043009185791015626, 0.042677726745605465, 0.04259174346923828, 0.04265865707397461, 0.04269465637207031, 0.04313907241821289, 0.04251219177246094, 0.042531009674072265, 0.042872447967529294, 0.04259468841552734, 0.04250361633300781, 0.042840641021728514, 0.04259849548339844, 0.04286979293823242, 0.042828670501708986, 0.042925918579101566, 0.042577056884765624, 0.04324784088134766, 0.04284710311889649, 0.04354076766967773, 0.04278534317016602, 0.04291926574707031, 0.04284073638916015, 0.04298118209838867, 0.04300547027587891, 0.04286860656738281, 0.04330985641479492, 0.042839935302734375, 0.0445456657409668, 0.04305561447143555, 0.043138080596923825, 0.04312368011474609, 0.04308582305908203, 0.04353638458251953, 0.04330086517333984, 0.04308582305908203, 0.04298499298095703, 0.04289961624145508, 0.04321926498413086, 0.042979328155517575, 0.04287692642211914, 0.04277231979370117, 0.042918113708496096, 0.04283955383300781, 0.04296953582763672, 0.04526620864868164, 0.04336713409423828, 0.04328857421875, 0.04286054229736328, 0.044453407287597654, 0.04301667022705078, 0.04304076766967774, 0.04291932678222656, 0.04291366577148437, 0.04288585662841797, 0.04285235214233398, 0.043361473083496097, 0.042965824127197266, 0.042775806427001954, 0.04276294326782227, 0.042753280639648436, 0.042692703247070314, 0.042874881744384766, 0.04270931243896484, 0.042512702941894534, 0.04281148910522461, 0.042444801330566405, 0.04258736038208008, 0.04273587036132812, 0.042760734558105466, 0.04301785659790039, 0.04267046356201172, 0.043200801849365235, 0.04312854385375976, 0.04257539367675781, 0.0428180160522461, 0.04251443099975586, 0.04258816146850586, 0.04297011184692383, 0.042580032348632814, 0.04261347198486328, 0.04255286407470703, 0.04333158493041992, 0.042676223754882815, 0.042964126586914064, 0.042722145080566404, 0.043225025177001955, 0.042726879119873044, 0.04297926330566406, 0.04400799942016602, 0.042918174743652344, 0.0429299201965332, 0.04267833709716797, 0.04272332763671875, 0.04263641738891601, 0.042675262451171876, 0.04272841644287109, 0.042869598388671874, 0.04281955337524414, 0.04270393753051758, 0.04272246551513672, 0.042934078216552735, 0.04286873626708984, 0.043030529022216796, 0.04277155303955078, 0.04315052795410156, 0.04297903823852539, 0.04316159820556641, 0.042850303649902347, 0.04259635162353516, 0.04285235214233398, 0.04316774368286133, 0.04298342514038086, 0.04277196884155274, 0.043248126983642575, 0.042854209899902344, 0.04290579223632812, 0.042893310546875, 0.04273766326904297, 0.04300774383544922, 0.04281983947753906, 0.04292956924438476, 0.04298806381225586, 0.04280441665649414, 0.042662784576416014, 0.04260454559326172, 0.04296275329589844, 0.043098175048828125, 0.0429486083984375, 0.04340310287475586, 0.042740192413330075, 0.04289516830444336, 0.04287196731567383, 0.04302320098876953, 0.04294041442871094, 0.04346060943603516, 0.04304864120483398, 0.04261913681030274, 0.0424571533203125, 0.04245443344116211, 0.0424672966003418, 0.042608638763427735, 0.04279974365234375, 0.042703006744384764, 0.04260028839111328, 0.043418689727783205, 0.042726337432861326, 0.04282313537597656, 0.042508319854736326, 0.042598911285400394, 0.04282908630371094, 0.043433982849121096, 0.04276092910766602, 0.04265795135498047, 0.04236054229736328, 0.042442176818847654, 0.04255775833129883, 0.04254553604125977, 0.042657215118408205, 0.043229759216308596, 0.045378944396972654, 0.04428278350830078, 0.04290508651733398, 0.042638687133789065, 0.04273446273803711, 0.04282479858398437, 0.042766910552978515, 0.04288547134399414, 0.042798847198486326, 0.0428851203918457, 0.04264166259765625, 0.04277862548828125, 0.04285779190063477, 0.043186878204345705, 0.04303462219238281, 0.0429486083984375, 0.042856449127197264, 0.04339532852172852, 0.04292172622680664, 0.04289900970458985, 0.042742206573486326, 0.04290150451660156, 0.04268841552734375, 0.04268041610717774, 0.04273971176147461, 0.042618881225585936, 0.042805248260498044, 0.04269184112548828, 0.042367744445800784, 0.04224332809448242, 0.042395614624023435, 0.04286134338378906, 0.042881023406982424, 0.04258201599121094, 0.04272035217285156, 0.04321782302856445, 0.04269865417480469, 0.04288726425170898, 0.04316140747070313, 0.04268406295776367, 0.042563262939453124, 0.04249481582641602, 0.04241923141479492, 0.042460128784179686, 0.04262259292602539, 0.042404224395751956, 0.04256358337402344, 0.04294451141357422, 0.044144447326660154, 0.04287667083740234, 0.04285520172119141, 0.04294473648071289, 0.0425533447265625, 0.042606239318847654, 0.04271286392211914, 0.04265836715698242, 0.04263471984863281, 0.04269424057006836, 0.04265280151367187, 0.04282553482055664, 0.042627071380615236, 0.04267955017089844, 0.042600799560546875, 0.04282755279541016, 0.042893760681152346, 0.042746047973632816, 0.04259587097167969, 0.0426512336730957, 0.042869632720947265, 0.042807296752929686, 0.043417598724365236, 0.04401871871948242, 0.042716129302978516, 0.04311449432373047, 0.042872833251953124, 0.04301798248291016, 0.04326559829711914, 0.04341215896606445, 0.0429567985534668, 0.04485734558105469, 0.0438476791381836, 0.04305744171142578, 0.04300156784057617, 0.042921184539794925, 0.04271299362182617, 0.04277088165283203, 0.04267871856689453, 0.04289446258544922, 0.042869632720947265, 0.042829822540283204, 0.04281894302368164, 0.042662464141845706, 0.042620193481445315, 0.042576671600341794, 0.04256547164916992, 0.04237871932983398, 0.04477942276000976, 0.043211105346679685, 0.0430720329284668, 0.04261785507202148, 0.043468734741210935, 0.042691486358642575, 0.0427367057800293, 0.04275014495849609, 0.04260534286499024, 0.042872737884521485, 0.042886367797851564, 0.042783679962158205, 0.04274172973632812, 0.04260588836669922, 0.04265955352783203, 0.043728225708007815, 0.04298364639282227, 0.042834976196289065, 0.042855873107910156, 0.042780960083007816, 0.04304281616210937, 0.042708992004394535, 0.04272915267944336, 0.04277401733398437, 0.04249225616455078, 0.0423408317565918, 0.042499870300292966, 0.04237865447998047, 0.04257654571533203, 0.042520286560058594, 0.04248543930053711, 0.04223667144775391, 0.042256385803222656, 0.04204748916625976, 0.042299488067626956, 0.042710399627685545, 0.04261648178100586, 0.042689056396484376, 0.042479969024658205, 0.04252671813964844, 0.04252809524536133, 0.04291446304321289, 0.042850143432617185, 0.04267843246459961, 0.0449837760925293, 0.043014400482177736, 0.04277686309814453, 0.042951904296875, 0.043123489379882814, 0.0428111686706543, 0.042657535552978514, 0.04301871871948242, 0.04261273574829102, 0.0426618881225586, 0.04253868865966797, 0.04265395355224609, 0.042686527252197265, 0.04264508819580078, 0.04255171203613281, 0.04256358337402344, 0.0426962890625, 0.04288963317871094, 0.04289247894287109, 0.0427815055847168, 0.042610080718994144, 0.04342844772338867, 0.04326377487182617, 0.043019584655761715, 0.04284630584716797, 0.043049793243408206, 0.04293657684326172, 0.042899200439453125, 0.04281958389282227, 0.04277657699584961, 0.042989662170410156, 0.04270828628540039, 0.04312124633789063, 0.04292777633666992, 0.04345625686645508, 0.04474854278564453, 0.043213729858398435, 0.04301561737060547, 0.04259196853637695, 0.043176799774169924, 0.04269670486450195, 0.04296419143676758, 0.04277651214599609, 0.04286140823364258, 0.042864639282226565, 0.04300595092773438, 0.04291104125976562, 0.042889919281005856, 0.04297641754150391, 0.04299798583984375, 0.043065536499023435, 0.042848129272460934, 0.04265132904052735, 0.042699230194091796, 0.04245276641845703, 0.042717056274414064, 0.04468156814575195, 0.04242985534667969, 0.04242752075195313, 0.04244876861572266, 0.042643455505371096, 0.04311199951171875, 0.044239105224609374, 0.04304038238525391, 0.04271673583984375, 0.042744831085205076, 0.04281766510009766, 0.04282156753540039, 0.042958782196044924, 0.04341145706176758, 0.04271491241455078, 0.04393366241455078, 0.04372825622558594, 0.042897537231445314, 0.04306201553344727, 0.042860385894775394, 0.042748287200927736, 0.04297055816650391, 0.04274620819091797, 0.042780670166015625, 0.04281695938110352, 0.042676097869873045, 0.04251504135131836, 0.04293027114868164, 0.042866622924804684, 0.042670143127441405, 0.042770591735839844, 0.043001697540283206, 0.042815486907958986, 0.04259635162353516, 0.0425984001159668, 0.042716926574707034, 0.042624542236328125, 0.042609375, 0.04264448165893555, 0.042787841796875, 0.04268857574462891, 0.0434175033569336, 0.04285923385620117, 0.043151264190673826, 0.04348543930053711, 0.04332339096069336, 0.042952255249023436, 0.043122303009033205, 0.042965824127197266, 0.042674175262451174, 0.04265065765380859, 0.042574817657470704, 0.04241513442993164, 0.042541824340820315, 0.04260476684570313, 0.042661598205566406, 0.044483070373535154, 0.043179424285888675, 0.043002239227294924, 0.04281753540039063, 0.04278857421875, 0.042780353546142576, 0.04270550537109375, 0.04282304000854492, 0.04286937713623047, 0.042831424713134766, 0.043106014251708985, 0.04326268768310547, 0.043128833770751954, 0.04307763290405273, 0.04297049713134766, 0.043133663177490233, 0.04304659271240235, 0.04324169540405273, 0.04316188812255859, 0.04310192108154297, 0.043096065521240234, 0.043019454956054685, 0.04296550369262695, 0.04296031951904297, 0.043033470153808595, 0.04306739044189453, 0.04328857421875, 0.04283766555786133, 0.04303907012939453, 0.04302204895019531, 0.042852638244628906, 0.043014144897460936, 0.04292166519165039, 0.04290924835205078, 0.04301286315917969, 0.04371244812011719, 0.042805313110351566, 0.04246323013305664, 0.04333363342285156, 0.044176448822021486, 0.042893280029296876, 0.042948703765869144, 0.04272195053100586, 0.04294496154785156, 0.04300563049316406, 0.04265363311767578, 0.04287299346923828, 0.046895103454589845]",tokens/s,23.30783836648041,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3576.36096,5272.109056,0.0,4869.586944,4520.068608,s,1,11.1489619140625,11.1489619140625,0.0,11.1489619140625,11.1489619140625,11.1489619140625,11.1489619140625,[11.1489619140625],,kWh,0.00011711298768750566,1.2911070473423039e-05,5.1393374448011064e-05,0.00018141743260893976,,MB,1577.467904,5305.663488,0.0,4888.461312,4194.018304,s,10,1.876614776611328,0.18766147766113284,0.00016942042087940068,0.18765492248535157,0.1878684036254883,0.18788994369506837,0.18790717575073243,"[0.18786361694335937, 0.18736915588378905, 0.18791148376464845, 0.1875861053466797, 0.1877426300048828, 0.18755743408203124, 0.18772373962402344, 0.18781747436523438, 0.18745925903320312, 0.18758387756347655]",tokens/s,1364.1585006714513,kWh,5.553524844418153e-06,6.122385786944581e-07,3.7084935328299733e-06,9.874256955942583e-06,tokens/kWh,25926001.43405551,MB,1612.468224,5314.052096,0.0,4896.84992,4194.020864,s,10,20.258277709960936,2.0258277709960937,0.004889194119075558,2.0246864013671875,2.032097399902344,2.034289666748047,2.0360434802246092,"[2.0252442626953124, 2.018380859375, 2.0316102294921876, 2.0241285400390625, 2.0235960693359374, 2.021800537109375, 2.026752197265625, 2.022859619140625, 2.0274234619140623, 2.03648193359375]",tokens/s,31.098398838230498,kWh,5.876527554933486e-05,6.481748531885916e-06,3.533873214937112e-05,0.00010058575623059187,tokens/kWh,626331.2258206133,,s,630,20.25564916419983,0.03215182407015846,0.0004346808450077047,0.03205027198791504,0.032459890747070314,0.032832878494262696,0.03461348052978516,"[0.0333006706237793, 0.03242758560180664, 0.032279678344726566, 0.03203414535522461, 0.03233622360229492, 0.03253561782836914, 0.03251705551147461, 0.03247014236450195, 0.03222822570800781, 0.03205868911743164, 0.03322745513916016, 0.0328583984375, 0.03249942398071289, 0.031842239379882814, 0.031881183624267576, 0.03191116714477539, 0.03183907127380371, 0.03210649490356445, 0.03209756851196289, 0.03189599990844726, 0.032077472686767576, 0.03200678253173828, 0.032378078460693356, 0.03199260711669922, 0.032032352447509765, 0.031842720031738284, 0.03177267265319824, 0.03183126449584961, 0.03181443214416504, 0.03181766319274902, 0.03188659286499024, 0.031871999740600586, 0.03198342323303223, 0.03207555389404297, 0.03199542427062988, 0.03195155143737793, 0.03183616065979004, 0.031705215454101564, 0.031797119140625, 0.03196313667297363, 0.032215038299560544, 0.0323185920715332, 0.032318145751953124, 0.032268512725830076, 0.03220681762695313, 0.03260825729370117, 0.03288585662841797, 0.0320357437133789, 0.03211590576171875, 0.03233465576171875, 0.03222547149658203, 0.03232096099853515, 0.03221478271484375, 0.032002689361572266, 0.03195680046081543, 0.03209360122680664, 0.03183241653442383, 0.03175638389587403, 0.03189961624145508, 0.03244595336914063, 0.031935359954833986, 0.03201984024047851, 0.03198630332946777, 0.033058815002441407, 0.03252812957763672, 0.03239347076416016, 0.03243727874755859, 0.032553951263427736, 0.03269011306762695, 0.032311359405517576, 0.032048511505126956, 0.031881568908691406, 0.03188531112670898, 0.03191772842407226, 0.03212543869018555, 0.03204927825927734, 0.03196313667297363, 0.031930496215820316, 0.03208179092407227, 0.03203891372680664, 0.032023937225341796, 0.031874975204467776, 0.03193254470825195, 0.03200214385986328, 0.0320015983581543, 0.03192723274230957, 0.03208099365234375, 0.032056095123291016, 0.03204108810424805, 0.031903743743896484, 0.03203891372680664, 0.03177471923828125, 0.03181164741516113, 0.0317291202545166, 0.031895904541015624, 0.03181075286865234, 0.031769119262695315, 0.032077728271484376, 0.03242214584350586, 0.03204492950439453, 0.032050846099853515, 0.03186556816101074, 0.03205302429199219, 0.03185004806518555, 0.03186780738830566, 0.03200998306274414, 0.032159744262695314, 0.031938560485839845, 0.03196284866333008, 0.03181596755981445, 0.03171657562255859, 0.03166425514221191, 0.034478752136230466, 0.03191158485412598, 0.03185433578491211, 0.03185503959655762, 0.0319040641784668, 0.031807424545288086, 0.03182553672790527, 0.03179916763305664, 0.03184796714782715, 0.031863679885864256, 0.03186220741271973, 0.031768672943115236, 0.03173014450073242, 0.03155884742736816, 0.03333529663085937, 0.032548095703125, 0.03221398544311523, 0.03200953674316406, 0.032076065063476565, 0.03238729476928711, 0.03197241592407227, 0.0321971206665039, 0.032471553802490234, 0.03233910369873047, 0.032166656494140626, 0.0331690559387207, 0.035132991790771485, 0.03234598541259766, 0.03243920135498047, 0.03235193634033203, 0.031999391555786134, 0.032295841217041016, 0.032069023132324216, 0.032334144592285154, 0.032270622253417966, 0.03230515289306641, 0.032004096984863284, 0.03212492752075195, 0.03222937774658203, 0.03223785781860351, 0.03210224151611328, 0.03335356903076172, 0.03230108642578125, 0.032362785339355465, 0.03247484970092773, 0.03228585433959961, 0.03229782485961914, 0.03201545715332031, 0.03202896118164063, 0.03212761688232422, 0.03201433563232422, 0.03206905746459961, 0.032217918395996095, 0.03207769775390625, 0.03196096038818359, 0.032020481109619144, 0.03225766372680664, 0.03198534393310547, 0.031898303985595705, 0.03206684875488281, 0.03250044631958008, 0.03186483192443847, 0.031899648666381834, 0.03187215995788574, 0.03236851119995117, 0.03199430465698242, 0.032137313842773435, 0.03193660736083984, 0.031865184783935546, 0.03192156791687012, 0.03168316841125488, 0.03171254348754883, 0.0315317440032959, 0.03178291130065918, 0.03292121505737305, 0.03234595108032227, 0.03202854537963867, 0.032834014892578124, 0.032304126739501955, 0.03205820846557617, 0.03181132888793945, 0.03168291282653809, 0.031911231994628905, 0.03159526443481445, 0.03308710479736328, 0.03208870315551758, 0.032010272979736326, 0.031850208282470704, 0.03188531112670898, 0.03203071975708008, 0.031808544158935546, 0.031934623718261716, 0.031798080444335936, 0.03180544090270996, 0.031763744354248044, 0.031813503265380856, 0.03173052787780762, 0.031719423294067385, 0.03192604827880859, 0.03178620719909668, 0.031923200607299806, 0.03183616065979004, 0.03199772834777832, 0.03199203109741211, 0.031942655563354495, 0.03192767906188965, 0.03196377563476562, 0.03203241729736328, 0.03202492904663086, 0.03213926315307617, 0.03199542427062988, 0.0320140151977539, 0.03213555145263672, 0.031952800750732424, 0.03214748764038086, 0.03219504165649414, 0.03252633666992188, 0.03213078308105469, 0.03216003036499023, 0.03262486267089844, 0.03255472183227539, 0.03215753555297852, 0.03240982437133789, 0.032395263671875, 0.03200969696044922, 0.03194633674621582, 0.03230006408691406, 0.03200400161743164, 0.032008190155029294, 0.03228643035888672, 0.032618656158447265, 0.032950401306152344, 0.035160064697265625, 0.032163806915283207, 0.03207302474975586, 0.032043872833251955, 0.032018558502197265, 0.03191334342956543, 0.031942943572998046, 0.032034912109375, 0.03299123382568359, 0.03258483123779297, 0.032382110595703124, 0.032073440551757815, 0.03187711906433106, 0.032159614562988284, 0.03192639923095703, 0.031971328735351565, 0.03191193580627441, 0.03191193580627441, 0.03236214447021484, 0.03484035110473633, 0.03217830276489258, 0.032086463928222654, 0.03217641448974609, 0.03196284866333008, 0.03202835083007813, 0.032235393524169924, 0.03189795112609863, 0.03181167984008789, 0.03199795150756836, 0.03210390472412109, 0.03209884643554688, 0.03215958404541015, 0.03202787017822266, 0.03237804794311523, 0.03215513610839844, 0.03199411201477051, 0.031865087509155274, 0.03185433578491211, 0.03199577522277832, 0.035102848052978516, 0.03204307174682617, 0.031848064422607424, 0.03188668823242188, 0.032048095703125, 0.031846368789672855, 0.03191622352600098, 0.03195408058166504, 0.0318286075592041, 0.03208095932006836, 0.03178598403930664, 0.032266273498535156, 0.0319979190826416, 0.032065025329589845, 0.03190764808654785, 0.03189340782165527, 0.03228342437744141, 0.03210444641113281, 0.03175337600708008, 0.03187593650817871, 0.03180758476257324, 0.031838111877441407, 0.03203238296508789, 0.031992191314697264, 0.031905792236328126, 0.03204108810424805, 0.032032352447509765, 0.03182953643798828, 0.031822591781616213, 0.03197952079772949, 0.03176582336425781, 0.031799455642700196, 0.033051422119140625, 0.03260575866699219, 0.03227481460571289, 0.031667327880859374, 0.03149635124206543, 0.03184275245666504, 0.031607135772705075, 0.031893503189086916, 0.03200614547729492, 0.032107872009277345, 0.03207235336303711, 0.03214745712280274, 0.032103584289550784, 0.033194847106933593, 0.03189145660400391, 0.03203481674194336, 0.03220479965209961, 0.03227817535400391, 0.032061790466308596, 0.032116767883300784, 0.032058849334716796, 0.032046592712402344, 0.03199283218383789, 0.032004096984863284, 0.032069278717041017, 0.03201446533203125, 0.03199407958984375, 0.03194246482849121, 0.03198316764831543, 0.03211737442016602, 0.03205734252929687, 0.03198384094238281, 0.032173023223876954, 0.03200921630859375, 0.03206739044189453, 0.032415744781494144, 0.03206553649902344, 0.03194854354858399, 0.031932672500610354, 0.031922176361083986, 0.03221033477783203, 0.03235491180419922, 0.03219660949707031, 0.032110240936279295, 0.03220105743408203, 0.03208774566650391, 0.031916351318359376, 0.0320184326171875, 0.03201804733276367, 0.03219699096679687, 0.03203481674194336, 0.03199699211120605, 0.03205830383300781, 0.03202227020263672, 0.032217342376708986, 0.03224166488647461, 0.03207123184204102, 0.032088512420654296, 0.0320634880065918, 0.03196444892883301, 0.03218729782104492, 0.03200188827514648, 0.03183171272277832, 0.03287039947509766, 0.03251721572875976, 0.03229337692260742, 0.0319266242980957, 0.03207084655761719, 0.032271232604980465, 0.03188531112670898, 0.03177471923828125, 0.031916032791137694, 0.031868480682373045, 0.03185062408447266, 0.031912256240844726, 0.03180131149291992, 0.031843551635742186, 0.03186566352844238, 0.031942655563354495, 0.03206553649902344, 0.03204070281982422, 0.03192448043823242, 0.032118785858154295, 0.032132320404052735, 0.03201091384887695, 0.03197913551330566, 0.032104671478271486, 0.03226563262939453, 0.032027103424072265, 0.03195136070251465, 0.03195487976074219, 0.03190592002868652, 0.03194054412841797, 0.03229481506347656, 0.03190959930419922, 0.03188096046447754, 0.03227088165283203, 0.03280060958862305, 0.032282142639160155, 0.032049697875976564, 0.032094303131103515, 0.03192393684387207, 0.031897567749023435, 0.03189894485473633, 0.031957408905029294, 0.032016639709472654, 0.03194620704650879, 0.032172447204589845, 0.032299488067626954, 0.03253452682495117, 0.03328729629516602, 0.032895870208740234, 0.032284671783447266, 0.03240345764160156, 0.03222528076171875, 0.03266355133056641, 0.03215955352783203, 0.031992000579833986, 0.03269222259521484, 0.032228832244873044, 0.032395809173583985, 0.03235763168334961, 0.03268431854248047, 0.032271839141845705, 0.032428928375244144, 0.03225408172607422, 0.03279702377319336, 0.032315742492675784, 0.032137439727783206, 0.031897375106811524, 0.03187711906433106, 0.03215488052368164, 0.031903839111328124, 0.03206329727172851, 0.03222521591186524, 0.03211971282958984, 0.03207372665405273, 0.031961023330688475, 0.03183622360229492, 0.031835775375366214, 0.03196556854248047, 0.03201020812988281, 0.03208550262451172, 0.032102272033691405, 0.03213993453979492, 0.031942655563354495, 0.031729087829589844, 0.031690975189208985, 0.03165228843688965, 0.03180944061279297, 0.03198566436767578, 0.03233587265014649, 0.03209830474853516, 0.03236454391479492, 0.032124832153320314, 0.03297071838378906, 0.03209843063354492, 0.03200307083129883, 0.03196137619018555, 0.032025089263916014, 0.0319715518951416, 0.03207987213134766, 0.03189555168151856, 0.03192326354980469, 0.03188627243041992, 0.03239116668701172, 0.03225193786621094, 0.032000160217285155, 0.03229203033447266, 0.03466851043701172, 0.03226451110839844, 0.03222972869873047, 0.03204224014282227, 0.031856672286987305, 0.031997983932495117, 0.03197817611694336, 0.031946144104003905, 0.0319654712677002, 0.03223379135131836, 0.03216566467285156, 0.03201660919189453, 0.03197894477844238, 0.031834688186645504, 0.0319807357788086, 0.03212287902832031, 0.03221996688842774, 0.03210255813598633, 0.03207766342163086, 0.03192422485351563, 0.03283148956298828, 0.032555007934570314, 0.03231068801879883, 0.0321497917175293, 0.03211078262329101, 0.03220060729980469, 0.03200636672973633, 0.03183616065979004, 0.03212646484375, 0.032068096160888675, 0.03192588806152344, 0.03206387329101563, 0.031981151580810545, 0.031967647552490236, 0.03198566436767578, 0.03186185646057129, 0.0318917121887207, 0.03202025604248047, 0.03216432189941406, 0.03199788856506348, 0.03226425552368164, 0.03233232116699219, 0.03220009613037109, 0.032018943786621096, 0.031983583450317384, 0.03187302398681641, 0.03200380706787109, 0.03186470413208008, 0.031934560775756834, 0.03204064178466797, 0.03194931221008301, 0.03206172943115235, 0.032065216064453124, 0.03211824035644531, 0.03192000007629395, 0.03196793556213379, 0.03220630264282227, 0.0324134407043457, 0.03490876770019531, 0.03216966247558594, 0.032148094177246095, 0.03206553649902344, 0.03251200103759765, 0.032107742309570315, 0.031984256744384765, 0.03193052864074707, 0.0324587516784668, 0.03518259048461914, 0.03230704116821289, 0.032286689758300784, 0.032045246124267575, 0.031993215560913085, 0.03205388641357422, 0.03197302436828613, 0.032045406341552736, 0.03196108818054199, 0.03192598342895508, 0.03187740707397461, 0.031809312820434574, 0.03201660919189453, 0.03189910316467285, 0.03192271995544434, 0.03229446411132812, 0.03316054534912109, 0.032436897277832034, 0.03232767868041992, 0.032062591552734374, 0.03242086410522461, 0.032315521240234374, 0.03205318450927734, 0.032065345764160154, 0.032222431182861326, 0.032436256408691404, 0.03230156707763672, 0.032344032287597656, 0.03221676635742188, 0.03226889419555664, 0.03321446228027344, 0.03346636962890625, 0.032349822998046875, 0.03270083236694336, 0.03235631942749023, 0.032176128387451174, 0.03222323226928711, 0.03212287902832031, 0.03213107299804688, 0.032166015625, 0.032134719848632816, 0.03224403381347656, 0.03235635375976562, 0.03240950393676758, 0.032178272247314454, 0.03306835174560547, 0.03232329559326172, 0.032158687591552736, 0.03196220779418945, 0.03190876770019531, 0.03203481674194336, 0.03242374420166016, 0.03227417755126953, 0.03201456069946289, 0.032022144317626955, 0.032075584411621096, 0.03203776168823242, 0.031829055786132814, 0.03190169525146484, 0.03193094444274902, 0.03268998336791992, 0.032299488067626954, 0.03233497619628906, 0.03228147125244141, 0.03424431991577148, 0.03229750442504883, 0.032320289611816405, 0.03225904083251953, 0.03218841552734375, 0.032215038299560544, 0.03226323318481445, 0.03218118286132812, 0.032252094268798825, 0.03259782409667969, 0.03224576187133789, 0.032158817291259766, 0.032144287109375, 0.03216287994384766, 0.032250431060791014]",tokens/s,31.102434431648454,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3573.936128,5177.737216,0.0,4775.215104,4427.072512,s,1,10.9890322265625,10.9890322265625,0.0,10.9890322265625,10.9890322265625,10.9890322265625,10.9890322265625,[10.9890322265625],,kWh,0.0001114617925458333,1.2287399225157718e-05,4.8054205109990256e-05,0.00017180339688098128,,MB,1570.992128,5209.194496,0.0,4794.089472,4101.022208,s,10,26.3546416015625,2.63546416015625,0.0064673453587069975,2.6359176025390623,2.642948120117188,2.6435891723632814,2.6441020141601563,"[2.62072314453125, 2.632052978515625, 2.6309130859375, 2.633322998046875, 2.6349365234375, 2.638989990234375, 2.639768310546875, 2.636898681640625, 2.6428056640625, 2.644230224609375]",tokens/s,97.13658939866684,kWh,7.692297122708093e-05,8.484332649231672e-06,5.1016318590800525e-05,0.00013642362246711314,tokens/kWh,1876507.8611053042,MB,1577.713664,5219.680256,0.0,4802.47808,4101.024768,s,10,16.92435400390625,1.6924354003906248,0.0040457712272885474,1.6921831665039062,1.6970105712890626,1.698607763671875,1.699885517578125,"[1.6966556396484376, 1.7002049560546875, 1.6875208740234375, 1.6881685791015626, 1.6933681640625, 1.691078125, 1.6932882080078124, 1.688147216796875, 1.68973876953125, 1.6961834716796875]",tokens/s,37.22446362529359,kWh,5.0168630085003444e-05,5.532521330044883e-06,3.3310359981600374e-05,8.901151139664864e-05,tokens/kWh,707773.6240120961,,s,630,16.921788980484006,0.026859982508704777,0.000494893922522437,0.026761247634887696,0.027163757133483887,0.027396520137786866,0.029010423126220704,"[0.027745920181274412, 0.02694767951965332, 0.026929439544677733, 0.026803424835205078, 0.0268767032623291, 0.02678374481201172, 0.026976255416870116, 0.02689638328552246, 0.026855424880981447, 0.026836992263793946, 0.026770944595336913, 0.026586719512939453, 0.026542943954467775, 0.026559776306152343, 0.026583295822143554, 0.028217887878417967, 0.02856550407409668, 0.02717081642150879, 0.02674483108520508, 0.02686566352844238, 0.027039743423461913, 0.027000160217285157, 0.027005599975585937, 0.026908672332763672, 0.0268155517578125, 0.026737600326538085, 0.026727615356445314, 0.026659648895263673, 0.02729884719848633, 0.026794975280761718, 0.026877952575683595, 0.026808319091796876, 0.027226112365722657, 0.026828128814697264, 0.02713052749633789, 0.027242496490478517, 0.027000768661499024, 0.02683894348144531, 0.02693062400817871, 0.02663088035583496, 0.026580991744995116, 0.02691276741027832, 0.026889856338500977, 0.026737152099609376, 0.02659110450744629, 0.02657593536376953, 0.026605535507202148, 0.026754016876220702, 0.026580255508422853, 0.02685001564025879, 0.02661702346801758, 0.027330848693847658, 0.027011615753173828, 0.027299232482910156, 0.027019872665405273, 0.02672230339050293, 0.026945247650146484, 0.02692300796508789, 0.0272644157409668, 0.02696691131591797, 0.02676121520996094, 0.026723648071289064, 0.026990623474121095, 0.02772172737121582, 0.02696937561035156, 0.02684284782409668, 0.026808671951293946, 0.026698400497436523, 0.026732736587524415, 0.026776639938354493, 0.026857568740844728, 0.026929824829101563, 0.02692095947265625, 0.027158304214477538, 0.026775615692138672, 0.026566816329956056, 0.02701683235168457, 0.026495647430419923, 0.026431039810180666, 0.026498815536499024, 0.026800159454345704, 0.026968128204345704, 0.026767072677612306, 0.02706902313232422, 0.02669526481628418, 0.0266715202331543, 0.026756383895874022, 0.026625791549682618, 0.027040735244750976, 0.026686656951904298, 0.031406911849975586, 0.026986495971679687, 0.026799808502197264, 0.02665116882324219, 0.026627872467041017, 0.026752864837646485, 0.02663382339477539, 0.026738271713256836, 0.026884384155273437, 0.026852031707763672, 0.02674278450012207, 0.02676108741760254, 0.026732511520385742, 0.02689039993286133, 0.026853376388549805, 0.026818111419677736, 0.026874303817749023, 0.02671820831298828, 0.026791391372680665, 0.027887680053710937, 0.02737609672546387, 0.026851327896118164, 0.027010271072387695, 0.02674358367919922, 0.0265482234954834, 0.026556415557861326, 0.02698806381225586, 0.02677548789978027, 0.027162368774414063, 0.026989343643188477, 0.02739414405822754, 0.026756128311157226, 0.02678668785095215, 0.027123712539672853, 0.02815385627746582, 0.02902150344848633, 0.028903039932250976, 0.026900863647460936, 0.026857471466064452, 0.026886144638061524, 0.02694758415222168, 0.026628095626831053, 0.02668083190917969, 0.02725734329223633, 0.027371007919311522, 0.0266810245513916, 0.026554655075073243, 0.026513792037963866, 0.02670198440551758, 0.026826496124267577, 0.02668057632446289, 0.02671718406677246, 0.026455808639526367, 0.02662835121154785, 0.026461215972900392, 0.026674144744873046, 0.026869760513305665, 0.026601119995117186, 0.026521663665771484, 0.026956064224243164, 0.026802175521850585, 0.02711756706237793, 0.02674073600769043, 0.026814464569091798, 0.02670537567138672, 0.026656768798828126, 0.0266081600189209, 0.026539167404174804, 0.026585952758789062, 0.026578880310058593, 0.027471935272216797, 0.027490304946899413, 0.026957088470458985, 0.026701663970947264, 0.026825599670410157, 0.026695680618286134, 0.0266014404296875, 0.026543615341186523, 0.026573343276977537, 0.026488832473754883, 0.02684435272216797, 0.027088863372802734, 0.02683171272277832, 0.026626047134399415, 0.026599424362182617, 0.027182720184326173, 0.02647462463378906, 0.02646451187133789, 0.026488832473754883, 0.02666851234436035, 0.026724063873291015, 0.026576799392700197, 0.026579872131347656, 0.02658406448364258, 0.02653696060180664, 0.026658655166625977, 0.02720684814453125, 0.02664137649536133, 0.02672640037536621, 0.027621376037597657, 0.02691481590270996, 0.02676940727233887, 0.02670796775817871, 0.026623327255249022, 0.026614431381225587, 0.02655961608886719, 0.0265532169342041, 0.02646816062927246, 0.026482271194458007, 0.026540639877319337, 0.026470016479492188, 0.026564512252807617, 0.026589439392089843, 0.026638208389282228, 0.02645846366882324, 0.02650726318359375, 0.026550271987915038, 0.02649068832397461, 0.027359424591064455, 0.026527551651000975, 0.026476543426513673, 0.026910816192626953, 0.027207679748535156, 0.02680841636657715, 0.026828800201416016, 0.026908319473266603, 0.027434879302978516, 0.02672892761230469, 0.02676940727233887, 0.026867136001586914, 0.02706489562988281, 0.02688377571105957, 0.026796031951904296, 0.02714041519165039, 0.027059455871582032, 0.027120384216308593, 0.026849279403686522, 0.02692265510559082, 0.026739231109619142, 0.026842720031738283, 0.026546335220336913, 0.026878015518188476, 0.027084320068359376, 0.0266628475189209, 0.026604000091552733, 0.026937152862548826, 0.026597375869750976, 0.026724607467651366, 0.026764480590820313, 0.026659488677978516, 0.026733760833740235, 0.027026048660278322, 0.027033279418945313, 0.026849695205688476, 0.02673040008544922, 0.026847360610961914, 0.026951904296875, 0.026672992706298828, 0.026704032897949218, 0.02707046318054199, 0.026774879455566405, 0.026682016372680663, 0.027879392623901367, 0.027123712539672853, 0.02692255973815918, 0.026971839904785157, 0.027079423904418944, 0.026959871292114256, 0.02698419189453125, 0.026955583572387695, 0.026999231338500976, 0.027150335311889647, 0.026990367889404298, 0.027439136505126954, 0.027060415267944334, 0.02686566352844238, 0.026705919265747072, 0.026695072174072267, 0.026683551788330078, 0.026734304428100587, 0.02649567985534668, 0.02657619285583496, 0.026538015365600586, 0.026480960845947265, 0.026724735260009767, 0.026770975112915037, 0.027019744873046876, 0.02700492858886719, 0.026643903732299804, 0.026872383117675782, 0.027069887161254882, 0.030648895263671875, 0.027418624877929686, 0.026801216125488282, 0.02666182327270508, 0.026599424362182617, 0.026918912887573244, 0.026681631088256837, 0.02664214324951172, 0.026498495101928712, 0.026550848007202147, 0.02652569580078125, 0.026425439834594725, 0.026412128448486328, 0.02642188835144043, 0.026474687576293947, 0.026642208099365235, 0.02674505615234375, 0.026926368713378907, 0.026964704513549806, 0.027062271118164064, 0.026933248519897462, 0.026879936218261718, 0.02701932716369629, 0.027000415802001954, 0.026916671752929687, 0.02681507110595703, 0.026677248001098632, 0.02657481575012207, 0.02666703987121582, 0.02652364730834961, 0.02673174476623535, 0.026622751235961913, 0.026673152923583986, 0.026661951065063475, 0.02799955177307129, 0.02707935905456543, 0.026883743286132813, 0.02684553527832031, 0.02667043113708496, 0.026644287109375, 0.026723167419433595, 0.026666719436645506, 0.02684956741333008, 0.0267509765625, 0.026800031661987304, 0.026631839752197267, 0.026785951614379883, 0.026689247131347658, 0.02684998321533203, 0.02666796875, 0.02712611198425293, 0.02691337585449219, 0.026888191223144533, 0.0268404483795166, 0.026780288696289064, 0.026873727798461915, 0.02725273513793945, 0.03009916877746582, 0.026712480545043944, 0.026515008926391602, 0.026661312103271485, 0.026582208633422852, 0.026549055099487306, 0.026570751190185548, 0.026621952056884765, 0.026693632125854492, 0.02663852882385254, 0.026767168045043945, 0.026886144638061524, 0.02736332893371582, 0.02692655944824219, 0.027437599182128906, 0.02671001625061035, 0.026529632568359374, 0.027138208389282225, 0.026930400848388672, 0.02668345642089844, 0.026706655502319335, 0.026566368103027344, 0.026751264572143555, 0.02669388771057129, 0.026998079299926758, 0.026628543853759765, 0.02679193687438965, 0.02671820831298828, 0.026630048751831056, 0.026605535507202148, 0.026814592361450194, 0.026517087936401368, 0.026691072463989256, 0.026974624633789062, 0.02660745620727539, 0.026526016235351564, 0.026543840408325196, 0.02658793640136719, 0.02661564826965332, 0.026609344482421873, 0.030491104125976564, 0.02854297637939453, 0.02716876792907715, 0.027019264221191407, 0.026789888381958008, 0.026574304580688476, 0.02673308753967285, 0.026644479751586913, 0.026781087875366212, 0.026727008819580077, 0.02684035110473633, 0.026740831375122072, 0.026540672302246094, 0.026703264236450194, 0.02655878448486328, 0.026470687866210936, 0.026449951171875, 0.026728063583374023, 0.026610015869140625, 0.02666486358642578, 0.027116832733154298, 0.02668832015991211, 0.026682880401611327, 0.02666486358642578, 0.026778207778930665, 0.02673459243774414, 0.026613759994506835, 0.02659708786010742, 0.0265743350982666, 0.02646444892883301, 0.026529727935791017, 0.02644028854370117, 0.02656671905517578, 0.026416160583496093, 0.026690528869628905, 0.027198783874511717, 0.027236608505249022, 0.026917312622070314, 0.026841087341308592, 0.026791744232177735, 0.026808191299438476, 0.02717846488952637, 0.026761632919311523, 0.02687151908874512, 0.026821344375610352, 0.026863616943359377, 0.026780832290649415, 0.026585952758789062, 0.02712326431274414, 0.026826623916625977, 0.026662784576416014, 0.026995391845703126, 0.02670297622680664, 0.026644767761230467, 0.02669219207763672, 0.026750240325927734, 0.026784479141235353, 0.026836767196655273, 0.026894559860229494, 0.027559839248657226, 0.02727497673034668, 0.02710976028442383, 0.027154399871826173, 0.027869184494018553, 0.02692086410522461, 0.027002304077148438, 0.026888864517211914, 0.028983295440673826, 0.026856447219848634, 0.026778047561645507, 0.02662201690673828, 0.027322879791259767, 0.02675052833557129, 0.026736736297607422, 0.02739846420288086, 0.026854591369628908, 0.026712352752685548, 0.026739295959472657, 0.027312095642089845, 0.026658815383911134, 0.02647350311279297, 0.02656559944152832, 0.026566656112670898, 0.026533504486083985, 0.026546815872192382, 0.02646950340270996, 0.026890527725219725, 0.026626399993896484, 0.02679193687438965, 0.026849023818969725, 0.026730752944946288, 0.026669055938720702, 0.026646528244018555, 0.02668345642089844, 0.0268287353515625, 0.026577056884765624, 0.026566495895385744, 0.026488832473754883, 0.026779199600219728, 0.02639302444458008, 0.026537151336669923, 0.02660639953613281, 0.026681343078613282, 0.02657792091369629, 0.02734182357788086, 0.027142143249511717, 0.026804224014282226, 0.02679193687438965, 0.02677555274963379, 0.02680361557006836, 0.027207456588745117, 0.027021823883056642, 0.026642751693725587, 0.026650623321533205, 0.026580799102783204, 0.026679168701171874, 0.026493247985839845, 0.026517152786254883, 0.026493087768554687, 0.02667942428588867, 0.02656483268737793, 0.02658086395263672, 0.026563808441162108, 0.026683712005615236, 0.026687936782836916, 0.026709375381469728, 0.027806528091430666, 0.02680012893676758, 0.026888191223144533, 0.026692895889282225, 0.026802528381347657, 0.026659135818481446, 0.026697088241577148, 0.02659542465209961, 0.026731103897094727, 0.02675712013244629, 0.026737760543823243, 0.02707734489440918, 0.02673683166503906, 0.026632192611694337, 0.026854816436767577, 0.02661199951171875, 0.02938003158569336, 0.027113407135009766, 0.026891199111938477, 0.026859519958496093, 0.026761280059814454, 0.026643552780151368, 0.026591424942016603, 0.026622207641601562, 0.02656889533996582, 0.026656991958618165, 0.02656671905517578, 0.026566591262817383, 0.02658406448364258, 0.026588159561157225, 0.026639520645141603, 0.026522464752197265, 0.026589183807373046, 0.026662336349487305, 0.027144704818725586, 0.027203264236450194, 0.026764928817749025, 0.027211967468261718, 0.026622751235961913, 0.026636064529418944, 0.026619903564453123, 0.026635648727416993, 0.026559104919433595, 0.026484735488891603, 0.026367712020874023, 0.026515743255615235, 0.02651545524597168, 0.026650239944458008, 0.026731903076171876, 0.026848255157470705, 0.02732342338562012, 0.027136991500854492, 0.027356191635131834, 0.026844127655029297, 0.026843135833740234, 0.026826751708984374, 0.027561407089233398, 0.026659391403198243, 0.026808319091796876, 0.026733600616455078, 0.026729631423950195, 0.026631776809692382, 0.026638303756713867, 0.027687744140625, 0.02674892807006836, 0.0269434871673584, 0.026896032333374023, 0.02674086380004883, 0.026846752166748047, 0.026854080200195314, 0.026728448867797853, 0.026834400177001953, 0.027100736618041993, 0.02679622459411621, 0.026657215118408205, 0.02653219223022461, 0.026572799682617186, 0.02700649642944336, 0.026657247543334962, 0.026664831161499022, 0.026598655700683593, 0.026828800201416016, 0.026860416412353514, 0.026625055313110352, 0.026956768035888673, 0.027502592086791993, 0.027058176040649414, 0.026927104949951174, 0.026810335159301757, 0.026802207946777342, 0.02682784080505371, 0.02660051155090332, 0.026693504333496095, 0.026607616424560547, 0.026630144119262695, 0.026619903564453123, 0.026924160003662108, 0.02665484809875488, 0.026645248413085937, 0.026996543884277344, 0.026875648498535156, 0.02690297508239746, 0.02677555274963379, 0.02676121520996094, 0.026762496948242186, 0.026854143142700196, 0.026839040756225587, 0.026844320297241212, 0.026902368545532226, 0.02697270393371582, 0.026910751342773438, 0.026844831466674806, 0.02668729591369629, 0.026745824813842772, 0.02667728042602539, 0.02679542350769043, 0.02716320037841797, 0.03195084762573242, 0.02718720054626465, 0.02709884834289551, 0.026892127990722655, 0.02677395248413086, 0.027023359298706053, 0.026765024185180664, 0.026779232025146486, 0.02670867156982422]",tokens/s,37.23010615051295,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 201837 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 200835 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3573.485568,4498.259968,0.0,4112.515072,3975.832064,s,1,9.7968662109375,9.7968662109375,0.0,9.7968662109375,9.7968662109375,9.7968662109375,9.7968662109375,[9.7968662109375],,kWh,7.801614000831402e-05,8.5985777241218e-06,2.5396131428004343e-05,0.00011201084916044016,,MB,1544.84736,4695.392256,0.0,4280.287232,4101.546496,s,10,3.242847106933594,0.3242847106933594,0.004208931677795974,0.32315966796875,0.32871683654785155,0.33096028594970706,0.3327550454711914,"[0.32159698486328125, 0.32461395263671877, 0.32170538330078124, 0.3212510986328125, 0.319335693359375, 0.3196559448242188, 0.32563458251953126, 0.3332037353515625, 0.32821829223632815, 0.32763143920898435]",tokens/s,789.429755885319,kWh,9.649329722175826e-06,1.06414404553702e-06,6.422523059161051e-06,1.7135996826873897e-05,tokens/kWh,14939311.823314676,MB,1556.62336,4703.780864,0.0,4288.67584,4101.549056,s,10,24.0122275390625,2.40122275390625,0.010539667321248914,2.4013861083984374,2.409760595703125,2.4170054199218747,2.4228012792968747,"[2.40196923828125, 2.400802978515625, 2.38269775390625, 2.393234130859375, 2.424250244140625, 2.408150634765625, 2.405986572265625, 2.394309326171875, 2.40556396484375, 2.3952626953125]",tokens/s,26.236632939411034,kWh,7.02983374303242e-05,7.753943810171252e-06,4.586507432643914e-05,0.0001239173555669346,tokens/kWh,508403.36054436077,,s,630,24.009618343353274,0.03811050530690995,0.0006068792739485952,0.03798510360717773,0.03853266410827637,0.03892181015014649,0.04132714172363284,"[0.03893862533569336, 0.03813347244262695, 0.03805417633056641, 0.03840409469604492, 0.038215679168701173, 0.038250495910644534, 0.03811942291259766, 0.03832771301269531, 0.03962326431274414, 0.038108959197998046, 0.038217952728271484, 0.037787647247314454, 0.037966976165771486, 0.0379947509765625, 0.03796015930175781, 0.037988510131835934, 0.037967872619628903, 0.03836928176879883, 0.038010879516601564, 0.03775606536865234, 0.03964166259765625, 0.03875164794921875, 0.03799292755126953, 0.038006977081298826, 0.03795990371704101, 0.03808051300048828, 0.037776607513427735, 0.03810927963256836, 0.037685535430908204, 0.038105503082275394, 0.03784838485717774, 0.03808121490478516, 0.03820748901367187, 0.038201343536376955, 0.0381789436340332, 0.038072479248046874, 0.03844054412841797, 0.037983776092529294, 0.03784150314331055, 0.03802521514892578, 0.03808448028564453, 0.03789846420288086, 0.03809475326538086, 0.03834019088745117, 0.03810755157470703, 0.0380313606262207, 0.03789583969116211, 0.03791484832763672, 0.037814014434814455, 0.03800086212158203, 0.038197406768798826, 0.0378081283569336, 0.03764019012451172, 0.0379266242980957, 0.03755651092529297, 0.0376292495727539, 0.03789651107788086, 0.037663105010986325, 0.03786131286621094, 0.038568000793457034, 0.03936460876464844, 0.038234111785888675, 0.03799388885498047, 0.038653953552246094, 0.037863296508789064, 0.0384664306640625, 0.03870105743408203, 0.037787647247314454, 0.03779174423217774, 0.03777536010742188, 0.037502334594726565, 0.037474945068359376, 0.03751500701904297, 0.03997865676879883, 0.03883478546142578, 0.03786931228637695, 0.03775904083251953, 0.038082752227783206, 0.037959678649902344, 0.03769734573364258, 0.03759088134765625, 0.03789654541015625, 0.03763727951049805, 0.03769420623779297, 0.037809375762939454, 0.037766014099121094, 0.03764633560180664, 0.03823616027832031, 0.038235553741455076, 0.038134368896484375, 0.03791462326049805, 0.037926910400390625, 0.03784089660644531, 0.03772415924072266, 0.03779331207275391, 0.038051616668701174, 0.03822025680541992, 0.038284767150878904, 0.0380560302734375, 0.040523551940917966, 0.038773536682128906, 0.03806617736816406, 0.03831612777709961, 0.03956755065917969, 0.03955475234985351, 0.03853433609008789, 0.038357952117919925, 0.038477760314941406, 0.03785324859619141, 0.03801599884033203, 0.03789926528930664, 0.037946399688720704, 0.0380752944946289, 0.037980224609375, 0.038035457611083984, 0.038027263641357424, 0.03773440170288086, 0.037977279663085936, 0.037854015350341795, 0.03772639846801758, 0.038000225067138675, 0.03794112014770508, 0.03776956939697266, 0.037789791107177735, 0.037769119262695314, 0.03782012939453125, 0.03868409729003906, 0.03805001449584961, 0.03777811050415039, 0.03775481414794922, 0.03782025527954101, 0.03784931182861328, 0.03779174423217774, 0.03783193588256836, 0.03777801513671875, 0.038074527740478516, 0.03784703826904297, 0.03761971282958984, 0.03764223861694336, 0.03761260986328125, 0.03798259353637695, 0.03774316787719727, 0.03775283050537109, 0.0375327033996582, 0.037901279449462894, 0.03769094467163086, 0.03765731048583985, 0.037527263641357424, 0.038354942321777344, 0.03848601531982422, 0.0378606071472168, 0.037781951904296875, 0.03810540771484375, 0.03777536010742188, 0.03755132675170898, 0.03759088134765625, 0.03770463943481445, 0.03776208114624023, 0.03773481750488281, 0.03762847900390625, 0.037705726623535156, 0.037730304718017575, 0.03761151885986328, 0.037572608947753904, 0.03755952072143555, 0.037765823364257815, 0.037722206115722655, 0.03751935958862305, 0.03752544021606445, 0.0376300163269043, 0.037533920288085935, 0.03780995178222656, 0.037740287780761717, 0.03783059310913086, 0.03815660858154297, 0.03840121459960937, 0.038216510772705076, 0.0380530891418457, 0.03833244705200195, 0.03788793563842773, 0.03768608093261719, 0.03773440170288086, 0.03779993438720703, 0.037705726623535156, 0.03778678512573242, 0.03760009765625, 0.03796080017089844, 0.03776768112182617, 0.037849502563476564, 0.038502334594726566, 0.03803311920166016, 0.03804611206054687, 0.0381357421875, 0.03796582412719727, 0.03807436752319336, 0.03790591812133789, 0.03796214294433594, 0.038141918182373044, 0.038174240112304685, 0.03793161773681641, 0.038145057678222655, 0.03813679885864258, 0.038306880950927734, 0.03879008102416992, 0.03891404724121094, 0.03808857727050781, 0.03774639892578125, 0.03795804977416992, 0.03793033599853515, 0.03785728073120117, 0.037925537109375, 0.037805728912353516, 0.03770608139038086, 0.03787916946411133, 0.03784483337402344, 0.037779808044433594, 0.037849056243896485, 0.03804412841796875, 0.03794739151000977, 0.037873279571533205, 0.037867774963378904, 0.037748832702636716, 0.03799647903442383, 0.03837868881225586, 0.038220703125, 0.037820415496826174, 0.03783270263671875, 0.03786678314208984, 0.03809516906738281, 0.038097312927246094, 0.03791024017333984, 0.03770761489868164, 0.03790073776245117, 0.037840736389160155, 0.03758063888549805, 0.03770399856567383, 0.037588512420654294, 0.03757036972045898, 0.037751457214355466, 0.037904384613037106, 0.037967872619628903, 0.03778547286987305, 0.03768262481689453, 0.03779971313476563, 0.037939777374267576, 0.038283615112304686, 0.03827478408813476, 0.038122848510742186, 0.03788281631469727, 0.03846521759033203, 0.03789836883544922, 0.038080703735351565, 0.03944617462158203, 0.038337982177734375, 0.03818092727661133, 0.038304256439208983, 0.03801532745361328, 0.037935104370117184, 0.037994110107421875, 0.03831846237182617, 0.037797470092773434, 0.03821535873413086, 0.0381806411743164, 0.03797683334350586, 0.03796192169189453, 0.037892032623291015, 0.03821574401855469, 0.038328254699707034, 0.03851411056518555, 0.041949825286865236, 0.03884812927246094, 0.03842291259765625, 0.038574176788330077, 0.03815414428710937, 0.038144001007080076, 0.03811734390258789, 0.03896732711791992, 0.038457439422607424, 0.03843267059326172, 0.03818611145019531, 0.038017822265625, 0.03810927963256836, 0.03811942291259766, 0.03796319961547852, 0.03802537536621094, 0.038346336364746096, 0.03819980621337891, 0.03817046356201172, 0.03809532928466797, 0.03852492904663086, 0.03828035354614258, 0.03832099151611328, 0.03812515258789063, 0.03846700668334961, 0.03840918350219726, 0.03851468658447266, 0.04303462219238281, 0.03874816131591797, 0.03880550384521484, 0.03836620712280273, 0.03824947357177735, 0.03810678482055664, 0.03879145431518555, 0.03852703857421875, 0.03814924621582031, 0.03810307312011719, 0.03817558288574219, 0.038184959411621096, 0.03790643310546875, 0.037928958892822266, 0.03875376129150391, 0.041646785736083984, 0.038547294616699215, 0.037994495391845705, 0.038387264251708984, 0.04276374435424805, 0.039784320831298826, 0.03803334426879883, 0.037706432342529295, 0.03752140808105469, 0.03782451248168945, 0.03785318374633789, 0.0376278076171875, 0.03751036834716797, 0.037950336456298826, 0.03800064086914062, 0.03800227355957031, 0.037864864349365236, 0.04092211151123047, 0.03938304138183594, 0.038193153381347655, 0.03863142395019531, 0.03828726577758789, 0.03819228744506836, 0.038069183349609376, 0.037779457092285154, 0.037943294525146484, 0.03827097702026367, 0.03786511993408203, 0.03859900665283203, 0.03786956787109375, 0.0376995849609375, 0.03834790420532227, 0.038066078186035156, 0.03770636749267578, 0.03787756729125977, 0.03776156616210938, 0.03943833541870117, 0.03831398391723633, 0.03800092697143555, 0.038376705169677734, 0.03794403076171875, 0.037871231079101564, 0.03767276763916016, 0.03773676681518555, 0.03855507278442383, 0.037814849853515624, 0.03804774475097656, 0.03828940963745117, 0.038311038970947266, 0.03827974319458008, 0.03806854248046875, 0.03930828857421875, 0.0381921272277832, 0.038032737731933594, 0.03783747100830078, 0.03778342437744141, 0.03769356918334961, 0.03805388641357422, 0.038680320739746095, 0.03791692733764648, 0.03781820678710938, 0.037851295471191405, 0.03811248016357422, 0.038292160034179686, 0.03791471862792969, 0.037804031372070314, 0.037953536987304685, 0.038795265197753906, 0.03804140853881836, 0.038149345397949216, 0.03799558258056641, 0.038078369140625, 0.03778947067260742, 0.037918750762939456, 0.03769158554077148, 0.037797889709472655, 0.03785113525390625, 0.03797721481323242, 0.03777420806884765, 0.037615135192871095, 0.03816291046142578, 0.038116542816162106, 0.03770006561279297, 0.03802719879150391, 0.03803792190551758, 0.03801702499389648, 0.038035457611083984, 0.03779072189331055, 0.03808358383178711, 0.03803734588623047, 0.03804790496826172, 0.03823206329345703, 0.038566913604736325, 0.03829683303833008, 0.03848780822753906, 0.037844673156738284, 0.03773881530761719, 0.038166526794433595, 0.03818905639648437, 0.03786876678466797, 0.03775363159179688, 0.03801881790161133, 0.038252639770507815, 0.04180972671508789, 0.03842902374267578, 0.03829350280761719, 0.03828876876831055, 0.03858684921264648, 0.038151935577392576, 0.0380338249206543, 0.03845264053344727, 0.03829616165161133, 0.03797964859008789, 0.037978431701660154, 0.038088897705078124, 0.03812761688232422, 0.03787161636352539, 0.038317920684814454, 0.03849641418457031, 0.037905792236328124, 0.03816636657714844, 0.03792319869995117, 0.03822428894042969, 0.0379714241027832, 0.03820995330810547, 0.038091968536376954, 0.03790703964233398, 0.038042240142822266, 0.03895471954345703, 0.040172607421875, 0.03896883010864258, 0.038246910095214845, 0.03815628814697265, 0.038031425476074215, 0.03800979232788086, 0.03801737594604492, 0.03801299285888672, 0.037853790283203126, 0.03788729476928711, 0.037911231994628904, 0.037914016723632815, 0.03786608123779297, 0.03784272003173828, 0.037754783630371096, 0.03779974365234375, 0.03804585647583008, 0.03783510589599609, 0.03767193603515625, 0.03770790481567383, 0.037983104705810546, 0.038043647766113284, 0.038012928009033206, 0.037750049591064455, 0.037593246459960934, 0.037894718170166014, 0.037674880981445315, 0.03779801559448242, 0.03784294509887695, 0.03811916732788086, 0.03797983932495117, 0.0379090576171875, 0.038745281219482425, 0.03825651168823242, 0.03805894470214844, 0.03804159927368164, 0.0376868782043457, 0.03789814376831055, 0.03785779190063476, 0.03766201782226562, 0.03769414520263672, 0.03757849502563477, 0.03814630508422852, 0.037806079864501956, 0.03768729782104492, 0.03818086242675781, 0.04057088088989258, 0.038012191772460936, 0.03880828857421875, 0.038117374420166016, 0.03804076766967773, 0.03791750335693359, 0.03776265716552735, 0.03772153472900391, 0.03778035354614258, 0.03807580947875976, 0.03824710464477539, 0.03809894561767578, 0.03811328125, 0.037773311614990236, 0.0375, 0.03781929779052735, 0.03809657669067383, 0.03815865707397461, 0.03892816162109375, 0.03825913619995117, 0.038532478332519535, 0.038056575775146484, 0.03805699157714844, 0.038470623016357425, 0.0386082878112793, 0.03789686584472656, 0.037857215881347654, 0.03915321731567383, 0.03800928115844727, 0.037959678649902344, 0.038897247314453126, 0.038719264984130856, 0.0380516471862793, 0.037872447967529296, 0.03790233612060547, 0.03798198318481445, 0.03811555099487305, 0.03791462326049805, 0.037973342895507814, 0.037765567779541015, 0.037883838653564456, 0.03800912094116211, 0.03775459289550781, 0.037736736297607425, 0.03796377563476563, 0.037838848114013675, 0.03780198287963867, 0.03780963134765625, 0.03811382293701172, 0.03820748901367187, 0.037748703002929686, 0.037833984375, 0.043225471496582034, 0.039002559661865235, 0.03862515258789063, 0.03820729446411133, 0.038164768218994144, 0.03786547088623047, 0.03786956787109375, 0.03770518493652344, 0.03757878494262695, 0.03796598434448242, 0.037899871826171876, 0.03777942276000976, 0.03916265487670898, 0.037913982391357424, 0.03798284912109375, 0.03755532836914063, 0.0375203857421875, 0.03746188735961914, 0.03739852905273437, 0.03756438446044922, 0.03784070587158203, 0.03785955047607422, 0.03769343948364258, 0.038085662841796875, 0.037968257904052734, 0.03821014404296875, 0.037986431121826175, 0.038035232543945315, 0.041492576599121096, 0.03894262313842774, 0.0382033920288086, 0.03759308624267578, 0.03791382217407226, 0.037980960845947265, 0.03815423965454102, 0.03787571334838867, 0.03783049774169922, 0.0377446403503418, 0.03790659332275391, 0.0377935676574707, 0.03783292770385742, 0.0381317138671875, 0.03784499359130859, 0.03772963333129883, 0.03763225555419922, 0.038316287994384766, 0.03779190444946289, 0.037969440460205076, 0.03817110443115234, 0.03795264053344727, 0.037870079040527346, 0.03788351821899414, 0.039062271118164064, 0.03785318374633789, 0.03805587387084961, 0.03781024169921875, 0.037866718292236326, 0.038152992248535154, 0.038217727661132815, 0.03793830490112305, 0.03856473541259765, 0.038109184265136715, 0.03804569625854492, 0.03810508728027344, 0.03797196960449219, 0.03789004898071289, 0.03809894561767578, 0.038421920776367184, 0.03814460754394531, 0.03804300689697265, 0.03785548782348633, 0.03804179382324219, 0.03813804626464844, 0.03797942352294922, 0.03810172653198242, 0.03825823974609375, 0.038209983825683594, 0.03780198287963867, 0.03851264190673828, 0.03789209747314453, 0.03812966537475586, 0.03804159927368164, 0.037875518798828126, 0.03789580917358398, 0.0380684814453125, 0.037816638946533206, 0.03803910446166992, 0.03797993469238281, 0.03769184112548828, 0.03762198257446289, 0.037771263122558595, 0.037865440368652345]",tokens/s,26.239484151334157,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 196093 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 198700 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4034, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2905, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 198197 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3575.926784,5272.109056,0.0,4869.586944,4520.068608,s,1,11.9082158203125,11.9082158203125,0.0,11.9082158203125,11.9082158203125,11.9082158203125,11.9082158203125,[11.9082158203125],,kWh,0.00011748109869997732,1.2951751967291584e-05,5.073587392200041e-05,0.0001811687245892693,,MB,1572.0448,5305.663488,0.0,4888.461312,4194.018304,s,10,1.7972826843261718,0.1797282684326172,0.0001738198442844156,0.1796596145629883,0.17999109497070312,0.18004680938720702,0.18009138092041016,"[0.17963618469238282, 0.17959027099609376, 0.17979440307617187, 0.17958934020996092, 0.17953240966796874, 0.1796703338623047, 0.17973960876464845, 0.18010252380371095, 0.17964889526367187, 0.1799787139892578]",tokens/s,1424.3724831521326,kWh,5.31986542818176e-06,5.866864578697227e-07,3.5244119104362667e-06,9.430963796487749e-06,tokens/kWh,27144627.58253178,MB,1580.617728,5314.052096,0.0,4896.84992,4194.020864,s,10,17.081702392578126,1.7081702392578126,0.0056161762317267555,1.7110252685546876,1.7125834228515626,1.7134856201171875,1.7142073779296874,"[1.7121220703125, 1.6977357177734376, 1.7143878173828124, 1.7123829345703125, 1.708708740234375, 1.7110892333984375, 1.702832275390625, 1.699511474609375, 1.7109613037109375, 1.7119708251953125]",tokens/s,36.881569852998396,kWh,5.008176658306997e-05,5.523793393050986e-06,3.2168533310563484e-05,8.777409328668445e-05,tokens/kWh,717751.6467670223,,s,630,17.079105426788324,0.027109691153632272,0.0003871795010973703,0.027033616065979003,0.02741643466949463,0.02765972652435303,0.028699747276306156,"[0.02764591979980469, 0.026936767578125, 0.027089504241943358, 0.026810239791870118, 0.02691904067993164, 0.02692915153503418, 0.027225568771362306, 0.027033632278442382, 0.027195903778076173, 0.02698854446411133, 0.027123775482177735, 0.02688323211669922, 0.026767744064331054, 0.02683283233642578, 0.026644832611083986, 0.026685407638549805, 0.0265665283203125, 0.02719977569580078, 0.02693110466003418, 0.026812511444091795, 0.026861568450927735, 0.027158527374267577, 0.030638336181640625, 0.028607807159423827, 0.02733919906616211, 0.028241920471191406, 0.026908191680908203, 0.027139968872070312, 0.02687446403503418, 0.026822944641113282, 0.026930912017822266, 0.02690025520324707, 0.026872032165527342, 0.026824703216552736, 0.027172864913940428, 0.027158527374267577, 0.026933183670043947, 0.026931232452392578, 0.027045791625976562, 0.02728767967224121, 0.026887359619140624, 0.026889024734497072, 0.027285343170166017, 0.026939552307128908, 0.02695583915710449, 0.027501855850219727, 0.02780022430419922, 0.02729190444946289, 0.027063648223876954, 0.02724857521057129, 0.027147872924804688, 0.02704252815246582, 0.026992223739624024, 0.027340511322021484, 0.02716540718078613, 0.027795583724975585, 0.027655872344970703, 0.027328319549560547, 0.027093727111816405, 0.02705526351928711, 0.027486848831176757, 0.02707865524291992, 0.02694963264465332, 0.027532672882080077, 0.02706265640258789, 0.027143583297729493, 0.027011327743530274, 0.02690518379211426, 0.02694758415222168, 0.02692095947265625, 0.026840927124023438, 0.026720319747924805, 0.027064384460449217, 0.02690255928039551, 0.026866943359375, 0.026942207336425782, 0.026955904006958006, 0.0271092472076416, 0.027406335830688477, 0.02702332878112793, 0.02686342430114746, 0.026869983673095704, 0.027034751892089842, 0.026947616577148437, 0.02705289649963379, 0.026738687515258788, 0.02697420883178711, 0.026910720825195314, 0.026832895278930666, 0.026713279724121092, 0.026749759674072265, 0.026583040237426758, 0.026755071640014647, 0.02669158363342285, 0.026894336700439454, 0.026785791397094725, 0.02677555274963379, 0.026845184326171875, 0.02689023971557617, 0.02665990447998047, 0.026706527709960938, 0.027386079788208006, 0.027362655639648438, 0.02685158348083496, 0.026664735794067383, 0.026722784042358397, 0.026706207275390626, 0.02753331184387207, 0.026654495239257812, 0.026696063995361327, 0.026654207229614257, 0.026673503875732422, 0.02667910385131836, 0.026924352645874023, 0.02698534393310547, 0.026808319091796876, 0.026951679229736326, 0.027033599853515625, 0.027000192642211915, 0.02700320053100586, 0.027016576766967774, 0.02704697608947754, 0.027825376510620118, 0.027462303161621095, 0.027178655624389647, 0.027019615173339843, 0.027639808654785155, 0.02705939292907715, 0.026942144393920897, 0.026969919204711913, 0.026888511657714845, 0.02873139190673828, 0.02840166473388672, 0.027424448013305663, 0.02721414375305176, 0.02712166404724121, 0.027117151260375977, 0.027496864318847656, 0.02716166305541992, 0.027079456329345702, 0.027000991821289063, 0.02711142349243164, 0.027148096084594727, 0.02720992088317871, 0.027150335311889647, 0.027108448028564453, 0.027007904052734375, 0.027205631256103514, 0.027268415451049806, 0.02723686408996582, 0.02713382339477539, 0.027320640563964844, 0.02773196792602539, 0.027410688400268553, 0.027122623443603517, 0.027181888580322267, 0.027043903350830078, 0.027129791259765626, 0.027125759124755858, 0.02731827163696289, 0.027187231063842774, 0.027355104446411132, 0.026978303909301758, 0.02710323143005371, 0.02721513557434082, 0.027122335433959963, 0.02725279998779297, 0.027133951187133788, 0.027031551361083983, 0.027197343826293945, 0.02709820747375488, 0.027214847564697265, 0.02703171157836914, 0.027120607376098633, 0.027375520706176756, 0.027315168380737304, 0.02713804817199707, 0.02694963264465332, 0.02716262435913086, 0.027073631286621092, 0.02693212890625, 0.027084800720214845, 0.026990463256835937, 0.027039871215820313, 0.02703139114379883, 0.027147680282592773, 0.027146560668945312, 0.027252607345581055, 0.027224639892578124, 0.02781862449645996, 0.027123712539672853, 0.027035648345947266, 0.02711961555480957, 0.0271011848449707, 0.027025568008422853, 0.026801536560058594, 0.02757596778869629, 0.026921791076660158, 0.027022464752197266, 0.02692799949645996, 0.026978303909301758, 0.02862227249145508, 0.02807046318054199, 0.0270930233001709, 0.027504192352294923, 0.026919328689575195, 0.026932384490966795, 0.02696278381347656, 0.026912416458129883, 0.026853824615478517, 0.0268625602722168, 0.02747488021850586, 0.02698854446411133, 0.0270250244140625, 0.02698681640625, 0.02697225570678711, 0.026922975540161133, 0.02711961555480957, 0.02719651222229004, 0.027216800689697264, 0.027850431442260744, 0.027399871826171877, 0.027331199645996094, 0.027328512191772462, 0.02756937599182129, 0.027355936050415038, 0.027387903213500975, 0.027183103561401366, 0.027201536178588868, 0.02709440040588379, 0.027086816787719726, 0.027094879150390626, 0.02712454414367676, 0.027201440811157225, 0.027074655532836913, 0.027119647979736327, 0.027018783569335937, 0.027083072662353515, 0.02718694305419922, 0.026928735733032227, 0.02711631965637207, 0.027299840927124022, 0.027148288726806642, 0.027340799331665038, 0.02711756706237793, 0.027239423751831054, 0.027189823150634767, 0.027099552154541014, 0.027002912521362304, 0.026967424392700196, 0.02695577621459961, 0.02694003105163574, 0.028007295608520506, 0.027148288726806642, 0.02713702392578125, 0.027036672592163087, 0.02707593536376953, 0.02714076805114746, 0.027003904342651368, 0.026952703475952147, 0.026889471054077147, 0.026931968688964844, 0.026918304443359374, 0.026971904754638672, 0.026921920776367188, 0.026810272216796875, 0.026927072525024413, 0.02691689682006836, 0.026894336700439454, 0.027084159851074218, 0.03003455924987793, 0.027606111526489258, 0.027089727401733397, 0.027092416763305663, 0.027005599975585937, 0.027029504776000978, 0.02703683280944824, 0.027093856811523438, 0.02720358467102051, 0.027123712539672853, 0.02774790382385254, 0.02843622398376465, 0.027371679306030273, 0.027306528091430665, 0.027026432037353516, 0.026973472595214844, 0.02693507194519043, 0.027150272369384765, 0.027309951782226564, 0.026779424667358397, 0.026936864852905272, 0.02666783905029297, 0.02674278450012207, 0.02679964828491211, 0.027199264526367187, 0.02724246406555176, 0.027085535049438475, 0.02706537628173828, 0.02710806465148926, 0.027089120864868164, 0.02711759948730469, 0.02692131233215332, 0.02714793586730957, 0.027025407791137695, 0.027006975173950197, 0.02693939208984375, 0.02691481590270996, 0.026953727722167968, 0.026946592330932616, 0.027016000747680666, 0.026896320343017577, 0.026927263259887695, 0.026871871948242188, 0.026847232818603517, 0.026870880126953125, 0.02769264030456543, 0.02723062324523926, 0.02742095947265625, 0.027596351623535156, 0.027162784576416014, 0.027074623107910156, 0.02707449531555176, 0.026938400268554687, 0.02689084815979004, 0.026824703216552736, 0.02677564811706543, 0.02692460823059082, 0.02706496047973633, 0.027002176284790038, 0.02695452880859375, 0.0268984317779541, 0.026963712692260743, 0.027004831314086913, 0.0269105281829834, 0.026915359497070312, 0.027054304122924804, 0.02708252716064453, 0.027056127548217773, 0.027278400421142577, 0.026997343063354492, 0.027062240600585936, 0.02717123222351074, 0.027138015747070313, 0.027006975173950197, 0.02696396827697754, 0.02694758415222168, 0.027040031433105467, 0.027043552398681642, 0.02694144058227539, 0.02701024055480957, 0.027177791595458984, 0.026968063354492186, 0.027094207763671874, 0.027190080642700197, 0.027123872756958007, 0.027371360778808595, 0.027224063873291016, 0.02746905517578125, 0.027361696243286132, 0.03070755195617676, 0.028845695495605467, 0.02753420829772949, 0.02705526351928711, 0.02696668815612793, 0.02705622482299805, 0.027102975845336913, 0.027046239852905274, 0.026857471466064452, 0.027017215728759765, 0.027052032470703126, 0.02697216033935547, 0.026906591415405273, 0.027043935775756835, 0.026986431121826172, 0.026884096145629883, 0.027031551361083983, 0.026714111328125, 0.026955360412597655, 0.02742348861694336, 0.02694508743286133, 0.026835391998291016, 0.026705919265747072, 0.027096736907958986, 0.0269069766998291, 0.027309568405151367, 0.027283935546875, 0.027750431060791017, 0.027082752227783204, 0.027174911499023437, 0.026973663330078126, 0.026905120849609374, 0.026925056457519532, 0.02687385559082031, 0.02684880065917969, 0.02688252830505371, 0.026808319091796876, 0.026820608139038086, 0.026802175521850585, 0.026883743286132813, 0.026895904541015626, 0.026864448547363282, 0.026955007553100586, 0.026938112258911132, 0.026793983459472655, 0.02692300796508789, 0.027019296646118164, 0.02697929573059082, 0.027026432037353516, 0.026851327896118164, 0.02696566390991211, 0.026927040100097655, 0.02680182456970215, 0.02692140769958496, 0.027111391067504882, 0.026808160781860352, 0.026739200592041015, 0.02713599967956543, 0.026787839889526367, 0.027021312713623048, 0.02684880065917969, 0.027034080505371094, 0.027142143249511717, 0.027467775344848632, 0.02716057586669922, 0.02715238380432129, 0.027396095275878905, 0.027336191177368165, 0.027312639236450196, 0.02735228729248047, 0.02708745574951172, 0.02699849510192871, 0.02712214469909668, 0.027047296524047852, 0.026949344635009767, 0.02697417640686035, 0.0270447998046875, 0.026992256164550782, 0.02722649574279785, 0.027182336807250976, 0.027091712951660157, 0.02695577621459961, 0.027691007614135742, 0.027244543075561522, 0.02697420883178711, 0.026902528762817384, 0.026936960220336915, 0.02688198471069336, 0.026904064178466795, 0.026940351486206056, 0.0268984317779541, 0.026971359252929688, 0.027371583938598634, 0.027224800109863282, 0.027054079055786134, 0.027023040771484375, 0.027152479171752928, 0.02697648048400879, 0.02702662467956543, 0.027134784698486326, 0.027043264389038087, 0.02702601623535156, 0.027129695892333983, 0.02712188720703125, 0.026845087051391603, 0.026785791397094725, 0.02685040092468262, 0.02710416030883789, 0.02858367919921875, 0.02742092704772949, 0.026851327896118164, 0.026662912368774414, 0.026619903564453123, 0.02676902389526367, 0.026967487335205077, 0.0268623046875, 0.027015552520751954, 0.026938976287841795, 0.026712160110473632, 0.026674591064453124, 0.026745279312133788, 0.026837312698364257, 0.026879999160766603, 0.026719999313354493, 0.026943744659423827, 0.0269803524017334, 0.026858976364135742, 0.027011615753173828, 0.027146240234375, 0.026879999160766603, 0.02686566352844238, 0.02691859245300293, 0.026863935470581055, 0.02692691230773926, 0.026921152114868164, 0.02682032012939453, 0.026882335662841796, 0.026939327239990235, 0.026982751846313477, 0.026803936004638672, 0.026822048187255858, 0.026819168090820314, 0.02675916862487793, 0.026867168426513672, 0.026777887344360353, 0.027664384841918944, 0.026975936889648437, 0.026812736511230468, 0.02686534309387207, 0.026838720321655272, 0.02686796760559082, 0.02759881591796875, 0.02680022430419922, 0.02671648025512695, 0.02689023971557617, 0.026971647262573242, 0.026993024826049806, 0.026968191146850586, 0.026959871292114256, 0.027480064392089845, 0.027303680419921875, 0.027744512557983398, 0.027389408111572266, 0.02734748840332031, 0.02727337646484375, 0.027289440155029297, 0.027678655624389648, 0.027340864181518553, 0.02741593551635742, 0.027662879943847658, 0.027459360122680663, 0.02735686492919922, 0.027255424499511717, 0.02745260810852051, 0.02721670341491699, 0.0270743350982666, 0.027074783325195313, 0.026965728759765627, 0.027826528549194336, 0.02801043128967285, 0.027480064392089845, 0.02726911926269531, 0.027062271118164064, 0.027053184509277343, 0.0269686393737793, 0.02707046318054199, 0.026971744537353515, 0.02699951934814453, 0.0274736328125, 0.02705558395385742, 0.027007808685302736, 0.026996511459350586, 0.027058336257934572, 0.02704902458190918, 0.027382783889770508, 0.027143680572509765, 0.027412992477416992, 0.026976255416870116, 0.02702047920227051, 0.026763456344604492, 0.02670185661315918, 0.02674502372741699, 0.026888608932495117, 0.026830848693847657, 0.026813535690307616, 0.026780319213867188, 0.0268720645904541, 0.027292896270751953, 0.027602399826049805, 0.027097631454467773, 0.0269803524017334, 0.026940544128417968, 0.027168928146362306, 0.027357120513916016, 0.02733660888671875, 0.027019615173339843, 0.026906656265258788, 0.027804159164428712, 0.02903798484802246, 0.027134239196777345, 0.027158847808837892, 0.027151872634887695, 0.027076480865478515, 0.027037824630737305, 0.027080671310424805, 0.02719603157043457, 0.02749750328063965, 0.027648992538452148, 0.02711747169494629, 0.027000511169433594, 0.02699452781677246, 0.027082624435424803, 0.02710793685913086, 0.027107295989990236, 0.027058176040649414, 0.027069887161254882, 0.02698860740661621, 0.026919456481933595, 0.027094079971313478, 0.02728044891357422, 0.02734809684753418, 0.027080543518066408, 0.027024063110351562, 0.02715056037902832, 0.02697420883178711, 0.0268984317779541, 0.02696396827697754, 0.02693452835083008, 0.026953535079956056, 0.02707551956176758, 0.027090591430664064, 0.02701955223083496, 0.02694883155822754, 0.02685833549499512, 0.02691276741027832, 0.026873247146606445, 0.02685398483276367, 0.02738163185119629, 0.029198463439941407, 0.027064319610595702, 0.026941280364990234, 0.027010944366455077, 0.027148576736450197, 0.027209375381469728, 0.0272159366607666, 0.02727555274963379, 0.027143936157226562, 0.027002368927001953, 0.02696063995361328, 0.02714543914794922, 0.026972959518432617]",tokens/s,36.88717788531559,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2177.503232,2492.399616,0.0,2097.152,1986.693632,s,1,9.5335927734375,9.5335927734375,0.0,9.5335927734375,9.5335927734375,9.5335927734375,9.5335927734375,[9.5335927734375],,kWh,7.331160806666807e-05,8.078223592068064e-06,2.9448634669998475e-05,0.0001108384663287346,,MB,2245.156864,2504.982528,0.0,2097.152,1862.377472,s,10,11.278011718750001,1.127801171875,0.0012554719244792937,1.1274984130859376,1.1294764282226564,1.129490106201172,1.1295010485839845,"[1.125908203125, 1.1264990234375, 1.12655517578125, 1.1295037841796876, 1.1271705322265626, 1.127464111328125, 1.12753271484375, 1.1289794921875, 1.12892529296875, 1.129473388671875]",tokens/s,226.99036530915555,kWh,3.2999977701250025e-05,3.6394173935105946e-06,2.1911711973799514e-05,5.855110706856014e-05,tokens/kWh,4372248.669871229,MB,2249.78944,2504.982528,0.0,2097.152,1946.952704,s,10,14.862877807617187,1.4862877807617187,0.02083933928562863,1.4797899169921873,1.5144125610351562,1.5164294860839844,1.5180430261230469,"[1.5184464111328124, 1.510836669921875, 1.495533447265625, 1.51396435546875, 1.487755615234375, 1.4692376708984376, 1.4657362060546875, 1.463328369140625, 1.46621484375, 1.47182421875]",tokens/s,42.387484318624125,kWh,4.332284441624911e-05,4.778796280974686e-06,2.577252061800039e-05,7.387416131522419e-05,tokens/kWh,852801.5598197632,,s,630,14.860007503509511,0.02358731349763416,0.0006191026026769244,0.023459424018859865,0.024075221633911133,0.024343612957000733,0.0255697407913208,"[0.024386816024780274, 0.024013568878173828, 0.0239803524017334, 0.024073759078979493, 0.024008159637451173, 0.02391315269470215, 0.02386124801635742, 0.02432614326477051, 0.02386288070678711, 0.023849376678466795, 0.024115007400512697, 0.023937215805053712, 0.024070144653320313, 0.024086528778076172, 0.02414729690551758, 0.024102912902832032, 0.02407491111755371, 0.024292543411254884, 0.024138463973999023, 0.0240166072845459, 0.02407865524291992, 0.024024320602416993, 0.024973760604858397, 0.024062335968017576, 0.024197120666503907, 0.024004608154296874, 0.02392678451538086, 0.023817888259887697, 0.023935327529907225, 0.024020992279052734, 0.023883615493774414, 0.02383683204650879, 0.02411315155029297, 0.024023040771484375, 0.02410028839111328, 0.024136255264282227, 0.02492416000366211, 0.024465408325195313, 0.02424790382385254, 0.024061439514160156, 0.024017824172973632, 0.023860416412353515, 0.02385145568847656, 0.023857215881347656, 0.02380406379699707, 0.023987775802612306, 0.024031839370727538, 0.023982080459594726, 0.024025087356567384, 0.02391859245300293, 0.02387459182739258, 0.023978975296020506, 0.023957664489746094, 0.024026975631713868, 0.024766464233398438, 0.026572799682617186, 0.024110143661499023, 0.02420172882080078, 0.023980480194091797, 0.02391859245300293, 0.02387923240661621, 0.02374496078491211, 0.02371993637084961, 0.024375360488891603, 0.02394153594970703, 0.023875648498535157, 0.02389580726623535, 0.023852672576904297, 0.023990272521972656, 0.024038015365600587, 0.023974912643432617, 0.024792064666748048, 0.02406393623352051, 0.024342592239379884, 0.024006656646728516, 0.02408038330078125, 0.023963647842407225, 0.024030368804931642, 0.023941984176635744, 0.02399635124206543, 0.02386950492858887, 0.024022176742553712, 0.023996448516845702, 0.02434444808959961, 0.023880128860473634, 0.02391267204284668, 0.024037664413452148, 0.02392268753051758, 0.023668159484863283, 0.02371027183532715, 0.023908351898193358, 0.023957408905029298, 0.023990367889404295, 0.023867391586303712, 0.02388899230957031, 0.024031936645507814, 0.02388422393798828, 0.023926559448242186, 0.023848512649536132, 0.024569440841674804, 0.02477552032470703, 0.024094720840454102, 0.023969791412353517, 0.024047903060913086, 0.023999584197998046, 0.02407084846496582, 0.02388547134399414, 0.024225215911865234, 0.02389017677307129, 0.02401273536682129, 0.023963935852050783, 0.023916927337646485, 0.023814144134521483, 0.024029184341430664, 0.023816192626953125, 0.023783071517944336, 0.023708000183105468, 0.023758848190307616, 0.0237076473236084, 0.02367897605895996, 0.023666688919067383, 0.023856416702270507, 0.023720672607421875, 0.02373222351074219, 0.02390425682067871, 0.024061759948730468, 0.02438969612121582, 0.023949216842651368, 0.024035327911376952, 0.024030431747436524, 0.023966495513916015, 0.02384486389160156, 0.023782527923583985, 0.02398092842102051, 0.023744512557983398, 0.023666496276855468, 0.023811872482299806, 0.024060319900512696, 0.02378700828552246, 0.023671295166015623, 0.02370742416381836, 0.023660768508911134, 0.02382156753540039, 0.023814432144165038, 0.023796064376831055, 0.023661855697631837, 0.023642976760864257, 0.023596864700317383, 0.023941183090209962, 0.02367500877380371, 0.023856544494628908, 0.023771743774414062, 0.0236810245513916, 0.023607295989990236, 0.0237293758392334, 0.023599519729614257, 0.02366092872619629, 0.023629823684692384, 0.02368076705932617, 0.023664064407348633, 0.023628671646118163, 0.02368880081176758, 0.02364246368408203, 0.023723007202148438, 0.023722879409790038, 0.023619712829589842, 0.023681280136108398, 0.023626720428466797, 0.02354240036010742, 0.02353705596923828, 0.023688159942626952, 0.023754528045654297, 0.023760000228881837, 0.02369171142578125, 0.023696832656860352, 0.02378031921386719, 0.02378329658508301, 0.023732383728027343, 0.023733631134033203, 0.02361334419250488, 0.023589920043945313, 0.02351465606689453, 0.02356150436401367, 0.02356038475036621, 0.023472831726074218, 0.02344960021972656, 0.02369932746887207, 0.023823936462402343, 0.023941120147705077, 0.025169919967651368, 0.02459379196166992, 0.024125343322753907, 0.023968320846557617, 0.023946720123291014, 0.023962015151977538, 0.023998783111572265, 0.023928800582885743, 0.023956735610961913, 0.024692800521850584, 0.023818944931030272, 0.02385055923461914, 0.023796159744262694, 0.023613439559936524, 0.0246560001373291, 0.024514015197753907, 0.02400534439086914, 0.02407801628112793, 0.02408448028564453, 0.023971168518066407, 0.023962272644042968, 0.02384841537475586, 0.02475267219543457, 0.024758272171020508, 0.02397011184692383, 0.023795391082763673, 0.026979743957519533, 0.03143945693969727, 0.023740415573120118, 0.02368819236755371, 0.023839744567871093, 0.02371583938598633, 0.023690784454345703, 0.02429305648803711, 0.02388047981262207, 0.023750656127929686, 0.02395955276489258, 0.023928768157958986, 0.0237825927734375, 0.024092735290527342, 0.024687423706054687, 0.0238734073638916, 0.023984256744384765, 0.024137727737426756, 0.023855392456054687, 0.02356809616088867, 0.023615488052368162, 0.023373823165893554, 0.023318527221679687, 0.023322303771972655, 0.023298368453979493, 0.02333695983886719, 0.023172224044799804, 0.023417055130004884, 0.023368192672729493, 0.023290016174316405, 0.02334457588195801, 0.02380009651184082, 0.023344415664672852, 0.02325196838378906, 0.023166976928710937, 0.023195648193359376, 0.023359487533569336, 0.0242774715423584, 0.023863391876220705, 0.023791263580322266, 0.023628480911254884, 0.023474496841430666, 0.023314111709594725, 0.023330816268920897, 0.023234560012817384, 0.023185407638549805, 0.02317673683166504, 0.023113664627075196, 0.023101984024047853, 0.023145824432373046, 0.023359264373779297, 0.027855104446411132, 0.027777536392211914, 0.02375289535522461, 0.023691200256347657, 0.023174911499023437, 0.023146751403808594, 0.023879327774047852, 0.025571680068969725, 0.023247968673706054, 0.02322105598449707, 0.023238752365112306, 0.023212032318115236, 0.023123327255249022, 0.02346044731140137, 0.023118175506591798, 0.02315817642211914, 0.023249183654785156, 0.02327510452270508, 0.024131999969482423, 0.024788991928100586, 0.023293760299682616, 0.02319379234313965, 0.02328384017944336, 0.023313312530517577, 0.02333590316772461, 0.023245824813842773, 0.023177696228027345, 0.023513631820678713, 0.0237260799407959, 0.024396799087524415, 0.02384588813781738, 0.023562240600585937, 0.02352332878112793, 0.023646207809448243, 0.023604671478271486, 0.02352707290649414, 0.02355846405029297, 0.02362224006652832, 0.023557279586791994, 0.023345632553100584, 0.02315507125854492, 0.023197599411010742, 0.02343049621582031, 0.023259904861450194, 0.023302143096923827, 0.02328780746459961, 0.02323276710510254, 0.023137216567993165, 0.023147167205810545, 0.023985151290893555, 0.023528192520141603, 0.023388416290283202, 0.023128063201904296, 0.023179264068603517, 0.02308710479736328, 0.023127424240112306, 0.023128704071044923, 0.02313216018676758, 0.023142400741577147, 0.023224319458007812, 0.023223424911499025, 0.023486751556396485, 0.02345840072631836, 0.023524608612060547, 0.023593536376953127, 0.02413382339477539, 0.023625312805175783, 0.023628192901611327, 0.023560192108154295, 0.023521280288696288, 0.023480319976806642, 0.023382015228271484, 0.023516544342041014, 0.023226591110229493, 0.023316896438598633, 0.023334911346435547, 0.023345151901245118, 0.02327347183227539, 0.02314975929260254, 0.023461023330688478, 0.023268640518188475, 0.023200128555297853, 0.023112768173217772, 0.02314950370788574, 0.023222272872924804, 0.023214080810546874, 0.023179264068603517, 0.023142400741577147, 0.023158784866333007, 0.02427827262878418, 0.02398080062866211, 0.023617536544799804, 0.02330419158935547, 0.023096384048461913, 0.02327142333984375, 0.023114688873291017, 0.02305843162536621, 0.02319923210144043, 0.023089664459228516, 0.023220224380493162, 0.023111679077148437, 0.023077983856201172, 0.023204416275024415, 0.023125728607177733, 0.023138816833496095, 0.023179391860961913, 0.02309939193725586, 0.023175008773803712, 0.02309744071960449, 0.023185216903686523, 0.023262752532958984, 0.023157472610473632, 0.023965696334838867, 0.023488512039184572, 0.02319094467163086, 0.023077375411987306, 0.0230994873046875, 0.02324684715270996, 0.023150592803955077, 0.02314854431152344, 0.023353343963623048, 0.02323865509033203, 0.023136255264282226, 0.02324265670776367, 0.02314249610900879, 0.023130111694335938, 0.023128063201904296, 0.02329542350769043, 0.023172895431518556, 0.02310633659362793, 0.023084575653076173, 0.02310537528991699, 0.023061183929443358, 0.023052223205566408, 0.023162879943847657, 0.0230830078125, 0.023068672180175782, 0.023044095993041993, 0.023053472518920898, 0.023025983810424804, 0.02312656021118164, 0.02332057571411133, 0.02309939193725586, 0.02310553550720215, 0.023189184188842773, 0.02309766387939453, 0.02309280014038086, 0.02313465690612793, 0.023138208389282225, 0.023267423629760742, 0.025564992904663086, 0.02481990432739258, 0.02325017547607422, 0.023149023056030272, 0.02320591926574707, 0.023228256225585938, 0.023263647079467775, 0.023302143096923827, 0.023287328720092773, 0.023376352310180665, 0.02337286376953125, 0.02308105659484863, 0.023104352951049803, 0.02324684715270996, 0.023289440155029296, 0.023464351654052733, 0.02326835250854492, 0.02319580841064453, 0.023163232803344726, 0.02316339111328125, 0.023181343078613283, 0.02310550308227539, 0.02348236846923828, 0.023136255264282226, 0.023127967834472657, 0.024162464141845703, 0.02358665657043457, 0.023254655838012697, 0.02311363220214844, 0.0232391357421875, 0.02329190444946289, 0.023212032318115236, 0.023212032318115236, 0.023214080810546874, 0.023343103408813477, 0.02557542419433594, 0.02324265670776367, 0.023199840545654295, 0.023162591934204103, 0.023107872009277344, 0.023146495819091797, 0.02317430305480957, 0.023124000549316407, 0.023171007156372072, 0.023128799438476563, 0.023197856903076172, 0.02318880081176758, 0.023190208435058594, 0.023172447204589844, 0.023130783081054686, 0.02325503921508789, 0.02330624008178711, 0.02318671989440918, 0.02308892822265625, 0.02314124870300293, 0.0231278076171875, 0.02317251205444336, 0.023133087158203124, 0.023072416305541993, 0.023080543518066408, 0.02314726448059082, 0.023187456130981447, 0.023144447326660156, 0.02311302375793457, 0.023196352005004882, 0.02309724807739258, 0.023134559631347657, 0.02309836769104004, 0.02313430404663086, 0.023141023635864257, 0.023173120498657225, 0.02312406349182129, 0.02306412887573242, 0.023045503616333007, 0.02312041664123535, 0.023173568725585937, 0.02334534454345703, 0.023305471420288087, 0.023229312896728516, 0.02309087944030762, 0.023154687881469727, 0.023134208679199218, 0.02310553550720215, 0.023064863204956054, 0.023100255966186523, 0.02308799934387207, 0.02311689567565918, 0.023135135650634766, 0.024225791931152343, 0.023690975189208985, 0.02348089599609375, 0.023215808868408204, 0.02322230339050293, 0.023115488052368165, 0.023237152099609373, 0.02319657516479492, 0.023161727905273436, 0.023123935699462892, 0.02325503921508789, 0.023183135986328124, 0.023722175598144532, 0.02315881538391113, 0.023285184860229492, 0.02383929634094238, 0.025065376281738282, 0.02342508888244629, 0.023316511154174803, 0.02333286476135254, 0.023262432098388672, 0.023378719329833986, 0.023201791763305665, 0.02311974334716797, 0.023117952346801758, 0.023736320495605468, 0.02326323127746582, 0.02310758399963379, 0.023195648193359376, 0.023152639389038086, 0.023123296737670898, 0.02314672088623047, 0.023192319869995117, 0.02304787254333496, 0.023145952224731446, 0.023088703155517577, 0.02312495994567871, 0.023171072006225587, 0.023117824554443358, 0.023119871139526366, 0.023431167602539063, 0.02309529685974121, 0.023646207809448243, 0.02314396858215332, 0.02309984016418457, 0.0231844482421875, 0.02302012825012207, 0.023128032684326172, 0.02307523155212402, 0.023133535385131836, 0.023165599822998047, 0.023349248886108398, 0.023197311401367188, 0.02315507125854492, 0.023177183151245118, 0.023189279556274416, 0.023131935119628907, 0.023038431167602538, 0.023117536544799804, 0.02311196708679199, 0.02305356788635254, 0.023065343856811523, 0.02317686462402344, 0.024754175186157225, 0.023817279815673827, 0.023759807586669922, 0.023633119583129882, 0.02368489646911621, 0.023472768783569336, 0.02336742401123047, 0.023264991760253907, 0.0233276481628418, 0.023369119644165038, 0.023368288040161132, 0.023369728088378908, 0.02326937675476074, 0.02321023941040039, 0.023132959365844728, 0.023110687255859377, 0.02309084892272949, 0.023107872009277344, 0.023203039169311525, 0.023137056350708007, 0.02314431953430176, 0.023237855911254882, 0.02345471954345703, 0.023465536117553712, 0.023658815383911132, 0.023395679473876954, 0.023290559768676757, 0.02304979133605957, 0.023091648101806642, 0.024012928009033204, 0.023089023590087892, 0.02302726364135742, 0.02311008071899414, 0.02308710479736328, 0.02310291290283203, 0.023084672927856445, 0.02304684829711914, 0.023058687210083008, 0.023125120162963867, 0.02310028839111328, 0.023045536041259765, 0.023265888214111328, 0.023146495819091797, 0.023150047302246093, 0.023167327880859376, 0.023138496398925783, 0.023170944213867187, 0.023156768798828126, 0.023141727447509766, 0.023173887252807616, 0.023166976928710937, 0.02333919906616211, 0.023471071243286134, 0.02369993591308594, 0.023678335189819336, 0.02387455940246582, 0.023688224792480467, 0.023624000549316407, 0.023938816070556642, 0.023579103469848633, 0.02362335968017578, 0.023612159729003906, 0.023584768295288085]",tokens/s,42.39567172837641,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3142.098944,4602.134528,0.0,4206.886912,4087.771648,s,1,11.3848994140625,11.3848994140625,0.0,11.3848994140625,11.3848994140625,11.3848994140625,11.3848994140625,[11.3848994140625],,kWh,0.00011912927681670831,1.3133468904521388e-05,4.522864729397669e-05,0.0001774913930152064,,MB,3186.556928,4774.100992,0.0,4366.270464,4273.048576,s,10,14.616045898437498,1.46160458984375,0.0024391264378625013,1.4610399780273438,1.4643836791992189,1.465648260498047,1.4666599255371096,"[1.460572998046875, 1.458865234375, 1.4580650634765624, 1.4618348388671876, 1.4611273193359375, 1.4605428466796875, 1.46095263671875, 1.4630694580078125, 1.4641026611328125, 1.466912841796875]",tokens/s,175.14996995690004,kWh,4.293747330542221e-05,4.73315819915845e-06,2.8513161699400546e-05,7.618379320398122e-05,tokens/kWh,3360294.7455577985,MB,3186.556928,4778.295296,0.0,4368.367616,4273.051136,s,10,24.59880004882813,2.4598800048828124,0.0055870374012830905,2.457652099609375,2.4651350830078127,2.4693376831054685,2.4726997631835936,"[2.464201171875, 2.457966064453125, 2.4545078125, 2.457338134765625, 2.457046142578125, 2.4617607421875, 2.457312744140625, 2.453147705078125, 2.473540283203125, 2.461979248046875]",tokens/s,25.611005364060954,kWh,7.094289176040852e-05,7.827931004083778e-06,4.0493560172599746e-05,0.000119264382937092,tokens/kWh,528238.1751241727,,s,630,24.59587634277343,0.03904107355995784,0.0005100896243114708,0.03896319961547851,0.03939310531616211,0.03969704113006592,0.04119656272888184,"[0.03987046432495117, 0.03919257736206055, 0.03948339080810547, 0.03919801712036133, 0.03940217590332031, 0.03913523101806641, 0.03915161514282227, 0.03880303955078125, 0.038876575469970705, 0.03863241577148437, 0.0387410888671875, 0.038720447540283205, 0.039024639129638675, 0.03879446411132813, 0.038781726837158206, 0.038767807006835936, 0.03908041763305664, 0.03907769775390625, 0.03886748886108399, 0.03883785629272461, 0.03877519989013672, 0.03880672073364258, 0.03867116928100586, 0.03895238494873047, 0.03868691253662109, 0.03888572692871094, 0.03888131332397461, 0.038986881256103514, 0.03907468795776367, 0.03908505630493164, 0.039088512420654295, 0.04006358337402344, 0.04022480010986328, 0.03917193603515625, 0.03943859100341797, 0.03906460952758789, 0.04046118545532226, 0.038984737396240234, 0.0392303352355957, 0.03887113571166992, 0.038932479858398435, 0.038913761138916016, 0.03897577667236328, 0.0390041618347168, 0.03904716873168945, 0.03895500946044922, 0.03904307174682617, 0.03989299011230469, 0.03912499237060547, 0.03930112075805664, 0.03906118392944336, 0.03907206344604492, 0.03911679840087891, 0.03912499237060547, 0.03938304138183594, 0.0392305908203125, 0.03923174285888672, 0.039139583587646486, 0.039077472686767575, 0.03930534362792969, 0.03909043121337891, 0.038989471435546874, 0.03904358291625976, 0.039609886169433596, 0.03925040054321289, 0.03909008026123047, 0.039034976959228515, 0.039293216705322265, 0.03900592041015625, 0.038934528350830076, 0.0388218879699707, 0.03885465621948242, 0.03890790557861328, 0.038718528747558593, 0.038960254669189454, 0.038774593353271485, 0.038629375457763675, 0.03863654327392578, 0.03944502258300781, 0.03942351913452148, 0.038847232818603514, 0.038931774139404296, 0.038740863800048826, 0.03905542373657227, 0.03874105453491211, 0.03896790313720703, 0.03858598327636719, 0.03893929672241211, 0.03890585708618164, 0.039049121856689455, 0.039008350372314454, 0.03904886245727539, 0.038916446685791015, 0.039174144744873046, 0.03881369781494141, 0.0390530891418457, 0.039526622772216795, 0.038950912475585936, 0.03874630355834961, 0.038597663879394534, 0.039027488708496094, 0.03863929748535156, 0.03877654266357422, 0.038525600433349606, 0.03868384170532226, 0.03884489440917969, 0.03889385604858398, 0.03906086349487305, 0.0390431022644043, 0.039137535095214844, 0.03931785583496094, 0.038778881072998046, 0.03885158538818359, 0.03881062316894531, 0.03898076629638672, 0.03868716812133789, 0.03871376037597656, 0.0424601936340332, 0.03903078460693359, 0.03934102249145508, 0.03912908935546875, 0.039094432830810544, 0.039065471649169924, 0.03894185638427734, 0.03886368179321289, 0.038983295440673825, 0.03948505783081055, 0.03890796661376953, 0.038615360260009765, 0.03853302383422851, 0.038306049346923825, 0.038618335723876955, 0.03881760025024414, 0.03958777618408203, 0.03929792022705078, 0.03897958374023437, 0.03885260772705078, 0.03918195343017578, 0.03890214538574219, 0.038784801483154295, 0.03896112060546875, 0.0387869758605957, 0.039022014617919924, 0.03898886489868164, 0.03823980712890625, 0.038363040924072264, 0.03816198348999023, 0.03840252685546875, 0.03819472122192383, 0.03837535858154297, 0.0384983024597168, 0.038923137664794924, 0.038557441711425784, 0.038921951293945316, 0.03909686279296875, 0.03901411056518555, 0.038881568908691405, 0.039679550170898435, 0.03978899383544922, 0.03885260772705078, 0.03894879913330078, 0.0397938232421875, 0.03896207809448242, 0.03904716873168945, 0.038637569427490234, 0.03878297424316406, 0.03878860855102539, 0.03881625747680664, 0.03896223831176758, 0.038978111267089846, 0.03862531280517578, 0.03897174453735352, 0.03891404724121094, 0.03884646224975586, 0.03851459121704102, 0.0384186897277832, 0.03850425720214844, 0.03824233627319336, 0.03847372817993164, 0.04012851333618164, 0.040656993865966794, 0.03879212951660156, 0.038701473236083986, 0.038924671173095705, 0.03953478240966797, 0.04266556930541992, 0.03905177688598633, 0.03912003326416016, 0.038828479766845704, 0.03952844619750977, 0.039000064849853515, 0.03937279891967774, 0.03913113784790039, 0.03891712188720703, 0.03867750549316406, 0.03851449584960937, 0.03858041763305664, 0.03865190505981445, 0.03910246276855469, 0.03870217514038086, 0.038556385040283206, 0.03874403381347656, 0.038629600524902344, 0.038787071228027346, 0.03851264190673828, 0.038333633422851565, 0.038886207580566406, 0.03878297424316406, 0.03870105743408203, 0.04410572814941406, 0.042813438415527344, 0.038633663177490236, 0.03858179092407227, 0.03883375930786133, 0.03878572845458984, 0.038572032928466796, 0.03845513534545898, 0.038674591064453125, 0.038809600830078124, 0.03886278533935547, 0.039811134338378906, 0.04007321548461914, 0.03876192092895508, 0.038523456573486325, 0.039051136016845706, 0.03880563354492188, 0.040630271911621094, 0.03916595077514649, 0.038862335205078126, 0.03898601531982422, 0.03890332794189453, 0.038949344635009764, 0.03892364883422852, 0.03893091201782226, 0.03892671966552735, 0.03871881484985352, 0.0386952018737793, 0.038879104614257816, 0.03891804885864258, 0.03884502410888672, 0.038819839477539066, 0.0389857292175293, 0.03905305480957031, 0.038912063598632814, 0.03903302383422851, 0.03886489486694336, 0.0388587532043457, 0.03871539306640625, 0.038422527313232424, 0.0384266242980957, 0.03831193542480469, 0.03808256149291992, 0.039667713165283204, 0.039144447326660156, 0.03926496124267578, 0.0388256950378418, 0.03868662261962891, 0.038507198333740236, 0.03860847854614258, 0.039042911529541015, 0.03883065414428711, 0.03842444610595703, 0.03989311981201172, 0.038940673828125, 0.03875423812866211, 0.038809249877929684, 0.03882844924926758, 0.03877478408813476, 0.0390041618347168, 0.0387454719543457, 0.03896793746948242, 0.03910041427612305, 0.03896284866333008, 0.0391703987121582, 0.03969023895263672, 0.03902668762207031, 0.0391978874206543, 0.0389415054321289, 0.03912879943847656, 0.03921948623657227, 0.03896031951904297, 0.039172927856445314, 0.038819103240966796, 0.03887177658081055, 0.0398803825378418, 0.03888569641113281, 0.0388218879699707, 0.03864985656738281, 0.038834175109863284, 0.03915155029296875, 0.03880352020263672, 0.03975955200195312, 0.039231231689453125, 0.03879302215576172, 0.03885670471191406, 0.03873049545288086, 0.03940284729003906, 0.039010974884033205, 0.038954463958740235, 0.039031295776367186, 0.03897721481323242, 0.039046592712402343, 0.03920374298095703, 0.038927520751953125, 0.039088993072509765, 0.03887868881225586, 0.03864412689208984, 0.03913945770263672, 0.03902054214477539, 0.038980926513671875, 0.038752609252929685, 0.038695262908935546, 0.03890300750732422, 0.038789920806884766, 0.03888332748413086, 0.039701663970947265, 0.039228256225585935, 0.03905027389526367, 0.0391011848449707, 0.03913328170776367, 0.03911203384399414, 0.03893648147583008, 0.03882400131225586, 0.040325439453125, 0.039596542358398434, 0.039032318115234374, 0.03907459259033203, 0.03899772644042969, 0.0393175048828125, 0.03922534561157227, 0.038948863983154294, 0.03919404983520508, 0.038814273834228516, 0.038866462707519533, 0.038795745849609375, 0.03889152145385742, 0.038959102630615236, 0.03907174301147461, 0.03889129638671875, 0.039096126556396486, 0.03897574234008789, 0.03930444717407226, 0.039217121124267576, 0.03910329437255859, 0.039128768920898435, 0.038969791412353516, 0.03889766311645508, 0.039000064849853515, 0.03903692626953125, 0.03903241729736328, 0.0385, 0.03842940902709961, 0.03870841598510742, 0.03853398513793945, 0.03906345748901367, 0.03896124649047852, 0.040187904357910156, 0.03906355285644531, 0.03890585708618164, 0.03873382568359375, 0.03885027313232422, 0.039911487579345706, 0.03908134460449219, 0.03872854232788086, 0.038835777282714846, 0.03894454574584961, 0.03908233642578125, 0.0390843505859375, 0.03896319961547851, 0.03896115112304688, 0.039001823425292965, 0.03896758270263672, 0.03892140960693359, 0.039619392395019534, 0.039159263610839844, 0.03927664184570313, 0.03900806427001953, 0.03914543914794922, 0.03969139099121094, 0.03901529693603516, 0.03888092803955078, 0.03920316696166992, 0.03903641510009766, 0.038879425048828124, 0.03885702514648438, 0.03908607864379883, 0.03891417694091797, 0.03875980758666992, 0.03898624038696289, 0.03873174285888672, 0.03901955032348633, 0.038895713806152345, 0.03891292953491211, 0.038768638610839845, 0.03907993698120117, 0.038787071228027346, 0.0390098876953125, 0.038776798248291014, 0.03864963150024414, 0.038722206115722656, 0.03873382568359375, 0.03869283294677734, 0.038725215911865236, 0.03894316864013672, 0.03869279861450195, 0.03871955108642578, 0.038893089294433594, 0.039286945343017576, 0.0387259521484375, 0.03892633438110352, 0.03876249694824219, 0.03876192092895508, 0.03876287841796875, 0.039044704437255856, 0.039126720428466794, 0.03909084701538086, 0.03893065643310547, 0.038940704345703125, 0.03916185760498047, 0.03930316925048828, 0.03890585708618164, 0.04023651123046875, 0.039141918182373045, 0.039067649841308595, 0.03896319961547851, 0.03918000030517578, 0.03889932632446289, 0.0391761589050293, 0.038940574645996096, 0.03902684783935547, 0.03897407913208008, 0.03911030578613281, 0.03920483016967773, 0.03947148895263672, 0.03906268692016602, 0.03920323181152344, 0.03911315155029297, 0.03917350387573242, 0.039016414642333984, 0.03931625747680664, 0.03895199966430664, 0.039574462890625, 0.03878815841674805, 0.038832801818847656, 0.038750015258789065, 0.03868691253662109, 0.03871683120727539, 0.03875683212280273, 0.03871347045898438, 0.03884646224975586, 0.03885055923461914, 0.038739776611328124, 0.03879459381103516, 0.03909513473510742, 0.03890790557861328, 0.03893040084838867, 0.03879939270019531, 0.04004249572753906, 0.03907788848876953, 0.0397209587097168, 0.03895238494873047, 0.03885523223876953, 0.038834175109863284, 0.03874220657348633, 0.03882579040527344, 0.03907939147949219, 0.03887772750854492, 0.03889766311645508, 0.03886399841308594, 0.03880988693237305, 0.03866182327270508, 0.03875727844238281, 0.039241344451904296, 0.03909260940551758, 0.03893407821655273, 0.03852947235107422, 0.038540287017822264, 0.03842764663696289, 0.03885657501220703, 0.03861305618286133, 0.039230751037597655, 0.0389005126953125, 0.038959102630615236, 0.03914137649536133, 0.038967201232910156, 0.03912303924560547, 0.03965449523925781, 0.039721950531005856, 0.03889350509643555, 0.03911811065673828, 0.03899875259399414, 0.03871728134155274, 0.0387457275390625, 0.03892063903808594, 0.03887318420410156, 0.03876454544067383, 0.03869081497192383, 0.03890995025634766, 0.03882915115356445, 0.03874031829833984, 0.03882352066040039, 0.03919708633422852, 0.039110366821289065, 0.0388164176940918, 0.039627361297607425, 0.03956684875488281, 0.039147201538085936, 0.03890476989746094, 0.03886812973022461, 0.03895904159545899, 0.038984416961669925, 0.03904915237426758, 0.03920300674438477, 0.03969030380249024, 0.039058464050292965, 0.03923452758789062, 0.039202400207519535, 0.039167583465576174, 0.03921798324584961, 0.03909791946411133, 0.038984127044677734, 0.03895024108886719, 0.03899663925170899, 0.039180286407470705, 0.03935644912719727, 0.03893363189697266, 0.039057441711425785, 0.03947743988037109, 0.03936336135864258, 0.03901036834716797, 0.03896847915649414, 0.0393201904296875, 0.03906569671630859, 0.039185409545898435, 0.04003728103637695, 0.04297024154663086, 0.039443328857421876, 0.0390978889465332, 0.03906777572631836, 0.0390285758972168, 0.038877536773681644, 0.0391824951171875, 0.038942718505859376, 0.03900422286987305, 0.039091487884521485, 0.03959820938110352, 0.041269790649414065, 0.039182334899902346, 0.03887308883666992, 0.03916508865356445, 0.03939209747314453, 0.039624702453613284, 0.04179302215576172, 0.03908454513549805, 0.039139328002929685, 0.03892019271850586, 0.03915724945068359, 0.038852161407470706, 0.03858963012695313, 0.03893222427368164, 0.03885670471191406, 0.03870924758911133, 0.03889110565185547, 0.03897590255737305, 0.038860801696777345, 0.038800609588623046, 0.03901520156860352, 0.0395181770324707, 0.03904716873168945, 0.039188480377197264, 0.038830078125, 0.038978721618652346, 0.038875999450683596, 0.03873382568359375, 0.03867766571044922, 0.03865686416625977, 0.03920876693725586, 0.03901663970947265, 0.039239681243896485, 0.03895296096801758, 0.039121982574462894, 0.03938729476928711, 0.03922000122070313, 0.039070945739746094, 0.03907657623291016, 0.038979648590087894, 0.038778881072998046, 0.03894892883300781, 0.04101728057861328, 0.03911884689331055, 0.03867577743530273, 0.03891865539550781, 0.038739646911621094, 0.03896985626220703, 0.038948863983154294, 0.03899801635742187, 0.038878623962402346, 0.039000545501708984, 0.03889324951171875, 0.03895248031616211, 0.038876319885253904, 0.0389826545715332, 0.039146240234375, 0.0392171516418457, 0.039272449493408204, 0.03892559814453125, 0.039118560791015625, 0.039174686431884764, 0.039098846435546876, 0.0389911994934082, 0.03917023849487305, 0.038885025024414065, 0.039036769866943356, 0.03900902557373047, 0.03909961700439453, 0.03910131072998047, 0.03968819046020508, 0.03901043319702149, 0.03900620651245117, 0.03895280075073242, 0.038988990783691405, 0.0388966064453125, 0.03922313690185547, 0.03909836959838867, 0.03907600021362305, 0.03913657760620117, 0.03915750503540039, 0.03927257537841797, 0.03899679946899414, 0.03942371368408203]",tokens/s,25.61404973826442,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4356.030464,5355.012096,0.0,4959.76448,4769.731072,s,1,11.3029736328125,11.3029736328125,0.0,11.3029736328125,11.3029736328125,11.3029736328125,11.3029736328125,[11.3029736328125],,kWh,0.00013010201382500003,1.4343644621801782e-05,5.7639212778000035e-05,0.00020208487122480185,,MB,1615.03232,5373.886464,0.0,4966.055936,4251.027456,s,10,30.630434814453125,3.0630434814453125,0.004086782872654609,3.0632532958984378,3.067527099609375,3.06789404296875,3.06818759765625,"[3.054669921875, 3.05810400390625, 3.06162646484375, 3.0614765625, 3.064130615234375, 3.0623759765625, 3.066512939453125, 3.065831787109375, 3.068260986328125, 3.067445556640625]",tokens/s,83.577004881173,kWh,8.94033612470832e-05,9.861140676874984e-06,5.9460658679599995e-05,0.00015872516060355817,tokens/kWh,1612850.7857642148,MB,1622.60992,5388.566528,0.0,4980.736,4251.030016,s,10,17.748735351562498,1.77487353515625,0.00824765049756315,1.772857360839844,1.7831903564453124,1.7875395141601562,1.7910188403320313,"[1.7669478759765624, 1.7713026123046876, 1.7784786376953126, 1.774412109375, 1.791888671875, 1.78211083984375, 1.765817626953125, 1.7652471923828126, 1.770305908203125, 1.782223876953125]",tokens/s,35.49548672179274,kWh,5.2776072571249924e-05,5.820655008400578e-06,3.4842416762799994e-05,9.343914434245052e-05,tokens/kWh,674235.6262286357,,s,630,17.744511219024666,0.028165890823848665,0.0004181261219157756,0.028107151985168458,0.028600368690490725,0.02875118350982666,0.02936630439758301,"[0.028757663726806642, 0.028042911529541015, 0.027849695205688477, 0.027927391052246092, 0.02764227294921875, 0.027685312271118163, 0.028643360137939455, 0.02774630355834961, 0.027622623443603514, 0.02810655975341797, 0.027959775924682618, 0.027746559143066406, 0.027622911453247072, 0.02776476860046387, 0.027816352844238282, 0.027843072891235353, 0.027650943756103517, 0.027914623260498046, 0.027714111328125, 0.027873279571533204, 0.0277193603515625, 0.027958784103393555, 0.027814048767089844, 0.028125888824462892, 0.028208768844604493, 0.028195104598999023, 0.028170303344726564, 0.028414976119995116, 0.028423168182373046, 0.02835456085205078, 0.028413951873779295, 0.02857574462890625, 0.028298271179199218, 0.028285919189453126, 0.028167423248291017, 0.028465791702270506, 0.02810483169555664, 0.02815385627746582, 0.028141855239868164, 0.028302719116210937, 0.02793712043762207, 0.02799731254577637, 0.02800124740600586, 0.028077503204345704, 0.02807651138305664, 0.02811494445800781, 0.028052831649780275, 0.027823104858398437, 0.02793747138977051, 0.028513023376464844, 0.0282193603515625, 0.027963584899902343, 0.027841663360595702, 0.02784320068359375, 0.02777529525756836, 0.027811840057373048, 0.02789116859436035, 0.027910495758056642, 0.02814112091064453, 0.028721792221069336, 0.028042720794677733, 0.027892576217651368, 0.027846303939819336, 0.028726207733154298, 0.028196863174438477, 0.028391424179077147, 0.028104703903198244, 0.028073984146118162, 0.02852979278564453, 0.028013439178466798, 0.028069887161254883, 0.02802387237548828, 0.028310367584228516, 0.028136640548706054, 0.028136671066284178, 0.02809823989868164, 0.02812326431274414, 0.02778883171081543, 0.02781737518310547, 0.02778822326660156, 0.028088319778442384, 0.028161535263061522, 0.02805401611328125, 0.027753536224365234, 0.0277142391204834, 0.02769536018371582, 0.02783875274658203, 0.027708511352539062, 0.028186304092407227, 0.027849279403686523, 0.028342655181884766, 0.028146944046020507, 0.028299711227416993, 0.0287172794342041, 0.028082271575927735, 0.028743263244628905, 0.028154272079467774, 0.02817228889465332, 0.028303071975708007, 0.02814828872680664, 0.02831279945373535, 0.028422624588012695, 0.02833523178100586, 0.028218271255493164, 0.028192768096923827, 0.02815180778503418, 0.02788150405883789, 0.02781916809082031, 0.028167232513427735, 0.027930368423461915, 0.02808137512207031, 0.02785321617126465, 0.02791472053527832, 0.027873216629028322, 0.027656160354614257, 0.02751251220703125, 0.027787839889526367, 0.027743423461914062, 0.02931155204772949, 0.029345855712890626, 0.028250207901000978, 0.02843164825439453, 0.0280216007232666, 0.027966367721557618, 0.02769987106323242, 0.02765126419067383, 0.029288448333740235, 0.028362464904785157, 0.0313863353729248, 0.02802409553527832, 0.02834636878967285, 0.02799001693725586, 0.028013280868530274, 0.02817843246459961, 0.028278175354003905, 0.028366464614868164, 0.028150335311889648, 0.027939231872558593, 0.02805868721008301, 0.028332096099853515, 0.02781683158874512, 0.02791219139099121, 0.027914239883422853, 0.028039167404174805, 0.02793471908569336, 0.027860191345214842, 0.027816736221313476, 0.02796303939819336, 0.027801952362060546, 0.027751487731933595, 0.02780361557006836, 0.028097375869750977, 0.028039583206176756, 0.028716096878051756, 0.028463647842407225, 0.02832758331298828, 0.028600799560546876, 0.028571552276611328, 0.028223520278930665, 0.0284467830657959, 0.028264448165893553, 0.028387327194213868, 0.02837708854675293, 0.028380447387695313, 0.02841270446777344, 0.02828486442565918, 0.028094207763671875, 0.02793087959289551, 0.027973728179931642, 0.028026784896850586, 0.028339487075805664, 0.02810870361328125, 0.027978559494018555, 0.027910144805908203, 0.028030048370361327, 0.027921375274658204, 0.02791961669921875, 0.027914623260498046, 0.027859264373779297, 0.02787059211730957, 0.028156383514404297, 0.02846531105041504, 0.02838319969177246, 0.028353567123413085, 0.028313600540161132, 0.028427167892456053, 0.02856559944152832, 0.028372543334960938, 0.028305856704711915, 0.028676383972167967, 0.028227584838867188, 0.02850201606750488, 0.027879199981689452, 0.02774038314819336, 0.02782147216796875, 0.02774403190612793, 0.02765702438354492, 0.027828224182128908, 0.027922431945800782, 0.027889503479003906, 0.027860319137573242, 0.02803158378601074, 0.028133567810058595, 0.028208223342895508, 0.02843289566040039, 0.028154592514038086, 0.028501728057861327, 0.0280710391998291, 0.02815679931640625, 0.028028095245361328, 0.028213184356689455, 0.02804185676574707, 0.02809395217895508, 0.02786790466308594, 0.02794495964050293, 0.028236896514892577, 0.028304288864135742, 0.028317855834960937, 0.028255199432373045, 0.02816924858093262, 0.029072351455688476, 0.028105600357055664, 0.02813132858276367, 0.02809040069580078, 0.028274656295776367, 0.028254463195800782, 0.028608383178710936, 0.028386560440063477, 0.028298944473266602, 0.02807865524291992, 0.028367231369018555, 0.028180479049682617, 0.029063167572021483, 0.02828816032409668, 0.029012832641601562, 0.02813260841369629, 0.028101375579833984, 0.028112255096435546, 0.028094463348388672, 0.028246335983276367, 0.027887392044067382, 0.027886112213134764, 0.027950239181518555, 0.028141984939575194, 0.027832767486572266, 0.028014591217041016, 0.027910144805908203, 0.0279583683013916, 0.028037887573242187, 0.028121248245239257, 0.02792857551574707, 0.028073984146118162, 0.029064287185668947, 0.02865577507019043, 0.028299840927124023, 0.028461055755615236, 0.030914527893066406, 0.028531904220581054, 0.028445280075073243, 0.028430591583251952, 0.028526880264282226, 0.02817398452758789, 0.027817695617675782, 0.02807423973083496, 0.028272735595703126, 0.027799264907836914, 0.027758975982666016, 0.028020639419555664, 0.028249759674072266, 0.028346719741821288, 0.028065183639526366, 0.028190624237060546, 0.028265151977539062, 0.028001440048217772, 0.028139936447143556, 0.028329824447631834, 0.028209760665893556, 0.028247648239135743, 0.02813705635070801, 0.028067935943603517, 0.027987968444824218, 0.028182912826538085, 0.028186975479125978, 0.028128959655761718, 0.02886649513244629, 0.02822563171386719, 0.028250015258789063, 0.0284532470703125, 0.028692480087280273, 0.028602367401123048, 0.028464351654052734, 0.028405920028686523, 0.028813087463378906, 0.028509023666381837, 0.02873103904724121, 0.028575519561767578, 0.028722944259643553, 0.028709760665893556, 0.028553152084350587, 0.0285347843170166, 0.028481536865234375, 0.028577375411987304, 0.028885311126708984, 0.028895328521728516, 0.028544063568115233, 0.028484832763671874, 0.028502847671508787, 0.02854560089111328, 0.02876860809326172, 0.02853398323059082, 0.028613407135009764, 0.028440576553344726, 0.028336160659790038, 0.02836182403564453, 0.028473791122436524, 0.02879324722290039, 0.02814156723022461, 0.028204864501953125, 0.028127424240112303, 0.028114208221435545, 0.02853766441345215, 0.028365856170654298, 0.02840665626525879, 0.02816579246520996, 0.02821334457397461, 0.028152128219604493, 0.028130752563476562, 0.028332544326782227, 0.028118623733520507, 0.027972000122070313, 0.027875232696533202, 0.027940639495849608, 0.028174047470092774, 0.028826208114624025, 0.031318016052246093, 0.028112895965576173, 0.027936767578125, 0.028016639709472657, 0.028028928756713867, 0.027990304946899414, 0.028087263107299806, 0.028183296203613283, 0.028114015579223633, 0.028405855178833008, 0.02830204772949219, 0.028319839477539063, 0.028151391983032226, 0.028377504348754884, 0.029198112487792968, 0.028508384704589843, 0.028462368011474608, 0.02841449546813965, 0.028287168502807616, 0.028331935882568358, 0.028305503845214845, 0.02813273620605469, 0.027995840072631836, 0.02807244873046875, 0.028151744842529296, 0.028099071502685546, 0.028069887161254883, 0.02807923126220703, 0.02801286315917969, 0.0286231689453125, 0.02816640090942383, 0.02815184020996094, 0.028000032424926758, 0.02823520088195801, 0.02791865539550781, 0.029374656677246095, 0.0283155517578125, 0.028194976806640626, 0.02832841682434082, 0.028094175338745118, 0.02811075210571289, 0.028024160385131835, 0.028334848403930663, 0.027883520126342775, 0.02926585578918457, 0.02825484848022461, 0.028057151794433594, 0.027826656341552736, 0.028018655776977538, 0.028101951599121093, 0.028070592880249025, 0.027969375610351562, 0.027812000274658202, 0.028016639709472657, 0.027934591293334962, 0.027875680923461914, 0.028093984603881836, 0.0280762882232666, 0.02897305679321289, 0.029832672119140625, 0.0281810245513916, 0.028286144256591796, 0.02885055923461914, 0.028228031158447266, 0.027926528930664062, 0.027834367752075196, 0.02863315200805664, 0.027845951080322267, 0.0276691837310791, 0.027721376419067384, 0.027735519409179687, 0.027771135330200196, 0.027841087341308593, 0.027989696502685547, 0.027826496124267578, 0.028338176727294922, 0.027887136459350585, 0.027668960571289064, 0.027830272674560546, 0.028214879989624023, 0.028036863327026366, 0.027885919570922853, 0.02784492874145508, 0.027867136001586915, 0.02792038345336914, 0.027875328063964845, 0.027807743072509765, 0.027826175689697266, 0.02767616081237793, 0.027668096542358397, 0.027961727142333984, 0.027910879135131836, 0.02770512008666992, 0.027809375762939452, 0.02773151969909668, 0.02769596862792969, 0.027760255813598634, 0.027865631103515625, 0.027905887603759765, 0.027703296661376952, 0.02775052833557129, 0.027867008209228515, 0.028317407608032228, 0.027953088760375975, 0.02790608024597168, 0.02812550354003906, 0.028117088317871092, 0.028852672576904298, 0.0280043830871582, 0.028004352569580077, 0.02768076705932617, 0.02793471908569336, 0.027611135482788086, 0.027815935134887695, 0.0277359676361084, 0.028149280548095703, 0.028064064025878906, 0.028416255950927734, 0.02816409683227539, 0.028180479049682617, 0.028217344284057616, 0.028237823486328126, 0.028442272186279295, 0.02823740768432617, 0.028248128890991212, 0.028203296661376952, 0.02790991973876953, 0.02800499153137207, 0.028050912857055663, 0.028142112731933594, 0.028026847839355468, 0.028057632446289064, 0.028067840576171874, 0.028073728561401366, 0.027947200775146484, 0.028118688583374022, 0.027830495834350585, 0.027949247360229492, 0.027863040924072265, 0.028932096481323243, 0.028241920471191406, 0.02832793617248535, 0.028221439361572266, 0.028218591690063476, 0.02816076850891113, 0.02848771286010742, 0.028004352569580077, 0.02787308883666992, 0.027860671997070312, 0.027815711975097655, 0.02842428779602051, 0.0277653751373291, 0.027877376556396483, 0.027926271438598632, 0.027859199523925782, 0.027611135482788086, 0.027893760681152343, 0.027660287857055665, 0.027838016510009767, 0.02764182472229004, 0.0278306884765625, 0.027693119049072266, 0.027875328063964845, 0.027729248046875, 0.02792310333251953, 0.027766399383544922, 0.028139904022216798, 0.027756032943725587, 0.027621055603027345, 0.02774304008483887, 0.028835872650146484, 0.028057024002075194, 0.02799398422241211, 0.027677343368530272, 0.027719743728637697, 0.027715776443481447, 0.028087295532226563, 0.027650623321533202, 0.027824127197265625, 0.028188383102416992, 0.02852707290649414, 0.029421567916870117, 0.02836479949951172, 0.028030431747436524, 0.027824159622192382, 0.027747072219848633, 0.027676416397094727, 0.02784992027282715, 0.027744800567626952, 0.027725439071655273, 0.027794080734252928, 0.027715583801269532, 0.02765145683288574, 0.027870943069458008, 0.027808383941650392, 0.02773651123046875, 0.02860630416870117, 0.029122560501098634, 0.02796441650390625, 0.027988576889038087, 0.027918752670288087, 0.02851430320739746, 0.02777292823791504, 0.027897855758666993, 0.028305120468139648, 0.027928287506103516, 0.03185523223876953, 0.02821308708190918, 0.02801263999938965, 0.027807136535644532, 0.027716192245483398, 0.02773580741882324, 0.02787347221374512, 0.02773561668395996, 0.02795516777038574, 0.02776323127746582, 0.02769715118408203, 0.02789580726623535, 0.02806505584716797, 0.028271327972412108, 0.028216575622558592, 0.028681184768676756, 0.02822047996520996, 0.02813395118713379, 0.028170528411865233, 0.028321664810180665, 0.028278783798217775, 0.028368896484375, 0.02808822441101074, 0.028031007766723633, 0.027903167724609376, 0.027851648330688476, 0.027852800369262694, 0.028877216339111327, 0.02817433547973633, 0.028166208267211914, 0.02808143997192383, 0.02880793571472168, 0.028185535430908203, 0.028107744216918945, 0.028130336761474608, 0.028396480560302733, 0.028576000213623047, 0.02833180809020996, 0.028438528060913085, 0.02849523162841797, 0.028596895217895508, 0.02866934394836426, 0.02865023994445801, 0.02864896011352539, 0.028604736328125, 0.02860032081604004, 0.02854617691040039, 0.028443199157714844, 0.028633407592773438, 0.028632959365844726, 0.02873766326904297, 0.028661760330200195, 0.028704767227172853, 0.028383232116699218, 0.028434656143188478, 0.028509279251098633, 0.028308160781860353, 0.02806118392944336, 0.02861926460266113, 0.02765180778503418, 0.027953184127807618, 0.027916543960571288, 0.028332063674926758, 0.02828489685058594, 0.02836275291442871, 0.02831155204772949, 0.02872319984436035, 0.028209184646606444, 0.028374752044677733, 0.028272607803344726, 0.02829280090332031, 0.028121023178100585, 0.02834499168395996, 0.028333887100219727, 0.028038560867309572, 0.028037919998168945, 0.028014144897460937, 0.028085952758789064, 0.02801308822631836, 0.028201183319091796, 0.027850751876831056, 0.027787200927734373, 0.02783647918701172, 0.027778528213500978, 0.02791084861755371, 0.027856735229492186, 0.027883232116699217, 0.027822271347045898, 0.027973728179931642, 0.028092704772949218]",tokens/s,35.503936525709975,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2041.79456,2384.330752,0.0,1981.80864,1840.378368,s,1,9.472162109375,9.472162109375,0.0,9.472162109375,9.472162109375,9.472162109375,9.472162109375,[9.472162109375],,kWh,6.443625136662377e-05,7.100519323868427e-06,2.6620576852015576e-05,9.815734754250777e-05,,MB,1521.651712,2401.107968,0.0,1983.905792,1733.29664,s,10,11.645124389648439,1.1645124389648438,0.002093229032189669,1.1652452392578125,1.166498767089844,1.1669936462402344,1.1673895495605469,"[1.1602388916015625, 1.162226806640625, 1.16270263671875, 1.16406005859375, 1.1656666259765625, 1.1648955078125, 1.165861572265625, 1.165594970703125, 1.167488525390625, 1.1663887939453126]",tokens/s,219.83449161570402,kWh,3.4127107767081575e-05,3.763700747444516e-06,2.2767018213601387e-05,6.065782672812748e-05,tokens/kWh,4220395.187374739,MB,1530.621952,2401.107968,0.0,1983.905792,1799.701504,s,10,12.615727783203127,1.2615727783203128,0.01711772904096993,1.2539674682617188,1.2879747680664062,1.291325762939453,1.2940065588378906,"[1.2946767578125, 1.2872301025390624, 1.276220458984375, 1.2506837158203126, 1.2604405517578126, 1.257251220703125, 1.2452310791015626, 1.2494088134765624, 1.247138916015625, 1.2474461669921875]",tokens/s,49.93766596952073,kWh,4.7036638308334955e-05,5.188497049361341e-06,2.5736992811797863e-05,7.796212816949414e-05,tokens/kWh,808084.6621199768,,s,630,12.609865636825564,0.020015659740992953,0.0004842620058161784,0.019950767517089844,0.02054161205291748,0.02076976203918457,0.021553215465545658,"[0.02100003242492676, 0.0206399040222168, 0.020288991928100585, 0.0203505916595459, 0.02145372772216797, 0.020412416458129884, 0.02049228858947754, 0.020363264083862305, 0.020458528518676758, 0.0207410888671875, 0.020924415588378906, 0.02085273551940918, 0.020832000732421876, 0.020740352630615234, 0.02142624092102051, 0.02158790397644043, 0.020766719818115235, 0.02055743980407715, 0.02059712028503418, 0.02027497673034668, 0.020325855255126955, 0.02023852729797363, 0.02034239959716797, 0.020167615890502928, 0.02021785545349121, 0.02012716865539551, 0.020101696014404296, 0.02019327926635742, 0.020121503829956054, 0.0200930233001709, 0.020235488891601563, 0.020379552841186522, 0.020323200225830076, 0.02012313652038574, 0.021749439239501952, 0.02036409568786621, 0.02031795120239258, 0.020308223724365235, 0.020361215591430663, 0.02037727928161621, 0.02045897674560547, 0.020256767272949217, 0.020261728286743164, 0.02028326416015625, 0.020367328643798827, 0.020758687973022463, 0.02088755226135254, 0.020536863327026367, 0.02063408088684082, 0.020493408203125, 0.02048912048339844, 0.02039193534851074, 0.020526975631713868, 0.020535039901733398, 0.020672895431518554, 0.020717567443847656, 0.020539039611816405, 0.02051020812988281, 0.020456287384033205, 0.0204039363861084, 0.020685375213623045, 0.020692703247070312, 0.02104115104675293, 0.02115836715698242, 0.02097158432006836, 0.02099007987976074, 0.021921791076660157, 0.020797664642333985, 0.020745504379272462, 0.020383840560913087, 0.020359039306640625, 0.02033513641357422, 0.020479999542236327, 0.020348928451538087, 0.02009846305847168, 0.020200031280517578, 0.02005401611328125, 0.02022969627380371, 0.022607776641845705, 0.020236192703247072, 0.020173376083374023, 0.020237728118896483, 0.020094720840454102, 0.020154943466186525, 0.020117439270019532, 0.020152767181396483, 0.02083967971801758, 0.020603328704833983, 0.020193920135498047, 0.020362911224365236, 0.020105215072631837, 0.020150335311889648, 0.020080575942993163, 0.020075839996337892, 0.020173120498657226, 0.020203136444091798, 0.02002796745300293, 0.020092416763305664, 0.020031551361083984, 0.020021728515625, 0.02020128059387207, 0.020914527893066408, 0.020518911361694335, 0.02084864044189453, 0.02045747184753418, 0.020690399169921873, 0.020652416229248047, 0.02054159927368164, 0.020522911071777342, 0.020564064025878907, 0.02054908752441406, 0.020625951766967774, 0.02052060890197754, 0.020375904083251954, 0.020346879959106445, 0.020247552871704103, 0.020632064819335938, 0.020425472259521484, 0.020385536193847656, 0.020231231689453125, 0.02014681625366211, 0.02020185661315918, 0.020096960067749022, 0.020088703155517577, 0.020219551086425782, 0.020148704528808594, 0.021093439102172852, 0.020995071411132812, 0.020714847564697266, 0.020425376892089845, 0.020315839767456056, 0.020183135986328125, 0.020082815170288086, 0.02008412742614746, 0.020238367080688477, 0.020226463317871094, 0.020191328048706055, 0.0200515193939209, 0.019997119903564453, 0.020062175750732422, 0.02002499198913574, 0.020101951599121093, 0.020299488067626954, 0.020185087203979494, 0.02077020835876465, 0.020199199676513672, 0.02019820785522461, 0.02036092758178711, 0.020875551223754882, 0.020365312576293947, 0.020230144500732423, 0.020074495315551756, 0.020191232681274415, 0.020068511962890626, 0.02016035270690918, 0.02017193603515625, 0.02019209671020508, 0.02024448013305664, 0.02049843215942383, 0.0207193603515625, 0.020515071868896485, 0.020319999694824217, 0.02037139129638672, 0.020398399353027345, 0.020419807434082032, 0.020429599761962892, 0.02047532844543457, 0.020357696533203126, 0.020434751510620117, 0.020711008071899413, 0.020859487533569337, 0.020562143325805665, 0.020257856369018553, 0.020189599990844728, 0.020228416442871093, 0.02004707145690918, 0.020133663177490234, 0.020386592864990234, 0.019896543502807618, 0.01997648048400879, 0.01977734375, 0.019726240158081054, 0.019619840621948242, 0.01963417625427246, 0.019771072387695314, 0.019603263854980468, 0.019888639450073242, 0.019900415420532228, 0.019859039306640625, 0.021467231750488282, 0.020427839279174805, 0.020304895401000975, 0.0202508487701416, 0.020358720779418946, 0.020248832702636718, 0.020214527130126954, 0.020123647689819335, 0.020243967056274414, 0.02029132843017578, 0.020272096633911132, 0.020186912536621093, 0.020096384048461913, 0.02016543960571289, 0.019934112548828126, 0.01984195137023926, 0.01987126350402832, 0.019781600952148436, 0.01993084716796875, 0.01987811279296875, 0.019892704010009764, 0.019949216842651368, 0.019909055709838867, 0.019929088592529298, 0.019775264739990233, 0.019738847732543946, 0.019787776947021486, 0.01967478370666504, 0.01962953567504883, 0.01968832015991211, 0.019984256744384764, 0.019667072296142576, 0.01973606491088867, 0.019685888290405275, 0.019644479751586914, 0.01957062339782715, 0.019664896011352538, 0.01965260887145996, 0.0197491512298584, 0.019635168075561524, 0.019563264846801758, 0.019511295318603517, 0.01947238349914551, 0.019512544631958006, 0.019688224792480467, 0.01946419143676758, 0.01944371223449707, 0.019500192642211915, 0.019645408630371095, 0.01948863983154297, 0.019693567276000978, 0.020240447998046876, 0.019603391647338868, 0.019541248321533203, 0.01951820755004883, 0.01956073570251465, 0.019553695678710938, 0.019492959976196288, 0.01980396842956543, 0.020138399124145508, 0.01977337646484375, 0.019636287689208984, 0.01966659164428711, 0.02033260726928711, 0.020344703674316407, 0.020507200241088867, 0.020164703369140623, 0.020144128799438478, 0.02003913688659668, 0.020068960189819338, 0.0197825927734375, 0.019745759963989258, 0.019623584747314453, 0.020236671447753905, 0.019761152267456054, 0.019762304306030272, 0.0196125431060791, 0.019601503372192384, 0.01964841651916504, 0.019765151977539062, 0.019759199142456055, 0.019859359741210936, 0.019752832412719728, 0.019879871368408204, 0.019783967971801757, 0.019736896514892577, 0.01971331214904785, 0.019726783752441406, 0.019621376037597657, 0.022357664108276366, 0.020038463592529296, 0.02122137641906738, 0.02018873596191406, 0.020769216537475585, 0.019963327407836913, 0.01999724769592285, 0.019838783264160158, 0.0199816951751709, 0.019966655731201172, 0.019955839157104492, 0.020082687377929686, 0.020015167236328124, 0.019986368179321288, 0.020208736419677735, 0.02004400062561035, 0.020120256423950194, 0.019963903427124022, 0.020221952438354493, 0.02003558349609375, 0.019982336044311523, 0.020002815246582033, 0.019998720169067383, 0.019916799545288084, 0.019959808349609375, 0.019914751052856446, 0.020035232543945312, 0.019912799835205077, 0.019867071151733397, 0.0197159366607666, 0.01970479965209961, 0.019597312927246095, 0.019902463912963866, 0.019568639755249022, 0.020035743713378906, 0.01969113540649414, 0.01981257629394531, 0.02046953582763672, 0.020289663314819337, 0.020287391662597656, 0.020156288146972658, 0.020248191833496094, 0.02003740882873535, 0.019860511779785157, 0.019986303329467773, 0.020188255310058592, 0.020936767578125, 0.02033135986328125, 0.020086336135864257, 0.020059871673583984, 0.02022268867492676, 0.02009059143066406, 0.02011292839050293, 0.020118560791015625, 0.02146828842163086, 0.019923551559448242, 0.019748863220214845, 0.019661983489990233, 0.019874656677246094, 0.019926111221313478, 0.019716224670410155, 0.01963257598876953, 0.01960380744934082, 0.01960483169555664, 0.019587648391723632, 0.019535903930664063, 0.019579103469848633, 0.019623775482177735, 0.019748863220214845, 0.019783679962158202, 0.019949567794799804, 0.019742080688476563, 0.0196942081451416, 0.019655679702758787, 0.02007961654663086, 0.019729536056518556, 0.020097919464111328, 0.01991059112548828, 0.019757055282592775, 0.019946752548217775, 0.01963091278076172, 0.01961974334716797, 0.019529823303222657, 0.01978607940673828, 0.020174495697021483, 0.019982112884521484, 0.019884256362915038, 0.01994870376586914, 0.019815263748168947, 0.019971519470214843, 0.020042591094970703, 0.020434047698974608, 0.02001161575317383, 0.01981020736694336, 0.01967318344116211, 0.020019039154052735, 0.020583648681640625, 0.019805120468139648, 0.019591167449951173, 0.019603456497192383, 0.020438976287841797, 0.020172895431518553, 0.020053087234497072, 0.020083520889282228, 0.02014419174194336, 0.020674560546875, 0.020121055603027342, 0.019978431701660155, 0.01995196723937988, 0.019978239059448243, 0.02001696014404297, 0.019878080368041992, 0.019773439407348634, 0.019709856033325195, 0.019734176635742187, 0.01953228759765625, 0.02000070381164551, 0.019562496185302734, 0.019738624572753907, 0.01943744087219238, 0.01942652893066406, 0.01946291160583496, 0.019462303161621095, 0.01948569679260254, 0.019452543258666993, 0.019525983810424804, 0.01954614448547363, 0.019451744079589845, 0.019366048812866212, 0.019924991607666014, 0.020189184188842774, 0.01959529685974121, 0.019850303649902343, 0.02025712013244629, 0.01966752052307129, 0.01953366470336914, 0.019486688613891603, 0.019732671737670897, 0.01949203109741211, 0.019483455657958983, 0.01952761650085449, 0.019529056549072266, 0.019767776489257812, 0.01978108787536621, 0.019612512588500976, 0.019656896591186523, 0.01960316848754883, 0.01974064064025879, 0.01978988838195801, 0.01970303916931152, 0.01984998321533203, 0.020080223083496093, 0.019971712112426758, 0.019784576416015626, 0.019562400817871094, 0.019466239929199217, 0.019388416290283202, 0.019802207946777343, 0.019990047454833983, 0.019623584747314453, 0.019652671813964843, 0.019527456283569337, 0.019567071914672853, 0.020733951568603515, 0.020069856643676758, 0.0197923526763916, 0.019845184326171876, 0.019993696212768555, 0.019786815643310546, 0.019777183532714845, 0.01962972831726074, 0.019710496902465822, 0.019666976928710937, 0.01969558334350586, 0.019705856323242187, 0.02004732894897461, 0.019857471466064452, 0.019841440200805666, 0.019882047653198242, 0.019975168228149414, 0.019760351181030273, 0.01975823974609375, 0.019910495758056642, 0.019881919860839845, 0.020200288772583008, 0.01992844772338867, 0.019739263534545897, 0.019850879669189452, 0.020078975677490233, 0.020153568267822265, 0.02006096076965332, 0.02004582405090332, 0.019992576599121094, 0.019900415420532228, 0.019899520874023437, 0.019985279083251952, 0.0198175048828125, 0.019852256774902342, 0.019736896514892577, 0.019883520126342775, 0.019847360610961914, 0.0197871036529541, 0.02002387237548828, 0.020066400527954102, 0.020109312057495117, 0.020049055099487303, 0.020038496017456053, 0.01983897590637207, 0.019982336044311523, 0.019722240447998047, 0.019888416290283203, 0.019955551147460938, 0.01957289505004883, 0.01958678436279297, 0.019582719802856446, 0.019566848754882814, 0.019437088012695312, 0.01934998321533203, 0.019400703430175782, 0.019484607696533204, 0.01951708793640137, 0.019402719497680663, 0.01945363235473633, 0.019572736740112305, 0.019782400131225585, 0.019673088073730468, 0.02027071952819824, 0.02015443229675293, 0.01989049530029297, 0.01964851188659668, 0.019553375244140626, 0.019485536575317382, 0.019464256286621094, 0.01945599937438965, 0.019560447692871095, 0.019535871505737306, 0.0195581111907959, 0.019474367141723632, 0.01955023956298828, 0.019525856018066407, 0.01975059127807617, 0.019718559265136718, 0.019781631469726564, 0.01975494384765625, 0.019865791320800782, 0.019944351196289064, 0.019833824157714845, 0.019827871322631835, 0.020043840408325197, 0.01986623954772949, 0.019756351470947266, 0.01971059226989746, 0.01987401580810547, 0.01969513511657715, 0.019670528411865236, 0.01954300880432129, 0.019654367446899416, 0.01989366340637207, 0.01987264060974121, 0.019611648559570313, 0.019677183151245118, 0.019457279205322267, 0.019444768905639648, 0.019439552307128908, 0.01946396827697754, 0.019351551055908203, 0.019759103775024413, 0.019389759063720702, 0.01948860740661621, 0.02054172706604004, 0.02004560089111328, 0.020200223922729493, 0.019732479095458985, 0.019774848937988282, 0.019730079650878907, 0.019454111099243165, 0.024864896774291993, 0.019845823287963867, 0.0195600643157959, 0.01955878448486328, 0.019436704635620118, 0.019475296020507814, 0.01958502388000488, 0.019515392303466796, 0.01946134376525879, 0.01941075134277344, 0.019945600509643554, 0.019618112564086913, 0.020134111404418946, 0.02064316749572754, 0.020688608169555665, 0.02017171287536621, 0.02006220817565918, 0.020106815338134767, 0.020124095916748047, 0.02008608055114746, 0.019698368072509766, 0.01974460792541504, 0.019669439315795897, 0.01957801628112793, 0.021992000579833984, 0.01964009666442871, 0.01944924736022949, 0.01940768051147461, 0.0194334716796875, 0.019413312911987304, 0.019371423721313476, 0.01943199920654297, 0.019551967620849608, 0.01962598419189453, 0.01981439971923828, 0.019687423706054686, 0.019373567581176757, 0.01937161636352539, 0.01941596794128418, 0.019476095199584962, 0.019382656097412108, 0.019442975997924803, 0.019273536682128906, 0.01933417510986328, 0.01952764892578125, 0.01944566345214844, 0.019536224365234375, 0.020803264617919922, 0.019851232528686525, 0.019961856842041017, 0.01980998420715332, 0.019930463790893554, 0.019868064880371093, 0.019790239334106445, 0.01971625518798828, 0.019566591262817384, 0.01948057556152344, 0.019404800415039062, 0.019441408157348634, 0.019609855651855468, 0.019589120864868165, 0.019863487243652344, 0.019889663696289063, 0.020105791091918946, 0.019759103775024413, 0.019921119689941407, 0.019949344635009764, 0.02026620864868164, 0.020169504165649416, 0.020121599197387697, 0.020479999542236327, 0.01989414405822754, 0.019714176177978517, 0.01977494430541992, 0.019730464935302734, 0.019734176635742187]",tokens/s,49.96088127697115,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1883.082752,3063.808,0.0,2661.285888,2598.0928,s,1,9.63490234375,9.63490234375,0.0,9.63490234375,9.63490234375,9.63490234375,9.63490234375,[9.63490234375],,kWh,6.695531435836224e-05,7.3785395435729896e-06,2.5898909608024212e-05,0.00010023276350995945,,MB,2003.607552,3890.085888,0.0,3470.78656,2993.465344,s,10,10.436965087890623,1.0436965087890624,0.00252279392385382,1.0428635253906249,1.0471017089843748,1.0474863403320311,1.0477940454101562,"[1.0400484619140624, 1.0424404296875, 1.0417840576171875, 1.04166015625, 1.0416038818359374, 1.0470162353515624, 1.0451240234375, 1.0478709716796875, 1.04328662109375, 1.0461302490234374]",tokens/s,245.28203155246848,kWh,3.057353400208134e-05,3.371740946227671e-06,2.015012723119991e-05,5.409540217950891e-05,tokens/kWh,4732380.011715148,MB,2012.278784,3892.18304,0.0,3472.883712,2993.467904,s,10,24.0041416015625,2.40041416015625,0.01581080382458639,2.4028914794921876,2.4209630371093747,2.421169140625,2.4213340234375,"[2.421375244140625, 2.382862548828125, 2.378132080078125, 2.399624267578125, 2.390499755859375, 2.37992578125, 2.40615869140625, 2.4126298828125, 2.41201611328125, 2.420917236328125]",tokens/s,26.24547090486216,kWh,7.835923243167144e-05,8.643651727058709e-06,4.027700444380056e-05,0.00012727988860253075,tokens/kWh,494972.14910940256,,s,630,24.002462524414064,0.03809914686414931,0.0005605621191758052,0.03799241638183594,0.03862527847290039,0.03889157562255859,0.040448724060058595,"[0.03947110366821289, 0.038494144439697266, 0.0387086067199707, 0.03859900665283203, 0.03992611312866211, 0.03890176010131836, 0.03847318267822265, 0.04361577606201172, 0.038642688751220705, 0.03855974578857422, 0.03836723327636719, 0.03909632110595703, 0.03845951843261719, 0.03819712066650391, 0.03829759979248047, 0.03820947265625, 0.03832364654541016, 0.03849689483642578, 0.03939916610717773, 0.038742271423339844, 0.03870105743408203, 0.03855936050415039, 0.038344768524169924, 0.03812588882446289, 0.03814972686767578, 0.03836572647094726, 0.03839929580688477, 0.03820966339111328, 0.03814591979980469, 0.03879743957519531, 0.03827552032470703, 0.03805184173583984, 0.038518016815185546, 0.038576896667480466, 0.03873177719116211, 0.038507774353027345, 0.038313793182373046, 0.03817363357543945, 0.038174270629882816, 0.03809939193725586, 0.038397022247314457, 0.038338558197021484, 0.03862774276733399, 0.03844966506958008, 0.03879731369018555, 0.03864780807495117, 0.03788595199584961, 0.03786751937866211, 0.03777443313598633, 0.0378001594543457, 0.03796607971191406, 0.0377594223022461, 0.03769475173950195, 0.03776115036010742, 0.037767360687255856, 0.03786383819580078, 0.03798361587524414, 0.03779647827148438, 0.03796303939819336, 0.038273761749267575, 0.03777740859985351, 0.03779324722290039, 0.03801142501831055, 0.03887363052368164, 0.037959678649902344, 0.03768524932861328, 0.0376995849609375, 0.037629505157470704, 0.03781881713867188, 0.03813763046264648, 0.038580448150634765, 0.038792640686035156, 0.038497920989990234, 0.037706687927246095, 0.03910224151611328, 0.03763596725463867, 0.03752175903320312, 0.03757670211791992, 0.03754598236083984, 0.037596607208251955, 0.03748614501953125, 0.03762278366088867, 0.039253921508789064, 0.037938335418701175, 0.037841217041015625, 0.038113918304443356, 0.03758694458007812, 0.03760246276855469, 0.037639007568359376, 0.037631999969482424, 0.03862527847290039, 0.03762550354003906, 0.03751545715332031, 0.03751542282104492, 0.03755974578857422, 0.03777724838256836, 0.03792723083496094, 0.03775302505493164, 0.03771004867553711, 0.03768729782104492, 0.03756032180786133, 0.03770163345336914, 0.03756851196289063, 0.037613471984863284, 0.03750096130371094, 0.03750259017944336, 0.03753414535522461, 0.037792766571044925, 0.037681758880615236, 0.037927040100097655, 0.03765001678466797, 0.03761017608642578, 0.03773596954345703, 0.03765910339355469, 0.03760332870483398, 0.03761151885986328, 0.03755417633056641, 0.03788595199584961, 0.03777644729614258, 0.037634048461914066, 0.03780294418334961, 0.03766886520385742, 0.038164031982421874, 0.03781062316894531, 0.03768083190917969, 0.03769776153564453, 0.03872979354858398, 0.037924800872802734, 0.037693119049072264, 0.037639839172363285, 0.037622718811035155, 0.03758358383178711, 0.03794374465942383, 0.0378721923828125, 0.03768320083618164, 0.037752193450927736, 0.037696128845214845, 0.03767295837402344, 0.03760736083984375, 0.037666336059570316, 0.03767520141601562, 0.03763827133178711, 0.037779678344726564, 0.03760886383056641, 0.037718017578125, 0.03773302459716797, 0.0378223991394043, 0.038217918395996094, 0.03969200134277344, 0.03789007949829101, 0.03783676910400391, 0.03766400146484375, 0.03763884735107422, 0.03789184188842774, 0.037704097747802735, 0.03770163345336914, 0.037647808074951175, 0.037671199798583986, 0.037679393768310546, 0.03751731109619141, 0.037594593048095704, 0.0375972785949707, 0.03760300827026367, 0.03759283065795899, 0.037677696228027344, 0.0376835823059082, 0.03764966583251953, 0.03776163101196289, 0.037770912170410155, 0.03755263900756836, 0.03758899307250976, 0.03768729782104492, 0.03787356948852539, 0.03766281509399414, 0.037641761779785156, 0.03772585678100586, 0.03758572769165039, 0.03767500686645508, 0.03765433502197266, 0.03760761642456055, 0.03783187103271484, 0.03764031982421875, 0.037580703735351564, 0.037604129791259766, 0.03756032180786133, 0.03775897598266602, 0.03769343948364258, 0.037623809814453124, 0.037672256469726564, 0.038783393859863284, 0.03841030502319336, 0.03798371124267578, 0.03777584075927734, 0.038055648803710936, 0.038050079345703126, 0.037822463989257815, 0.03818905639648437, 0.03773235321044922, 0.03775040054321289, 0.037929344177246097, 0.0380682258605957, 0.0379793586730957, 0.038242271423339844, 0.03782854461669922, 0.037857345581054684, 0.03785811233520508, 0.037926910400390625, 0.037994495391845705, 0.03787776184082031, 0.03789619064331055, 0.038037246704101565, 0.038949119567871095, 0.03835903930664063, 0.03802054214477539, 0.03787424087524414, 0.037923873901367186, 0.03876348876953125, 0.03813785552978516, 0.03787776184082031, 0.03797772979736328, 0.03846387100219727, 0.03816175842285156, 0.03791712188720703, 0.03813193511962891, 0.0385035514831543, 0.03846345520019531, 0.03859344100952149, 0.03808454513549805, 0.03799363327026367, 0.0387163200378418, 0.03799225616455078, 0.03798444747924805, 0.03792281723022461, 0.03788390350341797, 0.03778559875488281, 0.03799039840698242, 0.03803110504150391, 0.03797222518920899, 0.037922431945800784, 0.03782284927368164, 0.037894142150878905, 0.038440128326416016, 0.03804857635498047, 0.03797955322265625, 0.0379664306640625, 0.03827433776855469, 0.03826147079467773, 0.03801497650146484, 0.03799039840698242, 0.03788595199584961, 0.037957344055175785, 0.03846377563476563, 0.042599838256835935, 0.038635902404785157, 0.03816470336914062, 0.03789971160888672, 0.0378721923828125, 0.03777264022827148, 0.037692062377929686, 0.037637344360351564, 0.0376409912109375, 0.0378524169921875, 0.03793379211425781, 0.04042111968994141, 0.039112510681152346, 0.03786595153808594, 0.03771340942382812, 0.03788851165771484, 0.037631999969482424, 0.03761356735229492, 0.03799446487426758, 0.03757878494262695, 0.03766067123413086, 0.037889793395996095, 0.037699680328369144, 0.03776243209838867, 0.03784982299804687, 0.037840736389160155, 0.03818652725219727, 0.03777964782714844, 0.037825023651123044, 0.037819679260253904, 0.03765878295898437, 0.03768521499633789, 0.0376346549987793, 0.03778927993774414, 0.03773276901245117, 0.03770102310180664, 0.03807648086547852, 0.038053855895996094, 0.03779436874389648, 0.03766681671142578, 0.03766681671142578, 0.03779379272460937, 0.03767465591430664, 0.037781856536865235, 0.037760223388671875, 0.03765523147583008, 0.03770479965209961, 0.03787443161010742, 0.037738014221191406, 0.03814028930664062, 0.0376624641418457, 0.03767766571044922, 0.0378359375, 0.037728321075439455, 0.03780064010620117, 0.037693023681640625, 0.038064640045166014, 0.03795935821533203, 0.037781726837158205, 0.0375964469909668, 0.037731136322021484, 0.03768320083618164, 0.0376995849609375, 0.03867843246459961, 0.037819137573242186, 0.037862686157226565, 0.03782928085327148, 0.03768521499633789, 0.037725440979003905, 0.037587745666503906, 0.037558273315429686, 0.037601280212402347, 0.03785276794433594, 0.037570976257324216, 0.03760537719726562, 0.03766067123413086, 0.037730304718017575, 0.03766995239257812, 0.03765958404541016, 0.03762112045288086, 0.03762239837646485, 0.0376545295715332, 0.0384266242980957, 0.03805587387084961, 0.03773855972290039, 0.037719169616699216, 0.03752230453491211, 0.03759667205810547, 0.037644577026367185, 0.037568737030029296, 0.03753283309936523, 0.03757756805419922, 0.03771945571899414, 0.0377165756225586, 0.037556224822998044, 0.03803942489624024, 0.037548160552978514, 0.03759439849853516, 0.03780476760864258, 0.03771350479125977, 0.03763017654418945, 0.03761337661743164, 0.03790192031860352, 0.0377044792175293, 0.03794905471801758, 0.03784684753417969, 0.038882881164550784, 0.038898689270019535, 0.03798543930053711, 0.03795849609375, 0.038150142669677735, 0.037705726623535156, 0.03764166259765625, 0.037644542694091794, 0.037804031372070314, 0.037652801513671875, 0.03775619125366211, 0.0376014404296875, 0.03757104110717773, 0.03755427169799805, 0.037688385009765624, 0.03756524658203125, 0.03772428894042969, 0.037789695739746096, 0.03765414428710938, 0.03781660842895508, 0.03873750305175781, 0.03843692779541016, 0.037771617889404294, 0.03788390350341797, 0.03789823913574219, 0.03834265518188477, 0.038120479583740235, 0.0376657600402832, 0.037639583587646484, 0.03774844741821289, 0.03774892807006836, 0.03800905609130859, 0.038192672729492186, 0.037992385864257815, 0.03819404983520508, 0.03784000015258789, 0.03774563217163086, 0.03799647903442383, 0.037978206634521484, 0.03760332870483398, 0.03759513473510742, 0.03760438537597656, 0.037959712982177735, 0.037787841796875, 0.03778950500488281, 0.037655487060546874, 0.03951580810546875, 0.03855542373657227, 0.0378776969909668, 0.037644798278808594, 0.037933025360107425, 0.03801436614990234, 0.03798912048339844, 0.03811123275756836, 0.03818399810791016, 0.037811134338378904, 0.03788595199584961, 0.03791667175292969, 0.03818086242675781, 0.03804764938354492, 0.03802326583862305, 0.03909222412109375, 0.03832767868041992, 0.0382174072265625, 0.03795395278930664, 0.03808636856079101, 0.03801119995117187, 0.03793695831298828, 0.03793199920654297, 0.03787494277954102, 0.03807279968261719, 0.038539264678955076, 0.03839590454101562, 0.04091686248779297, 0.03852489471435547, 0.038269153594970705, 0.038821823120117185, 0.03884646224975586, 0.0389071044921875, 0.038527008056640624, 0.03897011184692383, 0.039687553405761716, 0.038440929412841794, 0.039196575164794925, 0.038418529510498046, 0.038449153900146485, 0.039823360443115234, 0.038400001525878906, 0.03865171051025391, 0.03871331024169922, 0.03827734375, 0.03818700790405274, 0.03825254440307617, 0.03843183898925781, 0.03815919876098633, 0.03798774337768555, 0.038136478424072265, 0.038223167419433594, 0.03816313552856445, 0.03826873779296875, 0.038615230560302735, 0.03836928176879883, 0.03833446502685547, 0.03804159927368164, 0.03827097702026367, 0.03793305587768555, 0.03783657455444336, 0.038234336853027344, 0.037806079864501956, 0.03783001708984375, 0.03773299026489258, 0.03847987365722656, 0.038653953552246094, 0.03826454544067383, 0.03838185501098633, 0.03824435043334961, 0.03819724655151367, 0.03806617736816406, 0.03821263885498047, 0.03805904006958008, 0.0381910400390625, 0.038563838958740236, 0.038125568389892575, 0.03816435241699219, 0.03842377471923828, 0.03838825607299805, 0.03823596954345703, 0.03844358444213867, 0.03813785552978516, 0.03902259063720703, 0.03848147201538086, 0.038283710479736326, 0.0381247673034668, 0.03804959869384766, 0.03821670532226563, 0.03806409454345703, 0.038152191162109376, 0.03818905639648437, 0.03819247817993164, 0.038682559967041015, 0.03832463836669922, 0.038088512420654294, 0.03818751907348633, 0.03820278549194336, 0.038115169525146486, 0.03810547256469726, 0.03924153518676758, 0.03838496017456055, 0.03830668640136719, 0.03857980728149414, 0.03829971313476563, 0.03841878509521485, 0.03838771057128906, 0.0380211181640625, 0.038790401458740235, 0.038247169494628905, 0.03807779312133789, 0.038103710174560546, 0.03799244689941406, 0.03828531265258789, 0.038125568389892575, 0.038144001007080076, 0.0380497932434082, 0.03798015975952149, 0.03829683303833008, 0.03811814498901367, 0.03846758270263672, 0.03805593490600586, 0.03822182464599609, 0.04073267364501953, 0.03969971084594726, 0.038376190185546874, 0.038351905822753905, 0.03793148803710938, 0.038131423950195316, 0.038169376373291014, 0.038045631408691404, 0.037949504852294924, 0.038153823852539064, 0.03838198471069336, 0.038845951080322266, 0.03832656097412109, 0.03823535919189453, 0.0381732177734375, 0.03829398345947266, 0.03813516616821289, 0.038185054779052735, 0.03826716613769531, 0.03847772979736328, 0.03821807861328125, 0.03826831817626953, 0.03808111953735351, 0.03820748901367187, 0.038182174682617184, 0.03842873764038086, 0.03815484619140625, 0.03800070571899414, 0.037814273834228515, 0.03811859130859375, 0.03823699188232422, 0.0380313606262207, 0.0380313606262207, 0.03808256149291992, 0.037988353729248046, 0.03815177536010742, 0.03792675018310547, 0.03806880187988281, 0.038325664520263675, 0.0380769271850586, 0.03906700897216797, 0.03957984161376953, 0.03852473449707031, 0.03796809768676758, 0.038529441833496096, 0.03798755264282227, 0.037923614501953126, 0.0380618896484375, 0.03812371063232422, 0.038294910430908206, 0.03842316818237305, 0.038330142974853515, 0.03818723297119141, 0.03817062377929688, 0.03806412887573242, 0.03811318588256836, 0.03811747360229492, 0.037959617614746095, 0.038262847900390626, 0.03818700790405274, 0.03850239944458008, 0.038165855407714847, 0.03810927963256836, 0.03809952163696289, 0.0381399040222168, 0.03823206329345703, 0.03827471923828125, 0.03828489685058594, 0.03805465698242187, 0.03820041656494141, 0.0381572151184082, 0.038215679168701173, 0.0380682258605957, 0.038266880035400394, 0.03827302551269531, 0.03833990478515625, 0.03850515365600586, 0.038246112823486327, 0.038707263946533205, 0.03823843383789063, 0.03837116622924805, 0.03820908737182617, 0.03816067123413086, 0.038168895721435545, 0.038629375457763675, 0.03862527847290039, 0.038422527313232424, 0.03868467330932617, 0.040796161651611325, 0.04092860794067383, 0.040459999084472655, 0.03852588653564453, 0.038340606689453126, 0.03824838256835938, 0.0382314567565918, 0.03820316696166992, 0.03850944137573242, 0.03820243072509766, 0.038098976135253905, 0.037956512451171875, 0.03801702499389648, 0.03820748901367187, 0.0397918701171875]",tokens/s,26.24730689024914,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3808.09216,5229.182976,0.0,4833.93536,4546.659328,s,1,11.0329423828125,11.0329423828125,0.0,11.0329423828125,11.0329423828125,11.0329423828125,11.0329423828125,[11.0329423828125],,kWh,0.00011698755016667235,1.2897346935870295e-05,4.96389286000036e-05,0.00017952382570254625,,MB,2035.134464,5260.640256,0.0,4852.809728,4095.212032,s,10,25.266362792968753,2.526636279296875,0.005492580904158527,2.5275803222656252,2.5309092529296873,2.5315973266601564,2.5321477856445314,"[2.511630126953125, 2.524263671875, 2.52607373046875, 2.5275087890625, 2.526899658203125, 2.52872998046875, 2.53075634765625, 2.530563232421875, 2.52765185546875, 2.532285400390625]",tokens/s,101.32047976103667,kWh,7.364710218833332e-05,8.121952807567629e-06,4.891492802080004e-05,0.000130683983016701,tokens/kWh,1958924.070804331,MB,2044.633088,5260.640256,0.0,4852.809728,4197.763584,s,10,21.150250488281248,2.115025048828125,0.008539970125787092,2.114699951171875,2.124815576171875,2.127568310546875,2.129770498046875,"[2.120118896484375, 2.130321044921875, 2.113159423828125, 2.124203857421875, 2.116240478515625, 2.11951318359375, 2.10022412109375, 2.112948974609375, 2.10659716796875, 2.10692333984375]",tokens/s,29.786881264080776,kWh,6.259576008583317e-05,6.905332628904864e-06,4.141061646180007e-05,0.00011091170917653813,tokens/kWh,568019.3774646726,,s,630,21.147305030822746,0.0335671508425758,0.0006349428110996855,0.03345873641967774,0.03397428321838379,0.034314902687072754,0.035135231094360356,"[0.034490623474121095, 0.03379529571533203, 0.03387881469726563, 0.033453342437744144, 0.033535839080810544, 0.03355862426757812, 0.03369055938720703, 0.03366476821899414, 0.03350067138671875, 0.03330300903320312, 0.03340630340576172, 0.03346249771118164, 0.03342598342895508, 0.03328716659545899, 0.03358950424194336, 0.03359939193725586, 0.03370479965209961, 0.03390902328491211, 0.03356265640258789, 0.03363983917236328, 0.03332649612426758, 0.0340417594909668, 0.03332969665527344, 0.03318751907348633, 0.03332172775268555, 0.03328211212158203, 0.033275745391845704, 0.03336553573608399, 0.033317440032958986, 0.033250911712646485, 0.03344425582885742, 0.03344384002685547, 0.03427123260498047, 0.03370348739624023, 0.03368982315063476, 0.03338467025756836, 0.0336814079284668, 0.03350732803344727, 0.03427328109741211, 0.034197376251220706, 0.03367129516601562, 0.033914432525634766, 0.03360550308227539, 0.03353430557250976, 0.035514816284179684, 0.03366799926757812, 0.03403046417236328, 0.03344793701171875, 0.03352579116821289, 0.0337174072265625, 0.033498241424560544, 0.033425086975097655, 0.033544193267822264, 0.033495166778564456, 0.03348672103881836, 0.03349308776855469, 0.0335335693359375, 0.03349532699584961, 0.03353984069824219, 0.033945854187011716, 0.034681087493896486, 0.03374684906005859, 0.03355657577514649, 0.04509280014038086, 0.034170528411865235, 0.033417152404785155, 0.03335651016235352, 0.03499827194213867, 0.034350719451904294, 0.03334316635131836, 0.03509958267211914, 0.03372003173828125, 0.03326774215698242, 0.03320217514038086, 0.03323206329345703, 0.03536569595336914, 0.03677503967285156, 0.033418113708496094, 0.03345619201660156, 0.033626369476318356, 0.0332957763671875, 0.03360918426513672, 0.033893184661865236, 0.03337577438354492, 0.033376449584960936, 0.034545951843261716, 0.03330047988891602, 0.033436767578125, 0.03337104034423828, 0.03317504119873047, 0.03337094497680664, 0.03319692611694336, 0.033858497619628905, 0.03346540832519531, 0.033286048889160154, 0.033237247467041015, 0.03332777786254883, 0.033791519165039065, 0.03317193603515625, 0.03318076705932617, 0.03323961639404297, 0.03312870407104492, 0.0330830078125, 0.03320995330810547, 0.033260414123535156, 0.03340697479248047, 0.03349270248413086, 0.03368499374389648, 0.0336412467956543, 0.0335945930480957, 0.03375593566894531, 0.033658878326416015, 0.0337367057800293, 0.03363430404663086, 0.033849342346191406, 0.03361734390258789, 0.03354476928710937, 0.03353190231323242, 0.033560928344726564, 0.03361897659301758, 0.033610591888427736, 0.03343974304199219, 0.033405982971191406, 0.03335654449462891, 0.0334543342590332, 0.033365760803222656, 0.03506012725830078, 0.0334719352722168, 0.033178367614746095, 0.033259326934814454, 0.03305376052856445, 0.033842208862304685, 0.033556320190429687, 0.033427486419677736, 0.033355743408203124, 0.03363190460205078, 0.03371868896484375, 0.033753089904785157, 0.033726463317871096, 0.0335261116027832, 0.03377888107299805, 0.0341828498840332, 0.03359545516967773, 0.03363619232177734, 0.0336126708984375, 0.03340492630004883, 0.03357644653320312, 0.03369558334350586, 0.03343366241455078, 0.03335228729248047, 0.033492225646972656, 0.033436641693115235, 0.03344294357299805, 0.03368617630004883, 0.033670913696289065, 0.033708160400390624, 0.033974369049072264, 0.03372851181030274, 0.03680624008178711, 0.03341561508178711, 0.03349913787841797, 0.033486846923828126, 0.03350115203857422, 0.03348275375366211, 0.033484798431396484, 0.03336092758178711, 0.033360992431640625, 0.03313449478149414, 0.033974273681640625, 0.033394111633300784, 0.03319267272949219, 0.033896350860595705, 0.033306110382080076, 0.03321696090698242, 0.033072353363037106, 0.033108894348144534, 0.033113441467285155, 0.03314947128295898, 0.033040382385253905, 0.033056320190429686, 0.03326406478881836, 0.033277950286865234, 0.03331043243408203, 0.03328643035888672, 0.03316326522827148, 0.033454078674316406, 0.03361996841430664, 0.033253185272216795, 0.033237537384033206, 0.03418313598632813, 0.033468353271484376, 0.03337577438354492, 0.033300064086914063, 0.03321977615356445, 0.03333622360229492, 0.033463134765625, 0.03371993637084961, 0.033601055145263674, 0.033412158966064455, 0.03355215835571289, 0.03363670349121094, 0.03353152084350586, 0.03364156723022461, 0.03376460647583008, 0.033975105285644534, 0.03438876724243164, 0.03406038284301758, 0.03411558532714844, 0.03401651382446289, 0.03485897445678711, 0.03726416015625, 0.03417839813232422, 0.034200225830078125, 0.03442457580566406, 0.034181568145751955, 0.034496318817138674, 0.0336814079284668, 0.03373875045776367, 0.03360563278198242, 0.03336806488037109, 0.03337936019897461, 0.03336662292480469, 0.03479180908203125, 0.034914527893066406, 0.03362691116333008, 0.03374796676635742, 0.03362611389160156, 0.03361497497558594, 0.03343964767456055, 0.034859424591064454, 0.03327558517456055, 0.0331673583984375, 0.03305782318115234, 0.03308889770507813, 0.033173023223876955, 0.03317241668701172, 0.03318531036376953, 0.033185726165771486, 0.033186046600341794, 0.033128032684326174, 0.03332780838012695, 0.033029376983642576, 0.03312511825561523, 0.03316121673583984, 0.03365478515625, 0.034235454559326174, 0.03346089553833008, 0.03338412857055664, 0.033218143463134765, 0.03317452621459961, 0.033960254669189456, 0.03341881561279297, 0.034484222412109376, 0.03362748718261719, 0.03384592056274414, 0.033296607971191404, 0.033303520202636716, 0.033253921508789065, 0.03337801742553711, 0.03365126419067383, 0.03321036911010742, 0.033226593017578125, 0.033434814453125, 0.03325596618652344, 0.03323740768432617, 0.033176609039306644, 0.03334873580932617, 0.03310326385498047, 0.03343750381469727, 0.03369836807250977, 0.03369731140136719, 0.03376595306396484, 0.03384665679931641, 0.03397203063964844, 0.033761856079101565, 0.03354854583740234, 0.033589088439941406, 0.033683902740478514, 0.03338844680786133, 0.0333304328918457, 0.03340864181518555, 0.03358988952636719, 0.03360390472412109, 0.03349216079711914, 0.03366739273071289, 0.033476608276367184, 0.03348249435424805, 0.03349692916870117, 0.03404880142211914, 0.03347420883178711, 0.033462814331054684, 0.03395782470703125, 0.033724414825439454, 0.033726303100585935, 0.03368566513061524, 0.03352323150634766, 0.033685985565185546, 0.033581344604492185, 0.033672927856445316, 0.03372054290771485, 0.03374054336547851, 0.03376521682739258, 0.03363616180419922, 0.033441280364990236, 0.03359423828125, 0.03360153579711914, 0.03354243087768555, 0.0334681282043457, 0.03360153579711914, 0.033656639099121095, 0.03349113464355469, 0.03360943984985352, 0.034086910247802735, 0.03348099136352539, 0.034138111114501955, 0.03448627090454102, 0.03426873779296875, 0.034294368743896485, 0.033761470794677735, 0.03364828872680664, 0.03376537704467773, 0.03377280044555664, 0.03433865737915039, 0.03390262222290039, 0.0336864013671875, 0.03379404830932617, 0.03376265716552734, 0.03376224136352539, 0.033836769104003905, 0.03373897552490234, 0.0335617904663086, 0.0340711669921875, 0.03429782485961914, 0.03377686309814453, 0.033680160522460936, 0.03382681655883789, 0.03389132690429687, 0.033836158752441406, 0.033529472351074216, 0.03340723037719726, 0.03328121566772461, 0.033323104858398435, 0.033297119140625, 0.0334031982421875, 0.03344086456298828, 0.03343523025512695, 0.03369411087036133, 0.03347014236450195, 0.03334054565429687, 0.03325705718994141, 0.03315145492553711, 0.03316707229614258, 0.033067008972167966, 0.03307891082763672, 0.033795680999755856, 0.03431913757324219, 0.03451295852661133, 0.03430972671508789, 0.033804798126220705, 0.03387968063354492, 0.03354262542724609, 0.03336166381835937, 0.033142784118652346, 0.033212448120117186, 0.03312214279174805, 0.03318592071533203, 0.03320230484008789, 0.0330382080078125, 0.033029953002929685, 0.0339633903503418, 0.03471366500854492, 0.03357939147949219, 0.03336640167236328, 0.03324220657348633, 0.03321260833740235, 0.033259712219238284, 0.03360969543457031, 0.03369551849365234, 0.03444595336914062, 0.03356095886230469, 0.033224063873291014, 0.033134559631347656, 0.032978622436523435, 0.03305503845214844, 0.033922496795654296, 0.03317619323730469, 0.03310627365112305, 0.03330252838134766, 0.033181697845458984, 0.03322390365600586, 0.0331558723449707, 0.0331673583984375, 0.033140735626220705, 0.03315884780883789, 0.033056766510009765, 0.03318387222290039, 0.033140350341796875, 0.03318841552734375, 0.03314041519165039, 0.03339321517944336, 0.03350092697143554, 0.03336995315551758, 0.03380582427978516, 0.033530529022216794, 0.03337372970581055, 0.03355081558227539, 0.033435199737548826, 0.03338694381713867, 0.033287487030029296, 0.03336399841308594, 0.033266334533691405, 0.033261215209960934, 0.03331068801879883, 0.03316595077514648, 0.03319907379150391, 0.03321321487426758, 0.03314067077636719, 0.033255329132080076, 0.03334979248046875, 0.03332092666625976, 0.03470489501953125, 0.03354483032226562, 0.03327091217041016, 0.033898624420166015, 0.03350620651245117, 0.03321855926513672, 0.03326073455810547, 0.03330534362792969, 0.033111297607421875, 0.03314672088623047, 0.03313926315307617, 0.033118526458740236, 0.033748382568359374, 0.033382686614990234, 0.03320598220825195, 0.0332591667175293, 0.03327388763427734, 0.033169727325439456, 0.033199905395507816, 0.03315369415283203, 0.03321241760253906, 0.03433193588256836, 0.03380511856079101, 0.03379619216918945, 0.033950752258300784, 0.033397216796875, 0.03372905731201172, 0.03393430328369141, 0.03356979370117188, 0.03346361541748047, 0.033700576782226564, 0.03366652679443359, 0.03328870391845703, 0.033406814575195315, 0.03310950469970703, 0.03322332763671875, 0.033218719482421874, 0.033242721557617184, 0.033460384368896486, 0.0335975341796875, 0.03367660903930664, 0.03369209671020508, 0.0336096305847168, 0.03379260635375977, 0.03401894378662109, 0.03372969436645508, 0.033524703979492185, 0.03351523208618164, 0.03348918533325195, 0.03348652648925781, 0.03361724853515625, 0.03391791915893555, 0.03344179153442383, 0.03345388793945313, 0.03337648010253906, 0.033441375732421875, 0.03327638244628906, 0.03386934280395508, 0.033765151977539064, 0.03393727874755859, 0.03367599868774414, 0.033646209716796875, 0.03347081756591797, 0.03415657424926758, 0.03329766464233398, 0.033321697235107424, 0.03333491134643555, 0.03336214447021484, 0.03321615982055664, 0.03333766555786133, 0.03340630340576172, 0.03343193435668945, 0.03328287887573242, 0.03336956787109375, 0.03333065414428711, 0.03341769790649414, 0.03332089614868164, 0.03318790435791016, 0.033181343078613285, 0.03332979202270508, 0.033980766296386716, 0.03335452651977539, 0.033427326202392575, 0.03329280090332031, 0.0347061767578125, 0.03382454299926758, 0.033734142303466795, 0.03384396743774414, 0.03392086410522461, 0.03382195281982422, 0.03364064025878906, 0.03369039916992188, 0.033463199615478514, 0.03382374572753906, 0.03344384002685547, 0.03346022415161133, 0.033570816040039066, 0.03332620620727539, 0.03320646286010742, 0.03310768127441406, 0.03330572891235352, 0.03307475280761719, 0.03331660842895508, 0.03337884902954102, 0.03358428955078125, 0.03366588973999023, 0.03376054382324219, 0.03344854354858398, 0.03346444702148438, 0.03320585632324219, 0.03326809692382812, 0.033229183197021485, 0.03322403335571289, 0.03330694580078125, 0.033199264526367185, 0.033283103942871095, 0.03328307342529297, 0.03318767929077148, 0.033218849182128904, 0.033077953338623046, 0.03313792037963867, 0.03313945770263672, 0.03308755111694336, 0.03317343902587891, 0.033172767639160154, 0.03318851089477539, 0.033286209106445315, 0.03342742538452149, 0.03338582229614258, 0.03318035125732422, 0.03312553787231445, 0.03318364715576172, 0.03324409484863281, 0.033259136199951175, 0.03328201675415039, 0.03323945617675781, 0.03321343994140625, 0.03345331192016601, 0.03371366500854492, 0.03363587188720703, 0.03363913726806641, 0.03374396896362305, 0.03358972930908203, 0.03365548706054688, 0.03413891220092773, 0.033597888946533205, 0.03336422348022461, 0.03432662582397461, 0.03371465682983398, 0.03348223876953125, 0.0333540153503418, 0.03326793670654297, 0.03334966278076172, 0.0351497917175293, 0.03415407943725586, 0.03346713638305664, 0.03325609588623047, 0.0332193603515625, 0.03323926544189453, 0.033107967376708985, 0.03306694412231445, 0.03330227279663086, 0.03329609680175781, 0.03307171249389648, 0.03331651306152344, 0.03334924697875977, 0.03336163330078125, 0.03325417709350586, 0.033143009185791016, 0.0333138542175293, 0.03328710556030273, 0.03337968063354492, 0.03330319976806641, 0.033468414306640625, 0.03352892684936523, 0.03316009521484375, 0.033255199432373046, 0.0332224006652832, 0.033485279083251954, 0.03363401412963867, 0.033450271606445314, 0.033081344604492184, 0.033056766510009765, 0.03346031951904297, 0.03312332916259766, 0.033276927947998046, 0.03325247955322266, 0.03339699172973633, 0.03330841445922852, 0.03329513549804688, 0.0333496322631836, 0.0333375358581543, 0.03369753646850586, 0.03329977416992187, 0.03329455947875976, 0.03340671920776367, 0.03351126480102539, 0.03349708938598633, 0.03365488052368164, 0.03345724868774414, 0.0336176643371582, 0.03364601516723633, 0.03343008041381836, 0.033320735931396485, 0.033624542236328124, 0.0334354248046875, 0.03376534271240234, 0.03358415985107422, 0.034164928436279295, 0.0335153923034668]",tokens/s,29.791030066562072,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7160.46336,11301.421056,0.0,10898.898944,10500.155392,s,1,16.0841962890625,16.0841962890625,0.0,16.0841962890625,16.0841962890625,16.0841962890625,16.0841962890625,[16.0841962890625],,kWh,0.00023903515877501605,2.635578892492956e-05,0.00010644591848999602,0.0003718368661899416,,MB,2981.781504,11320.295424,0.0,10903.093248,10049.059328,s,10,58.82700244140624,5.882700244140625,0.004587707694910432,5.88457568359375,5.8869137207031255,5.887116528320313,5.887278774414062,"[5.872099609375, 5.878892578125, 5.87893798828125, 5.88180908203125, 5.88490576171875, 5.88424560546875, 5.88512255859375, 5.88680126953125, 5.8873193359375, 5.88686865234375]",tokens/s,43.51743066544738,kWh,0.0001715680417150016,1.892438748033575e-05,0.00011416198021839893,0.00030465440941373627,tokens/kWh,840296.3885953113,MB,2990.690304,11320.295424,0.0,10903.093248,10072.241664,s,10,34.996525634765625,3.4996525634765625,0.012966653567288045,3.4956083984374997,3.5149693603515626,3.522730651855469,3.5289396850585937,"[3.499573486328125, 3.489941650390625, 3.50587646484375, 3.493220703125, 3.487412353515625, 3.49799609375, 3.487426025390625, 3.49134228515625, 3.51324462890625, 3.530491943359375]",tokens/s,18.001786993796795,kWh,8.709912472041503e-05,9.60775226422537e-06,5.789665742840164e-05,0.000154603534413042,tokens/kWh,407493.9181641728,,s,630,34.993715587615974,0.05554558029780312,0.0007844414827793098,0.05537020874023438,0.056168786621093744,0.05680088062286377,0.05904880897521973,"[0.05752035140991211, 0.05568723297119141, 0.05533200073242187, 0.05539487838745117, 0.05532185745239258, 0.05565283203125, 0.05611139297485351, 0.06058419036865234, 0.055643489837646484, 0.05541007995605469, 0.055081985473632813, 0.05492464065551758, 0.054833824157714844, 0.054988800048828126, 0.05497241592407227, 0.05585100936889648, 0.056330238342285156, 0.0558546257019043, 0.056334400177001955, 0.05558476638793945, 0.05540496063232422, 0.05532672119140625, 0.05550694274902344, 0.05599014282226562, 0.05616857528686523, 0.05579980850219726, 0.0551649284362793, 0.05569740676879883, 0.055803905487060546, 0.05541888046264649, 0.05506636810302734, 0.05524316787719727, 0.055220062255859376, 0.05512195205688476, 0.055441280364990235, 0.055054431915283204, 0.05497446441650391, 0.05486796951293945, 0.05508832168579102, 0.054983070373535156, 0.05496982574462891, 0.05489670562744141, 0.05505276870727539, 0.05513638305664063, 0.0552973747253418, 0.05524294281005859, 0.05522639846801758, 0.05508784103393555, 0.05557014465332031, 0.05600694274902344, 0.05583052825927735, 0.05527142333984375, 0.056076255798339844, 0.05578905487060547, 0.055715518951416014, 0.055274337768554685, 0.05529395294189453, 0.0554516487121582, 0.055501953125, 0.05534755325317383, 0.0555463981628418, 0.05544537734985352, 0.05550249481201172, 0.05679225540161133, 0.05586822509765625, 0.055313983917236326, 0.05497651290893555, 0.05503184127807617, 0.05538431930541992, 0.05536492919921875, 0.05531644821166992, 0.05510780715942383, 0.0549771842956543, 0.060708000183105466, 0.055829345703125, 0.05586943817138672, 0.055136257171630856, 0.055303871154785154, 0.05519142532348633, 0.05516336059570313, 0.054932926177978514, 0.05778451156616211, 0.056076641082763674, 0.05539632034301758, 0.05518544006347656, 0.05500092697143555, 0.05513999938964844, 0.054977024078369144, 0.05536665725708008, 0.055790592193603515, 0.05495603179931641, 0.05505023956298828, 0.055097278594970704, 0.05549375915527344, 0.05530310440063477, 0.05520329666137695, 0.05505283355712891, 0.05514796829223633, 0.055109760284423825, 0.05474092864990234, 0.055281696319580076, 0.05493561553955078, 0.054935806274414065, 0.05496847915649414, 0.0548289909362793, 0.05559708786010742, 0.05517667388916016, 0.05533462524414062, 0.05476393508911133, 0.05490521621704102, 0.05592067337036133, 0.05585513687133789, 0.055416831970214846, 0.055158302307128905, 0.05524460983276367, 0.055075294494628904, 0.05505199813842773, 0.055373985290527346, 0.05542841720581055, 0.05564630508422851, 0.05524563217163086, 0.055005279541015625, 0.05508121490478515, 0.055226112365722654, 0.05518502426147461, 0.05491545486450195, 0.056815616607666014, 0.055578624725341794, 0.05852569580078125, 0.055801025390625, 0.055530303955078124, 0.0554598388671875, 0.05585919952392578, 0.05538431930541992, 0.05509305572509766, 0.05487187194824219, 0.05538172912597656, 0.05519196701049805, 0.05554742431640625, 0.055763423919677736, 0.05557823944091797, 0.05541238403320312, 0.05543190383911133, 0.05543251037597656, 0.055261505126953124, 0.05525689697265625, 0.05945718383789062, 0.057696640014648436, 0.0562426872253418, 0.05578140640258789, 0.05584076690673828, 0.055524608612060544, 0.05524528121948242, 0.0551058235168457, 0.05554585647583008, 0.06011904144287109, 0.05586739349365234, 0.05580099105834961, 0.05551395034790039, 0.05529766464233398, 0.05524623870849609, 0.05517820739746094, 0.05505843353271484, 0.05509529495239258, 0.05551103973388672, 0.055193599700927735, 0.05561958312988281, 0.055363040924072265, 0.05562831878662109, 0.05534220886230469, 0.055034751892089846, 0.05537996673583984, 0.055772449493408205, 0.05527331161499023, 0.05505318450927734, 0.05527961730957031, 0.05611471939086914, 0.05525551986694336, 0.055222335815429686, 0.05510483169555664, 0.055050846099853515, 0.05563190460205078, 0.055299072265625, 0.055415264129638674, 0.05504668807983398, 0.055003135681152344, 0.05510758590698242, 0.054973953247070315, 0.055110145568847656, 0.056807937622070315, 0.05591641616821289, 0.05575228881835938, 0.055500545501708985, 0.05521036911010742, 0.05565686416625976, 0.05613935852050781, 0.05547436904907226, 0.0553023681640625, 0.05537996673583984, 0.055037120819091796, 0.055153438568115234, 0.05496620941162109, 0.05514806365966797, 0.05492998504638672, 0.054951934814453124, 0.05489459228515625, 0.054973918914794924, 0.05558480072021484, 0.05579622268676758, 0.05547417449951172, 0.055464191436767576, 0.05518259048461914, 0.05554227066040039, 0.055640064239501956, 0.055211647033691406, 0.05511411285400391, 0.05512310409545899, 0.05501424026489258, 0.05517926406860352, 0.055744510650634765, 0.055795711517333986, 0.0559288330078125, 0.05724140930175781, 0.05543718338012695, 0.05558911895751953, 0.05498220825195312, 0.055228256225585935, 0.055199905395507814, 0.05554201507568359, 0.0568873291015625, 0.056776927947998046, 0.055812095642089846, 0.05485567855834961, 0.05494988632202148, 0.05488230514526367, 0.05491097640991211, 0.05491302490234375, 0.05587968063354492, 0.05557968139648437, 0.0548691520690918, 0.055379169464111325, 0.055091808319091794, 0.05507215881347656, 0.05476003265380859, 0.05554166412353516, 0.05754889678955078, 0.055817569732666016, 0.05533747100830078, 0.05501353454589844, 0.05506390380859375, 0.054892318725585934, 0.05492972946166992, 0.056740222930908205, 0.05543766403198242, 0.055398303985595705, 0.0551563835144043, 0.05532844924926758, 0.05617331314086914, 0.05543027114868164, 0.05537171173095703, 0.055045055389404296, 0.055169025421142576, 0.054910526275634766, 0.055180896759033204, 0.055220287322998045, 0.05500188827514649, 0.05526732635498047, 0.05488435363769531, 0.05498470306396484, 0.055524478912353514, 0.05519996643066406, 0.055314910888671874, 0.05544160079956055, 0.055201793670654295, 0.055076416015625, 0.0552737922668457, 0.05529126358032226, 0.055695934295654295, 0.055119838714599606, 0.05518972778320313, 0.05532262420654297, 0.055619838714599606, 0.05511548614501953, 0.0552470703125, 0.055360416412353515, 0.05559782409667969, 0.05504220962524414, 0.054902782440185545, 0.05503327941894531, 0.05482144165039062, 0.05483481597900391, 0.05473641586303711, 0.05497942352294922, 0.05505843353271484, 0.054957313537597655, 0.05514316940307617, 0.058982078552246096, 0.055949119567871096, 0.05546585464477539, 0.05557276916503906, 0.05503142547607422, 0.055083744049072264, 0.055414783477783204, 0.055431167602539064, 0.058472095489501955, 0.055583072662353517, 0.055094913482666014, 0.05489702224731445, 0.055003135681152344, 0.05509529495239258, 0.05505033493041992, 0.054816574096679685, 0.05484143829345703, 0.05506047821044922, 0.05549260711669922, 0.056691646575927734, 0.05517475128173828, 0.055228160858154296, 0.05478985595703125, 0.05471244812011719, 0.05509971237182617, 0.05483161544799805, 0.054779712677001956, 0.05536796951293945, 0.055537567138671876, 0.05887945556640625, 0.05546448135375977, 0.05523212814331055, 0.055015583038330075, 0.05507727813720703, 0.05509715270996094, 0.05671513748168945, 0.056516735076904294, 0.055504894256591795, 0.05575411224365234, 0.05560720062255859, 0.0552496337890625, 0.05496012878417969, 0.05500723266601563, 0.055272800445556644, 0.05568915176391601, 0.05624496078491211, 0.055903968811035154, 0.05575094223022461, 0.056149185180664064, 0.05565862274169922, 0.0550366096496582, 0.055408641815185546, 0.05529190444946289, 0.0553779182434082, 0.05513203048706055, 0.05753459167480469, 0.05531363296508789, 0.05546409606933594, 0.0550775032043457, 0.05506662368774414, 0.055019039154052735, 0.05488880157470703, 0.05478412628173828, 0.05535334396362305, 0.05544345474243164, 0.055365631103515625, 0.05540249633789063, 0.055310176849365233, 0.05558028793334961, 0.05544195175170898, 0.055126014709472655, 0.05502099227905274, 0.05558143997192383, 0.0558724479675293, 0.05889555358886719, 0.05606572723388672, 0.05550080108642578, 0.05526063919067383, 0.0549791030883789, 0.054763519287109375, 0.05497817611694336, 0.05540499114990234, 0.056672863006591793, 0.0574463996887207, 0.056643585205078124, 0.05557241439819336, 0.05561139297485351, 0.05496223831176758, 0.055060127258300784, 0.055111457824707034, 0.05497708892822266, 0.05480038452148438, 0.05489622497558594, 0.054895008087158206, 0.05480652618408203, 0.05505187225341797, 0.05496259307861328, 0.05477580642700195, 0.055003135681152344, 0.05504931259155273, 0.05485456085205078, 0.055317920684814455, 0.054935840606689455, 0.055177536010742184, 0.0591278076171875, 0.05507276916503906, 0.054975967407226566, 0.05510982513427734, 0.054935455322265625, 0.05561177444458008, 0.054830528259277346, 0.055618175506591795, 0.05579894256591797, 0.05499580764770508, 0.05488611221313477, 0.05537984085083008, 0.055304607391357424, 0.0550230712890625, 0.0550173454284668, 0.05527619171142578, 0.05518950271606445, 0.05579718399047852, 0.055750720977783205, 0.05546799850463867, 0.055158687591552735, 0.05512870407104492, 0.05541068649291992, 0.055045631408691405, 0.05496268844604492, 0.05513216018676758, 0.05501337432861328, 0.055182689666748046, 0.0549807357788086, 0.0548454704284668, 0.054806846618652344, 0.05569673538208008, 0.054983329772949216, 0.05581638336181641, 0.055200832366943356, 0.055648574829101564, 0.055374080657958985, 0.05558515167236328, 0.055959552764892576, 0.055784671783447266, 0.05564495849609375, 0.05705775833129883, 0.055416831970214846, 0.0553221435546875, 0.055683006286621095, 0.05526950454711914, 0.05533849716186524, 0.055547870635986325, 0.05497897720336914, 0.05495577621459961, 0.054806400299072265, 0.05487913513183594, 0.05485123062133789, 0.0546860466003418, 0.06041705703735351, 0.055234783172607424, 0.05532675170898437, 0.05534281539916992, 0.05563449478149414, 0.05584918212890625, 0.05603942489624023, 0.05521539306640625, 0.055199775695800785, 0.055180191040039066, 0.05550236892700195, 0.05578710556030273, 0.05508163070678711, 0.05485385513305664, 0.05475532913208008, 0.05482291030883789, 0.05475923156738281, 0.0552092170715332, 0.05493443298339844, 0.05469187164306641, 0.0547677116394043, 0.05829935836791992, 0.055661502838134765, 0.0553963508605957, 0.05516435241699219, 0.055917022705078125, 0.05575689697265625, 0.05551449584960937, 0.05573593521118164, 0.055634944915771485, 0.05550233459472656, 0.0551223030090332, 0.05542639923095703, 0.05532889556884766, 0.055061054229736325, 0.055029537200927736, 0.05553184127807617, 0.05523251342773437, 0.05540249633789063, 0.05499289703369141, 0.05503385543823242, 0.05509939193725586, 0.05495606231689453, 0.0548515510559082, 0.05498371124267578, 0.054893535614013673, 0.055063583374023437, 0.055497695922851566, 0.05585100936889648, 0.05571583938598633, 0.05708278274536133, 0.055787391662597656, 0.05601696014404297, 0.0552182388305664, 0.055572479248046876, 0.05558476638793945, 0.055721343994140624, 0.05522700881958008, 0.05542911911010742, 0.05699599838256836, 0.055444480895996094, 0.05515350341796875, 0.055493888854980467, 0.05539654541015625, 0.05499142456054688, 0.05487411117553711, 0.05496422576904297, 0.05483273696899414, 0.05495849609375, 0.05490687942504883, 0.055109630584716796, 0.05492736053466797, 0.05519705581665039, 0.05524259185791015, 0.058765632629394535, 0.05551699066162109, 0.05523251342773437, 0.05518982315063477, 0.05489904022216797, 0.05546096038818359, 0.05604755020141602, 0.0558743667602539, 0.055414913177490234, 0.05539228820800781, 0.055347198486328124, 0.056737247467041015, 0.05617068862915039, 0.05565039825439453, 0.055494911193847654, 0.055282974243164064, 0.055255775451660154, 0.05601484680175781, 0.05650841522216797, 0.055714977264404296, 0.05562028884887695, 0.05907606506347656, 0.05645177459716797, 0.05583462524414062, 0.056289279937744144, 0.05696688079833984, 0.05606633758544922, 0.05572774505615234, 0.055546241760253905, 0.05544937515258789, 0.055543136596679685, 0.05561798477172852, 0.05579750442504883, 0.056379745483398434, 0.05561993789672852, 0.05573222351074219, 0.05717606353759765, 0.05620028686523437, 0.05578435134887695, 0.05707334518432617, 0.05615622329711914, 0.05643667221069336, 0.05616831970214844, 0.05564876937866211, 0.05588787078857422, 0.05595257568359375, 0.055699966430664063, 0.056186721801757815, 0.05612796783447266, 0.056190975189208986, 0.05639987182617188, 0.0559797134399414, 0.05565670394897461, 0.055809951782226565, 0.055990081787109375, 0.056151424407958984, 0.05577212905883789, 0.055756031036376955, 0.055667457580566404, 0.056027137756347656, 0.05570716857910156, 0.05545750427246094, 0.05607273483276367, 0.05593929672241211, 0.05592601776123047, 0.05609913635253906, 0.0556416015625, 0.055419422149658205, 0.05603193664550781, 0.05536870574951172, 0.05524553680419922, 0.05530828857421875, 0.05545369720458984, 0.05673123168945313, 0.05643036651611328, 0.05591718292236328, 0.05585039901733398, 0.05562223815917969, 0.05612748718261719, 0.05587353515625, 0.055785247802734375, 0.056038753509521484, 0.05636185455322266, 0.056027008056640626, 0.05583270263671875, 0.056183967590332035, 0.05585321426391601, 0.05589676666259766, 0.055932926177978515, 0.056137313842773436, 0.05613951873779297, 0.05638790512084961, 0.056164608001708985, 0.056621150970458986, 0.056360160827636716, 0.058641185760498045, 0.05611520004272461, 0.05553926467895508, 0.05593952178955078, 0.05743199920654297, 0.0559529914855957, 0.05585884857177734]",tokens/s,18.00323256393364,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 111026 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,870.170624,617.545728,0.0,222.298112,199.93344,s,1,8.03411474609375,8.03411474609375,0.0,8.03411474609375,8.03411474609375,8.03411474609375,8.03411474609375,[8.03411474609375],,kWh,2.450999462917025e-05,2.69318842136144e-06,7.537506029997587e-06,3.474068908052927e-05,,MB,1211.572224,669.974528,0.0,262.144,220.881408,s,16,0.5669669075012207,0.035435431718826294,0.0010705433270812425,0.035248767852783205,0.03584315109252929,0.036712830543518066,0.03876446552276611,"[0.03927737426757812, 0.03503222274780273, 0.03528144073486328, 0.034322624206542966, 0.03507164764404297, 0.03585798263549805, 0.03582831954956055, 0.035390846252441405, 0.03559091186523437, 0.035216094970703125, 0.035463329315185546, 0.03534169769287109, 0.03486675262451172, 0.034920894622802734, 0.03455049514770508, 0.03495427322387695]",tokens/s,7224.407537385559,kWh,1.2342303559247405e-06,1.3611274231380592e-07,8.165512861518986e-07,2.186894384390445e-06,tokens/kWh,117060980.09454402,MB,1245.51168,684.654592,0.0,276.824064,220.883968,s,16,9.711272521972656,0.606954532623291,0.008585633852691918,0.6104183959960938,0.6147964477539063,0.6172033386230469,0.6172731384277343,"[0.5949608154296875, 0.58690283203125, 0.5928167114257813, 0.6113905639648437, 0.6124186401367188, 0.6122392578125, 0.6092311401367188, 0.6065200805664063, 0.6100323486328125, 0.6172905883789063, 0.6171742553710937, 0.610804443359375, 0.6068065795898437, 0.6120545043945312, 0.6117877807617188, 0.5988419799804687]",tokens/s,103.79690176743638,kWh,1.7168328469075072e-05,1.8933499684787224e-06,6.887743071723286e-06,2.594942150927708e-05,tokens/kWh,2427799.7864991752,,s,1008,9.702431542396553,0.009625428117456892,0.00026042284172965544,0.009625679969787599,0.009851100540161133,0.009908850908279419,0.01048333580970764,"[0.009447872161865234, 0.009883872032165527, 0.009592991828918457, 0.009680928230285645, 0.009769023895263672, 0.009662367820739747, 0.00961945629119873, 0.009543904304504395, 0.009411968231201173, 0.009510944366455078, 0.009316127777099609, 0.00942959976196289, 0.009633536338806153, 0.009889151573181152, 0.009661343574523926, 0.00935308837890625, 0.009390175819396973, 0.009297056198120117, 0.009340800285339356, 0.009318400382995605, 0.009380031585693359, 0.009345824241638184, 0.009279232025146484, 0.009408767700195313, 0.009334303855895997, 0.009332480430603028, 0.009302751541137695, 0.009256159782409668, 0.009249567985534668, 0.009538687705993653, 0.009282431602478027, 0.009288800239562989, 0.009294816017150879, 0.009297856330871582, 0.009354432106018067, 0.009312640190124512, 0.009260671615600586, 0.00941487979888916, 0.009314144134521484, 0.009253631591796875, 0.00919961643218994, 0.00927948760986328, 0.00922323226928711, 0.009255871772766114, 0.009237919807434082, 0.009220383644104003, 0.009230655670166016, 0.009252256393432617, 0.00960905647277832, 0.009428959846496582, 0.009360063552856445, 0.009312543869018555, 0.00996127986907959, 0.009870559692382812, 0.010033632278442383, 0.009816384315490723, 0.009544832229614258, 0.009667455673217773, 0.009404416084289552, 0.009328255653381348, 0.009349504470825195, 0.009328160285949707, 0.009316831588745118, 0.009525343894958497, 0.009607104301452637, 0.009431232452392578, 0.009420255661010742, 0.009408512115478516, 0.009412351608276368, 0.009341919898986816, 0.009263839721679688, 0.009296863555908204, 0.009326239585876465, 0.009684800148010254, 0.009287327766418456, 0.009227359771728515, 0.009227807998657227, 0.009212160110473632, 0.009289183616638184, 0.009218720436096192, 0.0091278076171875, 0.009142271995544434, 0.00932863998413086, 0.009203776359558106, 0.009279135704040528, 0.009216032028198241, 0.00928384017944336, 0.009214271545410156, 0.009309887886047363, 0.009207776069641113, 0.009220128059387206, 0.009185152053833008, 0.00923408031463623, 0.009265631675720214, 0.009371135711669922, 0.00929635238647461, 0.009306240081787109, 0.009285504341125489, 0.009322527885437012, 0.00929587173461914, 0.009286751747131347, 0.009294848442077636, 0.009238431930541992, 0.009256095886230468, 0.009261919975280761, 0.009430591583251952, 0.009429439544677735, 0.009507871627807618, 0.009454784393310546, 0.009492256164550782, 0.009390080451965332, 0.00931552028656006, 0.009323328018188477, 0.009362848281860351, 0.009230912208557129, 0.009256735801696777, 0.00925107192993164, 0.00942899227142334, 0.009298015594482421, 0.009239935874938964, 0.009159199714660645, 0.009328512191772461, 0.009303232192993165, 0.009180095672607422, 0.009190591812133789, 0.009220064163208008, 0.009267071723937988, 0.00943561553955078, 0.00954643154144287, 0.009376735687255859, 0.009472224235534668, 0.009382335662841797, 0.009531392097473144, 0.009433024406433106, 0.009355680465698242, 0.009446335792541503, 0.009403103828430176, 0.009461759567260742, 0.009308095932006835, 0.009351231575012208, 0.009328479766845704, 0.009283743858337402, 0.00929097557067871, 0.00927619171142578, 0.009274911880493165, 0.009390432357788086, 0.009349247932434083, 0.009261055946350098, 0.009506239891052246, 0.009422464370727539, 0.009231040000915527, 0.00928998374938965, 0.009314016342163086, 0.009215583801269531, 0.009251744270324706, 0.00929155158996582, 0.009220095634460449, 0.009334272384643554, 0.009203328132629394, 0.009170975685119629, 0.009195648193359374, 0.009214688301086425, 0.009293791770935058, 0.009230367660522461, 0.00923852825164795, 0.00927948760986328, 0.00937936019897461, 0.009452159881591798, 0.009427840232849122, 0.009343968391418458, 0.009350848197937012, 0.009389408111572265, 0.009313152313232423, 0.009361311912536622, 0.009377984046936036, 0.009385215759277343, 0.00963167953491211, 0.00966329574584961, 0.009664544105529785, 0.009652192115783692, 0.009612480163574218, 0.009548319816589355, 0.00981430435180664, 0.009943039894104003, 0.009624768257141113, 0.009614144325256348, 0.009523200035095216, 0.009523200035095216, 0.009533439636230469, 0.009478624343872071, 0.009737215995788574, 0.009750528335571289, 0.00968832015991211, 0.009748224258422852, 0.009639936447143555, 0.009650176048278808, 0.009576448440551758, 0.009660415649414063, 0.009524736404418945, 0.009575967788696288, 0.009534720420837403, 0.009532416343688965, 0.009501407623291016, 0.009538816452026368, 0.009546496391296387, 0.009613311767578125, 0.009697343826293945, 0.009756608009338379, 0.009654272079467773, 0.0097259521484375, 0.009699328422546387, 0.009706720352172852, 0.009686112403869629, 0.00983420753479004, 0.009747584342956543, 0.009736063957214356, 0.009894880294799804, 0.009772959709167481, 0.009694656372070313, 0.009681568145751954, 0.00976460838317871, 0.009732352256774903, 0.009707200050354003, 0.009885536193847657, 0.009771391868591309, 0.009736384391784668, 0.009703328132629394, 0.009703424453735352, 0.009764415740966797, 0.009817983627319336, 0.009847359657287598, 0.009705471992492675, 0.009762080192565917, 0.009691871643066407, 0.009650176048278808, 0.009794560432434082, 0.00993177604675293, 0.009791744232177735, 0.009791199684143066, 0.00973417568206787, 0.009665920257568359, 0.009630335807800292, 0.009607135772705078, 0.009599007606506348, 0.009582592010498046, 0.009631744384765625, 0.009594335556030273, 0.009579039573669434, 0.00962179183959961, 0.009606207847595215, 0.009966367721557618, 0.009864224433898925, 0.009728768348693847, 0.009780991554260253, 0.009867584228515625, 0.009820032119750976, 0.009682815551757812, 0.009683072090148926, 0.00966659164428711, 0.009727775573730469, 0.009598496437072754, 0.009607839584350585, 0.00975158405303955, 0.009662848472595215, 0.009642144203186036, 0.009896384239196778, 0.009752351760864258, 0.009782976150512695, 0.009587231636047364, 0.009539584159851074, 0.009578207969665527, 0.009640064239501953, 0.0097609281539917, 0.009832448005676269, 0.009678848266601562, 0.009674143791198731, 0.009722463607788086, 0.009668352127075196, 0.00967091178894043, 0.00967676830291748, 0.009576479911804199, 0.009721440315246582, 0.009687456130981445, 0.009823583602905273, 0.009754847526550293, 0.009850496292114258, 0.009798175811767578, 0.009890080451965332, 0.00964310359954834, 0.00962384033203125, 0.009656959533691407, 0.009654080390930176, 0.00962390422821045, 0.009645503997802734, 0.009607808113098144, 0.009596128463745118, 0.009746912002563477, 0.009666655540466309, 0.009727935791015625, 0.009897791862487793, 0.009782655715942383, 0.010109855651855468, 0.009703392028808594, 0.00971776008605957, 0.009703424453735352, 0.009557567596435546, 0.00975648021697998, 0.009560288429260254, 0.009544351577758788, 0.009524991989135742, 0.009566399574279785, 0.009518912315368652, 0.010208415985107421, 0.009755647659301758, 0.009953120231628417, 0.009440064430236816, 0.009772319793701172, 0.009629504203796387, 0.00984768009185791, 0.009584287643432617, 0.009733728408813477, 0.009590975761413574, 0.009642720222473145, 0.009518688201904296, 0.00959494400024414, 0.009668319702148438, 0.009644512176513673, 0.009582240104675293, 0.009628000259399415, 0.009624896049499511, 0.00961196804046631, 0.009709280014038086, 0.009664799690246582, 0.009739999771118163, 0.009668512344360352, 0.009656512260437011, 0.00959280014038086, 0.009611392021179199, 0.009633888244628906, 0.009545727729797364, 0.009674752235412597, 0.009637920379638672, 0.00965833568572998, 0.009666463851928712, 0.00971292781829834, 0.00967683219909668, 0.009672896385192871, 0.010103391647338868, 0.009857088088989259, 0.009727295875549316, 0.00967347240447998, 0.00975004768371582, 0.009642335891723633, 0.009559295654296874, 0.010099103927612305, 0.010545503616333008, 0.010921983718872071, 0.010004480361938477, 0.00971776008605957, 0.009684831619262695, 0.009625760078430176, 0.009709695816040038, 0.009615232467651367, 0.009707200050354003, 0.009793536186218262, 0.009676223754882812, 0.00973299217224121, 0.00988479995727539, 0.009646719932556152, 0.009568511962890625, 0.009506815910339356, 0.009621503829956055, 0.00954543972015381, 0.009546079635620118, 0.009545663833618163, 0.009560064315795898, 0.009516863822937012, 0.009967807769775391, 0.009519776344299317, 0.009664511680603028, 0.0096112642288208, 0.010798912048339843, 0.012132543563842774, 0.009691455841064452, 0.009690912246704101, 0.009564224243164063, 0.009650015830993652, 0.009531328201293945, 0.009705535888671875, 0.009598496437072754, 0.009545472145080567, 0.009618368148803711, 0.009574175834655762, 0.009661888122558594, 0.009585151672363281, 0.009570367813110351, 0.009614527702331543, 0.009620287895202636, 0.009600223541259766, 0.009645024299621581, 0.009611071586608887, 0.009715392112731933, 0.009715423583984376, 0.009599583625793457, 0.009594592094421386, 0.009596384048461915, 0.009569087982177734, 0.00952457618713379, 0.009534367561340332, 0.009571423530578613, 0.009612959861755371, 0.00976319980621338, 0.009531744003295898, 0.009554176330566407, 0.009568287849426269, 0.009592831611633301, 0.009661472320556641, 0.009608160018920899, 0.009574399948120118, 0.009570303916931153, 0.00958182430267334, 0.009619263648986817, 0.009495231628417969, 0.00952575969696045, 0.009541376113891602, 0.00971776008605957, 0.009644031524658203, 0.0095862398147583, 0.009658559799194336, 0.009570143699645995, 0.009585344314575195, 0.0095250883102417, 0.009553119659423829, 0.009638079643249512, 0.009620032310485839, 0.009583871841430664, 0.00970748805999756, 0.009548480033874512, 0.00951734447479248, 0.009573760032653809, 0.009591039657592773, 0.009543680191040039, 0.00970956802368164, 0.009793248176574707, 0.010053919792175294, 0.009944448471069336, 0.00968569564819336, 0.009922623634338379, 0.009713536262512206, 0.009567392349243165, 0.009699616432189941, 0.009695743560791016, 0.009801792144775391, 0.00960102367401123, 0.009666432380676269, 0.009656671524047852, 0.009633888244628906, 0.009620256423950195, 0.00968387222290039, 0.009682656288146973, 0.009783583641052246, 0.009635583877563476, 0.009656479835510254, 0.009541279792785644, 0.009611712455749512, 0.009586688041687011, 0.009625311851501464, 0.009572640419006347, 0.009660415649414063, 0.009564160346984863, 0.009552895545959473, 0.00953990364074707, 0.00965283203125, 0.009556384086608886, 0.009648927688598633, 0.009597855567932129, 0.009565728187561035, 0.009482815742492675, 0.00955401611328125, 0.009505951881408691, 0.009536160469055175, 0.00951910400390625, 0.009491840362548827, 0.009509568214416503, 0.009482111930847168, 0.009525376319885254, 0.00952086353302002, 0.009519328117370605, 0.00951910400390625, 0.009558015823364258, 0.009566176414489745, 0.009531295776367188, 0.009578623771667481, 0.009557663917541504, 0.009505120277404785, 0.009473983764648437, 0.009491552352905273, 0.009593184471130372, 0.009656096458435058, 0.009792351722717285, 0.009664511680603028, 0.009590784072875976, 0.009630816459655762, 0.009575615882873536, 0.009500320434570313, 0.009721152305603028, 0.009724703788757325, 0.009793536186218262, 0.00961740779876709, 0.00970537567138672, 0.009683296203613282, 0.009624959945678711, 0.00970911979675293, 0.009576671600341797, 0.009623647689819336, 0.009660479545593261, 0.009537631988525391, 0.009567935943603516, 0.009535967826843262, 0.00962559986114502, 0.009785247802734374, 0.009834464073181153, 0.009742655754089356, 0.009690688133239747, 0.009636287689208984, 0.009652095794677734, 0.009656448364257813, 0.009643296241760253, 0.009556703567504883, 0.009797632217407226, 0.009616543769836425, 0.009657183647155761, 0.009613311767578125, 0.009576448440551758, 0.009585760116577148, 0.009597855567932129, 0.00960048007965088, 0.009615903854370117, 0.009562111854553223, 0.009595135688781738, 0.009604063987731934, 0.009709856033325196, 0.009588768005371093, 0.009638208389282227, 0.009702560424804687, 0.009933440208435059, 0.009869215965270996, 0.009767231941223144, 0.009719615936279296, 0.009688575744628907, 0.009654879570007324, 0.00981606388092041, 0.00961945629119873, 0.009653696060180664, 0.009635711669921875, 0.009644991874694824, 0.009623552322387695, 0.009682080268859864, 0.009741151809692383, 0.009689087867736817, 0.009725664138793945, 0.00982806396484375, 0.009787967681884766, 0.009786944389343261, 0.009730496406555177, 0.00972390365600586, 0.009674976348876953, 0.00956287956237793, 0.009942815780639649, 0.00992899227142334, 0.010126272201538085, 0.010017056465148926, 0.009740544319152831, 0.00986460781097412, 0.00966649627685547, 0.009731040000915528, 0.009623488426208497, 0.00972390365600586, 0.00966864013671875, 0.009692768096923828, 0.009736576080322266, 0.009637887954711915, 0.009670656204223632, 0.009690943717956544, 0.009646464347839355, 0.00976467227935791, 0.010141695976257324, 0.009758720397949219, 0.009764863967895507, 0.009704607963562012, 0.009705984115600585, 0.009733983993530274, 0.009763487815856934, 0.009772480010986328, 0.009728416442871094, 0.009709856033325196, 0.009919391632080079, 0.009644576072692871, 0.00964031982421875, 0.009716799736022949, 0.00967353630065918, 0.00975376033782959, 0.009779775619506836, 0.009780768394470216, 0.00971673583984375, 0.009682720184326171, 0.009842880249023437, 0.010102720260620117, 0.009794943809509278, 0.009883999824523926, 0.009904671669006347, 0.009862784385681153, 0.009801728248596191, 0.00979155158996582, 0.009877440452575683, 0.009789440155029297, 0.00987110424041748, 0.009892352104187012, 0.00977894401550293, 0.009884960174560547, 0.009851615905761718, 0.009746527671813965, 0.009746335983276367, 0.009867551803588868, 0.00989087963104248, 0.009789728164672851, 0.009808256149291992, 0.009758015632629395, 0.009848640441894532, 0.009743231773376465, 0.009546112060546875, 0.009905983924865722, 0.009841312408447265, 0.009899423599243165, 0.009830080032348632, 0.009835200309753418, 0.009769503593444823, 0.009813055992126465, 0.00956287956237793, 0.009850655555725098, 0.009641823768615722, 0.009960960388183594, 0.009818943977355957, 0.00989132785797119, 0.009744256019592286, 0.009803647994995118, 0.009762847900390625, 0.009863519668579101, 0.00974448013305664, 0.009750335693359376, 0.009734560012817382, 0.009750528335571289, 0.009672639846801757, 0.009715456008911133, 0.009760640144348144, 0.009836480140686036, 0.009687423706054688, 0.009940223693847656, 0.00968291187286377, 0.009702431678771972, 0.009680000305175782, 0.009728768348693847, 0.009756704330444337, 0.00978940773010254, 0.009752575874328612, 0.00971116828918457, 0.00971731185913086, 0.009851776123046874, 0.00975692844390869, 0.009752320289611817, 0.009836799621582031, 0.00984447956085205, 0.00993660831451416, 0.009849120140075683, 0.009821279525756836, 0.009927488327026368, 0.009797727584838867, 0.00990835189819336, 0.00979475212097168, 0.009806879997253418, 0.009747551918029786, 0.009840288162231446, 0.009851807594299317, 0.009901856422424317, 0.00985324764251709, 0.009880576133728027, 0.009890527725219726, 0.009778495788574218, 0.009818431854248046, 0.009611840248107911, 0.009684991836547852, 0.00968892765045166, 0.009718048095703124, 0.00955571174621582, 0.010055935859680176, 0.009907839775085449, 0.009775456428527831, 0.00981123161315918, 0.009704192161560059, 0.009858783721923827, 0.009752863883972169, 0.009902239799499512, 0.01001046371459961, 0.009804032325744628, 0.009832127571105957, 0.00969324779510498, 0.009701215744018554, 0.009683327674865723, 0.009790623664855957, 0.009710495948791503, 0.009688384056091308, 0.009709983825683595, 0.009578783988952637, 0.009635552406311036, 0.009619135856628418, 0.009787712097167969, 0.009596927642822266, 0.009591103553771973, 0.009625280380249023, 0.009691360473632813, 0.009634655952453613, 0.009638079643249512, 0.009618176460266113, 0.009539135932922363, 0.009559935569763183, 0.010627103805541991, 0.009675295829772949, 0.009717887878417968, 0.009706687927246094, 0.009699999809265137, 0.009649727821350098, 0.009739871978759765, 0.00962060832977295, 0.009592576026916504, 0.009613311767578125, 0.009567328453063965, 0.009614239692687989, 0.009545408248901367, 0.009650495529174805, 0.009576255798339844, 0.00950819206237793, 0.00986182403564453, 0.009847999572753906, 0.009671648025512696, 0.009841952323913574, 0.00962435245513916, 0.00966211223602295, 0.009511199951171875, 0.009535264015197754, 0.009478367805480958, 0.009441280364990234, 0.009464863777160645, 0.009521216392517089, 0.009451583862304687, 0.009549920082092284, 0.009616127967834473, 0.00946793556213379, 0.00981049633026123, 0.009789055824279785, 0.009763520240783691, 0.00969696044921875, 0.009662336349487304, 0.010201215744018555, 0.01009059238433838, 0.009874367713928222, 0.009585023880004883, 0.009709407806396484, 0.009634655952453613, 0.009622688293457032, 0.009713408470153808, 0.009616512298583985, 0.009625472068786622, 0.009566335678100587, 0.00954150390625, 0.009667712211608887, 0.009665696144104004, 0.009688799858093261, 0.009669952392578125, 0.009570207595825195, 0.009588607788085938, 0.009584832191467284, 0.009519424438476563, 0.00958505630493164, 0.009573760032653809, 0.009697055816650391, 0.009556832313537598, 0.00960102367401123, 0.009555968284606933, 0.009668288230895996, 0.009566816329956054, 0.0096212158203125, 0.009615039825439453, 0.009517472267150879, 0.009524127960205079, 0.009548800468444824, 0.0094967041015625, 0.009534560203552245, 0.00950761604309082, 0.009504768371582031, 0.009524543762207032, 0.009503487586975098, 0.009540896415710449, 0.009869983673095703, 0.009561152458190917, 0.00952620792388916, 0.00961945629119873, 0.009545727729797364, 0.009526816368103028, 0.009525823593139648, 0.0095763521194458, 0.009511967658996583, 0.00963811206817627, 0.009548416137695312, 0.009611071586608887, 0.009576416015625, 0.009695775985717773, 0.00957420825958252, 0.009566240310668946, 0.009603039741516113, 0.00952768039703369, 0.009738719940185547, 0.009861120223999023, 0.009818112373352051, 0.009744383811950684, 0.00962559986114502, 0.009682239532470703, 0.00958672046661377, 0.009664959907531738, 0.009726335525512695, 0.009683903694152831, 0.009587807655334473, 0.009512672424316406, 0.009642080307006836, 0.009613311767578125, 0.009621503829956055, 0.009776255607604981, 0.009474368095397949, 0.009523776054382324, 0.009672736167907715, 0.009891807556152344, 0.009889792442321778, 0.009649984359741211, 0.009689279556274414, 0.009746432304382324, 0.009663711547851563, 0.009643808364868164, 0.009675775527954102, 0.009602656364440918, 0.009586943626403808, 0.009820320129394532, 0.009684703826904296, 0.009560288429260254, 0.009746560096740723, 0.009849023818969727, 0.009887264251708985, 0.0097193603515625, 0.009724575996398926, 0.009797792434692382, 0.009717599868774414, 0.009834495544433594, 0.009754143714904785, 0.009795104026794434, 0.009737152099609375, 0.009885567665100098, 0.009764991760253906, 0.0097609281539917, 0.009752415657043456, 0.009756352424621582, 0.009711487770080567, 0.009693792343139648, 0.009682784080505371, 0.00974847984313965, 0.009674688339233399, 0.009621888160705567, 0.00977683162689209, 0.009731679916381837, 0.009638015747070312, 0.009693375587463379, 0.009630047798156738, 0.00963161563873291, 0.00981174373626709, 0.00974847984313965, 0.00959718418121338, 0.009842720031738281, 0.009944992065429687, 0.009842687606811524, 0.009942432403564454, 0.009909119606018067, 0.009858688354492188, 0.009818207740783692, 0.009850879669189454, 0.009889535903930664, 0.00989414405822754, 0.009795871734619141, 0.010454527854919434, 0.010950655937194824, 0.010774751663208008, 0.01005568027496338, 0.01006175994873047, 0.009836607933044434, 0.009920512199401856, 0.009981951713562011, 0.009844736099243164, 0.009824352264404297, 0.009877408027648926, 0.009795583724975587, 0.009773216247558593, 0.009760607719421386, 0.009688639640808105, 0.009607872009277344, 0.009682687759399414, 0.009739359855651856, 0.009507871627807618, 0.009461503982543945, 0.009545856475830079, 0.009477343559265137, 0.009415488243103027, 0.009435104370117188, 0.009422656059265136, 0.009462271690368652, 0.009432191848754884, 0.009552448272705079, 0.009549247741699218, 0.009648927688598633, 0.009645855903625489, 0.00963584041595459, 0.009586432456970214, 0.009662943840026856, 0.009497695922851563, 0.00947270393371582, 0.009524255752563477, 0.009488991737365723, 0.009594592094421386, 0.009447263717651368, 0.009540703773498535, 0.009443008422851562, 0.009438976287841797, 0.009400320053100587, 0.009470239639282226, 0.009656319618225098, 0.009555968284606933, 0.00939417552947998, 0.009334783554077148, 0.00944547176361084, 0.009328543663024902, 0.009187328338623046, 0.009523200035095216, 0.009322784423828124, 0.009346495628356934, 0.009333215713500976, 0.009364319801330566, 0.00942409610748291, 0.009347071647644043, 0.00953916835784912, 0.009448863983154298, 0.009579263687133788, 0.009508864402770996, 0.01324169635772705, 0.010420576095581054, 0.010524991989135743, 0.010485504150390625, 0.009701631546020507, 0.009324543952941895, 0.009377216339111328, 0.009525823593139648, 0.00939417552947998, 0.00928553581237793, 0.009527392387390136, 0.00930611228942871, 0.009223648071289062, 0.009226911544799805, 0.009233311653137207, 0.009210047721862792, 0.009220864295959473, 0.009220128059387206, 0.009224320411682129, 0.00934284782409668, 0.009584639549255371, 0.009237855911254882, 0.009636480331420899, 0.009351200103759765, 0.009354720115661621, 0.00937564754486084, 0.009327232360839844, 0.00932585620880127, 0.009260831832885742, 0.00932755184173584, 0.009272480010986328, 0.0093274564743042, 0.009308159828186035, 0.009255231857299805, 0.009359040260314942, 0.00935321617126465, 0.009248543739318847, 0.009302240371704101, 0.00936297607421875, 0.010651712417602538, 0.009429408073425292, 0.00934928035736084, 0.009588031768798829, 0.009392767906188966, 0.009439167976379394, 0.009336064338684082, 0.009327327728271485, 0.009303872108459472, 0.009316512107849121, 0.009338560104370118, 0.009338368415832519]",tokens/s,103.8914828303977,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1196.367872,1149.108224,0.0,746.586112,728.01536,s,1,8.62387109375,8.62387109375,0.0,8.62387109375,8.62387109375,8.62387109375,8.62387109375,[8.62387109375],,kWh,3.0236546608375648e-05,3.3209170310235012e-06,9.779730045983426e-06,4.3337193685382574e-05,,MB,1572.92544,1457.389568,0.0,1040.187392,985.00096,s,10,1.589587371826172,0.1589587371826172,0.0011242750814520921,0.15868964385986328,0.15946807403564453,0.16077782974243165,0.16182563430786134,"[0.16208758544921875, 0.15779299926757812, 0.15914378356933595, 0.15866018676757812, 0.15893980407714844, 0.15843463134765626, 0.15803158569335937, 0.15871910095214845, 0.15860067749023438, 0.15917701721191407]",tokens/s,1610.48083633112,kWh,4.852531642554226e-06,5.351485358657892e-07,3.2067284305896596e-06,8.594408609009674e-06,tokens/kWh,29786808.10354194,MB,1605.65248,1505.624064,0.0,1088.421888,985.00352,s,10,11.109411743164065,1.1109411743164062,0.006887684538624719,1.1124687500000001,1.1183736694335937,1.1193114074707031,1.1200615979003907,"[1.101510498046875, 1.11509423828125, 1.118165283203125, 1.1052757568359375, 1.1135023193359375, 1.1061446533203125, 1.10007080078125, 1.1202491455078125, 1.1114351806640625, 1.1179638671875]",tokens/s,56.70867320114019,kWh,3.205926829119471e-05,3.53574678766871e-06,1.4968063887012112e-05,5.056307896587554e-05,tokens/kWh,1245968.427724071,,s,630,11.106172786712637,0.017628845693194676,0.0004051979093477331,0.017573904037475586,0.017849810600280763,0.018003361511230467,0.01855540641784668,"[0.017670688629150392, 0.01768604850769043, 0.01747235107421875, 0.017709056854248048, 0.017524736404418945, 0.01739708709716797, 0.01764998435974121, 0.017381664276123046, 0.01732371139526367, 0.01748134422302246, 0.01731865692138672, 0.017410400390625, 0.017512096405029296, 0.017399808883666993, 0.017292415618896485, 0.017548160552978517, 0.017425504684448243, 0.01723619270324707, 0.01740870475769043, 0.01738956832885742, 0.01742201614379883, 0.01724652862548828, 0.017284095764160155, 0.01741312026977539, 0.017270784378051757, 0.017202272415161132, 0.01740687942504883, 0.017399808883666993, 0.01742438316345215, 0.01764249610900879, 0.01741721534729004, 0.01745305633544922, 0.017495840072631837, 0.017424608230590822, 0.01780860710144043, 0.017707807540893555, 0.017375232696533204, 0.017378463745117187, 0.017437536239624022, 0.017612991333007814, 0.017434431076049806, 0.017516544342041016, 0.01772876739501953, 0.017576704025268553, 0.01734886360168457, 0.017329919815063478, 0.017479232788085938, 0.017762752532958986, 0.017573888778686524, 0.017491071701049805, 0.017357280731201172, 0.017510751724243163, 0.017434688568115236, 0.017350656509399414, 0.017375232696533204, 0.01739068794250488, 0.01733692741394043, 0.017472896575927734, 0.017469919204711915, 0.01800035285949707, 0.017910879135131837, 0.017709503173828123, 0.01757391929626465, 0.017772544860839845, 0.01761894416809082, 0.017655391693115235, 0.017790687561035155, 0.017722047805786133, 0.0175861759185791, 0.017551359176635743, 0.01760054397583008, 0.01767420768737793, 0.017532928466796875, 0.017754112243652344, 0.017712703704833986, 0.017739423751831053, 0.017573728561401367, 0.017630111694335936, 0.01755459213256836, 0.01746121597290039, 0.01794550323486328, 0.022190080642700196, 0.02043289566040039, 0.017580032348632812, 0.01778659248352051, 0.017614751815795898, 0.01788147163391113, 0.017603935241699217, 0.017535648345947265, 0.01762646484375, 0.017515167236328125, 0.017524736404418945, 0.017514495849609374, 0.017491615295410157, 0.017578336715698244, 0.017556671142578126, 0.017535423278808592, 0.01754764747619629, 0.017725439071655275, 0.017909759521484374, 0.017590272903442384, 0.01760665512084961, 0.017514080047607423, 0.017602975845336915, 0.01745510482788086, 0.017328128814697266, 0.017413728713989256, 0.01766032028198242, 0.01754070472717285, 0.01738153648376465, 0.0174021110534668, 0.017459199905395507, 0.017367040634155274, 0.017294464111328126, 0.017429279327392577, 0.01746895980834961, 0.017578048706054686, 0.017553728103637697, 0.01753926467895508, 0.017620384216308595, 0.017447519302368163, 0.017348608016967772, 0.017516544342041016, 0.017520639419555666, 0.017526784896850587, 0.01759846305847168, 0.018551679611206056, 0.017909759521484374, 0.01841289520263672, 0.017756128311157228, 0.017584831237792968, 0.017690624237060547, 0.01777663993835449, 0.017543167114257813, 0.017530784606933594, 0.01791804885864258, 0.017709056854248048, 0.017587551116943358, 0.017522464752197264, 0.017551712036132813, 0.017578239440917968, 0.017469728469848633, 0.017772544860839845, 0.017760255813598632, 0.01784956741333008, 0.017729440689086915, 0.017711296081542968, 0.017682367324829102, 0.017781408309936523, 0.01810985565185547, 0.018471616744995117, 0.018263168334960937, 0.017653696060180663, 0.017511072158813475, 0.017621280670166016, 0.017612800598144532, 0.01773676872253418, 0.017713792800903322, 0.017527103424072266, 0.01743667221069336, 0.017496063232421876, 0.01786579132080078, 0.017664608001708985, 0.017680736541748048, 0.01797302436828613, 0.017878719329833984, 0.017578847885131837, 0.017729055404663085, 0.018522592544555665, 0.01760630416870117, 0.017489952087402345, 0.01769267272949219, 0.017560928344726563, 0.01765433692932129, 0.01764726448059082, 0.017526720046997072, 0.017576448440551756, 0.018135040283203126, 0.017688575744628905, 0.017547071456909178, 0.017612991333007814, 0.01765171241760254, 0.017633279800415038, 0.017465599060058595, 0.017950464248657226, 0.01784012794494629, 0.017743616104125975, 0.01754751968383789, 0.017837503433227538, 0.01791155242919922, 0.01803241539001465, 0.017768575668334962, 0.017705759048461913, 0.017765600204467772, 0.018167999267578124, 0.017707679748535158, 0.017655807495117186, 0.017711103439331053, 0.017844224929809572, 0.017678272247314452, 0.017587839126586916, 0.017709247589111327, 0.017664255142211913, 0.01764761543273926, 0.017622240066528322, 0.017685344696044922, 0.017510208129882812, 0.017459104537963867, 0.017510080337524415, 0.017433120727539064, 0.017440095901489257, 0.01729097557067871, 0.017238208770751953, 0.017492544174194335, 0.01764476776123047, 0.017558271408081055, 0.017445087432861328, 0.017603904724121093, 0.017394367218017577, 0.017388671875, 0.017486719131469725, 0.0174072322845459, 0.017382144927978516, 0.017534175872802735, 0.017720096588134764, 0.0174202880859375, 0.017235872268676757, 0.0173089599609375, 0.017481855392456055, 0.017512575149536132, 0.017568031311035157, 0.017466848373413085, 0.017407808303833008, 0.017368064880371094, 0.01737113571166992, 0.017307647705078123, 0.017512351989746093, 0.01746339225769043, 0.017373119354248047, 0.017461055755615233, 0.01742220878601074, 0.017586463928222655, 0.017363040924072266, 0.018005823135375975, 0.01743459129333496, 0.017510623931884767, 0.017536767959594725, 0.017338176727294922, 0.017519039154052736, 0.01739948844909668, 0.017374879837036134, 0.0174182071685791, 0.01754582405090332, 0.017772544860839845, 0.01778892707824707, 0.01781532859802246, 0.017843584060668945, 0.017617759704589845, 0.017557472229003907, 0.01754319953918457, 0.017497760772705078, 0.017682783126831053, 0.01776780891418457, 0.01772198486328125, 0.01806035232543945, 0.017693567276000976, 0.017716991424560548, 0.017822015762329103, 0.017612064361572265, 0.01755731201171875, 0.01761577606201172, 0.017655807495117186, 0.017655807495117186, 0.017770496368408203, 0.01762441635131836, 0.01765033531188965, 0.017889087677001952, 0.01767647933959961, 0.01778825569152832, 0.01776019287109375, 0.018180831909179688, 0.017765535354614257, 0.017496383666992188, 0.017705503463745116, 0.017721343994140625, 0.017604608535766602, 0.01751641654968262, 0.01756787109375, 0.01762505531311035, 0.017625120162963866, 0.0177108154296875, 0.017643808364868164, 0.017639551162719726, 0.017618175506591796, 0.01794646453857422, 0.017568544387817384, 0.01756492805480957, 0.017504383087158203, 0.017745792388916017, 0.01805183982849121, 0.017786880493164063, 0.0178288631439209, 0.017750175476074218, 0.017603008270263672, 0.017618431091308593, 0.0175645751953125, 0.01767616081237793, 0.017583967208862305, 0.017475103378295897, 0.017498880386352538, 0.017457376480102538, 0.01745692825317383, 0.01738956832885742, 0.017435712814331053, 0.017555423736572266, 0.01782156753540039, 0.017715328216552733, 0.017786880493164063, 0.017851999282836914, 0.01825404739379883, 0.0177379207611084, 0.017544992446899416, 0.017857824325561523, 0.017617855072021484, 0.01741209602355957, 0.01755340766906738, 0.017463487625122072, 0.01743827247619629, 0.017524192810058594, 0.017521440505981447, 0.01740943908691406, 0.017411775588989258, 0.017408927917480468, 0.017470495223999023, 0.017417184829711913, 0.017778688430786133, 0.017399808883666993, 0.01755897521972656, 0.017658016204833985, 0.01747599983215332, 0.017343807220458984, 0.017496768951416015, 0.017754112243652344, 0.017502208709716797, 0.01750399971008301, 0.017625343322753905, 0.01754710388183594, 0.017445024490356446, 0.017326080322265625, 0.017686527252197267, 0.017426431655883787, 0.017373184204101562, 0.01741209602355957, 0.017678335189819337, 0.01760051155090332, 0.017633279800415038, 0.017555456161499023, 0.017421951293945314, 0.017483455657958984, 0.017480384826660155, 0.01744063949584961, 0.017469568252563475, 0.01743680000305176, 0.017491615295410157, 0.017532415390014648, 0.01747123146057129, 0.017382368087768555, 0.017450016021728517, 0.017482719421386718, 0.01740572738647461, 0.01772313690185547, 0.017648096084594726, 0.017496063232421876, 0.017649696350097655, 0.017549280166625977, 0.01740185546875, 0.0175861759185791, 0.017793088912963866, 0.0177108154296875, 0.017671167373657228, 0.017665760040283203, 0.01768876838684082, 0.017579967498779298, 0.01743449592590332, 0.017510271072387694, 0.017663808822631837, 0.017408607482910156, 0.017487520217895507, 0.017510175704956055, 0.017363519668579103, 0.017938432693481447, 0.01738137626647949, 0.017594112396240234, 0.01758233642578125, 0.017735071182250976, 0.017515104293823244, 0.0175918083190918, 0.01751091194152832, 0.017383455276489258, 0.01741334342956543, 0.017521408081054686, 0.017295360565185547, 0.017350048065185548, 0.0175765438079834, 0.017350656509399414, 0.017250303268432618, 0.017391424179077148, 0.017344703674316408, 0.01737932777404785, 0.017514495849609374, 0.0172359676361084, 0.017194623947143554, 0.01753945541381836, 0.01721343994140625, 0.017362943649291994, 0.01742646408081055, 0.01733628845214844, 0.01743667221069336, 0.01743052864074707, 0.017338367462158204, 0.017269920349121094, 0.017285600662231445, 0.017516927719116213, 0.017384672164916994, 0.01734022331237793, 0.01762761688232422, 0.017420799255371093, 0.01739776039123535, 0.01740310478210449, 0.017281824111938477, 0.0173505916595459, 0.017333759307861327, 0.01735478401184082, 0.017279392242431642, 0.01733030319213867, 0.017557247161865235, 0.01757414436340332, 0.01756979179382324, 0.01761894416809082, 0.01749782371520996, 0.017537248611450194, 0.017709280014038088, 0.01768448066711426, 0.018282495498657226, 0.018556928634643553, 0.018386240005493163, 0.017918432235717773, 0.01763555145263672, 0.01749737548828125, 0.017595104217529297, 0.017647008895874023, 0.02306732749938965, 0.017468671798706054, 0.017514432907104492, 0.017639167785644533, 0.017621984481811525, 0.017694719314575197, 0.017542207717895508, 0.017718143463134765, 0.017481792449951173, 0.01759436798095703, 0.017544864654541015, 0.0175947208404541, 0.017917503356933595, 0.01760710334777832, 0.017696512222290038, 0.017782304763793947, 0.01787913513183594, 0.01795110321044922, 0.017899776458740236, 0.017829887390136717, 0.017612224578857423, 0.0177772159576416, 0.017649663925170898, 0.0176680965423584, 0.017589599609375, 0.017604448318481444, 0.01757279968261719, 0.017481599807739258, 0.017604608535766602, 0.017892383575439454, 0.017445856094360352, 0.017475584030151366, 0.017653663635253905, 0.01747977638244629, 0.01745305633544922, 0.017532928466796875, 0.017534719467163087, 0.01751475143432617, 0.01772870445251465, 0.017761087417602538, 0.017592416763305665, 0.017579839706420897, 0.01760598373413086, 0.01772336006164551, 0.01779996871948242, 0.017630783081054688, 0.017579647064208986, 0.017802047729492187, 0.018020448684692384, 0.017601951599121094, 0.01771776008605957, 0.017723392486572266, 0.01756563186645508, 0.017653663635253905, 0.018037120819091798, 0.01763302421569824, 0.017893152236938478, 0.01779555130004883, 0.017709056854248048, 0.01763052749633789, 0.017502239227294922, 0.017469856262207033, 0.017527040481567384, 0.017768447875976562, 0.017493152618408205, 0.01750716781616211, 0.01745929527282715, 0.01758608055114746, 0.017426143646240233, 0.017430816650390625, 0.017537023544311522, 0.01757699203491211, 0.017497055053710936, 0.0178701114654541, 0.01780099105834961, 0.017654272079467775, 0.017580480575561525, 0.01768448066711426, 0.017532928466796875, 0.017710975646972656, 0.017616479873657227, 0.017566240310668946, 0.01765171241760254, 0.017651487350463867, 0.017568992614746093, 0.017722368240356445, 0.017667295455932618, 0.017629535675048828, 0.017623071670532228, 0.017672607421875, 0.01761484718322754, 0.017550783157348634, 0.017700927734375, 0.017844640731811523, 0.017609952926635742, 0.01785683250427246, 0.01764819145202637, 0.018169599533081053, 0.017701087951660158, 0.017532928466796875, 0.017704736709594725, 0.017443071365356444, 0.017507776260375977, 0.01762486457824707, 0.017492576599121092, 0.017542816162109374, 0.01760310363769531, 0.017612640380859374, 0.017538528442382812, 0.017483999252319336, 0.01758576011657715, 0.017876928329467773, 0.017652896881103514, 0.017649440765380858, 0.017702911376953127, 0.017514495849609374, 0.01971251106262207, 0.018228511810302734, 0.01810032081604004, 0.018188928604125975, 0.01776639938354492, 0.017760255813598632, 0.017952768325805665, 0.017688575744628905, 0.018028543472290038, 0.017670143127441407, 0.02002457618713379, 0.020568832397460938, 0.017804672241210937, 0.01752947235107422, 0.017587263107299803, 0.017445119857788086, 0.017508480072021486, 0.017973312377929686, 0.0179532470703125, 0.017846048355102537, 0.017533151626586915, 0.017537055969238283, 0.017606143951416017, 0.017936479568481444, 0.017531328201293946, 0.017588191986083985, 0.017532928466796875, 0.01775574493408203, 0.01749443244934082, 0.01760665512084961, 0.017479679107666016, 0.01741209602355957, 0.01770832061767578, 0.01759715270996094, 0.01743257522583008, 0.017489919662475584, 0.017688703536987305, 0.017520511627197265, 0.017579551696777343, 0.017617376327514648, 0.017487520217895507, 0.017494367599487304, 0.01743257522583008, 0.017515647888183595, 0.017535871505737304, 0.01743027114868164, 0.01765724754333496, 0.017468255996704103, 0.017270784378051757, 0.017364063262939454, 0.017398591995239257, 0.017340511322021485, 0.01725644874572754, 0.01725644874572754, 0.017346559524536134, 0.017391616821289063, 0.0174653434753418, 0.01743667221069336, 0.017522687911987304, 0.017657855987548828, 0.017517631530761718, 0.018043840408325195, 0.018311168670654295]",tokens/s,56.72521147462499,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1516.470272,1531.904,0.0,1136.656384,1111.384576,s,1,8.6451328125,8.6451328125,0.0,8.6451328125,8.6451328125,8.6451328125,8.6451328125,[8.6451328125],,kWh,4.834146508333864e-05,5.307537180176209e-06,1.72866804960016e-05,7.093568275951646e-05,,MB,1583.77984,1601.110016,0.0,1193.279488,1029.126656,s,10,5.371971313476563,0.5371971313476562,0.0013318217229636975,0.53732275390625,0.5388457336425782,0.5390548858642578,0.5392222076416016,"[0.5392640380859375, 0.5355089721679688, 0.5380020751953125, 0.536339111328125, 0.537271240234375, 0.5346407470703125, 0.5373074951171875, 0.5387992553710937, 0.5373380126953125, 0.5375003662109376]",tokens/s,476.5475931671073,kWh,1.5963014583771957e-05,1.7604251500054838e-06,1.0564204357789467e-05,2.8287644091566905e-05,tokens/kWh,9049887.617764484,MB,1591.15264,1603.207168,0.0,1195.37664,1083.4944,s,10,10.96520068359375,1.096520068359375,0.007217433858219241,1.0955146484374998,1.1040343994140625,1.1077592041015625,1.1107390478515624,"[1.0915089111328125, 1.095031982421875, 1.084250244140625, 1.089641845703125, 1.1032066650390624, 1.0956351318359374, 1.1114840087890625, 1.10139501953125, 1.0976527099609374, 1.0953941650390624]",tokens/s,57.45448881228528,kWh,3.385353185497731e-05,3.7339509309515524e-06,1.7515929217410514e-05,5.510341200333938e-05,tokens/kWh,1143304.882757207,,s,630,10.962743856430054,0.01740118072449215,0.000302951764663302,0.017318960189819335,0.017811832809448244,0.01798636655807495,0.01838773988723755,"[0.018071903228759765, 0.017671455383300783, 0.018241312026977537, 0.01757689666748047, 0.017505407333374023, 0.01761087989807129, 0.01747020721435547, 0.01762486457824707, 0.017348672866821287, 0.017549375534057617, 0.017196640014648438, 0.017168479919433592, 0.017242528915405272, 0.0171144962310791, 0.017166015625, 0.017195648193359375, 0.017215232849121093, 0.017097280502319335, 0.017111040115356444, 0.017111040115356444, 0.017113088607788086, 0.01704140853881836, 0.017137664794921875, 0.017032768249511717, 0.017078720092773437, 0.01704140853881836, 0.017061567306518553, 0.01700486373901367, 0.017098367691040037, 0.01711552047729492, 0.017121280670166016, 0.01708790397644043, 0.017341024398803712, 0.0172728328704834, 0.017345792770385744, 0.017300224304199217, 0.017283071517944337, 0.017254400253295898, 0.017540895462036132, 0.01738947105407715, 0.017535263061523438, 0.01775209617614746, 0.01752239990234375, 0.017352800369262695, 0.01743276786804199, 0.01740540885925293, 0.01747420883178711, 0.01742425537109375, 0.017526784896850587, 0.01743257522583008, 0.017456287384033202, 0.017256736755371094, 0.01719353675842285, 0.01718230438232422, 0.017170848846435546, 0.017463424682617187, 0.01725632095336914, 0.017216991424560547, 0.01733603286743164, 0.01722800064086914, 0.017287776947021483, 0.017223039627075196, 0.017173120498657227, 0.01806537628173828, 0.017682432174682617, 0.017311744689941407, 0.017268735885620116, 0.017188800811767577, 0.017184831619262694, 0.017229824066162108, 0.01715590476989746, 0.017205408096313476, 0.017282751083374022, 0.01734223937988281, 0.017289567947387695, 0.017222944259643554, 0.017140256881713868, 0.017209760665893553, 0.017108991622924806, 0.01716419219970703, 0.017154144287109374, 0.017145856857299805, 0.017084415435791016, 0.017225439071655274, 0.017127712249755858, 0.017137664794921875, 0.01752992057800293, 0.01814409637451172, 0.018397279739379883, 0.017494016647338868, 0.017416128158569334, 0.0175350399017334, 0.017328128814697266, 0.017313440322875975, 0.017422399520874023, 0.017344736099243165, 0.017336383819580078, 0.017397184371948243, 0.017340383529663085, 0.01752943992614746, 0.017333984375, 0.01751273536682129, 0.017473535537719728, 0.017442815780639647, 0.01745715141296387, 0.01742233657836914, 0.017349632263183593, 0.01734668731689453, 0.0174703369140625, 0.01723948860168457, 0.01721196746826172, 0.017338367462158204, 0.01764892768859863, 0.018029279708862304, 0.01740924835205078, 0.017453855514526367, 0.017481727600097655, 0.01726249694824219, 0.01723196792602539, 0.017315839767456053, 0.0172476806640625, 0.017279552459716796, 0.017367040634155274, 0.01719705581665039, 0.01721139144897461, 0.017612800598144532, 0.018023584365844728, 0.017535839080810547, 0.01742624092102051, 0.017234111785888673, 0.017168479919433592, 0.017219488143920898, 0.01724006462097168, 0.017196672439575195, 0.01747609519958496, 0.01711280059814453, 0.01715203285217285, 0.017141632080078125, 0.017112319946289062, 0.017085439682006837, 0.017113088607788086, 0.01720729637145996, 0.01722287940979004, 0.01717737579345703, 0.017200767517089845, 0.017031551361083985, 0.017203168869018556, 0.017102367401123048, 0.01715177536010742, 0.017150688171386718, 0.0172806396484375, 0.017283456802368164, 0.01739548873901367, 0.01726076889038086, 0.01719705581665039, 0.017194080352783202, 0.01718771171569824, 0.017100831985473634, 0.017124671936035157, 0.01710736083984375, 0.017185056686401367, 0.017176544189453125, 0.017127712249755858, 0.017153535842895508, 0.01714201545715332, 0.017059648513793945, 0.017204864501953125, 0.01722764778137207, 0.017811487197875977, 0.017113759994506837, 0.01715974426269531, 0.01704185676574707, 0.017118623733520508, 0.017220191955566407, 0.017146879196166993, 0.017081344604492187, 0.017160192489624023, 0.017152000427246093, 0.017246208190917968, 0.017243295669555663, 0.01728803253173828, 0.01709174346923828, 0.017159008026123048, 0.017141759872436522, 0.01722777557373047, 0.017176576614379883, 0.017125375747680666, 0.01707827186584473, 0.01713968086242676, 0.018052223205566407, 0.01759228706359863, 0.01737615966796875, 0.017348031997680664, 0.01732268714904785, 0.01728499221801758, 0.01715545654296875, 0.01712745666503906, 0.01708435249328613, 0.017124000549316405, 0.017188863754272463, 0.017119232177734374, 0.017115135192871094, 0.017115135192871094, 0.01720729637145996, 0.017127424240112304, 0.017270368576049806, 0.017136032104492188, 0.01761484718322754, 0.017102495193481445, 0.017850719451904296, 0.017221120834350585, 0.017433088302612306, 0.01742438316345215, 0.01725984001159668, 0.017287103652954102, 0.01736163139343262, 0.0174366397857666, 0.01745033645629883, 0.01754390335083008, 0.017434623718261717, 0.017391616821289063, 0.017480928421020506, 0.017554208755493163, 0.01746112060546875, 0.01743827247619629, 0.017272640228271484, 0.01727769660949707, 0.01732419204711914, 0.017424224853515625, 0.01721343994140625, 0.017516544342041016, 0.017121280670166016, 0.017049280166625977, 0.01732793617248535, 0.017252864837646483, 0.01728499221801758, 0.0173384952545166, 0.01733987236022949, 0.017168928146362304, 0.017136959075927733, 0.017213600158691406, 0.017098623275756834, 0.01705027198791504, 0.0170949764251709, 0.017094335556030273, 0.017106943130493164, 0.017293312072753905, 0.017291263580322267, 0.0172227840423584, 0.017136512756347658, 0.017118976593017577, 0.017126815795898438, 0.018485248565673826, 0.01760665512084961, 0.01720319938659668, 0.01723756790161133, 0.01717673683166504, 0.017258783340454102, 0.017141727447509764, 0.01711836814880371, 0.017154016494750977, 0.01709542465209961, 0.01834614372253418, 0.017579296112060546, 0.017231712341308592, 0.01708121681213379, 0.017123327255249024, 0.017321184158325197, 0.017526784896850587, 0.01748227119445801, 0.017410655975341797, 0.01719606399536133, 0.017246847152709962, 0.01769267272949219, 0.01728335952758789, 0.017227487564086916, 0.01722572708129883, 0.017085504531860352, 0.017158432006835936, 0.017150623321533203, 0.01715193557739258, 0.017129535675048827, 0.017152000427246093, 0.01708201599121094, 0.01717692756652832, 0.017321983337402345, 0.017643007278442382, 0.017719999313354492, 0.017778495788574218, 0.017709056854248048, 0.01785379219055176, 0.017732255935668944, 0.01781760025024414, 0.017804479598999022, 0.017805408477783204, 0.01772755241394043, 0.01779574394226074, 0.01780531120300293, 0.017745439529418944, 0.017732063293457032, 0.017733631134033204, 0.017747167587280274, 0.017785152435302733, 0.017957088470458984, 0.017867263793945314, 0.01783782386779785, 0.017901216506958008, 0.017737888336181642, 0.017637567520141603, 0.017559551239013673, 0.01758415985107422, 0.01750217628479004, 0.01754854393005371, 0.01750502395629883, 0.017503583908081054, 0.01791801643371582, 0.017827327728271485, 0.01740451240539551, 0.017173759460449217, 0.017134239196777343, 0.017139711380004884, 0.01707827186584473, 0.017354751586914064, 0.017328479766845702, 0.017307296752929687, 0.017337568283081056, 0.01726748847961426, 0.01735443115234375, 0.0174466552734375, 0.017684255599975586, 0.01736390495300293, 0.01753487968444824, 0.017388959884643555, 0.01741423988342285, 0.017457599639892577, 0.01742438316345215, 0.017421344757080078, 0.0174748477935791, 0.017464096069335938, 0.017484703063964845, 0.017473440170288086, 0.01726473617553711, 0.017164287567138673, 0.01721548843383789, 0.017129472732543945, 0.017096864700317384, 0.018331071853637696, 0.017461664199829103, 0.017239839553833007, 0.01722390365600586, 0.017100799560546876, 0.017147775650024413, 0.017107072830200194, 0.017121280670166016, 0.017055744171142577, 0.01715782356262207, 0.017140031814575196, 0.017177824020385743, 0.017113887786865234, 0.01725619125366211, 0.017162303924560546, 0.017219072341918946, 0.017855167388916016, 0.0188272647857666, 0.01728102493286133, 0.017315839767456053, 0.017310911178588868, 0.017401952743530274, 0.01735875129699707, 0.017427263259887697, 0.017350656509399414, 0.01741823959350586, 0.017520639419555666, 0.017360895156860352, 0.017854463577270507, 0.017620384216308595, 0.017469919204711915, 0.017447040557861327, 0.01835385513305664, 0.017809791564941405, 0.017452032089233398, 0.017320032119750976, 0.01722051239013672, 0.017180192947387696, 0.017320415496826173, 0.017111040115356444, 0.017141759872436522, 0.017266687393188478, 0.017202880859375, 0.01722956848144531, 0.017360639572143555, 0.01715283203125, 0.01718681526184082, 0.017292287826538084, 0.017317888259887695, 0.01764614486694336, 0.017899967193603514, 0.017588224411010742, 0.017573888778686524, 0.017321119308471678, 0.017285984039306642, 0.017485567092895508, 0.017311296463012694, 0.01727964782714844, 0.017205535888671877, 0.01712892723083496, 0.017213312149047852, 0.017210912704467774, 0.017212127685546873, 0.017492128372192384, 0.017703168869018553, 0.017780479431152345, 0.017881088256835938, 0.01794867134094238, 0.018126848220825196, 0.01790140724182129, 0.01797318458557129, 0.017914079666137697, 0.017975296020507812, 0.018013248443603514, 0.0180982723236084, 0.01791472053527832, 0.018052959442138673, 0.01795907211303711, 0.017946624755859376, 0.017995424270629883, 0.017968832015991212, 0.01804876708984375, 0.018364383697509767, 0.018135583877563477, 0.018501535415649414, 0.018157567977905274, 0.017862752914428712, 0.017656543731689452, 0.017690303802490235, 0.01801398468017578, 0.01779475212097168, 0.017834367752075194, 0.017626527786254884, 0.01734137535095215, 0.017297216415405273, 0.018059072494506837, 0.01771254348754883, 0.01744361686706543, 0.017299455642700197, 0.017164287567138673, 0.01714995193481445, 0.017156095504760743, 0.017211519241333007, 0.017474559783935546, 0.01742323112487793, 0.01744691276550293, 0.017354272842407228, 0.017275360107421874, 0.017133567810058595, 0.017396928787231446, 0.01717740821838379, 0.01726233673095703, 0.017133279800415038, 0.017121824264526367, 0.017182048797607423, 0.017261215209960937, 0.01722777557373047, 0.017296512603759764, 0.017326976776123045, 0.017253568649291992, 0.01723846435546875, 0.017242303848266603, 0.017282560348510743, 0.017650367736816407, 0.01762713623046875, 0.017618207931518554, 0.01752751922607422, 0.017505632400512696, 0.017426816940307618, 0.018227487564086913, 0.018348031997680665, 0.018647039413452148, 0.017680383682250975, 0.018019744873046875, 0.01774620819091797, 0.017766719818115236, 0.01755340766906738, 0.017438528060913085, 0.01785843276977539, 0.017565599441528322, 0.01748543930053711, 0.01745590400695801, 0.017495264053344728, 0.017227680206298827, 0.0171539192199707, 0.017162975311279298, 0.017139999389648438, 0.01715171241760254, 0.017313888549804687, 0.01728531265258789, 0.01742367935180664, 0.017254816055297852, 0.01729676818847656, 0.017177440643310546, 0.01710905647277832, 0.017880191802978517, 0.01988083267211914, 0.017346879959106446, 0.01783184051513672, 0.017525184631347657, 0.017344512939453126, 0.01738515281677246, 0.017242431640625, 0.018280288696289063, 0.01773923110961914, 0.01762348747253418, 0.017535232543945314, 0.01759436798095703, 0.017837312698364256, 0.017338720321655274, 0.017437088012695313, 0.017469440460205078, 0.01718272018432617, 0.017327680587768554, 0.017777088165283204, 0.01745715141296387, 0.017827840805053712, 0.017487360000610352, 0.017259008407592775, 0.017072128295898437, 0.017136640548706054, 0.017177183151245116, 0.017160608291625978, 0.017164287567138673, 0.017118816375732423, 0.01711555290222168, 0.017301055908203126, 0.01716473579406738, 0.017256799697875976, 0.017256128311157228, 0.017405920028686524, 0.017127424240112304, 0.017217504501342774, 0.017242143630981446, 0.017357919692993166, 0.017632160186767578, 0.01756979179382324, 0.01738956832885742, 0.017454751968383787, 0.01741606330871582, 0.017473760604858397, 0.01743017578125, 0.017472095489501953, 0.017467391967773437, 0.01741619110107422, 0.01740595245361328, 0.017516544342041016, 0.017373184204101562, 0.017434623718261717, 0.01743257522583008, 0.017462848663330078, 0.017842559814453124, 0.017413440704345702, 0.01725926399230957, 0.017292448043823242, 0.017383871078491212, 0.01725257682800293, 0.017368511199951173, 0.01747430419921875, 0.017438720703125, 0.017546911239624024, 0.018438207626342774, 0.017780672073364256, 0.017487871170043946, 0.01743052864074707, 0.017295360565185547, 0.017293312072753905, 0.01725644874572754, 0.017231647491455077, 0.017193183898925782, 0.017575935363769533, 0.017473535537719728, 0.017373184204101562, 0.01721139144897461, 0.01719705581665039, 0.01722083282470703, 0.017181472778320314, 0.017340063095092773, 0.017256736755371094, 0.017203264236450196, 0.017094655990600584, 0.017310783386230467, 0.01718092727661133, 0.017407743453979493, 0.017252416610717775, 0.017240959167480467, 0.017309696197509765, 0.01753651237487793, 0.017418752670288085, 0.017814943313598633, 0.017590879440307617, 0.017547264099121093, 0.01724844741821289, 0.017250112533569336, 0.01718272018432617, 0.017252351760864256, 0.01720524787902832, 0.017315839767456053, 0.017207359313964842, 0.017289152145385744, 0.017252256393432617, 0.01744086456298828, 0.017215103149414063, 0.017277311325073243, 0.017108991622924806, 0.01718284797668457, 0.017245792388916017, 0.017448480606079102, 0.01757651138305664, 0.017518335342407227, 0.017438976287841797, 0.017479871749877928, 0.017518592834472657, 0.01747990417480469, 0.01756070327758789, 0.01747420883178711, 0.01745894432067871, 0.01742464065551758, 0.01766579246520996, 0.017550592422485353, 0.017503231048583985, 0.01744076728820801, 0.01750956726074219, 0.017289344787597655]",tokens/s,57.467364762926735,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 291209 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4088, in from_pretrained hf_quantizer.postprocess_model(model) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model return self._process_model_after_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 456, in post_init_awq_exllama_modules model = exllama_post_init(model) File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 133, in exllama_post_init submodule.post_init() File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 73, in post_init self.q4 = exl_ext.make_q4( RuntimeError: scales and qweight have incompatible shapes " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1480.830976,1823.408128,0.0,1428.160512,1322.516992,s,1,9.0790751953125,9.0790751953125,0.0,9.0790751953125,9.0790751953125,9.0790751953125,9.0790751953125,[9.0790751953125],,kWh,4.854528277079074e-05,5.330303647538533e-06,1.8832515065986266e-05,7.270810148431555e-05,,MB,1523.392512,1842.282496,0.0,1434.451968,1320.892416,s,10,5.394534423828125,0.5394534423828125,0.0015562314511154997,0.5389523620605469,0.5410210266113281,0.542272232055664,0.5432731964111328,"[0.5435234375, 0.5383721313476563, 0.5386351318359375, 0.5384713745117188, 0.5396533203125, 0.5378704833984375, 0.5407429809570312, 0.5386964111328125, 0.53936083984375, 0.5392083129882812]",tokens/s,474.55439132842656,kWh,1.6081650892326186e-05,1.773542507303471e-06,1.0747742516316255e-05,2.8602935915945914e-05,tokens/kWh,8950130.180772178,MB,1527.717888,1842.282496,0.0,1434.451968,1373.031936,s,10,13.756556762695311,1.3756556762695311,0.008469377521908704,1.3762078857421876,1.3836558349609374,1.3867905639648437,1.3892983471679687,"[1.382959228515625, 1.3754798583984376, 1.3769359130859375, 1.38992529296875, 1.35745751953125, 1.374190673828125, 1.3677625732421874, 1.38070458984375, 1.3712379150390626, 1.3799031982421874]",tokens/s,45.79634358129633,kWh,3.9714110341008635e-05,4.380432631009805e-06,1.953750393408206e-05,6.36320469061005e-05,tokens/kWh,990067.1605451702,,s,630,13.754047189712523,0.021831820936051623,0.00044146758512300676,0.02171879959106445,0.022216588401794433,0.022392360305786133,0.02359489480972292,"[0.02289436721801758, 0.02641001510620117, 0.02230054473876953, 0.022132095336914064, 0.022047359466552733, 0.02209190368652344, 0.021983104705810545, 0.021936128616333008, 0.021938175201416017, 0.021868320465087892, 0.02159756851196289, 0.021566303253173828, 0.021570880889892577, 0.023116159439086913, 0.021795135498046875, 0.021960704803466798, 0.02183359909057617, 0.021584415435791017, 0.021737056732177733, 0.02147532844543457, 0.021600255966186522, 0.021560319900512694, 0.02166681671142578, 0.021626880645751953, 0.02152448081970215, 0.02148761558532715, 0.02170585632324219, 0.021988224029541016, 0.021835775375366212, 0.02187059211730957, 0.02184806442260742, 0.02186454391479492, 0.022320575714111328, 0.02202003288269043, 0.022766111373901367, 0.02197212791442871, 0.022512447357177733, 0.023052288055419923, 0.021954591751098634, 0.021936128616333008, 0.02184806442260742, 0.02198121643066406, 0.02196847915649414, 0.021666175842285158, 0.02157766342163086, 0.0217457275390625, 0.021733375549316408, 0.02166988754272461, 0.021655168533325195, 0.021579456329345704, 0.02187129592895508, 0.021710687637329102, 0.021581632614135742, 0.02147324752807617, 0.02159654426574707, 0.021733375549316408, 0.021544960021972655, 0.021643264770507813, 0.02164735984802246, 0.02189689636230469, 0.02212486457824707, 0.02166969680786133, 0.021823680877685547, 0.02243132781982422, 0.021907743453979493, 0.021675359725952147, 0.021564064025878907, 0.02166374397277832, 0.0215644474029541, 0.02158060836791992, 0.021532768249511718, 0.021700672149658203, 0.021579296112060546, 0.02166831970214844, 0.02147942352294922, 0.02162073516845703, 0.02215936088562012, 0.021534719467163087, 0.021555200576782226, 0.0215285758972168, 0.021703680038452147, 0.021787647247314454, 0.021590015411376954, 0.02150156784057617, 0.02161497688293457, 0.021569536209106444, 0.021540864944458008, 0.02145884895324707, 0.021561439514160157, 0.021554431915283202, 0.021557024002075195, 0.021530879974365234, 0.021610847473144533, 0.021598495483398438, 0.021642911911010743, 0.021469535827636718, 0.02207753562927246, 0.022587392807006838, 0.022265247344970703, 0.02207548713684082, 0.021600191116333007, 0.02183558464050293, 0.021625600814819335, 0.022115488052368164, 0.0218570556640625, 0.021648639678955077, 0.02152454376220703, 0.021604736328125, 0.021706239700317383, 0.021705120086669923, 0.021682655334472656, 0.021727231979370116, 0.021741535186767576, 0.02167184066772461, 0.02182566452026367, 0.021755903244018555, 0.022105951309204102, 0.024666271209716796, 0.022394880294799805, 0.022292255401611328, 0.02208176040649414, 0.022323200225830078, 0.02266111946105957, 0.022039840698242188, 0.022088159561157228, 0.021897472381591798, 0.022680160522460937, 0.02209382438659668, 0.022060831069946288, 0.022089759826660157, 0.022026432037353515, 0.0219238395690918, 0.022007776260375977, 0.022054943084716797, 0.022024383544921877, 0.02183558464050293, 0.021809152603149414, 0.021651424407958985, 0.02171014404296875, 0.02156959915161133, 0.021765792846679687, 0.021804128646850586, 0.021823392868041993, 0.021740800857543947, 0.021697280883789062, 0.021594112396240234, 0.02150115203857422, 0.02163587188720703, 0.021659648895263672, 0.021550752639770507, 0.02146544075012207, 0.02150809669494629, 0.021600255966186522, 0.021691551208496095, 0.021652320861816406, 0.021708799362182618, 0.021960704803466798, 0.021968704223632812, 0.021813440322875976, 0.021927743911743163, 0.02194438362121582, 0.022171775817871095, 0.02202137565612793, 0.02188569641113281, 0.021804031372070314, 0.0220765438079834, 0.021831552505493165, 0.0218666877746582, 0.021956544876098633, 0.02182649612426758, 0.02208393669128418, 0.02175644874572754, 0.021608160018920897, 0.021500255584716795, 0.021572704315185546, 0.021681407928466796, 0.021769056320190428, 0.021764511108398436, 0.021688512802124024, 0.0237194881439209, 0.02326697540283203, 0.021738784790039063, 0.021693151473999025, 0.021609472274780273, 0.02164735984802246, 0.021555200576782226, 0.021665056228637694, 0.02164556884765625, 0.021723615646362306, 0.022250591278076173, 0.021885215759277345, 0.022164096832275392, 0.02175551986694336, 0.021551488876342773, 0.021476800918579102, 0.021465311050415038, 0.02212019157409668, 0.0218100471496582, 0.02149081611633301, 0.02150163269042969, 0.02140457534790039, 0.02147123146057129, 0.021626720428466795, 0.021558687210083007, 0.02174847984313965, 0.02189107131958008, 0.022155263900756835, 0.022437887191772463, 0.022708223342895507, 0.022375839233398438, 0.022221343994140625, 0.02234988784790039, 0.022226943969726562, 0.02231737518310547, 0.022171327590942383, 0.022428991317749024, 0.02225632095336914, 0.02227596855163574, 0.022130815505981446, 0.022158624649047852, 0.02218671989440918, 0.02212620735168457, 0.02201366424560547, 0.022061695098876954, 0.022310848236083983, 0.022134880065917968, 0.02208358383178711, 0.022157344818115234, 0.022206655502319338, 0.022230239868164064, 0.022127168655395508, 0.021981184005737304, 0.022071647644042968, 0.02201753616333008, 0.02217385673522949, 0.021997568130493163, 0.022368255615234374, 0.022207775115966798, 0.02229487991333008, 0.022161792755126954, 0.021964384078979493, 0.022784128189086914, 0.022044960021972655, 0.022025983810424806, 0.022415456771850587, 0.022016096115112304, 0.022038591384887694, 0.022221824645996095, 0.022021120071411132, 0.021940223693847655, 0.021927871704101563, 0.021961984634399415, 0.022281152725219727, 0.02176540756225586, 0.021535455703735353, 0.02152448081970215, 0.02150297546386719, 0.021878015518188475, 0.021577119827270508, 0.02158585548400879, 0.021481536865234376, 0.021987680435180665, 0.021458080291748047, 0.021500768661499022, 0.021602304458618164, 0.021501951217651367, 0.021497024536132812, 0.02156422424316406, 0.02188083267211914, 0.021531999588012694, 0.021456703186035157, 0.021433408737182618, 0.021518112182617188, 0.02160153579711914, 0.02150783920288086, 0.0214866886138916, 0.021772159576416015, 0.02170992088317871, 0.021707712173461916, 0.0214835205078125, 0.021395456314086913, 0.021424287796020507, 0.02151203155517578, 0.02148524856567383, 0.021433696746826172, 0.021470176696777345, 0.021538816452026367, 0.02146505546569824, 0.021517568588256836, 0.02150480079650879, 0.021551103591918946, 0.021481792449951173, 0.02147225570678711, 0.02145964813232422, 0.021458879470825195, 0.021477439880371093, 0.021414976119995117, 0.021502687454223634, 0.021465311050415038, 0.021355712890625, 0.021416032791137695, 0.021500640869140625, 0.021450080871582032, 0.021507072448730468, 0.021616128921508788, 0.021558528900146486, 0.021502880096435546, 0.021524576187133788, 0.021425247192382812, 0.021625663757324217, 0.021559295654296876, 0.02144256019592285, 0.021452064514160155, 0.021465824127197265, 0.02146303939819336, 0.022627040863037108, 0.021825536727905274, 0.021417695999145506, 0.02146544075012207, 0.021523872375488282, 0.021460832595825194, 0.02149622344970703, 0.021353759765625, 0.02243881607055664, 0.021430303573608398, 0.02158585548400879, 0.021579904556274412, 0.021577407836914062, 0.02154323196411133, 0.021445823669433595, 0.021450847625732423, 0.021573856353759767, 0.021508415222167968, 0.021461183547973633, 0.02141798400878906, 0.021893280029296875, 0.023903488159179687, 0.02158064079284668, 0.021572864532470704, 0.021591552734375, 0.02170572853088379, 0.021655647277832032, 0.021774240493774414, 0.021790719985961913, 0.02207744026184082, 0.02590105628967285, 0.022167295455932618, 0.022152576446533203, 0.022475648880004882, 0.022040639877319336, 0.022054079055786133, 0.021960800170898437, 0.021674655914306642, 0.02171494483947754, 0.02146303939819336, 0.021536928176879883, 0.021671775817871095, 0.021670976638793946, 0.021533632278442384, 0.02149344062805176, 0.02152275276184082, 0.02147635269165039, 0.021445632934570313, 0.021476831436157227, 0.022389280319213868, 0.02149488067626953, 0.021566368103027343, 0.021517440795898436, 0.021838272094726562, 0.021801599502563475, 0.02158777618408203, 0.022114112854003908, 0.02214726448059082, 0.02188083267211914, 0.021683616638183592, 0.02159676742553711, 0.021571584701538086, 0.021609823226928712, 0.022196287155151366, 0.021909408569335938, 0.021610591888427736, 0.021492767333984374, 0.021428512573242187, 0.021520063400268553, 0.021572608947753907, 0.021522432327270507, 0.02156716728210449, 0.02149407958984375, 0.02147327995300293, 0.021514240264892577, 0.02150339126586914, 0.021535263061523438, 0.021891136169433594, 0.02195235252380371, 0.021743776321411133, 0.021798976898193358, 0.021669824600219725, 0.021499807357788087, 0.02155276870727539, 0.021467424392700194, 0.021532447814941406, 0.021431711196899413, 0.02147020721435547, 0.021379072189331053, 0.02147737693786621, 0.02147020721435547, 0.021511167526245118, 0.021618623733520508, 0.021727296829223634, 0.021630975723266603, 0.0216407356262207, 0.021602783203125, 0.02164121627807617, 0.021649728775024413, 0.02163475227355957, 0.021771520614624024, 0.021678207397460937, 0.02166579246520996, 0.021658239364624025, 0.021544960021972655, 0.021659648895263672, 0.02168627166748047, 0.02162483215332031, 0.0216760311126709, 0.021640512466430666, 0.02198294448852539, 0.021563711166381835, 0.02144937515258789, 0.02149113655090332, 0.021434112548828124, 0.021467967987060545, 0.02190540885925293, 0.02328985595703125, 0.022039871215820312, 0.021566144943237303, 0.02211020851135254, 0.021532928466796875, 0.021443552017211914, 0.02188163185119629, 0.023802879333496094, 0.022623231887817383, 0.022318431854248047, 0.022086591720581056, 0.022245376586914063, 0.022183359146118162, 0.022246015548706054, 0.02215116882324219, 0.02221670341491699, 0.02207472038269043, 0.02219468879699707, 0.022210464477539063, 0.022181760787963866, 0.02220070457458496, 0.021989376068115234, 0.021714336395263673, 0.021626815795898438, 0.021671808242797852, 0.021811071395874022, 0.022137760162353515, 0.022337087631225584, 0.022216575622558594, 0.021805631637573243, 0.02196512031555176, 0.02164908790588379, 0.02180905532836914, 0.021700511932373046, 0.02159404754638672, 0.021647615432739256, 0.02167398452758789, 0.0219237117767334, 0.021808832168579102, 0.021917535781860353, 0.021912128448486327, 0.02225155258178711, 0.021972991943359374, 0.02191974449157715, 0.02163711929321289, 0.021576927185058593, 0.021553279876708985, 0.02153334426879883, 0.021594112396240234, 0.02157360076904297, 0.02152787208557129, 0.021597183227539063, 0.02153388786315918, 0.021475008010864258, 0.02168284797668457, 0.02156563186645508, 0.021452192306518555, 0.021510080337524416, 0.021813631057739258, 0.021885183334350584, 0.022249343872070313, 0.022140832901000978, 0.022128896713256838, 0.022245376586914063, 0.02210767936706543, 0.02203081512451172, 0.022171648025512695, 0.022263744354248046, 0.022082944869995118, 0.022080448150634764, 0.022123903274536134, 0.021938560485839843, 0.02262828826904297, 0.022160160064697267, 0.021970655441284178, 0.021778976440429688, 0.02172083282470703, 0.021651456832885742, 0.02163711929321289, 0.021966848373413086, 0.02239897537231445, 0.02191276741027832, 0.021852447509765626, 0.021764223098754882, 0.021811456680297853, 0.02170275115966797, 0.021665855407714842, 0.021777631759643555, 0.021779232025146485, 0.021741567611694337, 0.02162646484375, 0.022229087829589843, 0.021588287353515624, 0.021755456924438477, 0.02200783920288086, 0.0217174072265625, 0.021508256912231447, 0.021607999801635743, 0.021682464599609375, 0.021612543106079102, 0.021696287155151366, 0.02153606414794922, 0.021611423492431642, 0.02149567985534668, 0.02144883155822754, 0.02161065673828125, 0.02153251266479492, 0.021492031097412108, 0.021582752227783202, 0.021490463256835936, 0.0215285758972168, 0.021555200576782226, 0.02156745529174805, 0.021622079849243164, 0.021668479919433593, 0.021583520889282226, 0.02168876838684082, 0.021757951736450197, 0.021760000228881835, 0.02176185607910156, 0.02176838493347168, 0.02169036865234375, 0.02165760040283203, 0.021772064208984376, 0.021680351257324218, 0.02179017639160156, 0.02185641670227051, 0.021922176361083984, 0.021908544540405275, 0.021994272232055665, 0.021931903839111328, 0.02201772880554199, 0.02192854309082031, 0.02194972801208496, 0.021872928619384766, 0.022202783584594727, 0.021967103958129883, 0.021839872360229492, 0.022194175720214843, 0.02267750358581543, 0.02230067253112793, 0.022144704818725585, 0.02216569519042969, 0.02201612854003906, 0.021900768280029296, 0.021923999786376953, 0.02213052749633789, 0.022867712020874023, 0.022124671936035158, 0.022082559585571288, 0.022183584213256835, 0.022214656829833986, 0.022155263900756835, 0.02188287925720215, 0.021855968475341797, 0.021924127578735353, 0.021847679138183595, 0.021730783462524415, 0.021699039459228516, 0.021714815139770506, 0.021692991256713867, 0.021811199188232423, 0.021720191955566407, 0.021552000045776367, 0.02145894432067871, 0.021573888778686524, 0.02181497573852539, 0.021726560592651368, 0.02166815948486328, 0.021657695770263673, 0.02175820732116699, 0.021725248336791993, 0.021764095306396485, 0.021793888092041015, 0.021810079574584963, 0.021673919677734375, 0.021553216934204103, 0.021563392639160156, 0.02184163284301758, 0.02173753547668457, 0.021758176803588866, 0.02167807960510254, 0.021647071838378905, 0.02166422462463379, 0.02158777618408203, 0.021834943771362306, 0.02431830406188965, 0.02178611183166504, 0.02158243179321289, 0.021534175872802735, 0.02164374351501465, 0.021731584548950196, 0.02194246482849121, 0.021828704833984375, 0.021805984497070312, 0.021892160415649415, 0.021904512405395506, 0.02184582328796387]",tokens/s,45.80469961388636,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1551.089664,1823.408128,0.0,1428.160512,1322.516992,s,1,8.658736328125,8.658736328125,0.0,8.658736328125,8.658736328125,8.658736328125,8.658736328125,[8.658736328125],,kWh,4.8651675595783675e-05,5.3587232173526815e-06,1.8511681475999464e-05,7.252208028913582e-05,,MB,1581.109248,1842.282496,0.0,1434.451968,1320.892416,s,10,5.406551818847656,0.5406551818847657,0.0018826069571755957,0.5399053039550781,0.5420017639160156,0.5439743316650391,0.5455523858642578,"[0.5459468994140625, 0.5397939453125, 0.5397861938476562, 0.5399390258789063, 0.5397224731445313, 0.5403477783203126, 0.5389154663085938, 0.53987158203125, 0.5415634155273438, 0.5406650390625]",tokens/s,473.49957713817565,kWh,1.6123777890129133e-05,1.778195864228297e-06,1.0764438436105498e-05,2.8666412190462927e-05,tokens/kWh,8930311.833204193,MB,1588.71552,1842.282496,0.0,1434.451968,1373.031936,s,10,13.761650512695315,1.3761650512695311,0.012817914920764084,1.3816153564453124,1.388759130859375,1.3899785034179688,1.3909540014648436,"[1.387599609375, 1.36535986328125, 1.362013427734375, 1.3884881591796876, 1.3596466064453125, 1.3572025146484374, 1.3869117431640625, 1.3838553466796875, 1.3793753662109376, 1.3911978759765624]",tokens/s,45.7793924804889,kWh,3.9648751469035636e-05,4.373167839454321e-06,1.952119690349422e-05,6.354311621198417e-05,tokens/kWh,991452.7923029098,,s,630,13.758981756210328,0.021839653581286235,0.00047273162063906005,0.021813504219055174,0.022227411842346192,0.022362587451934812,0.023115124950408936,"[0.022732927322387696, 0.02229055976867676, 0.022194175720214843, 0.02215488052368164, 0.022108543395996095, 0.02215116882324219, 0.02215884780883789, 0.022047231674194336, 0.022034431457519533, 0.022158624649047852, 0.022198047637939453, 0.02199852752685547, 0.022642303466796875, 0.02233996772766113, 0.0221200008392334, 0.022086080551147462, 0.021977088928222657, 0.021987327575683592, 0.021929983139038087, 0.021877983093261718, 0.021990175247192382, 0.022074367523193358, 0.02205183982849121, 0.02214297676086426, 0.02205286407470703, 0.022256927490234377, 0.022110944747924806, 0.0218603515625, 0.0218984317779541, 0.021882944107055664, 0.02193280029296875, 0.021741567611694337, 0.02198944091796875, 0.021686208724975585, 0.02184169578552246, 0.02183788871765137, 0.02205302429199219, 0.022338623046875, 0.021892032623291015, 0.02186240005493164, 0.021952512741088868, 0.022035808563232423, 0.022062847137451172, 0.022008544921875, 0.022433183670043946, 0.022162208557128905, 0.02211020851135254, 0.02181475257873535, 0.02178236770629883, 0.021785280227661134, 0.021991424560546875, 0.021917695999145507, 0.021968896865844727, 0.021804767608642577, 0.021894655227661132, 0.02188368034362793, 0.021996959686279297, 0.021502559661865234, 0.022294015884399415, 0.02218239974975586, 0.021835775375366212, 0.02166169548034668, 0.02159414482116699, 0.022390560150146486, 0.021788543701171875, 0.02145948791503906, 0.02142799949645996, 0.021549535751342774, 0.02152556800842285, 0.021461952209472657, 0.021558624267578125, 0.021744287490844727, 0.021548128128051756, 0.02155641555786133, 0.021513952255249023, 0.02176326370239258, 0.021537599563598634, 0.02156883239746094, 0.02148624038696289, 0.02143235206604004, 0.021972383499145508, 0.021589599609375, 0.021505023956298826, 0.021550304412841798, 0.021590688705444335, 0.021573631286621094, 0.02153615951538086, 0.02148137664794922, 0.02144499206542969, 0.02152707290649414, 0.021484479904174805, 0.021455072402954103, 0.021498207092285157, 0.021509599685668946, 0.02144268798828125, 0.021506879806518556, 0.021865535736083984, 0.021571968078613283, 0.021571744918823244, 0.021923871994018556, 0.021454816818237306, 0.02149622344970703, 0.02151219177246094, 0.021716991424560548, 0.021843967437744142, 0.02159008026123047, 0.021374624252319337, 0.021610784530639648, 0.021411840438842773, 0.02143020820617676, 0.021456064224243163, 0.021568384170532227, 0.021540864944458008, 0.021626880645751953, 0.028711999893188477, 0.021480384826660155, 0.02139311981201172, 0.021430559158325195, 0.021357791900634766, 0.02151641654968262, 0.021504255294799806, 0.021391775131225584, 0.0214835205078125, 0.021388639450073244, 0.021406368255615236, 0.02151628875732422, 0.022323392868041993, 0.021788671493530275, 0.021395456314086913, 0.02134601593017578, 0.02140166473388672, 0.021393632888793944, 0.021426111221313476, 0.021405759811401366, 0.021536447525024413, 0.021333696365356446, 0.02143824005126953, 0.02131235122680664, 0.021409631729125977, 0.021681535720825196, 0.021392160415649415, 0.02147737693786621, 0.021553152084350585, 0.021355712890625, 0.021819360733032228, 0.02155196762084961, 0.02147327995300293, 0.021397247314453124, 0.021975072860717773, 0.0214932804107666, 0.021492351531982423, 0.02139491271972656, 0.02139187240600586, 0.02141606330871582, 0.021422048568725587, 0.021429952621459962, 0.02146131134033203, 0.021401472091674804, 0.02133932876586914, 0.02142617607116699, 0.021398271560668945, 0.021440992355346678, 0.021495264053344728, 0.02139084815979004, 0.021424896240234376, 0.02159187126159668, 0.02139308738708496, 0.021527040481567384, 0.02146672058105469, 0.021402015686035156, 0.021566495895385743, 0.021586912155151367, 0.021729280471801758, 0.02133795166015625, 0.021384864807128905, 0.021385440826416014, 0.021597471237182617, 0.021590944290161132, 0.02170844841003418, 0.02198476791381836, 0.02207427215576172, 0.02220444869995117, 0.022140928268432617, 0.022216384887695312, 0.022279903411865233, 0.02229216003417969, 0.02230521583557129, 0.022499296188354494, 0.02236672019958496, 0.02249932861328125, 0.022130624771118164, 0.022227008819580077, 0.022287519454956054, 0.0221210880279541, 0.021995712280273437, 0.0220098876953125, 0.023076864242553712, 0.021917695999145507, 0.021898880004882812, 0.02189072036743164, 0.021869440078735352, 0.021883968353271485, 0.021932832717895506, 0.021812768936157228, 0.021905311584472655, 0.02187321662902832, 0.021942272186279296, 0.021977088928222657, 0.02183286476135254, 0.021875551223754883, 0.021803007125854493, 0.021857280731201172, 0.02189411163330078, 0.022249216079711913, 0.021819679260253907, 0.021970943450927736, 0.022188032150268554, 0.02192793655395508, 0.021876735687255858, 0.02191360092163086, 0.021962751388549806, 0.02187059211730957, 0.021816991806030275, 0.022091264724731444, 0.021862592697143555, 0.022067520141601564, 0.021899839401245118, 0.021927711486816406, 0.022129728317260743, 0.021965408325195314, 0.02183612823486328, 0.021805055618286134, 0.02197491264343262, 0.02208576011657715, 0.022122495651245116, 0.021991424560546875, 0.022486047744750978, 0.023318784713745117, 0.022468639373779298, 0.021936832427978517, 0.021925888061523437, 0.022003456115722655, 0.022155519485473632, 0.02228223991394043, 0.021978431701660157, 0.022098560333251954, 0.022263103485107422, 0.021723264694213866, 0.0216408634185791, 0.022010847091674803, 0.022099967956542968, 0.021968896865844727, 0.02210358428955078, 0.021953216552734377, 0.021868064880371095, 0.021566112518310546, 0.021503231048583985, 0.021520639419555666, 0.021477888107299805, 0.021386592864990235, 0.02145552062988281, 0.021403648376464843, 0.021370336532592772, 0.021404191970825194, 0.021360288619995116, 0.021329504013061523, 0.021325664520263674, 0.021418912887573242, 0.021448703765869142, 0.021381120681762695, 0.021481184005737303, 0.02135478401184082, 0.02147737693786621, 0.021376031875610352, 0.022205215454101562, 0.021833087921142576, 0.022274784088134766, 0.02146108818054199, 0.02161039924621582, 0.021391456604003906, 0.021389312744140625, 0.021559007644653322, 0.021623071670532228, 0.02135759925842285, 0.021814239501953124, 0.02154697608947754, 0.021538719177246094, 0.021579904556274412, 0.021514240264892577, 0.02149580764770508, 0.02135183906555176, 0.02129078483581543, 0.02143836784362793, 0.02127964782714844, 0.02170675277709961, 0.022154943466186523, 0.021844287872314454, 0.021556863784790038, 0.021458976745605467, 0.0215978889465332, 0.02149033546447754, 0.021706560134887695, 0.021409984588623046, 0.021489664077758788, 0.021397504806518555, 0.021728319168090822, 0.02165241622924805, 0.021743616104125976, 0.02282700729370117, 0.021932031631469725, 0.02149580764770508, 0.021211135864257814, 0.021394975662231447, 0.02148963165283203, 0.02158847999572754, 0.022292287826538085, 0.021743711471557618, 0.021350496292114256, 0.02128281593322754, 0.02141798400878906, 0.021569408416748048, 0.021389696121215822, 0.021673055648803712, 0.021422752380371092, 0.021352447509765626, 0.02149068832397461, 0.021351423263549805, 0.02167919921875, 0.021492095947265626, 0.021463584899902344, 0.02156300735473633, 0.021485855102539062, 0.02147689628601074, 0.021402175903320313, 0.021602304458618164, 0.02167788887023926, 0.02170460891723633, 0.021507808685302734, 0.021553695678710936, 0.02134223937988281, 0.021387231826782226, 0.02143951988220215, 0.021351039886474608, 0.021451200485229492, 0.021409727096557616, 0.02146099281311035, 0.021391359329223633, 0.021348127365112303, 0.02150147247314453, 0.021559776306152342, 0.0215185604095459, 0.021406015396118163, 0.021614271163940428, 0.021630975723266603, 0.021581279754638673, 0.02155695915222168, 0.021373247146606444, 0.021422592163085938, 0.021366783142089844, 0.021428224563598632, 0.021436128616333008, 0.021448991775512696, 0.021366783142089844, 0.021421695709228517, 0.021481855392456055, 0.021444351196289062, 0.02144895935058594, 0.021583871841430666, 0.02146713638305664, 0.021853952407836913, 0.021426111221313476, 0.021565759658813476, 0.02155107116699219, 0.022298656463623046, 0.021936128616333008, 0.02188697624206543, 0.021966848373413086, 0.0218603515625, 0.022382848739624022, 0.022038816452026367, 0.02203209686279297, 0.021842208862304688, 0.021766143798828123, 0.02169148826599121, 0.02182032012939453, 0.02187059211730957, 0.021991424560546875, 0.022155263900756835, 0.02230659294128418, 0.022089696884155272, 0.022496736526489258, 0.02226460838317871, 0.022221887588500976, 0.02218079948425293, 0.022231039047241212, 0.02235753631591797, 0.02220460891723633, 0.02210812759399414, 0.022091392517089845, 0.02233145523071289, 0.022061695098876954, 0.021968832015991212, 0.022100351333618165, 0.022066303253173828, 0.022179391860961913, 0.02209689521789551, 0.022101280212402343, 0.02207606315612793, 0.022141088485717775, 0.02204457664489746, 0.02209587287902832, 0.022033727645874024, 0.02186310386657715, 0.021941919326782227, 0.0217030086517334, 0.021700607299804688, 0.021730783462524415, 0.02182601547241211, 0.021897279739379882, 0.021961023330688476, 0.02224892807006836, 0.02221897506713867, 0.021992895126342775, 0.021899839401245118, 0.02185215950012207, 0.021723072052001954, 0.02176211166381836, 0.022271999359130858, 0.021953567504882813, 0.021851104736328127, 0.021839391708374022, 0.021895647048950195, 0.02173891258239746, 0.02186422348022461, 0.02186854362487793, 0.022131519317626955, 0.02197465515136719, 0.021764480590820312, 0.021946367263793946, 0.021825088500976562, 0.021975488662719728, 0.02232150459289551, 0.022534208297729494, 0.02195382308959961, 0.021738208770751954, 0.021739519119262696, 0.021570655822753908, 0.02169036865234375, 0.021733503341674804, 0.021594623565673828, 0.02169990348815918, 0.021857248306274415, 0.02216307258605957, 0.022047103881835936, 0.022265951156616212, 0.02210758399963379, 0.02241539192199707, 0.02245849609375, 0.022432064056396483, 0.022206464767456056, 0.022109600067138673, 0.022280799865722657, 0.022203903198242187, 0.02214246368408203, 0.022150144577026368, 0.021938175201416017, 0.021893119812011717, 0.021798912048339843, 0.02194793510437012, 0.021952543258666992, 0.021936288833618166, 0.02209542465209961, 0.021946720123291016, 0.022083967208862305, 0.021876735687255858, 0.02190745544433594, 0.021716991424560548, 0.021776384353637695, 0.021677343368530274, 0.02213475227355957, 0.021623008728027342, 0.0216724796295166, 0.021675264358520508, 0.02178643226623535, 0.02196575927734375, 0.02199942398071289, 0.022276063919067383, 0.02212681579589844, 0.02251366424560547, 0.022112159729003905, 0.022044767379760744, 0.021982303619384767, 0.022094335556030274, 0.0220546875, 0.02185241508483887, 0.02199795150756836, 0.02193222427368164, 0.021753664016723632, 0.02184601593017578, 0.021763296127319337, 0.021762624740600586, 0.021559167861938476, 0.021690336227416993, 0.02143395233154297, 0.02251363182067871, 0.022097759246826172, 0.02167344093322754, 0.021642112731933595, 0.021679807662963867, 0.022042367935180666, 0.022069280624389648, 0.021778976440429688, 0.021812671661376952, 0.022970592498779297, 0.022690271377563475, 0.021696224212646484, 0.021788576126098632, 0.021741823196411134, 0.021729280471801758, 0.02179852867126465, 0.0217706241607666, 0.021802400588989256, 0.021889503479003907, 0.02537295913696289, 0.022071136474609374, 0.022003231048583986, 0.021711328506469726, 0.021932031631469725, 0.022697984695434572, 0.021491424560546875, 0.023031967163085938, 0.02153875160217285, 0.021563232421875, 0.02159222412109375, 0.021632352828979493, 0.021604192733764647, 0.021445280075073243, 0.021592416763305665, 0.021596160888671875, 0.021736928939819336, 0.021794815063476563, 0.02173187255859375, 0.022340639114379883, 0.021736160278320312, 0.021530879974365234, 0.02149580764770508, 0.022005151748657227, 0.021895551681518556, 0.021725343704223632, 0.02161235237121582, 0.021616416931152342, 0.021498016357421875, 0.02161257553100586, 0.021661983489990235, 0.021651456832885742, 0.022063039779663087, 0.022046592712402342, 0.021790304183959962, 0.021865055084228514, 0.02168832015991211, 0.021784576416015625, 0.021704704284667968, 0.021594112396240234, 0.021761215209960938, 0.021719871520996095, 0.02165328025817871, 0.021745887756347657, 0.022271999359130858, 0.02210406494140625, 0.02221571159362793, 0.022119392395019533, 0.022124544143676757, 0.021991519927978515, 0.021970848083496093, 0.02190745544433594, 0.02203225517272949, 0.02192313575744629, 0.02266604804992676, 0.021987327575683592, 0.022017183303833007, 0.02194927978515625, 0.021976543426513673, 0.022034975051879884, 0.021966848373413086, 0.02205695915222168, 0.022153215408325197, 0.022058176040649413, 0.022296960830688477, 0.022066719055175783, 0.02186537551879883, 0.021855520248413085, 0.021960575103759764, 0.0218438720703125, 0.021783775329589843, 0.021744960784912108, 0.02162719917297363, 0.021547071456909178, 0.02160438346862793, 0.021643264770507813, 0.02161769676208496, 0.021674240112304687, 0.021815519332885742, 0.021922304153442384, 0.021978559494018553, 0.02203001594543457, 0.02213158416748047, 0.022132415771484375, 0.022081855773925782, 0.023324031829833985, 0.02313075256347656, 0.023174816131591797, 0.022206815719604492, 0.022177791595458983, 0.0221343994140625, 0.02210380744934082, 0.021926528930664064, 0.021745664596557617, 0.021782527923583983, 0.02185558319091797, 0.021990047454833985, 0.021946367263793946, 0.021972991943359374, 0.021766143798828123, 0.022494527816772462, 0.0219202880859375, 0.021719200134277344, 0.02168627166748047, 0.02186444854736328, 0.022248767852783204, 0.024951488494873046]",tokens/s,45.788272065673745,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 26351 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6652.2112,9171.828736,0.0,8776.58112,8188.314112,s,1,13.7148662109375,13.7148662109375,0.0,13.7148662109375,13.7148662109375,13.7148662109375,13.7148662109375,[13.7148662109375],,kWh,0.00019776101324584942,2.180726305868196e-05,8.863895979999148e-05,0.0003082072361045229,,MB,1611.063296,9184.411648,0.0,8776.58112,7663.089664,s,10,52.020645507812496,5.20206455078125,0.004295593668816398,5.203471435546875,5.2058587890625,5.20640107421875,5.20683490234375,"[5.1930009765625, 5.1960810546875, 5.19998583984375, 5.20151123046875, 5.20285498046875, 5.204087890625, 5.2052236328125, 5.20521826171875, 5.206943359375, 5.20573828125]",tokens/s,49.21123094513576,kWh,0.000151715859933751,1.673465758453569e-05,0.00010085163623680116,0.00026930215375508784,tokens/kWh,950605.0970272401,MB,1615.388672,9184.411648,0.0,8776.58112,7906.289152,s,10,27.204001953125,2.7204001953125,0.005295004331255583,2.719349853515625,2.7275160644531247,2.7290044921875,2.730195234375,"[2.730492919921875, 2.720167236328125, 2.72054541015625, 2.712332275390625, 2.718532470703125, 2.7180791015625, 2.718348876953125, 2.7242158203125, 2.727185302734375, 2.7141025390625]",tokens/s,23.15835740217737,kWh,7.942551686416889e-05,8.759270436297493e-06,5.2842431162797285e-05,0.00014102721846326376,tokens/kWh,446722.2759300957,,s,630,27.200278923034652,0.04317504590957884,0.001029309602478756,0.04311952018737793,0.04354717712402344,0.04367663974761963,0.044888739929199215,"[0.044818687438964847, 0.04318556976318359, 0.042661857604980466, 0.04239555358886719, 0.04245932769775391, 0.04239238357543945, 0.04240758514404297, 0.04236528015136719, 0.04238934326171875, 0.04231129455566406, 0.04267472076416016, 0.042624542236328125, 0.04263103866577148, 0.042783329010009766, 0.042718910217285154, 0.04242243194580078, 0.042463390350341794, 0.042602497100830077, 0.0427334098815918, 0.042920097351074216, 0.042807296752929686, 0.042788864135742184, 0.043043903350830075, 0.04311750411987305, 0.043179264068603514, 0.04288131332397461, 0.04304291152954102, 0.04286880111694336, 0.042764190673828126, 0.04274198532104492, 0.04274335861206055, 0.04271142578125, 0.042905857086181644, 0.04291936111450195, 0.04287539291381836, 0.043031841278076174, 0.042961536407470705, 0.042874366760253906, 0.042887775421142575, 0.04275820922851563, 0.06539222717285156, 0.04241206359863281, 0.04287731170654297, 0.04307558441162109, 0.043268096923828124, 0.04322889709472656, 0.043165313720703126, 0.04331996917724609, 0.043304576873779296, 0.04323113632202148, 0.04316937637329102, 0.04300479888916016, 0.043153408050537106, 0.0430439682006836, 0.04330995178222656, 0.04322304153442383, 0.04343711853027344, 0.0434079360961914, 0.0433790397644043, 0.04329856109619141, 0.04425142288208008, 0.044365825653076174, 0.04395212936401367, 0.04469139099121094, 0.043185344696044924, 0.04275289535522461, 0.042610527038574215, 0.04248710250854492, 0.04231439971923828, 0.04239583969116211, 0.04247504043579101, 0.04245888137817383, 0.042500831604003905, 0.04270284652709961, 0.04273152160644531, 0.043069438934326174, 0.0466874885559082, 0.04258483123779297, 0.04294652938842773, 0.0431426887512207, 0.0437314567565918, 0.04306534576416016, 0.04292800140380859, 0.04302451324462891, 0.043251712799072264, 0.04347289657592773, 0.044889408111572264, 0.043261920928955075, 0.043052833557128904, 0.042955711364746095, 0.04319641494750977, 0.04309196853637695, 0.04300115203857422, 0.04294521713256836, 0.04299980926513672, 0.043044864654541014, 0.04322099304199219, 0.04318207931518555, 0.04308777618408203, 0.043259998321533204, 0.04319641494750977, 0.04310419082641601, 0.043164798736572266, 0.043117279052734374, 0.04301846313476562, 0.0429854736328125, 0.043194366455078126, 0.04326604843139648, 0.04340262222290039, 0.04330950546264648, 0.04329081726074219, 0.043337535858154294, 0.04318220901489258, 0.043179393768310544, 0.043162303924560545, 0.043019454956054685, 0.04294960021972656, 0.04308518218994141, 0.04320099258422851, 0.04322089767456055, 0.043120193481445315, 0.04331164932250976, 0.043294689178466794, 0.043431968688964845, 0.04343555068969727, 0.04349359893798828, 0.045954593658447264, 0.04321571350097656, 0.042686561584472656, 0.04260240173339844, 0.04250783920288086, 0.042396095275878905, 0.04259430313110352, 0.04260454559326172, 0.042471424102783206, 0.04243791961669922, 0.04267475128173828, 0.043077056884765624, 0.04263302230834961, 0.04262742233276367, 0.042774879455566406, 0.04279318237304688, 0.043371742248535156, 0.042711841583251954, 0.04295459365844727, 0.042731422424316406, 0.04287513732910156, 0.043081729888916016, 0.043169792175292966, 0.04296089553833008, 0.04301824188232422, 0.04301126480102539, 0.042875713348388675, 0.04285638427734375, 0.04290351867675781, 0.04292617416381836, 0.04300172805786133, 0.04277670288085938, 0.04258822250366211, 0.04273740768432617, 0.042894561767578124, 0.042863582611083986, 0.042913791656494144, 0.04302627182006836, 0.04298688125610352, 0.043837440490722655, 0.04973852920532226, 0.042571136474609375, 0.043243263244628904, 0.043398017883300784, 0.04322873687744141, 0.04338035202026367, 0.043338272094726564, 0.043256095886230465, 0.04330873489379883, 0.04333804702758789, 0.04362041473388672, 0.04332128143310547, 0.043174049377441404, 0.04308268737792969, 0.04338188934326172, 0.043450145721435546, 0.04333324813842773, 0.04328275299072266, 0.04324723052978516, 0.04338937759399414, 0.04359360122680664, 0.043910560607910154, 0.04346748733520508, 0.044711071014404295, 0.043092830657958985, 0.042708606719970704, 0.04256204986572266, 0.04248767852783203, 0.04247449493408203, 0.0427407341003418, 0.04258611297607422, 0.042627071380615236, 0.04251846313476562, 0.04239558410644531, 0.04252060699462891, 0.04258415985107422, 0.0425082893371582, 0.04279430389404297, 0.04303737640380859, 0.042866687774658206, 0.04270073699951172, 0.04267628860473633, 0.04267827224731445, 0.04280319976806641, 0.042858592987060545, 0.043224990844726564, 0.043245567321777346, 0.04317184066772461, 0.04290150451660156, 0.042872833251953124, 0.04278988647460937, 0.04284928131103516, 0.04300815963745117, 0.043040607452392576, 0.04281760025024414, 0.04310214233398438, 0.04286054229736328, 0.0429752311706543, 0.04298076629638672, 0.04290204620361328, 0.043060928344726565, 0.04306367874145508, 0.04298137664794922, 0.04295065689086914, 0.04322099304199219, 0.04331315231323242, 0.043263233184814454, 0.04352486419677734, 0.043491329193115234, 0.0433616943359375, 0.043298847198486326, 0.04332191848754883, 0.04305920028686523, 0.04320012664794922, 0.04355340957641601, 0.043599006652832034, 0.043517822265625, 0.043375328063964845, 0.04328611373901367, 0.043173343658447265, 0.04349004745483399, 0.04346303939819336, 0.04349747085571289, 0.043593536376953124, 0.043319297790527345, 0.04336588668823242, 0.04476918411254883, 0.043241119384765624, 0.04278112030029297, 0.042517696380615234, 0.042549758911132815, 0.042489696502685546, 0.04260665512084961, 0.04238787078857422, 0.04252057647705078, 0.04266995239257813, 0.042616958618164065, 0.043055103302001956, 0.04300595092773438, 0.04292540740966797, 0.04297798538208008, 0.042784126281738284, 0.04290595245361328, 0.04292633438110351, 0.042891231536865235, 0.04323331069946289, 0.04332134246826172, 0.04316073608398437, 0.0433974723815918, 0.043299327850341796, 0.04333929443359375, 0.04308835220336914, 0.043118015289306644, 0.04307321548461914, 0.04299638366699219, 0.04293273544311523, 0.04304067230224609, 0.04319750213623047, 0.04307839965820313, 0.04309811019897461, 0.04332476806640625, 0.043118846893310546, 0.04307190322875976, 0.04326544189453125, 0.04319907379150391, 0.04310630416870117, 0.04326435089111328, 0.04323907089233398, 0.04342784118652344, 0.043401214599609376, 0.043276287078857424, 0.04331315231323242, 0.043621952056884766, 0.04346681594848633, 0.04331686401367187, 0.04335388946533203, 0.04330582427978515, 0.043186302185058596, 0.043053054809570314, 0.04321203231811523, 0.043375553131103514, 0.043329151153564456, 0.043346111297607424, 0.043276287078857424, 0.04325392150878906, 0.04348419189453125, 0.043533119201660156, 0.043484928131103516, 0.04356940841674805, 0.04481635284423828, 0.04324969482421875, 0.04271104049682617, 0.04259392166137695, 0.04245951843261719, 0.042590206146240234, 0.04264550399780274, 0.042602497100830077, 0.04275404739379883, 0.042692607879638675, 0.04270182418823242, 0.04266912078857422, 0.04276627349853516, 0.04289913558959961, 0.042893630981445316, 0.042856224060058595, 0.04278908920288086, 0.04279075241088867, 0.04280131149291992, 0.04284963226318359, 0.04297385787963867, 0.043224288940429685, 0.04346480178833008, 0.043374622344970706, 0.04333955383300781, 0.04315631866455078, 0.04293756866455078, 0.04297401428222656, 0.0429936637878418, 0.043063297271728515, 0.04308377456665039, 0.04312063980102539, 0.042894783020019533, 0.04293280029296875, 0.04296236801147461, 0.04306796646118164, 0.04342156982421875, 0.04318217468261719, 0.0432243537902832, 0.04318899154663086, 0.0430571517944336, 0.04324777603149414, 0.043300640106201174, 0.04349708938598633, 0.04344380950927734, 0.04354339218139648, 0.04346384048461914, 0.043342689514160156, 0.043243518829345705, 0.04353638458251953, 0.0434277458190918, 0.04330915069580078, 0.04349116897583008, 0.04358486557006836, 0.04325049591064453, 0.04328857421875, 0.04333318328857422, 0.0433422737121582, 0.04335747146606445, 0.04342227172851563, 0.04360153579711914, 0.043510208129882814, 0.043380767822265624, 0.04488710403442383, 0.04327110290527344, 0.042850303649902347, 0.042548225402832034, 0.04247859191894531, 0.04238336181640625, 0.04239353561401367, 0.04236275100708008, 0.0426416015625, 0.04258201599121094, 0.04265369415283203, 0.042684417724609375, 0.042686656951904295, 0.04278252792358399, 0.042987518310546875, 0.042864639282226565, 0.04295859146118164, 0.043030784606933596, 0.04296307373046875, 0.04297488021850586, 0.04280137634277344, 0.042889217376708984, 0.04426137542724609, 0.04341958236694336, 0.043081470489501957, 0.04308367919921875, 0.043136577606201175, 0.04305929565429688, 0.04302684783935547, 0.04291004943847656, 0.042866622924804684, 0.04299987030029297, 0.04358873748779297, 0.043159839630126956, 0.04296969604492187, 0.042807296752929686, 0.04290150451660156, 0.04319174575805664, 0.04342022323608399, 0.04346268844604492, 0.04341347122192383, 0.04322099304199219, 0.04323328018188476, 0.04333539199829101, 0.04333596801757812, 0.04331488037109375, 0.04328851318359375, 0.043481472015380856, 0.043597599029541016, 0.04347926330566406, 0.04372480010986328, 0.04339900970458985, 0.043315361022949216, 0.0431484489440918, 0.04346147155761719, 0.04319382476806641, 0.043231777191162106, 0.043222625732421874, 0.0432275505065918, 0.04334121704101562, 0.04356771087646484, 0.043627967834472654, 0.04376361465454102, 0.04480051040649414, 0.04333766555786133, 0.0427786865234375, 0.042608383178710935, 0.042668289184570315, 0.04263740921020508, 0.042665889739990234, 0.042724769592285154, 0.04272598266601563, 0.043190078735351564, 0.04269456100463867, 0.04280972671508789, 0.042796958923339845, 0.042737598419189456, 0.04276611328125, 0.0430533447265625, 0.043063297271728515, 0.042979328155517575, 0.04313497543334961, 0.04318956756591797, 0.043420352935791016, 0.04355072021484375, 0.04340531158447265, 0.04334796905517578, 0.04318172836303711, 0.04331145477294922, 0.04328857421875, 0.043730945587158204, 0.04348659133911133, 0.04294924926757813, 0.04304281616210937, 0.0429936637878418, 0.042921184539794925, 0.043205406188964846, 0.04329414367675781, 0.043151039123535156, 0.042920799255371095, 0.04310540771484375, 0.04328131103515625, 0.04356464004516602, 0.043430305480957034, 0.04342704010009766, 0.0433221435546875, 0.043464702606201173, 0.04322918319702149, 0.04360332870483399, 0.04364905548095703, 0.04365167999267578, 0.043578369140625, 0.04335923385620117, 0.04338675308227539, 0.04326969528198242, 0.043405376434326175, 0.043442272186279295, 0.04346102523803711, 0.04340252685546875, 0.04338966369628906, 0.04322304153442383, 0.04345446395874023, 0.043546783447265626, 0.04338467025756836, 0.04350537490844727, 0.043701728820800784, 0.044947265625, 0.04333014297485351, 0.04287078475952148, 0.04266556930541992, 0.04267180633544922, 0.04258070373535156, 0.04272537612915039, 0.042657791137695314, 0.042583934783935545, 0.04254528045654297, 0.043055103302001956, 0.04290057754516602, 0.042947040557861325, 0.0427729263305664, 0.042755966186523435, 0.04272281646728516, 0.04265347290039063, 0.04262793731689453, 0.04307558441162109, 0.04309196853637695, 0.043052318572998044, 0.043340511322021484, 0.043655105590820316, 0.043509822845458984, 0.04333977508544922, 0.04327423858642578, 0.04306710433959961, 0.043090206146240234, 0.04318207931518555, 0.043172958374023435, 0.04300483322143555, 0.04288425445556641, 0.04306396865844726, 0.04305657577514648, 0.04334662246704102, 0.04322918319702149, 0.0432386245727539, 0.04326895904541016, 0.04335411071777344, 0.04348441696166992, 0.045204448699951175, 0.04349724960327148, 0.04374649429321289, 0.04343020629882813, 0.043536895751953124, 0.04372275161743164, 0.04366889572143555, 0.043596382141113284, 0.043655166625976564, 0.04327590560913086, 0.04330124664306641, 0.04333488082885742, 0.04358367919921875, 0.043587966918945314, 0.04341385650634766, 0.04350553512573242, 0.04362022399902344, 0.04429414367675781, 0.043606369018554685, 0.04352182388305664, 0.04348640060424805, 0.04357593536376953, 0.04379020690917969, 0.04454604721069336, 0.04319232177734375, 0.04280476760864258, 0.042585727691650394, 0.042420127868652346, 0.04225529479980469, 0.042262527465820314, 0.04246527862548828, 0.04242227172851563, 0.04231897735595703, 0.04242015838623047, 0.04282371139526367, 0.0427344970703125, 0.04262297439575195, 0.042641407012939454, 0.04299760055541992, 0.04286649703979492, 0.04266428756713867, 0.04273971176147461, 0.04291340637207031, 0.04291417694091797, 0.043253761291503906, 0.04335935974121094, 0.043322238922119144, 0.04321843338012695, 0.04301465606689453, 0.04293632125854492, 0.043028480529785154, 0.04302643203735351, 0.04298342514038086, 0.04292716979980469, 0.04282668685913086, 0.04274176025390625, 0.04284415817260742, 0.04295427322387695, 0.042938720703125, 0.04294591903686523, 0.043246337890625, 0.04317184066772461, 0.04307878494262695, 0.04289785766601562, 0.04314502334594727, 0.043391616821289065, 0.04365107345581055, 0.043461982727050784, 0.04334787368774414, 0.04328729629516601, 0.04323097610473633, 0.04325958251953125, 0.04329891204833984, 0.04360444641113281, 0.04341145706176758, 0.04347843170166016, 0.04324208068847656, 0.04334592056274414, 0.04345446395874023, 0.0435214729309082, 0.04355673599243164, 0.04333795166015625, 0.04341939163208008, 0.043541217803955076, 0.04368588638305664, 0.04368297576904297]",tokens/s,23.16152719546129,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3021.12768,3923.64032,0.0,3521.118208,3488.072192,s,1,11.58998046875,11.58998046875,0.0,11.58998046875,11.58998046875,11.58998046875,11.58998046875,[11.58998046875],,kWh,0.00011052027029166613,1.2150332954955648e-05,4.572253657797454e-05,0.00016839313982459632,,MB,2937.622528,4045.275136,0.0,3628.07296,3524.635648,s,10,19.04846887207031,1.904846887207031,0.005313792226101908,1.9050657958984374,1.9107308227539062,1.9120947204589844,1.913185838623047,"[1.895146728515625, 1.900818359375, 1.9003414306640625, 1.90470849609375, 1.90698291015625, 1.9010838623046875, 1.91007763671875, 1.905423095703125, 1.9134586181640625, 1.910427734375]",tokens/s,134.39400390619232,kWh,5.565790272583702e-05,6.1377972980287196e-06,3.6894973960399645e-05,9.869067398426538e-05,tokens/kWh,2593963.4381341347,MB,2944.032768,4045.275136,0.0,3628.07296,3524.638208,s,10,21.410410888671873,2.1410410888671874,0.008899961068873283,2.1395426025390627,2.1529890869140624,2.1560216430664063,2.1584476879882812,"[2.152315185546875, 2.15905419921875, 2.132346923828125, 2.128715576171875, 2.13490673828125, 2.144057861328125, 2.1447236328125, 2.14220458984375, 2.136880615234375, 2.13520556640625]",tokens/s,29.424937394981495,kWh,6.452964651582685e-05,7.11906386794752e-06,4.027589333180093e-05,0.00011192460371557533,tokens/kWh,562878.9194563213,,s,630,21.405550262451182,0.033977063908652655,0.00044023899551987923,0.03389873504638672,0.03430062522888184,0.034564273643493654,0.035896623229980475,"[0.03399270248413086, 0.033964031219482424, 0.034261249542236326, 0.0341165771484375, 0.03412844848632812, 0.03419478225708008, 0.0344134407043457, 0.03392272186279297, 0.03427503967285156, 0.034079360961914065, 0.03401039886474609, 0.034151294708251956, 0.03414934539794922, 0.03410361480712891, 0.034005599975585936, 0.034557918548583984, 0.034569473266601564, 0.034133953094482423, 0.033929119110107424, 0.033947647094726564, 0.03419638442993164, 0.034127872467041014, 0.033731998443603514, 0.03416284942626953, 0.03427078247070312, 0.03393974304199219, 0.033866336822509766, 0.03444915390014648, 0.034185470581054686, 0.034135391235351566, 0.03443983840942383, 0.034609153747558595, 0.03405414581298828, 0.034069950103759766, 0.0336607666015625, 0.03411017608642578, 0.033955841064453124, 0.033871166229248045, 0.03401180648803711, 0.03394563293457031, 0.033941505432128906, 0.034100223541259765, 0.03407974243164062, 0.03408006286621094, 0.03424940872192383, 0.03426844787597656, 0.03413398361206055, 0.0342289924621582, 0.03424233627319336, 0.03405231857299805, 0.034236064910888674, 0.034150558471679686, 0.03395731353759766, 0.03423513412475586, 0.033972225189208984, 0.034154495239257815, 0.03412771224975586, 0.034183326721191405, 0.03403744125366211, 0.03459513473510742, 0.03596083068847656, 0.03421593475341797, 0.03392134475708008, 0.0337718391418457, 0.034111839294433594, 0.03406576156616211, 0.03451996612548828, 0.033955841064453124, 0.034291072845458986, 0.034095745086669925, 0.03394355010986328, 0.03403104019165039, 0.03388063812255859, 0.03398601531982422, 0.033856033325195316, 0.03387715148925781, 0.03381264114379883, 0.03474502563476563, 0.034246143341064454, 0.03426355361938477, 0.034225505828857423, 0.034345630645751954, 0.03410432052612305, 0.033884384155273437, 0.03430003356933594, 0.03433126449584961, 0.03397596740722656, 0.034010814666748046, 0.03433132934570313, 0.03435059356689453, 0.03422259140014648, 0.034724929809570315, 0.034707904815673825, 0.03497216033935547, 0.03430595016479492, 0.03422627258300781, 0.034118751525878906, 0.03443958282470703, 0.03439603042602539, 0.0340814094543457, 0.03411558532714844, 0.033965824127197265, 0.03405363082885742, 0.033863903045654294, 0.034035518646240236, 0.03390537643432617, 0.0339128303527832, 0.03476889419555664, 0.034170528411865235, 0.03782486343383789, 0.03421084976196289, 0.03454457473754883, 0.034983966827392576, 0.03413721466064453, 0.03370435333251953, 0.033890079498291016, 0.03378656005859375, 0.033906688690185545, 0.034723712921142576, 0.03396387100219726, 0.03376976013183594, 0.034225505828857423, 0.03568502426147461, 0.034332481384277344, 0.034395774841308596, 0.034132545471191406, 0.0338702392578125, 0.033604736328125, 0.033888641357421874, 0.033939552307128903, 0.03377363204956055, 0.03417327880859375, 0.03377151870727539, 0.0340316162109375, 0.03371987152099609, 0.03370963287353516, 0.03363724899291992, 0.03386163330078125, 0.03395993423461914, 0.03397372817993164, 0.03388470458984375, 0.03431436920166016, 0.03387081527709961, 0.0341943359375, 0.03373875045776367, 0.033605377197265626, 0.034118942260742184, 0.03391177749633789, 0.03370089721679687, 0.033806625366210936, 0.03391968154907227, 0.03423638534545898, 0.03425408172607422, 0.0339730224609375, 0.03380796813964844, 0.034120063781738284, 0.03369577789306641, 0.03367107009887695, 0.03399884796142578, 0.03383065414428711, 0.033616222381591794, 0.033826656341552734, 0.03369385528564453, 0.03376128005981445, 0.033808383941650394, 0.03382271957397461, 0.0335928955078125, 0.03425734329223633, 0.033799358367919925, 0.03369804763793945, 0.03386966323852539, 0.03411017608642578, 0.03355033493041992, 0.03363382339477539, 0.03371366500854492, 0.03358000183105469, 0.03368316650390625, 0.03373651123046875, 0.033702335357666015, 0.0337674560546875, 0.033791999816894534, 0.033664127349853516, 0.0336759033203125, 0.03382707214355469, 0.03382067108154297, 0.03387372970581055, 0.03377532958984375, 0.03366060638427734, 0.03399350357055664, 0.03427104187011719, 0.03370822525024414, 0.03384035110473633, 0.03364739227294922, 0.033544193267822264, 0.033729663848876955, 0.03390256118774414, 0.0339796142578125, 0.03476841735839844, 0.033732769012451175, 0.03382812881469727, 0.034011871337890624, 0.03400672149658203, 0.03387628936767578, 0.03378915023803711, 0.03376025772094727, 0.03384707260131836, 0.03380223846435547, 0.03384729766845703, 0.03403772735595703, 0.03418537521362305, 0.033656192779541017, 0.033634750366210935, 0.03371014404296875, 0.03368960189819336, 0.033564254760742186, 0.03435356903076172, 0.033845439910888675, 0.03394950485229492, 0.03360988616943359, 0.033455966949462894, 0.03370348739624023, 0.03337260818481445, 0.03339263916015625, 0.03347251129150391, 0.03397343826293946, 0.033430335998535156, 0.03333910369873047, 0.033483039855957034, 0.03331686401367188, 0.03354009628295898, 0.03411721420288086, 0.03355279922485352, 0.03345721435546875, 0.03454457473754883, 0.0335912971496582, 0.033447681427001955, 0.0337441291809082, 0.033637374877929685, 0.03387596893310547, 0.03388111877441406, 0.03371430587768555, 0.03372889709472656, 0.0338191032409668, 0.03370169448852539, 0.033794143676757815, 0.03459417724609375, 0.03375996780395508, 0.033719585418701174, 0.03379916763305664, 0.03373007965087891, 0.033880352020263675, 0.033555934906005856, 0.03402915191650391, 0.0341385269165039, 0.03371747207641602, 0.03387043380737305, 0.033812671661376956, 0.0338197135925293, 0.03377199935913086, 0.03379657745361328, 0.03371734237670899, 0.03372476959228515, 0.033692222595214844, 0.03406966400146484, 0.033685375213623046, 0.033942081451416015, 0.03421868896484375, 0.03371798324584961, 0.03371417617797851, 0.036020320892333986, 0.03401718521118164, 0.033586719512939456, 0.033815006256103515, 0.033710079193115236, 0.03379404830932617, 0.03410233688354492, 0.033863712310791015, 0.03411360168457031, 0.03389692687988281, 0.03435760116577148, 0.033982208251953125, 0.033599262237548826, 0.03372902297973633, 0.033675262451171875, 0.03365817642211914, 0.03360383987426758, 0.03369823837280273, 0.033730239868164064, 0.033689247131347654, 0.0334752311706543, 0.03376332855224609, 0.03402371215820312, 0.03411734390258789, 0.03374873733520508, 0.03371033477783203, 0.0344859504699707, 0.03381484985351563, 0.03359878540039062, 0.03377967834472656, 0.03365932846069336, 0.033706272125244144, 0.033817760467529295, 0.03384604644775391, 0.03365484619140625, 0.0338158073425293, 0.033954559326171876, 0.03385948944091797, 0.033923168182373044, 0.034022689819335934, 0.03431222534179688, 0.033796703338623044, 0.03409929656982422, 0.03373622512817383, 0.03374067306518555, 0.033874526977539066, 0.03398115158081055, 0.0337501106262207, 0.03369798278808594, 0.03397683334350586, 0.033956062316894534, 0.033887584686279296, 0.03379062271118164, 0.03402547073364258, 0.03383500671386719, 0.03385343933105469, 0.034266334533691406, 0.03434316635131836, 0.033669086456298826, 0.03391904067993164, 0.03373516845703125, 0.03370355224609375, 0.033553825378417966, 0.03389948654174805, 0.033837345123291014, 0.03370880126953125, 0.03388924789428711, 0.03424774551391602, 0.03403462219238281, 0.034205825805664065, 0.03427081680297851, 0.034227615356445314, 0.034208641052246094, 0.03441049575805664, 0.034543617248535156, 0.03408249664306641, 0.0337344970703125, 0.033662593841552735, 0.03372959899902344, 0.033705184936523434, 0.03357088088989258, 0.03373926544189453, 0.03352166366577149, 0.03383705520629883, 0.033607967376708986, 0.034182880401611326, 0.03383091354370117, 0.0337872314453125, 0.03361859130859375, 0.033880062103271484, 0.03422963333129883, 0.03399331283569336, 0.03377667236328125, 0.03376025772094727, 0.03393535995483398, 0.033744735717773436, 0.03430995178222656, 0.03416617584228516, 0.03386991882324219, 0.034703392028808594, 0.035138336181640625, 0.03434806442260742, 0.034687873840332034, 0.034944801330566405, 0.034527584075927736, 0.034481727600097656, 0.0346126708984375, 0.034200576782226565, 0.034194976806640624, 0.033671489715576174, 0.033893054962158206, 0.03365241622924805, 0.033794143676757815, 0.03352371215820313, 0.0339128303527832, 0.03362406539916992, 0.0336732177734375, 0.03381174468994141, 0.03391910552978516, 0.03376172637939453, 0.033822879791259766, 0.034080768585205076, 0.03376947021484375, 0.033783809661865234, 0.035282943725585936, 0.03366902542114258, 0.033552478790283204, 0.0344002571105957, 0.03386777496337891, 0.033667072296142575, 0.03377667236328125, 0.03409609603881836, 0.034100608825683595, 0.03374348831176758, 0.033895648956298825, 0.03401398468017578, 0.0337154541015625, 0.03374684906005859, 0.033878654479980466, 0.033879520416259766, 0.03417913436889648, 0.034187393188476564, 0.03397046279907227, 0.033952030181884765, 0.033944801330566404, 0.03395356750488281, 0.034267646789550785, 0.03400140762329101, 0.03404185485839844, 0.03637247848510742, 0.0343205451965332, 0.03406217575073242, 0.03389984130859375, 0.03396883010864258, 0.03381875228881836, 0.03372224044799805, 0.0339865608215332, 0.03396745681762695, 0.03411244964599609, 0.03396531295776367, 0.03389487838745117, 0.033914302825927736, 0.03651436614990235, 0.03421184158325195, 0.033933311462402346, 0.034136062622070314, 0.034057281494140626, 0.03401020812988281, 0.03426083374023438, 0.033973758697509765, 0.0342474250793457, 0.03438361740112305, 0.03399225616455078, 0.0338559341430664, 0.03487279891967773, 0.03405878448486328, 0.034196544647216796, 0.03416364669799805, 0.034283233642578126, 0.03400527954101563, 0.034151744842529294, 0.034094879150390625, 0.03399929428100586, 0.034057727813720705, 0.03390153503417969, 0.03464531326293945, 0.03412412643432617, 0.03410160064697266, 0.03379814529418945, 0.03412108612060547, 0.03386022567749023, 0.033726303100585935, 0.03471379089355469, 0.033992671966552736, 0.03399270248413086, 0.03387721633911133, 0.0339730224609375, 0.03395375823974609, 0.03398604965209961, 0.0339830093383789, 0.03427532958984375, 0.03390627288818359, 0.03381481552124024, 0.03379206466674805, 0.0342196159362793, 0.034029121398925784, 0.03408284759521484, 0.03397452926635742, 0.03573942565917969, 0.033909217834472656, 0.03397036743164063, 0.033777854919433595, 0.0335994873046875, 0.0337589111328125, 0.03367555236816406, 0.03372854232788086, 0.03395747375488281, 0.033505504608154296, 0.03355257415771484, 0.033772735595703124, 0.033624095916748045, 0.03375593566894531, 0.0339024658203125, 0.033653919219970706, 0.03386252975463867, 0.03385353469848633, 0.033873920440673826, 0.0340945930480957, 0.033853950500488283, 0.03400908660888672, 0.034050048828125, 0.03398451232910156, 0.03388550567626953, 0.033798271179199216, 0.03400352096557617, 0.03370761489868164, 0.03379561614990234, 0.03399951934814453, 0.0340739517211914, 0.03361676788330078, 0.03413372802734375, 0.033845535278320314, 0.03380633544921875, 0.0336297607421875, 0.03394604873657227, 0.03376249694824219, 0.03390342330932617, 0.03385958480834961, 0.03386092758178711, 0.033882816314697264, 0.0337918701171875, 0.033645919799804684, 0.03382508850097656, 0.033898975372314455, 0.03405363082885742, 0.0339788818359375, 0.033890270233154295, 0.03405849456787109, 0.03392646408081055, 0.034038017272949216, 0.033966304779052735, 0.03398860931396484, 0.03375718307495117, 0.03381657409667969, 0.03394355010986328, 0.033898494720458985, 0.03401113510131836, 0.03399436950683594, 0.0338656005859375, 0.034068065643310545, 0.03388713455200195, 0.0340049934387207, 0.03410124969482422, 0.03402342224121094, 0.03390054321289063, 0.033820289611816406, 0.03388358306884766, 0.03391171264648438, 0.033706016540527343, 0.033879966735839845, 0.03388835144042969, 0.033990238189697264, 0.03391494369506836, 0.03388419342041016, 0.033853729248046874, 0.03397840118408203, 0.033856609344482425, 0.03423324966430664, 0.033949535369873045, 0.0339859504699707, 0.03399452972412109, 0.03391990280151367, 0.03398252868652344, 0.03404912185668945, 0.033837024688720706, 0.03378681564331055, 0.033972225189208984, 0.03395372772216797, 0.03390259170532227, 0.03367731094360352, 0.033710079193115236, 0.03362569427490234, 0.03372825622558594, 0.03385001754760742, 0.03400431823730469, 0.03387664031982422, 0.03642777633666992, 0.03500646209716797, 0.03379337692260742, 0.03368399810791016, 0.03555859375, 0.033668033599853514, 0.03350732803344727, 0.03416438293457031, 0.03348038482666016, 0.033685951232910155, 0.033728736877441406, 0.033658878326416015, 0.033519615173339845, 0.033774913787841795, 0.03356662368774414, 0.03383347320556641, 0.03351958465576172, 0.03347078323364258, 0.03366608047485352, 0.033536128997802735, 0.0334961929321289, 0.0334947509765625, 0.033600513458251956, 0.033609855651855466, 0.03345225524902344, 0.033811103820800784, 0.033650688171386715, 0.03369276809692383, 0.03376995086669922, 0.03367571258544922, 0.03382271957397461, 0.03871686553955078, 0.0337468147277832, 0.03354393768310547, 0.03352217483520508, 0.03332547378540039, 0.03372671890258789, 0.033654399871826175, 0.03360502243041992, 0.03388454437255859, 0.033717918395996097, 0.03377046585083008, 0.0337528305053711, 0.034052223205566404, 0.03379391860961914, 0.033793792724609376, 0.034198783874511716, 0.034202625274658206, 0.033708030700683594, 0.03384134292602539, 0.03374406433105469, 0.03371401596069336, 0.03369244766235351, 0.0335810546875, 0.03377923202514648]",tokens/s,29.431619008884944,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4259.328,6981.287936,0.0,6578.765824,6136.103424,s,1,12.0058994140625,12.0058994140625,0.0,12.0058994140625,12.0058994140625,12.0058994140625,12.0058994140625,[12.0058994140625],,kWh,0.000134835615687507,1.4861517126734972e-05,5.800004639999745e-05,0.00020769717921423944,,MB,2046.029824,7000.162304,0.0,6582.960128,5870.414848,s,10,29.515345703125003,2.9515345703125,0.0034030400772638615,2.9522696533203128,2.95500615234375,2.955571069335938,2.9560230029296877,"[2.943960693359375, 2.947252685546875, 2.9508291015625, 2.951962646484375, 2.952800048828125, 2.951331787109375, 2.95257666015625, 2.954880615234375, 2.953615478515625, 2.956135986328125]",tokens/s,86.73454228689432,kWh,8.621986961290834e-05,9.509290615126602e-06,5.723149022959784e-05,0.00015296065045763277,tokens/kWh,1673633.0502916316,MB,2059.051008,7000.162304,0.0,6582.960128,5870.417408,s,10,22.37218603515625,2.237218603515625,0.005200439537022995,2.235488037109375,2.2438154052734376,2.2462564575195314,2.2482092993164065,"[2.241233642578125, 2.2348818359375, 2.23609423828125, 2.231322509765625, 2.24327294921875, 2.233241455078125, 2.2329970703125, 2.248697509765625, 2.2364072265625, 2.23403759765625]",tokens/s,28.15996608512021,kWh,6.188950665042739e-05,6.82755052887102e-06,4.1298199705203095e-05,0.00011001525688450147,tokens/kWh,572647.8470721564,,s,630,22.36760069656373,0.03550412808978368,0.0006281188439021833,0.0354195671081543,0.03595920829772949,0.036331660270690916,0.03740516849517823,"[0.03742377471923828, 0.03597747039794922, 0.03513958358764648, 0.03511686325073242, 0.035426494598388675, 0.03561836624145508, 0.035584030151367185, 0.035418529510498044, 0.035168254852294925, 0.03555737686157227, 0.03531967926025391, 0.035525760650634765, 0.03554611206054688, 0.035491233825683595, 0.035019359588623046, 0.03534019088745117, 0.035732608795166015, 0.035535839080810545, 0.03531987380981445, 0.03522467041015625, 0.03551219177246094, 0.03592243194580078, 0.035662303924560546, 0.03575603103637695, 0.035335296630859374, 0.03509721755981445, 0.03541017532348633, 0.036253376007080076, 0.035909374237060546, 0.03558425521850586, 0.03523411178588867, 0.03489580917358399, 0.03553286361694336, 0.03537715148925781, 0.03566723251342773, 0.035406558990478516, 0.03508838272094727, 0.035149822235107424, 0.034953216552734374, 0.04114022445678711, 0.035344001770019534, 0.035766654968261716, 0.035418113708496096, 0.03521267318725586, 0.03516889572143555, 0.03551846313476562, 0.03562496185302735, 0.03574288177490234, 0.035076831817626955, 0.03537126541137695, 0.03571494293212891, 0.035522560119628906, 0.03529638290405274, 0.035146400451660155, 0.035309791564941406, 0.036016128540039063, 0.035571617126464845, 0.035680000305175784, 0.03575228881835937, 0.0354007682800293, 0.035094623565673826, 0.03541897583007812, 0.035224990844726564, 0.03732515335083008, 0.035853633880615236, 0.0348740463256836, 0.03491839981079101, 0.03491430282592774, 0.0352454719543457, 0.034861663818359374, 0.03499935913085937, 0.03516921615600586, 0.0352064323425293, 0.0352283821105957, 0.0348504638671875, 0.03489212799072266, 0.0348172492980957, 0.034659103393554686, 0.03496252822875977, 0.03500460815429687, 0.03493142318725586, 0.03510067367553711, 0.03535996627807617, 0.034945598602294924, 0.03573987197875977, 0.03654655838012695, 0.035225601196289064, 0.035133438110351564, 0.03510681533813476, 0.035885055541992186, 0.036944992065429685, 0.035920799255371096, 0.03544595336914062, 0.03599814224243164, 0.0355997428894043, 0.03529558563232422, 0.035802047729492185, 0.03558118438720703, 0.03556809616088867, 0.035020286560058594, 0.03606739044189453, 0.035062206268310546, 0.0351907844543457, 0.03542345428466797, 0.035859390258789064, 0.035561248779296874, 0.03589945602416992, 0.03546931076049804, 0.03521513748168945, 0.03506403350830078, 0.03587100982666016, 0.03573321533203125, 0.035843486785888674, 0.03546707153320312, 0.036542240142822265, 0.03733382415771484, 0.03530316925048828, 0.03578515243530273, 0.035704097747802734, 0.0351376953125, 0.034953601837158205, 0.0353875846862793, 0.03529638290405274, 0.03571100616455078, 0.035181407928466794, 0.03545292663574219, 0.03646806335449219, 0.03564313507080078, 0.03496438217163086, 0.03532799911499023, 0.03536272048950195, 0.035055679321289064, 0.03502902221679687, 0.035192832946777344, 0.03506790542602539, 0.03508841705322266, 0.03537097549438477, 0.03530092620849609, 0.03514745712280273, 0.035568382263183596, 0.03506380844116211, 0.03517379379272461, 0.03507043075561524, 0.036516128540039064, 0.03505340957641601, 0.035060768127441404, 0.035334625244140626, 0.03521177673339844, 0.035573760986328126, 0.03518054580688477, 0.03539923095703125, 0.035615169525146484, 0.03528908920288086, 0.03490102386474609, 0.03533030319213867, 0.03482287979125977, 0.03783449554443359, 0.03558425521850586, 0.035396800994873044, 0.03596160125732422, 0.03574995040893555, 0.03538534545898438, 0.03572848129272461, 0.0358364143371582, 0.03552652740478516, 0.03576176071166992, 0.03540812683105469, 0.035103424072265625, 0.03527167892456055, 0.035848705291748044, 0.03581593704223633, 0.03555846405029297, 0.035971744537353516, 0.035385631561279295, 0.035288192749023437, 0.03504422378540039, 0.03533926391601563, 0.03646156692504883, 0.035692543029785154, 0.0357212142944336, 0.03560857772827149, 0.03606118392944336, 0.03575193786621094, 0.035639297485351565, 0.03491020965576172, 0.0352542724609375, 0.03592601776123047, 0.035528446197509764, 0.035092735290527345, 0.0365780143737793, 0.03553071975708008, 0.03536899185180664, 0.03490816116333008, 0.035053440093994144, 0.03501068878173828, 0.035225025177001955, 0.034974273681640626, 0.034912353515625, 0.03497564697265625, 0.035231166839599606, 0.03483017730712891, 0.03480863952636719, 0.03510233688354492, 0.035039520263671874, 0.03467385482788086, 0.03493356704711914, 0.03527884674072266, 0.034928638458251955, 0.03551846313476562, 0.035786975860595704, 0.035347904205322266, 0.03506963348388672, 0.03500899124145508, 0.035014846801757815, 0.03521331024169922, 0.035917823791503906, 0.035778560638427735, 0.03560246276855469, 0.036429790496826174, 0.03550207901000976, 0.03505347061157227, 0.03524822235107422, 0.03576006317138672, 0.03568159866333008, 0.03552905654907226, 0.035103328704833986, 0.03520185470581055, 0.035378177642822264, 0.03589529418945313, 0.035639392852783204, 0.03570064163208008, 0.0359444465637207, 0.035678207397460936, 0.03594566345214844, 0.035375713348388675, 0.03514921569824219, 0.03513123321533203, 0.035719390869140624, 0.03570355224609375, 0.03713836669921875, 0.03530672073364258, 0.03507081604003906, 0.0349595832824707, 0.03564223861694336, 0.03571599960327149, 0.0358172492980957, 0.03551238250732422, 0.03579667282104492, 0.035624767303466795, 0.03517494583129883, 0.035011905670166016, 0.03570691299438476, 0.03707868957519531, 0.036112766265869144, 0.035349918365478517, 0.03533062362670898, 0.035014942169189454, 0.035245792388916015, 0.0356346549987793, 0.03550467300415039, 0.03544268798828125, 0.03538438415527344, 0.0351343994140625, 0.03599478530883789, 0.03507081604003906, 0.03520716857910156, 0.035425312042236326, 0.036049888610839846, 0.03553891372680664, 0.03577142333984375, 0.03561289596557617, 0.03553564834594727, 0.03848739242553711, 0.03513616180419922, 0.03565276718139648, 0.035652446746826175, 0.034928478240966794, 0.035090591430664064, 0.03518979263305664, 0.03487200164794922, 0.03521974563598633, 0.034996353149414065, 0.03545443344116211, 0.03564585494995117, 0.035555328369140625, 0.0353037109375, 0.03519171142578125, 0.035531585693359374, 0.03863542556762695, 0.03532588958740234, 0.035691967010498045, 0.0358787841796875, 0.035668830871582034, 0.03565727996826172, 0.03546486282348633, 0.0353548469543457, 0.03532243347167969, 0.03533129501342774, 0.03559708786010742, 0.035573760986328126, 0.03511500930786133, 0.035052993774414065, 0.03609849548339844, 0.035776641845703124, 0.03564134216308594, 0.03560038375854492, 0.035060062408447265, 0.035339935302734375, 0.035778209686279296, 0.035598304748535155, 0.03563062286376953, 0.036026206970214844, 0.03567308807373047, 0.03569868850708008, 0.035875904083251954, 0.03663683319091797, 0.035649375915527345, 0.035110912322998046, 0.03499404907226562, 0.035103904724121095, 0.03476364898681641, 0.03499836730957031, 0.03511705780029297, 0.03463497543334961, 0.03495401763916016, 0.03523929595947266, 0.035475200653076175, 0.03560537719726563, 0.03564966583251953, 0.035192703247070314, 0.03529251098632812, 0.03532252883911133, 0.035688449859619144, 0.035743518829345705, 0.03609193420410156, 0.03523104095458984, 0.03540377426147461, 0.03554102325439453, 0.035353214263916015, 0.0355371208190918, 0.03533571243286133, 0.035160545349121095, 0.03548364639282227, 0.03615299224853516, 0.03586492919921875, 0.035639297485351565, 0.03507334518432617, 0.03510335922241211, 0.03520927810668945, 0.03545907211303711, 0.035266559600830076, 0.03501465606689453, 0.0353361930847168, 0.03581721496582031, 0.03652428817749023, 0.035915775299072264, 0.03542015838623047, 0.035222881317138674, 0.03550271987915039, 0.03545296096801758, 0.03548476791381836, 0.03541494369506836, 0.03520003128051758, 0.035352767944335936, 0.03583260726928711, 0.03550931167602539, 0.03486816024780273, 0.03510681533813476, 0.03509657669067383, 0.03499174499511719, 0.03516774368286133, 0.03496025466918945, 0.035885150909423826, 0.03612662506103516, 0.035700096130371096, 0.035707809448242187, 0.03662124633789063, 0.035485889434814455, 0.03690665435791016, 0.03504611206054688, 0.0351181755065918, 0.034941856384277346, 0.03484390258789063, 0.03509280014038086, 0.034832832336425784, 0.034813953399658204, 0.03531161499023437, 0.035141632080078124, 0.037026912689208984, 0.034998943328857425, 0.03507839965820313, 0.03524198532104492, 0.0353004150390625, 0.0351077766418457, 0.0350731201171875, 0.03541289520263672, 0.035192832946777344, 0.03542835235595703, 0.035730430603027344, 0.03579532623291016, 0.03549568176269531, 0.035478401184082034, 0.0353546257019043, 0.03576422500610352, 0.03591167831420899, 0.035622528076171875, 0.0357256965637207, 0.0353422737121582, 0.035063873291015624, 0.035739009857177734, 0.03562969589233399, 0.03526646423339844, 0.03484668731689453, 0.035029121398925785, 0.035706497192382815, 0.03523388671875, 0.034953121185302735, 0.03544105529785156, 0.034960865020751956, 0.03514419174194336, 0.03546931076049804, 0.03558956909179688, 0.035545024871826175, 0.03551500701904297, 0.03522918319702149, 0.03524166488647461, 0.0357322883605957, 0.0355360336303711, 0.0352182731628418, 0.03496361541748047, 0.0351165771484375, 0.035208641052246095, 0.035519359588623046, 0.03588710403442383, 0.03573347091674805, 0.036046878814697265, 0.03558803176879883, 0.03609526443481445, 0.03565027236938476, 0.03661612701416016, 0.035856224060058596, 0.03661004638671875, 0.03546316909790039, 0.035563392639160155, 0.03561305618286133, 0.035604225158691404, 0.0357022705078125, 0.03500697708129883, 0.03506892776489258, 0.03501321411132813, 0.03519071960449219, 0.034898368835449216, 0.03526249694824219, 0.03493622589111328, 0.03575868988037109, 0.03511094284057617, 0.035272510528564456, 0.035352287292480467, 0.03528953552246094, 0.03499769592285156, 0.035631294250488284, 0.03557548904418945, 0.03533193588256836, 0.03480259323120117, 0.03519071960449219, 0.04316758346557617, 0.03472137451171875, 0.03504528045654297, 0.03505340957641601, 0.03546003341674805, 0.03622694396972656, 0.03554316711425781, 0.03579190444946289, 0.03586703872680664, 0.03531721496582031, 0.03576726531982422, 0.03592819213867188, 0.03567116928100586, 0.03551641464233399, 0.035178241729736326, 0.03520000076293945, 0.035520511627197264, 0.035827713012695314, 0.03558812713623047, 0.035137504577636716, 0.035266559600830076, 0.03498735809326172, 0.035508895874023436, 0.03596255874633789, 0.03679212951660156, 0.0359552001953125, 0.037359615325927735, 0.03572870254516602, 0.03560732650756836, 0.035337791442871094, 0.035488094329833984, 0.035487743377685545, 0.03596492767333984, 0.03616704177856445, 0.035865215301513674, 0.03576150512695313, 0.03591167831420899, 0.036262176513671876, 0.037036415100097655, 0.03923676681518555, 0.03588915252685547, 0.03525116729736328, 0.03525353622436524, 0.034976127624511716, 0.035506431579589846, 0.03547955322265625, 0.035765281677246095, 0.035115039825439454, 0.03535148620605469, 0.03516998291015625, 0.03545119857788086, 0.03510393524169922, 0.03500038528442383, 0.035222145080566404, 0.0353570556640625, 0.03477231979370117, 0.03496755218505859, 0.03491881561279297, 0.03502412796020508, 0.03490044784545898, 0.03492310333251953, 0.035232574462890624, 0.0350318717956543, 0.035049537658691406, 0.03545087814331055, 0.03571660614013672, 0.03626639938354492, 0.03626198577880859, 0.0357314567565918, 0.03516419219970703, 0.03506582260131836, 0.03570499038696289, 0.03623920059204101, 0.03571014404296875, 0.03497036743164063, 0.03534368133544922, 0.03604352188110352, 0.03574556732177735, 0.03570457458496094, 0.034978271484375, 0.03564748764038086, 0.035777694702148435, 0.03589616012573242, 0.03542396926879883, 0.03535696029663086, 0.03562246322631836, 0.0353076171875, 0.035418689727783205, 0.035116832733154295, 0.03575193786621094, 0.035643329620361326, 0.03515193557739258, 0.035350528717041016, 0.03521542358398438, 0.03608569717407226, 0.03587071990966797, 0.036052574157714845, 0.03568454360961914, 0.03563951873779297, 0.03513983917236328, 0.035109825134277343, 0.03566412734985352, 0.03704396820068359, 0.035604736328125, 0.0354854736328125, 0.03542812728881836, 0.03534688186645508, 0.03533824157714844, 0.0351231689453125, 0.03490409469604492, 0.03501385498046875, 0.03479017639160156, 0.03498735809326172, 0.035197601318359376, 0.035688449859619144, 0.03587481689453125, 0.03499622344970703, 0.034985984802246094, 0.035148990631103515, 0.035257312774658205, 0.03547644805908203, 0.03497663879394531, 0.03539558410644531, 0.03559833526611328, 0.035743743896484374, 0.035432449340820314, 0.03508019256591797, 0.035538944244384765, 0.03578252792358398, 0.03541004943847656, 0.03504742431640625, 0.034875232696533205, 0.035143199920654296, 0.03527043151855469, 0.03638505554199219, 0.03609430313110352, 0.03549193572998047, 0.03577459335327148, 0.03557785415649414, 0.03519190216064453, 0.03497872161865234, 0.03674521636962891, 0.03598928070068359, 0.03542038345336914, 0.035520511627197264, 0.035811233520507815, 0.03501475143432617, 0.03525558471679688, 0.03560111999511719, 0.03553647994995117, 0.035238304138183595, 0.03503308868408203, 0.03533561706542969, 0.035746368408203125, 0.03542015838623047, 0.03570070266723633, 0.035950206756591795, 0.03553731155395508, 0.0357314567565918, 0.03550848007202148, 0.0354356803894043, 0.03515555191040039, 0.03542723083496094, 0.03499967956542969, 0.03595894241333008]",tokens/s,28.16573885355461,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8211.062784,12133.00736,0.0,11737.759744,11171.24352,s,1,16.47221875,16.47221875,0.0,16.47221875,16.47221875,16.47221875,16.47221875,[16.47221875],,kWh,0.0002681633353000355,2.9572969375535115e-05,0.00011389814667406295,0.0004116344513496336,,MB,4054.339584,12147.687424,0.0,11739.856896,10924.35968,s,10,58.20302490234375,5.820302490234375,0.0018394682126367818,5.820822265625,5.822356005859374,5.8225693603515625,5.822740043945312,"[5.817763671875, 5.8176748046875, 5.81830908203125, 5.81912646484375, 5.8204892578125, 5.82135888671875, 5.82205615234375, 5.8211552734375, 5.82278271484375, 5.82230859375]",tokens/s,43.98396826101924,kWh,0.00016975931377415842,1.8724993909382963e-05,0.00011283195137660185,0.0003013162590601432,tokens/kWh,849605.6628291737,MB,4058.60352,12151.881728,0.0,11741.954048,10924.36224,s,10,29.493633300781255,2.9493633300781257,0.0030624496666022006,2.9485103759765625,2.95195400390625,2.9544826171875,2.9565055078125,"[2.947757568359375, 2.945792724609375, 2.95139208984375, 2.947404052734375, 2.946928466796875, 2.949522705078125, 2.94754345703125, 2.94926318359375, 2.95701123046875, 2.951017822265625]",tokens/s,21.360542242291725,kWh,8.640699360418382e-05,9.531341060527186e-06,5.753701825179645e-05,0.0001534753529165074,tokens/kWh,410489.36394544615,,s,630,29.49085386276244,0.04681087914724198,0.0004593725109467762,0.04678553581237793,0.04721825866699219,0.047367992210388184,0.048674534416198734,"[0.04866867065429688, 0.046413822174072264, 0.04618345642089844, 0.04651516723632813, 0.04608572769165039, 0.04632783889770508, 0.046125438690185545, 0.04614339065551758, 0.04617942428588867, 0.04675888061523437, 0.04648361587524414, 0.0464087028503418, 0.046726016998291015, 0.04683327865600586, 0.04625241470336914, 0.04638505554199219, 0.04637868881225586, 0.04636016082763672, 0.04645151901245117, 0.04685004806518555, 0.04707328033447265, 0.046903297424316405, 0.046712833404541014, 0.046407550811767576, 0.04621939086914063, 0.047035808563232424, 0.04671139144897461, 0.04629199981689453, 0.046486526489257815, 0.04717772674560547, 0.04672918319702148, 0.04671897506713867, 0.04680089569091797, 0.046683872222900394, 0.046765567779541016, 0.04692863845825195, 0.04672707366943359, 0.046833599090576175, 0.04679478454589844, 0.04696035385131836, 0.04716527938842773, 0.04724787139892578, 0.04719011306762695, 0.04705254364013672, 0.046885120391845704, 0.04702412796020508, 0.04683980941772461, 0.04697907257080078, 0.04659811019897461, 0.04688598251342774, 0.047166015625, 0.04697238540649414, 0.0468427848815918, 0.04683331298828125, 0.04686617660522461, 0.04737699127197265, 0.04708563232421875, 0.04683564758300781, 0.046832958221435544, 0.04742828750610351, 0.04734956741333008, 0.04744790267944336, 0.047097408294677734, 0.04867692947387695, 0.04647366333007812, 0.04609417724609375, 0.04623987197875976, 0.04605132675170898, 0.04640768051147461, 0.04640476989746094, 0.04623846435546875, 0.04634838485717773, 0.04682060623168945, 0.04675417709350586, 0.046487136840820314, 0.046445343017578126, 0.04641996765136719, 0.046440448760986325, 0.04660960006713867, 0.046319873809814456, 0.04663929748535156, 0.046817569732666015, 0.04691567993164063, 0.046688255310058595, 0.046903297424316405, 0.04663100814819336, 0.046342048645019535, 0.0462479362487793, 0.04661372756958008, 0.04653100967407227, 0.04643203353881836, 0.04660076904296875, 0.04679679870605469, 0.046486751556396484, 0.0469587516784668, 0.046913505554199215, 0.04678118515014648, 0.046655391693115236, 0.0466429443359375, 0.04671100616455078, 0.0467108154296875, 0.047065086364746093, 0.04709523010253906, 0.04705046463012695, 0.046965599060058594, 0.04717977523803711, 0.047134719848632815, 0.04689715194702149, 0.046680065155029295, 0.04660224151611328, 0.04666163253784179, 0.04664476776123047, 0.04689750289916992, 0.04720243072509766, 0.04700495910644531, 0.04681596755981445, 0.04680089569091797, 0.046747711181640624, 0.047248481750488285, 0.04703228759765625, 0.0468645133972168, 0.04677299118041992, 0.04712262344360352, 0.04713862228393555, 0.04715068817138672, 0.047429759979248046, 0.04886294555664063, 0.0467092170715332, 0.04633411026000977, 0.046499839782714845, 0.04631350326538086, 0.046235614776611325, 0.04637865447998047, 0.04649324798583984, 0.0463056640625, 0.04674806213378906, 0.04651753616333008, 0.046367454528808597, 0.04650608062744141, 0.04646083068847656, 0.04641753768920898, 0.04683200073242187, 0.04684799957275391, 0.04668627166748047, 0.04635836791992187, 0.046589344024658204, 0.04654473495483399, 0.04640444946289062, 0.0465715217590332, 0.04648287963867188, 0.046405406951904295, 0.04699625778198242, 0.046721023559570314, 0.04642201614379883, 0.04657561492919922, 0.046992576599121094, 0.04655718231201172, 0.04666041564941406, 0.04822220611572266, 0.046723072052001956, 0.04652032089233398, 0.04722073745727539, 0.046960033416748044, 0.04709404754638672, 0.0467542724609375, 0.04675363159179687, 0.047177345275878906, 0.04733779144287109, 0.04723686218261719, 0.04687699127197266, 0.04723497772216797, 0.04703241729736328, 0.046843902587890625, 0.046680065155029295, 0.04712243270874023, 0.04709785461425781, 0.046677696228027345, 0.046765567779541016, 0.046885025024414065, 0.04709872055053711, 0.04678047943115234, 0.046849246978759765, 0.04732368087768555, 0.0474683837890625, 0.04727619171142578, 0.047675392150878904, 0.04728400039672852, 0.04720374298095703, 0.047075969696044925, 0.048652256011962894, 0.04674291229248047, 0.04641241455078125, 0.04623974227905273, 0.04607385635375977, 0.046415870666503906, 0.04646883010864258, 0.04625187301635742, 0.04642211151123047, 0.04685654449462891, 0.04661164855957031, 0.04655596923828125, 0.046784446716308596, 0.04651219177246094, 0.0463296012878418, 0.04679724884033203, 0.04680275344848633, 0.04639289474487305, 0.046338687896728514, 0.04690719985961914, 0.04664115142822266, 0.046632190704345704, 0.04668806457519531, 0.0465880012512207, 0.046500320434570315, 0.04649155044555664, 0.04653104019165039, 0.04674745559692383, 0.04694355010986328, 0.04675859069824219, 0.04644473648071289, 0.046769729614257814, 0.04692559814453125, 0.046502655029296874, 0.04638300704956055, 0.04673535919189453, 0.04670259094238281, 0.0466247673034668, 0.046524417877197265, 0.04655516815185547, 0.04703638458251953, 0.04714713668823242, 0.04692892837524414, 0.04720441436767578, 0.0471429443359375, 0.04685696029663086, 0.04677552032470703, 0.04713865661621094, 0.046934623718261716, 0.04724361419677734, 0.046960639953613284, 0.04677017593383789, 0.04685619354248047, 0.04731635284423828, 0.04694899368286133, 0.046884864807128904, 0.04680704116821289, 0.0473675537109375, 0.0471385612487793, 0.04702230453491211, 0.04691603088378906, 0.04737014389038086, 0.047129951477050784, 0.04873878479003906, 0.04679471969604492, 0.04648537445068359, 0.04627878570556641, 0.04614352035522461, 0.04615126419067383, 0.04634048080444336, 0.04620268630981445, 0.046018753051757816, 0.046208446502685546, 0.046524673461914065, 0.046303550720214845, 0.04640284729003906, 0.0467279052734375, 0.046565376281738284, 0.046780193328857425, 0.0467371826171875, 0.04668451309204102, 0.04640572738647461, 0.04663849639892578, 0.04688956832885742, 0.04665139389038086, 0.04647292709350586, 0.04689894485473633, 0.046373409271240236, 0.04635772705078125, 0.04659462356567383, 0.04681740951538086, 0.04655728149414062, 0.0467322883605957, 0.046437374114990236, 0.04658111953735351, 0.04693635177612305, 0.046790111541748045, 0.04639628982543945, 0.04705484771728516, 0.047031646728515626, 0.04678662490844727, 0.04703907012939453, 0.046857662200927734, 0.046641727447509766, 0.047239166259765625, 0.04693734359741211, 0.04687497711181641, 0.04672710418701172, 0.04677862548828125, 0.046618526458740234, 0.046839553833007815, 0.04721516799926758, 0.04703641510009766, 0.04683366394042969, 0.04681926345825195, 0.04695251083374023, 0.04719411087036133, 0.047115585327148435, 0.047000255584716794, 0.04730579376220703, 0.04722710418701172, 0.047185985565185544, 0.04742399978637695, 0.04728438568115234, 0.04703641510009766, 0.04700284957885742, 0.04905023956298828, 0.04683967971801758, 0.04607804870605469, 0.04619468688964844, 0.04604431915283203, 0.04626313781738281, 0.04627046585083008, 0.04629094314575195, 0.04619862365722656, 0.046440608978271486, 0.04656243133544922, 0.046519168853759764, 0.046176254272460936, 0.046368255615234374, 0.046793216705322264, 0.046774303436279294, 0.04680643081665039, 0.046647872924804684, 0.046516223907470705, 0.04653055953979492, 0.046935264587402346, 0.04692031860351562, 0.04660847854614258, 0.04704022216796875, 0.046920032501220704, 0.04698316955566406, 0.046639102935791016, 0.04653670501708984, 0.046433311462402344, 0.04691247940063477, 0.047034366607666016, 0.046475265502929686, 0.04665884780883789, 0.04693267059326172, 0.046569503784179685, 0.046852096557617184, 0.046925857543945314, 0.04678857421875, 0.04684358215332031, 0.04704412841796875, 0.047368350982666015, 0.04733737564086914, 0.046975711822509765, 0.04695654296875, 0.04716252899169922, 0.04703228759765625, 0.04678473663330078, 0.04681772613525391, 0.04659222412109375, 0.04653875350952148, 0.04697248077392578, 0.047292865753173825, 0.04692115020751953, 0.04692025756835937, 0.04679270553588867, 0.047484127044677735, 0.047266590118408204, 0.046956127166748046, 0.046975265502929686, 0.04735110473632813, 0.04731711959838867, 0.047049217224121094, 0.04698559951782227, 0.048882720947265625, 0.04644758224487305, 0.04625139236450195, 0.046276832580566404, 0.04634460830688476, 0.046432254791259765, 0.04643635177612305, 0.0461781120300293, 0.046259967803955075, 0.04688032150268555, 0.046501953125, 0.04623443222045898, 0.046383102416992186, 0.046620670318603515, 0.04625423812866211, 0.04638089752197266, 0.04673126220703125, 0.04660019302368164, 0.04645478439331055, 0.0468746223449707, 0.04690739059448242, 0.047124000549316404, 0.04655875015258789, 0.04657843017578125, 0.046260414123535154, 0.04657929611206055, 0.046940353393554686, 0.04646899032592773, 0.04632598495483398, 0.04671017456054687, 0.04702057647705078, 0.04691782379150391, 0.04654006576538086, 0.04677913665771485, 0.047011550903320314, 0.047026432037353516, 0.046701953887939456, 0.046731136322021485, 0.04654975891113281, 0.046755840301513675, 0.047002880096435544, 0.04715971374511719, 0.046774688720703124, 0.04688435363769531, 0.046712993621826175, 0.046997791290283204, 0.04714700698852539, 0.047110145568847656, 0.046773536682128906, 0.04689788818359375, 0.046795936584472654, 0.047008609771728514, 0.04683958435058594, 0.04685372924804688, 0.04699216079711914, 0.047056640625, 0.04725347137451172, 0.047083648681640625, 0.04695654296875, 0.047392704010009765, 0.047179840087890626, 0.04739276885986328, 0.04711196899414063, 0.048344127655029295, 0.04673529434204102, 0.046445568084716796, 0.046344192504882815, 0.0459417610168457, 0.046235904693603516, 0.04639411163330078, 0.04638671875, 0.04633430480957031, 0.04691571044921875, 0.0464257926940918, 0.04695199966430664, 0.04660857772827148, 0.04642387390136719, 0.04646915054321289, 0.04685491180419922, 0.046827167510986326, 0.04656159973144531, 0.0466063346862793, 0.04688076782226563, 0.046685791015625, 0.046534847259521485, 0.046567649841308595, 0.046637054443359374, 0.046465023040771485, 0.04649273681640625, 0.04666476821899414, 0.04668403244018555, 0.04670259094238281, 0.04687033462524414, 0.046700736999511716, 0.047050750732421875, 0.046565025329589844, 0.0465145263671875, 0.046704639434814454, 0.046919166564941404, 0.04685260772705078, 0.04714707183837891, 0.047052734375, 0.04679884719848633, 0.047156448364257815, 0.047012287139892577, 0.04668412780761719, 0.04719244766235352, 0.04690943908691406, 0.04670374298095703, 0.046636959075927735, 0.04671526336669922, 0.04672572708129883, 0.047324928283691406, 0.04713894271850586, 0.04669353485107422, 0.04694720077514648, 0.04696227264404297, 0.04673766326904297, 0.04735820770263672, 0.047185726165771484, 0.04712419128417969, 0.0473864631652832, 0.0474582405090332, 0.04705472183227539, 0.04748166275024414, 0.04711139297485351, 0.04863577651977539, 0.04684812927246094, 0.04640768051147461, 0.046176254272460936, 0.04609209442138672, 0.04646873474121094, 0.04643692779541016, 0.04636671829223633, 0.052022911071777346, 0.04558067321777344, 0.046456127166748046, 0.04674956893920899, 0.04658448028564453, 0.04659574508666992, 0.04642828750610352, 0.04681270217895508, 0.04681814575195312, 0.04677427291870117, 0.04638105773925781, 0.046617984771728516, 0.04688272094726562, 0.04701660919189453, 0.046609886169433595, 0.046639713287353515, 0.04668620681762695, 0.04647731018066406, 0.04665273666381836, 0.046792510986328126, 0.04674035263061523, 0.04675945663452148, 0.04683209609985352, 0.04652844619750977, 0.04671920013427734, 0.04699081420898438, 0.046537086486816405, 0.04678192138671875, 0.04714275360107422, 0.04692464065551758, 0.04711103820800781, 0.047158241271972656, 0.04685785675048828, 0.04689686584472656, 0.047186016082763675, 0.04699363327026367, 0.046903518676757815, 0.04690752029418945, 0.04694220733642578, 0.046849822998046874, 0.04672463989257813, 0.04695667266845703, 0.047343936920166016, 0.04715340805053711, 0.04701388931274414, 0.04707328033447265, 0.04721798324584961, 0.047130561828613284, 0.04701686477661133, 0.04744927978515625, 0.04709238433837891, 0.04749926376342774, 0.04741939163208008, 0.047351806640625, 0.04750140762329102, 0.04920111846923828, 0.04674150466918945, 0.046599937438964845, 0.046352577209472653, 0.046182464599609375, 0.046475265502929686, 0.046712833404541014, 0.04674355316162109, 0.04636569595336914, 0.0469486083984375, 0.04654265594482422, 0.04623455810546875, 0.046698593139648435, 0.04640143966674805, 0.04634406280517578, 0.04679212951660156, 0.04658844757080078, 0.046610591888427734, 0.046458881378173826, 0.04651827239990235, 0.046911102294921875, 0.04702777481079102, 0.046938209533691405, 0.046897598266601566, 0.04673974227905273, 0.046671424865722656, 0.046690750122070315, 0.04667596817016602, 0.0470384635925293, 0.04697907257080078, 0.04678656005859375, 0.04669440078735351, 0.04678633499145508, 0.04656560134887695, 0.04672512054443359, 0.047376415252685544, 0.046575584411621095, 0.047132671356201174, 0.04680019378662109, 0.046448543548583986, 0.04674828720092773, 0.046975135803222656, 0.04703619384765625, 0.047223007202148434, 0.04700364685058594, 0.046655487060546875, 0.04681932830810547, 0.04698128128051758, 0.04682735824584961, 0.047263904571533205, 0.04724671936035156, 0.04694803237915039, 0.04688566589355469, 0.047082687377929686, 0.0467869758605957, 0.046973342895507815, 0.04691353607177735, 0.04690124893188476, 0.04735145568847656, 0.04720793533325195, 0.04681404876708985, 0.04702572631835938, 0.04707980728149414]",tokens/s,21.362555419105355,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,8557.305856,12045.910016,0.0,11643.387904,10774.352384,s,1,15.449345703125,15.449345703125,0.0,15.449345703125,15.449345703125,15.449345703125,15.449345703125,[15.449345703125],,kWh,0.00024450217612916275,2.6962865104855526e-05,0.00011183731169199518,0.00038330235292601347,,MB,1673.777152,12077.367296,0.0,11660.16512,9610.865152,s,10,72.2507099609375,7.22507099609375,0.003126629139248643,7.22534326171875,7.22931923828125,7.22991865234375,7.23039818359375,"[7.22107666015625, 7.222177734375, 7.22446240234375, 7.22128857421875, 7.22258740234375, 7.22651953125, 7.22666943359375, 7.22622412109375, 7.23051806640625, 7.22918603515625]",tokens/s,35.432177779070535,kWh,0.00021072569176167007,2.3243946275738666e-05,0.00014018705659400027,0.000374156694631409,tokens/kWh,684205.3173796394,MB,1685.491712,12077.367296,0.0,11660.16512,9610.867712,s,10,32.54028857421875,3.2540288574218748,0.001619967803782302,3.2536656494140628,3.256328466796875,3.2564135009765622,3.256481528320313,"[3.252347900390625, 3.252013916015625, 3.2529052734375, 3.253246826171875, 3.252628173828125, 3.2541767578125, 3.25408447265625, 3.2563095703125, 3.25649853515625, 3.2560771484375]",tokens/s,19.360615028446333,kWh,9.514654453791384e-05,1.0495374447632376e-05,6.313163383860065e-05,0.00016877355282414702,tokens/kWh,373281.23361628,,s,630,32.5375793762207,0.051646951390826513,0.0004361819623780889,0.051651472091674804,0.052203572845458984,0.0523054313659668,0.052800942497253416,"[0.05284374237060547, 0.05141177749633789, 0.05083750534057617, 0.05091516876220703, 0.050727039337158206, 0.05127171325683594, 0.050915328979492185, 0.05110067367553711, 0.05130259323120117, 0.051264320373535156, 0.051451904296875, 0.05136553573608398, 0.05137369537353516, 0.05123049545288086, 0.05128268814086914, 0.05102201461791992, 0.05152544021606445, 0.050985153198242185, 0.05161471939086914, 0.05129523086547851, 0.051373184204101564, 0.05153606414794922, 0.05121299362182617, 0.05156454467773437, 0.051353599548339846, 0.05142118453979492, 0.05154598236083984, 0.05165478515625, 0.05114879989624024, 0.05164976119995117, 0.05175321578979492, 0.051692062377929685, 0.05210726547241211, 0.051515392303466793, 0.05177958297729492, 0.05141708755493164, 0.05184102249145508, 0.051320831298828126, 0.05166223907470703, 0.051937889099121094, 0.05170771026611328, 0.05191289520263672, 0.05142937469482422, 0.05192499160766602, 0.051714046478271485, 0.052004863739013675, 0.05198393630981445, 0.05170016098022461, 0.052365310668945314, 0.05182799911499023, 0.0515868148803711, 0.05163897705078125, 0.05211142349243164, 0.0522437744140625, 0.05207542419433594, 0.05230182266235352, 0.051787551879882814, 0.05195798492431641, 0.05216211318969727, 0.05207459259033203, 0.05225676727294922, 0.05220521545410156, 0.051890880584716796, 0.05287606430053711, 0.051294208526611325, 0.050816158294677734, 0.051149566650390624, 0.050939998626708984, 0.051306079864501954, 0.05080105590820312, 0.0509378547668457, 0.05095401763916016, 0.05128812789916992, 0.0511899185180664, 0.05136383819580078, 0.05110940933227539, 0.0511058235168457, 0.05144601440429687, 0.05125068664550781, 0.05145052719116211, 0.0514716796875, 0.05143619155883789, 0.051775585174560546, 0.05157680130004883, 0.05101772689819336, 0.05122662353515625, 0.05148672103881836, 0.05161743927001953, 0.0514727668762207, 0.051518753051757814, 0.05173728179931641, 0.05132051086425781, 0.05171027374267578, 0.051195903778076174, 0.05136383819580078, 0.051324161529541015, 0.051792545318603514, 0.05176073455810547, 0.05149542236328125, 0.05212723159790039, 0.051829246520996096, 0.05201919937133789, 0.05171571350097656, 0.051416641235351564, 0.05189510345458984, 0.05168896102905273, 0.051632640838623046, 0.051501056671142575, 0.05199052810668945, 0.05150825500488281, 0.051993217468261715, 0.051990016937255856, 0.05181647872924805, 0.05223507308959961, 0.05187740707397461, 0.05209750366210938, 0.05166396713256836, 0.05220854568481445, 0.05216665649414062, 0.052077632904052734, 0.05255260848999024, 0.052262943267822264, 0.052111358642578126, 0.05175001525878906, 0.051954177856445315, 0.05209740829467773, 0.05279948806762695, 0.051046398162841795, 0.05065324783325195, 0.05105430221557617, 0.05098905563354492, 0.05101385498046875, 0.05101772689819336, 0.05124710464477539, 0.05101059341430664, 0.05128895950317383, 0.050835071563720705, 0.051122657775878905, 0.05133926391601563, 0.05130649566650391, 0.051714046478271485, 0.05122396850585938, 0.05142297744750977, 0.05143228912353515, 0.05133724975585938, 0.05158031845092773, 0.051288673400878906, 0.050853790283203124, 0.05109049606323242, 0.05151795196533203, 0.05162035369873047, 0.051795391082763674, 0.05164908981323242, 0.05165670394897461, 0.05139990234375, 0.051405376434326175, 0.05122022247314453, 0.051660865783691404, 0.051941791534423826, 0.05184716796875, 0.051781375885009764, 0.051276031494140624, 0.05180825424194336, 0.05130649566650391, 0.05188608169555664, 0.05187091064453125, 0.05176755142211914, 0.0520948486328125, 0.05171065521240235, 0.051986431121826174, 0.05175436782836914, 0.05184774398803711, 0.05176435089111328, 0.051856319427490236, 0.05205526351928711, 0.051585823059082034, 0.052103168487548826, 0.05170156860351562, 0.05212588882446289, 0.052156257629394534, 0.05187152099609375, 0.052687232971191406, 0.05224038314819336, 0.052029441833496094, 0.05222528076171875, 0.05223705673217773, 0.05209206390380859, 0.05212793731689453, 0.052318878173828125, 0.05280153656005859, 0.051046783447265626, 0.050757312774658205, 0.05133760070800781, 0.05126569747924805, 0.051261566162109376, 0.05073894500732422, 0.05130047988891601, 0.050743297576904295, 0.05110784149169922, 0.051296257019042966, 0.05122566223144531, 0.051641281127929685, 0.05118361663818359, 0.05146182250976562, 0.05104671859741211, 0.05152716827392578, 0.05114316940307617, 0.05148665618896484, 0.05125120162963867, 0.05121440124511719, 0.05152127838134766, 0.051552513122558596, 0.05172016143798828, 0.05158639907836914, 0.0516420783996582, 0.051104736328125, 0.05165878295898438, 0.051206111907958984, 0.05147443389892578, 0.05173964691162109, 0.05146726226806641, 0.05174639892578125, 0.05168988800048828, 0.051918846130371094, 0.05183488082885742, 0.05179391860961914, 0.05144371032714844, 0.051444862365722654, 0.051821441650390626, 0.05168694305419922, 0.0519334716796875, 0.051603168487548826, 0.05191932678222656, 0.051714046478271485, 0.051988479614257815, 0.05201919937133789, 0.05171814346313477, 0.05243289566040039, 0.051608734130859375, 0.05180633544921875, 0.051650432586669924, 0.052244449615478514, 0.05210611343383789, 0.05217839813232422, 0.05240057754516601, 0.05168751907348633, 0.052348926544189454, 0.05228470230102539, 0.052003551483154296, 0.05229919815063477, 0.052120128631591794, 0.05201919937133789, 0.052746238708496096, 0.05105244827270508, 0.05054665756225586, 0.05090111923217774, 0.05061017608642578, 0.05127104187011719, 0.05095196914672852, 0.050864192962646486, 0.05113935852050781, 0.05160332870483399, 0.051609729766845705, 0.05144985580444336, 0.05142323303222656, 0.05122457504272461, 0.051212287902832034, 0.05103363037109375, 0.05141449737548828, 0.05117567825317383, 0.05147721481323242, 0.05157891082763672, 0.05134643173217773, 0.051528446197509764, 0.05123696136474609, 0.05127388763427734, 0.051138561248779295, 0.051652416229248044, 0.051189952850341794, 0.05127372741699219, 0.05127782440185547, 0.05197113418579102, 0.05190335845947266, 0.05185747146606445, 0.051660030364990235, 0.05174502563476562, 0.05184880065917969, 0.05148969650268555, 0.051556385040283204, 0.05144518280029297, 0.05182313537597656, 0.05193318557739258, 0.05165260696411133, 0.05198220825195313, 0.051702911376953126, 0.05196083068847656, 0.05144771194458008, 0.05173244857788086, 0.051748992919921875, 0.05202272033691406, 0.051979904174804685, 0.05213689422607422, 0.05235263824462891, 0.05209868621826172, 0.05211417770385742, 0.05211340713500977, 0.051988479614257815, 0.05215436935424805, 0.05180767822265625, 0.05223225784301758, 0.05220403289794922, 0.05194268798828125, 0.05246550369262695, 0.052150657653808594, 0.05193164825439453, 0.05289871978759766, 0.05139427185058594, 0.05124739074707031, 0.05094150543212891, 0.05120172882080078, 0.05102588653564453, 0.051305248260498044, 0.05102592086791992, 0.05134092712402344, 0.05099292755126953, 0.05114255905151367, 0.05127648162841797, 0.05112556838989258, 0.0513825912475586, 0.05141132736206055, 0.05117747116088867, 0.05126553726196289, 0.05165654373168945, 0.05125446319580078, 0.051563488006591794, 0.051136417388916014, 0.05154150390625, 0.051542625427246094, 0.05126505661010742, 0.05151587295532226, 0.0513966064453125, 0.05165811157226562, 0.05116175842285156, 0.05138991928100586, 0.051507198333740234, 0.05166745758056641, 0.051568641662597656, 0.05195916748046875, 0.05180070495605469, 0.051500446319580076, 0.05146825790405273, 0.051210880279541016, 0.05192499160766602, 0.05165465545654297, 0.05191999816894531, 0.0518869743347168, 0.05165052795410156, 0.05227727890014648, 0.051955711364746096, 0.05194659042358398, 0.05188406372070312, 0.052380542755126956, 0.052152191162109375, 0.05181248092651367, 0.052094974517822266, 0.05168649673461914, 0.05211433410644531, 0.052077983856201174, 0.051847774505615236, 0.05188528060913086, 0.05173123168945312, 0.05200838470458984, 0.05184524917602539, 0.05223574447631836, 0.05222870254516602, 0.05229369735717773, 0.05263600158691406, 0.05187343978881836, 0.053098846435546875, 0.05140889739990234, 0.0510013427734375, 0.05093075180053711, 0.051033023834228516, 0.05122048187255859, 0.05099657440185547, 0.05121295928955078, 0.050733055114746094, 0.05112329483032227, 0.050890785217285156, 0.05132918548583985, 0.05107328033447266, 0.05121481704711914, 0.05096857452392578, 0.0514252815246582, 0.051593215942382815, 0.05159526443481445, 0.05149603271484375, 0.051192737579345705, 0.051535007476806644, 0.05126947021484375, 0.051128704071044924, 0.05111417770385742, 0.051577278137207035, 0.051691520690917966, 0.051713729858398436, 0.05202902221679687, 0.05146083068847656, 0.05156249618530274, 0.05172540664672851, 0.05151631927490234, 0.05116108703613281, 0.05177695846557617, 0.05175558471679687, 0.05159280014038086, 0.051931488037109376, 0.05174483108520508, 0.05191452789306641, 0.051697887420654294, 0.052013057708740235, 0.05200076675415039, 0.0517400016784668, 0.05190310287475586, 0.051500926971435546, 0.05180185699462891, 0.051681697845458986, 0.05183488082885742, 0.05194956970214844, 0.05206016159057617, 0.05186073684692383, 0.05209312057495117, 0.05211958312988281, 0.05162652969360351, 0.05221152114868164, 0.05222195053100586, 0.05223241424560547, 0.05257164764404297, 0.05201708984375, 0.05231024169921875, 0.05217516708374023, 0.05200691223144531, 0.052436798095703126, 0.05276467132568359, 0.05110105514526367, 0.05080070495605469, 0.050976478576660156, 0.05084364700317383, 0.051294944763183595, 0.051046527862548825, 0.05127782440185547, 0.05129171371459961, 0.05132128143310547, 0.051707904815673826, 0.051436798095703125, 0.05141376113891601, 0.05105254364013672, 0.05145526504516602, 0.05119811248779297, 0.05129888153076172, 0.05109145736694336, 0.05153545761108398, 0.05153833770751953, 0.051310592651367185, 0.0519595832824707, 0.05142095947265625, 0.051380542755126955, 0.051694942474365235, 0.05165881729125977, 0.05146847915649414, 0.05126361465454102, 0.05171651077270508, 0.051775489807128906, 0.051680446624755856, 0.0519277458190918, 0.05184115219116211, 0.05122969436645508, 0.05158556747436523, 0.05161590576171875, 0.051848575592041014, 0.05189270401000977, 0.051648990631103515, 0.05223014450073242, 0.05178275299072266, 0.05197043228149414, 0.05195328140258789, 0.051882911682128906, 0.052365310668945314, 0.051891902923583984, 0.05175475311279297, 0.051363807678222656, 0.052014976501464846, 0.05183766555786133, 0.052148193359375, 0.052137119293212894, 0.052015998840332034, 0.05220284652709961, 0.05192508697509766, 0.051896705627441406, 0.052195518493652344, 0.05216435241699219, 0.052045440673828124, 0.052265247344970706, 0.05220991897583008, 0.052024799346923827, 0.05239817428588867, 0.05287996673583984, 0.051310592651367185, 0.05120198440551758, 0.05117958450317383, 0.051501056671142575, 0.051351551055908204, 0.05105868911743164, 0.051305950164794924, 0.05078796768188477, 0.05112105560302734, 0.051097599029541016, 0.05139865493774414, 0.05120323181152344, 0.05105750274658203, 0.05150310516357422, 0.051480289459228515, 0.05170000076293945, 0.051596416473388675, 0.051553150177001954, 0.05113407897949219, 0.05154991912841797, 0.051100318908691406, 0.0515494384765625, 0.051589889526367186, 0.05142323303222656, 0.05161369705200195, 0.05140383911132813, 0.05136671829223633, 0.05119968032836914, 0.051448257446289065, 0.051713951110839845, 0.05147046279907227, 0.05175398254394531, 0.05178822326660156, 0.051633953094482425, 0.0515404167175293, 0.05192736053466797, 0.051736030578613285, 0.051980289459228515, 0.05200467300415039, 0.05171273422241211, 0.052350334167480465, 0.05216444778442383, 0.052056865692138674, 0.05153302383422852, 0.05170012664794922, 0.052066688537597654, 0.051908576965332034, 0.05210524749755859, 0.05189427185058594, 0.052142078399658204, 0.05161062240600586, 0.0519681282043457, 0.05221625518798828, 0.05201331329345703, 0.05223632049560547, 0.0519530258178711, 0.05240911865234375, 0.052152320861816405, 0.052144126892089845, 0.05237926483154297, 0.05216704177856445, 0.05210076904296875, 0.053418014526367186, 0.0510648307800293, 0.05097411346435547, 0.05076028823852539, 0.05126057434082031, 0.05093404769897461, 0.051282112121582034, 0.050859519958496094, 0.05123158264160156, 0.05115087890625, 0.051374015808105467, 0.05139462280273437, 0.05144553756713867, 0.051789249420166016, 0.05125392150878906, 0.05141107177734375, 0.05118975830078125, 0.05102182388305664, 0.05116867065429687, 0.051477088928222656, 0.051244480133056644, 0.051536094665527346, 0.05171577453613281, 0.05151401519775391, 0.051636062622070315, 0.05128179168701172, 0.05168563079833984, 0.051286048889160156, 0.051707904815673826, 0.051329025268554686, 0.05179190444946289, 0.0517529296875, 0.051797439575195316, 0.052152896881103514, 0.05166694259643555, 0.0518389778137207, 0.05145600128173828, 0.0518553581237793, 0.0522239990234375, 0.051625694274902344, 0.05203587341308594, 0.05172220611572265, 0.051951648712158204, 0.05135699081420898, 0.051552127838134766, 0.051974849700927736, 0.051856510162353514, 0.05204851150512695, 0.051714431762695315, 0.052203521728515626, 0.051778881072998044, 0.051974849700927736, 0.05228950500488281, 0.05211958312988281, 0.05229363250732422, 0.05203353500366211, 0.05184441757202148, 0.0519785270690918, 0.05230838394165039, 0.0525656623840332, 0.05254095840454102, 0.05223651123046875, 0.05181500625610352]",tokens/s,19.36222706414418,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4595.003392,7907.24608,0.0,7511.998464,6895.682048,s,1,12.65923046875,12.65923046875,0.0,12.65923046875,12.65923046875,12.65923046875,12.65923046875,[12.65923046875],,kWh,0.00016308059054586011,1.798142303511062e-05,6.664338664802427e-05,0.000247705400228995,,MB,2460.782592,7921.926144,0.0,7514.095616,6822.139904,s,10,29.046709716796876,2.9046709716796877,0.0033588276174065443,2.9054093017578126,2.9084703857421874,2.9084865356445313,2.9084994555664063,"[2.89770849609375, 2.900326416015625, 2.9035615234375, 2.90337646484375, 2.904469970703125, 2.9063486328125, 2.908466796875, 2.90701220703125, 2.908502685546875, 2.9069365234375]",tokens/s,88.13390655808516,kWh,8.473676948625477e-05,9.34597383415778e-06,5.638348955120387e-05,0.0001504662328716164,tokens/kWh,1701378.4097221938,MB,2465.046528,7926.120448,0.0,7516.192768,6822.142464,s,10,21.404966064453124,2.1404966064453124,0.02118520790531577,2.1430899658203124,2.1637234375000003,2.1649804931640624,2.1659861376953122,"[2.11697216796875, 2.106642578125, 2.12462939453125, 2.160172119140625, 2.15317333984375, 2.166237548828125, 2.159627685546875, 2.121060546875, 2.163444091796875, 2.133006591796875]",tokens/s,29.4324222754191,kWh,6.286705930166284e-05,6.935121888016265e-06,4.151392209999494e-05,0.00011131610328967404,tokens/kWh,565955.8512936558,,s,630,21.402253734588616,0.033971831324743844,0.0005589987614556571,0.034013088226318364,0.03447806396484375,0.034646830940246585,0.035267693634033204,"[0.03500239944458008, 0.03394144058227539, 0.03366620635986328, 0.03341747283935547, 0.033616416931152346, 0.03364054489135742, 0.03351551818847656, 0.033586849212646486, 0.03360726547241211, 0.03352576065063476, 0.034168865203857424, 0.03337862396240234, 0.03338668823242188, 0.033340705871582034, 0.033645503997802736, 0.03369574356079102, 0.0336363525390625, 0.033441150665283206, 0.03334563064575195, 0.03340547180175781, 0.03340185546875, 0.03329945755004883, 0.03320832061767578, 0.033173503875732424, 0.03340288162231445, 0.033323009490966796, 0.03328204727172852, 0.03341519927978515, 0.033456096649169924, 0.033484798431396484, 0.034079936981201174, 0.033421535491943356, 0.033280609130859375, 0.03350223922729492, 0.0333076171875, 0.03377897644042969, 0.03347324752807617, 0.0335810546875, 0.03337625503540039, 0.03459619140625, 0.03340118408203125, 0.03331439971923828, 0.03341107177734375, 0.03323065567016602, 0.033530208587646486, 0.033605728149414066, 0.03351542282104492, 0.03338246536254883, 0.033388641357421874, 0.03365478515625, 0.03358761596679687, 0.03325360107421875, 0.033255199432373046, 0.033560192108154294, 0.03408268737792969, 0.03397011184692383, 0.03395593643188476, 0.03388991928100586, 0.03411235046386719, 0.03408224105834961, 0.034027584075927736, 0.03391670227050781, 0.033790687561035156, 0.034800510406494144, 0.03368255996704102, 0.03335667037963867, 0.033298431396484376, 0.03364195251464844, 0.033245471954345705, 0.033198558807373046, 0.03317465591430664, 0.033813152313232425, 0.03335145568847656, 0.033234622955322264, 0.0331822395324707, 0.03349094390869141, 0.03321446228027344, 0.03327366256713867, 0.033530048370361325, 0.033298431396484376, 0.03320611190795898, 0.03327606582641601, 0.0332674560546875, 0.03336832046508789, 0.033308734893798826, 0.03336150360107422, 0.03311840057373047, 0.033495201110839846, 0.03314412689208984, 0.033077953338623046, 0.0331960334777832, 0.033142784118652346, 0.03322265625, 0.033083393096923826, 0.033157119750976564, 0.03315017700195313, 0.03301011276245117, 0.033028030395507814, 0.03307347106933594, 0.03324937438964844, 0.033658878326416015, 0.03389158248901367, 0.03388083267211914, 0.03379715347290039, 0.033745697021484375, 0.033720512390136716, 0.033866817474365235, 0.03376022338867188, 0.033734622955322265, 0.03421388626098633, 0.03343065643310547, 0.03345878219604492, 0.03365827178955078, 0.03370470428466797, 0.03340505599975586, 0.033853214263916014, 0.033265888214111326, 0.033337249755859374, 0.033314910888671875, 0.033355777740478515, 0.033232288360595705, 0.033265758514404296, 0.03398015975952148, 0.03367808151245117, 0.03344384002685547, 0.03303807830810547, 0.034748382568359375, 0.033763263702392576, 0.033626590728759766, 0.033559776306152346, 0.03338729476928711, 0.03365228652954102, 0.03406480026245117, 0.033740833282470704, 0.03383500671386719, 0.03405171203613281, 0.03426342391967773, 0.034108638763427734, 0.03425769424438477, 0.03384921646118164, 0.03382476806640625, 0.03382284927368164, 0.0339865608215332, 0.0338034553527832, 0.03413423919677734, 0.03358371353149414, 0.03375513458251953, 0.0335994873046875, 0.03350515365600586, 0.03337638473510742, 0.03360153579711914, 0.03346825790405274, 0.033829025268554684, 0.03375667190551758, 0.0338600959777832, 0.033849342346191406, 0.033392929077148435, 0.03341299057006836, 0.0332770881652832, 0.03326393508911133, 0.03324720001220703, 0.03319030380249023, 0.033339393615722655, 0.0333496322631836, 0.03418931198120117, 0.03362166213989258, 0.03379808044433594, 0.03333161544799805, 0.033468414306640625, 0.0334881591796875, 0.033321567535400394, 0.033546302795410154, 0.033475936889648436, 0.03345072174072266, 0.03414425659179687, 0.03382681655883789, 0.033753089904785157, 0.03386368179321289, 0.03379404830932617, 0.03388809585571289, 0.03403174209594727, 0.03401299285888672, 0.03385718536376953, 0.03389702224731445, 0.03395993423461914, 0.033678558349609374, 0.03373136138916016, 0.03354982376098633, 0.03354201507568359, 0.035786014556884765, 0.03423100662231445, 0.03401657485961914, 0.03455244827270508, 0.03406649780273437, 0.03456800079345703, 0.0344384651184082, 0.03427587127685547, 0.03409340667724609, 0.03433420944213867, 0.034294143676757816, 0.034574462890625, 0.03500236892700195, 0.0343548469543457, 0.03414255905151367, 0.03418316650390625, 0.03416883087158203, 0.033957889556884766, 0.033973758697509765, 0.03408537673950195, 0.03388838577270508, 0.03391068649291992, 0.03388412857055664, 0.0340206413269043, 0.033946334838867186, 0.03417292785644531, 0.033947647094726564, 0.034136062622070314, 0.03417497634887695, 0.03402956771850586, 0.03401318359375, 0.03425075149536133, 0.0341044807434082, 0.03409187316894531, 0.03421388626098633, 0.034228446960449216, 0.03438979339599609, 0.03433062362670898, 0.0344268798828125, 0.03434451293945313, 0.03440796661376953, 0.03431673431396484, 0.034689502716064455, 0.034236415863037106, 0.034103294372558594, 0.034275169372558596, 0.034412254333496095, 0.03442678451538086, 0.03449091339111328, 0.03430611038208008, 0.03431612777709961, 0.034340190887451175, 0.03488790512084961, 0.03427824020385742, 0.034246047973632815, 0.03431248092651367, 0.03440822219848633, 0.03461539077758789, 0.034121856689453126, 0.03424611282348633, 0.03414435195922851, 0.03420182418823242, 0.034466014862060544, 0.03509657669067383, 0.03429580688476563, 0.0342852783203125, 0.03396636962890625, 0.0339947509765625, 0.0340555534362793, 0.03386841583251953, 0.0337367057800293, 0.033898239135742185, 0.03400518417358398, 0.03394566345214844, 0.0344637451171875, 0.034277374267578126, 0.034097152709960936, 0.03411372756958008, 0.034033470153808594, 0.03400646209716797, 0.033965694427490235, 0.03376224136352539, 0.03387360000610352, 0.03388857650756836, 0.033828414916992185, 0.033882080078125, 0.033699745178222655, 0.0337632942199707, 0.03363849639892578, 0.03452320098876953, 0.034268703460693356, 0.03417161560058594, 0.033990081787109376, 0.034003711700439455, 0.034203647613525394, 0.03412335968017578, 0.0339890251159668, 0.0340805778503418, 0.03506563186645508, 0.034006561279296875, 0.03407478332519531, 0.03404873657226563, 0.03421990585327148, 0.034578144073486326, 0.03410163116455078, 0.03410943984985351, 0.034240543365478514, 0.03424870300292969, 0.03414425659179687, 0.034514942169189454, 0.03416223907470703, 0.03424489593505859, 0.034313377380371095, 0.03430047988891602, 0.03448428726196289, 0.03447356796264649, 0.03429024124145508, 0.034212062835693356, 0.034551807403564457, 0.034344959259033206, 0.03435696029663086, 0.03435507202148438, 0.034292129516601565, 0.034408447265625, 0.03464614486694336, 0.03427622222900391, 0.035020225524902346, 0.034320960998535155, 0.034176097869873044, 0.034210399627685545, 0.03420191955566406, 0.0341514892578125, 0.034212799072265626, 0.03452844619750976, 0.03414422225952148, 0.034810718536376954, 0.03463359832763672, 0.034238174438476564, 0.03423478317260742, 0.034204830169677736, 0.03441712188720703, 0.03424499130249024, 0.03424870300292969, 0.034307903289794925, 0.03428575897216797, 0.03427689743041992, 0.03447382354736328, 0.03422793579101562, 0.03438454437255859, 0.034253150939941406, 0.03440630340576172, 0.034309825897216796, 0.034277694702148434, 0.03420310211181641, 0.03409564971923828, 0.03422412872314453, 0.03437977600097656, 0.034549758911132815, 0.03411084747314453, 0.034284160614013674, 0.03435715103149414, 0.03432457733154297, 0.03454313659667969, 0.03427779388427735, 0.03461536026000977, 0.03445753479003906, 0.034412609100341794, 0.03445935821533203, 0.0352770881652832, 0.03451084899902344, 0.034616992950439456, 0.034385696411132816, 0.034259201049804684, 0.034355518341064456, 0.03447760009765625, 0.03484924697875977, 0.03433881759643555, 0.03432447814941406, 0.03434249496459961, 0.03473859024047852, 0.03430160140991211, 0.03440470504760742, 0.03440367889404297, 0.034390304565429686, 0.03426134490966797, 0.034231903076171875, 0.03420204925537109, 0.03428761672973633, 0.034460800170898434, 0.035250175476074216, 0.03441766357421875, 0.03496243286132812, 0.034408481597900394, 0.03512112045288086, 0.03460505676269531, 0.03437158584594727, 0.034320384979248046, 0.03424665451049805, 0.03441209411621094, 0.03436793518066406, 0.03423231887817383, 0.034231681823730466, 0.03446028900146485, 0.03427840042114258, 0.034229248046875, 0.03417292785644531, 0.034328575134277346, 0.03453952026367187, 0.0343361930847168, 0.03434092712402344, 0.034867710113525394, 0.034515071868896484, 0.03443699264526367, 0.03407417678833008, 0.03434118270874023, 0.034395870208740235, 0.03437609481811523, 0.03422003173828125, 0.034364513397216793, 0.03427612686157227, 0.03601011276245117, 0.03429897689819336, 0.034182048797607424, 0.0342999038696289, 0.03428054428100586, 0.03455683135986328, 0.03428665542602539, 0.034149375915527344, 0.03411289596557617, 0.03457622528076172, 0.03435977554321289, 0.03449472045898438, 0.03427503967285156, 0.034224414825439455, 0.03440550231933594, 0.034380672454833984, 0.03460710525512695, 0.03420774459838867, 0.03396588897705078, 0.03401110458374024, 0.03387321472167969, 0.03392399978637695, 0.03374899291992187, 0.03348876953125, 0.03364672088623047, 0.0332042236328125, 0.03327910232543945, 0.033361824035644534, 0.03328464126586914, 0.034138240814208985, 0.03478764724731445, 0.03347568130493164, 0.03527484893798828, 0.034372222900390624, 0.03425804901123047, 0.03433561706542969, 0.03430604934692383, 0.03477443313598633, 0.03472035217285156, 0.03597942352294922, 0.034928478240966794, 0.033920448303222654, 0.0339359359741211, 0.03390780639648437, 0.03376118469238281, 0.03358924865722656, 0.033459201812744144, 0.03343324661254883, 0.033487201690673825, 0.03333740615844726, 0.03357894515991211, 0.03329036712646485, 0.03357430267333984, 0.03333091354370117, 0.03328623962402344, 0.03325404739379883, 0.033247230529785156, 0.033271583557128906, 0.03329782485961914, 0.03331676864624023, 0.033395614624023434, 0.033500385284423825, 0.03346518325805664, 0.03352681732177734, 0.03366089630126953, 0.03336521530151367, 0.03365859222412109, 0.03371417617797851, 0.033355777740478515, 0.03434617614746094, 0.033365985870361325, 0.033410049438476565, 0.03376726531982422, 0.03330780792236328, 0.033129310607910155, 0.03342131042480469, 0.03334348678588867, 0.03330188751220703, 0.03348912048339844, 0.03327632141113281, 0.03312998580932617, 0.03342489624023438, 0.033842174530029294, 0.033183135986328126, 0.033269824981689455, 0.03313308715820312, 0.03321673583984375, 0.034359073638916014, 0.033562625885009766, 0.033490177154541015, 0.03346022415161133, 0.0333924789428711, 0.0333034553527832, 0.03336732864379883, 0.033638816833496094, 0.03481068801879883, 0.03409036636352539, 0.034017566680908204, 0.03417478561401367, 0.03442073440551758, 0.03409952163696289, 0.034328800201416015, 0.034295646667480466, 0.03442617416381836, 0.03410409545898437, 0.03448223876953125, 0.03413804626464844, 0.03417007827758789, 0.0343704948425293, 0.034231937408447266, 0.03426537704467773, 0.03427939224243164, 0.03417910385131836, 0.03410662460327148, 0.03482495880126953, 0.034123966217041016, 0.03419472122192383, 0.03399939346313476, 0.03383295822143555, 0.03394124984741211, 0.03405235290527344, 0.03406380844116211, 0.034056640625, 0.034081920623779294, 0.034339839935302735, 0.03422563171386719, 0.03438617706298828, 0.03414863967895508, 0.03458854293823242, 0.03447001647949219, 0.034359294891357424, 0.034285568237304685, 0.03430099105834961, 0.03416979217529297, 0.034362400054931644, 0.03420428848266602, 0.03404947280883789, 0.03400182342529297, 0.03416438293457031, 0.03437807846069336, 0.03464739227294922, 0.03425347137451172, 0.03416585540771484, 0.034460575103759765, 0.03450470352172851, 0.03418931198120117, 0.034317375183105465, 0.03456857681274414, 0.03444601440429688, 0.03466227340698242, 0.034187137603759764, 0.033963329315185545, 0.03397046279907227, 0.04120630264282227, 0.03450998306274414, 0.03369424057006836, 0.033357440948486326, 0.03351827239990234, 0.03487311935424805, 0.03375740814208984, 0.03383859252929688, 0.03356313705444336, 0.03348070526123047, 0.03325279998779297, 0.03333283233642578, 0.03326819229125977, 0.0334730224609375, 0.033500831604003904, 0.0336530876159668, 0.03349708938598633, 0.0335093765258789, 0.03338854217529297, 0.034167999267578124, 0.0334587516784668, 0.03346604919433594, 0.034011390686035155, 0.03369929504394531, 0.03356665420532227, 0.033344223022460935, 0.03343584060668945, 0.0337367057800293, 0.033533119201660154, 0.033741184234619144, 0.033837505340576175, 0.03388393783569336, 0.0338741455078125, 0.03387801742553711, 0.03371942520141601, 0.03374310302734375, 0.033954433441162106, 0.03439804840087891, 0.03408707046508789, 0.034238208770751954, 0.03411299133300781, 0.03413190460205078, 0.03420560073852539, 0.034344928741455075, 0.034227169036865235, 0.034256481170654295, 0.03434947204589844, 0.03528511810302734, 0.034396095275878905, 0.03398630523681641, 0.03406988906860352, 0.034286529541015624, 0.03405401611328125, 0.03389004898071289, 0.03397228622436523, 0.033695934295654296, 0.03362319946289063, 0.03382563018798828, 0.03387302398681641, 0.033825248718261716, 0.03382236862182617, 0.03356121444702148, 0.03360371017456055, 0.03361587142944336, 0.03378995132446289, 0.03385331344604492, 0.034087039947509765, 0.03377318572998047]",tokens/s,29.436152276890542,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11118.22336,14586.6752,0.0,14191.427584,13325.783552,s,1,18.100052734375,18.100052734375,0.0,18.100052734375,18.100052734375,18.100052734375,18.100052734375,[18.100052734375],,kWh,0.00032731011127499793,3.609732983239266e-05,0.00014919789713599418,0.0005126053382433848,,MB,2054.074368,14599.258112,0.0,14191.427584,12591.007744,s,10,93.63611035156251,9.36361103515625,0.0020158699324482636,9.36380419921875,9.36573466796875,9.366105126953125,9.366401494140625,"[9.3626689453125, 9.362556640625, 9.3612734375, 9.360896484375, 9.3611318359375, 9.36565234375, 9.364939453125, 9.365037109375, 9.365478515625, 9.3664755859375]",tokens/s,27.339879779161304,kWh,0.0002731969215091669,3.0134654500767466e-05,0.00018165378421180003,0.0004849853602217344,tokens/kWh,527850.9847863393,MB,2060.218368,14599.258112,0.0,14191.427584,12934.19776,s,10,43.45631689453125,4.345631689453125,0.003924766702764901,4.344840576171874,4.35171328125,4.351717236328125,4.351720400390626,"[4.3444501953125, 4.34523095703125, 4.34166162109375, 4.340126953125, 4.34248779296875, 4.3425625, 4.34802001953125, 4.35171240234375, 4.34834326171875, 4.35172119140625]",tokens/s,14.497316961513649,kWh,0.00012696021212291729,1.4005015315773754e-05,8.450056760039989e-05,0.00022546579503909082,tokens/kWh,279421.5414762899,,s,630,43.452540962219246,0.06897228724161784,0.0005116757379802847,0.06898217391967773,0.06963376541137696,0.0697523811340332,0.07028416740417481,"[0.07021110534667968, 0.06841190338134766, 0.06775328063964844, 0.06799836730957032, 0.0680796127319336, 0.06837203216552734, 0.06838931274414063, 0.06815478515625, 0.06797523498535156, 0.06823990631103516, 0.0686204833984375, 0.06868716430664062, 0.06849177551269531, 0.06866738891601562, 0.06910313415527344, 0.06851181030273437, 0.06861020660400391, 0.06846208190917968, 0.0685739517211914, 0.06850764465332031, 0.0688493423461914, 0.06874317169189453, 0.06875481414794922, 0.06890386962890625, 0.06897241973876952, 0.06911347198486328, 0.0686556167602539, 0.06891315460205077, 0.06926950073242187, 0.06906470489501954, 0.06896633911132813, 0.0689767074584961, 0.06913024139404297, 0.06895938873291016, 0.0689303970336914, 0.06890086364746094, 0.06876096343994141, 0.06893350219726563, 0.06906259155273438, 0.0689755859375, 0.06918128204345703, 0.06931686401367188, 0.06910950469970703, 0.06900691223144531, 0.06898323059082032, 0.06907698822021484, 0.06951321411132813, 0.06929996490478516, 0.06898700714111328, 0.06901773071289062, 0.06913843536376953, 0.06928998565673829, 0.0690871353149414, 0.06985282897949219, 0.06942355346679688, 0.0694128646850586, 0.06966886138916016, 0.0696463394165039, 0.06963350677490235, 0.06954415893554687, 0.06991686248779297, 0.06968742370605469, 0.06965414428710938, 0.07080188751220703, 0.06823321533203125, 0.0677027816772461, 0.06811750030517578, 0.06774681854248046, 0.06848477172851562, 0.06858377838134766, 0.06816563415527344, 0.06829603576660156, 0.06853849792480468, 0.0689874267578125, 0.06848716735839844, 0.06823321533203125, 0.06833971405029297, 0.06899830627441406, 0.06875392150878906, 0.06843631744384765, 0.0684656982421875, 0.06835913848876952, 0.06884761810302735, 0.06873673248291015, 0.06884905242919923, 0.06856924438476562, 0.06877046203613281, 0.0688128662109375, 0.06898057556152344, 0.06881705474853515, 0.0689459228515625, 0.06911328125, 0.06860591888427735, 0.06891375732421876, 0.06860594940185546, 0.06852819061279297, 0.06892537689208984, 0.06888784027099609, 0.06910851287841797, 0.06886531066894531, 0.06934185791015625, 0.06953164672851563, 0.06935884857177735, 0.06928665924072265, 0.06925721740722657, 0.06966595458984375, 0.0692457275390625, 0.06918985748291015, 0.06931177520751954, 0.0692455062866211, 0.06962515258789062, 0.0691792984008789, 0.06921705627441406, 0.0692838363647461, 0.06964979553222657, 0.06942076873779297, 0.06925212860107421, 0.06970918273925782, 0.06998067474365234, 0.06954598236083985, 0.06941801452636719, 0.0690739517211914, 0.06934630584716797, 0.06940153503417969, 0.06919577789306641, 0.06955411529541015, 0.0703597412109375, 0.06841753387451172, 0.06867356872558594, 0.06779824066162109, 0.06808425903320313, 0.06829395294189453, 0.06835238647460938, 0.06868870544433593, 0.06826512145996094, 0.06838944244384766, 0.06848636627197266, 0.0689142074584961, 0.06865382385253906, 0.06874332427978516, 0.06898111724853516, 0.06910205078125, 0.06861619567871094, 0.06815948486328124, 0.06830054473876954, 0.06850774383544922, 0.06821699523925781, 0.06869529724121094, 0.06850153350830078, 0.0682298583984375, 0.0685998077392578, 0.06890419006347656, 0.06892435455322266, 0.06910342407226562, 0.06894544219970702, 0.06910559844970703, 0.06880719757080078, 0.06861004638671875, 0.06890850830078125, 0.06877347564697266, 0.06857823944091797, 0.06872182464599609, 0.06897135925292969, 0.06902793884277343, 0.06892124938964844, 0.06915424346923828, 0.06894445037841797, 0.06917475128173828, 0.0690898208618164, 0.06942515563964843, 0.06920915222167968, 0.06907177734375, 0.06913632202148437, 0.06896649932861328, 0.06908502197265624, 0.06924918365478516, 0.06907289886474609, 0.06903990173339844, 0.06932819366455079, 0.06927657318115234, 0.06951321411132813, 0.06953778839111328, 0.0694775390625, 0.06959804534912109, 0.06952754974365234, 0.06980812835693359, 0.06912627410888672, 0.06965638732910157, 0.06952543640136719, 0.07057859039306641, 0.0686286392211914, 0.06774153900146485, 0.06746173095703124, 0.06849740600585938, 0.06850355529785156, 0.06833974456787109, 0.06820787048339844, 0.06819843292236329, 0.06828707122802734, 0.06850160217285156, 0.06827552032470703, 0.06829119873046875, 0.0687043228149414, 0.06885078430175781, 0.06884444427490234, 0.06808499145507813, 0.06842985534667968, 0.06825244903564454, 0.06872073364257812, 0.06868889617919922, 0.06858223724365234, 0.06858751678466797, 0.06873897552490234, 0.0687096939086914, 0.06885865783691406, 0.06870832061767577, 0.06884150695800781, 0.06877536010742187, 0.06878470611572265, 0.06880802917480469, 0.0690264663696289, 0.06857341003417969, 0.06863235473632813, 0.06881622314453124, 0.06857724761962891, 0.06846125030517577, 0.06895206451416015, 0.0685869140625, 0.06914054107666015, 0.06884143829345703, 0.06930899047851563, 0.06946771240234376, 0.0691655044555664, 0.06973235321044922, 0.06957615661621094, 0.0692741470336914, 0.06934073638916016, 0.06920646667480469, 0.06895616149902344, 0.06909677124023438, 0.06906745910644531, 0.06947840118408204, 0.06985533142089843, 0.06956022644042968, 0.06929203033447266, 0.06915058898925781, 0.06965055847167968, 0.06948454284667968, 0.06959327697753906, 0.0694345932006836, 0.06952953338623047, 0.069429443359375, 0.07030985260009766, 0.06835369873046875, 0.06812054443359375, 0.0679141082763672, 0.06810332489013672, 0.06809276580810547, 0.06788902282714844, 0.06830502319335938, 0.06850559997558593, 0.06857926177978516, 0.06832032012939453, 0.06810111999511718, 0.06850252532958985, 0.06846361541748047, 0.06895158386230468, 0.06873955535888672, 0.06817791748046875, 0.06833766174316407, 0.06856108856201172, 0.0685299835205078, 0.06856294250488282, 0.06861209869384766, 0.06871858978271485, 0.06874931335449219, 0.06887375640869141, 0.06889724731445312, 0.0688864974975586, 0.06905228424072266, 0.06922255706787109, 0.06915891265869141, 0.06903132629394532, 0.06884822082519532, 0.06872176361083984, 0.06884854125976562, 0.06886003112792968, 0.06887615966796876, 0.06925721740722657, 0.06862384033203126, 0.06892598724365234, 0.06913142395019531, 0.06896428680419922, 0.06984591674804687, 0.0690723876953125, 0.06904640197753906, 0.06972783660888672, 0.06924313354492187, 0.06898537445068359, 0.06887379455566406, 0.06911001586914063, 0.06910988616943359, 0.06974674987792968, 0.06918962860107422, 0.06917324829101562, 0.06945760345458984, 0.06946409606933594, 0.06958735656738281, 0.06944140625, 0.06990009307861328, 0.06947449493408203, 0.06939443206787109, 0.06966230773925781, 0.06949417877197266, 0.06946841430664062, 0.07042867279052735, 0.06888768005371093, 0.06800678253173828, 0.06775398254394531, 0.06796288299560548, 0.06825574493408203, 0.0681553955078125, 0.06809731292724609, 0.06822064208984376, 0.06819737243652343, 0.0685465316772461, 0.06860598754882813, 0.06852995300292969, 0.06886844635009766, 0.06909085083007813, 0.06886640167236328, 0.06850355529785156, 0.06849110412597656, 0.06804291534423829, 0.06820044708251953, 0.06872418975830077, 0.06877964782714843, 0.06847135925292969, 0.06830850982666016, 0.06855709075927735, 0.06860829162597656, 0.06883968353271484, 0.06952896118164062, 0.06911564636230469, 0.06909426879882813, 0.06904630279541016, 0.06911318206787109, 0.06903250885009765, 0.06910572814941406, 0.06905651092529297, 0.06884307098388671, 0.06913581085205078, 0.06882406616210937, 0.06871046447753906, 0.06879225921630859, 0.06918931579589843, 0.06918994903564453, 0.06942931365966797, 0.06964640045166015, 0.06911325073242187, 0.06958879852294922, 0.06911020660400391, 0.06893961334228516, 0.06912783813476563, 0.0694873275756836, 0.06909542083740235, 0.06898438262939453, 0.06900166320800781, 0.06928089904785156, 0.0692580795288086, 0.06976924896240234, 0.06927359771728515, 0.06975698852539063, 0.06952076721191407, 0.0694400634765625, 0.06964617919921876, 0.06942121887207031, 0.06955792236328125, 0.07022128295898437, 0.0684867172241211, 0.06797583770751953, 0.06842195129394531, 0.06780313873291016, 0.06829017639160156, 0.06851519775390626, 0.06856396484375, 0.06842777252197266, 0.068421630859375, 0.06841958618164062, 0.06885190582275391, 0.0686316146850586, 0.06890982055664062, 0.06894319915771484, 0.06886688232421875, 0.06867279815673828, 0.06875398254394531, 0.06842918395996093, 0.06866188812255859, 0.06880665588378906, 0.06863257598876953, 0.06909283447265625, 0.06856246185302735, 0.06887731170654297, 0.0692061767578125, 0.06915833282470703, 0.06968748474121093, 0.07014012908935546, 0.0693043212890625, 0.06922361755371094, 0.06905017852783203, 0.06895942687988281, 0.06898668670654297, 0.0686981430053711, 0.06908515167236329, 0.06880067443847657, 0.06921337890625, 0.06908195495605468, 0.06903788757324218, 0.06902579498291016, 0.06924012756347656, 0.0691739501953125, 0.06919923400878907, 0.06918931579589843, 0.06912095642089844, 0.06903545379638672, 0.06889734649658204, 0.06907891082763672, 0.06904025268554688, 0.06906896209716797, 0.06967836761474609, 0.06938066864013671, 0.06935142517089844, 0.06953164672851563, 0.06949807739257813, 0.06932947540283203, 0.0695031967163086, 0.06963609313964844, 0.06943743896484375, 0.06937395477294922, 0.06929203033447266, 0.06972313690185547, 0.07016454315185547, 0.0684416961669922, 0.06782768249511718, 0.06789715576171874, 0.06831948852539063, 0.06851417541503907, 0.0684051513671875, 0.06845244598388672, 0.06828844451904297, 0.06853564453125, 0.06841027069091797, 0.06909113311767578, 0.06915814208984375, 0.06861305236816406, 0.06879750061035156, 0.06869888305664062, 0.06852607727050782, 0.06850895690917969, 0.06876448059082031, 0.06841744232177735, 0.06891891479492188, 0.06907862091064453, 0.06954035186767578, 0.06872502136230468, 0.06886188507080078, 0.06905657958984375, 0.06931401824951172, 0.0695588150024414, 0.06910361480712891, 0.06912204742431641, 0.06896038055419922, 0.06886182403564453, 0.06875564575195313, 0.06867273712158203, 0.06865776062011719, 0.0690708465576172, 0.06932044982910156, 0.0691673583984375, 0.06957465362548829, 0.0693595199584961, 0.06956764984130859, 0.06934374237060546, 0.0692350082397461, 0.06972633361816406, 0.06918553924560547, 0.06902169799804687, 0.0693259506225586, 0.06924527740478516, 0.0689299545288086, 0.06956249237060547, 0.06966041564941407, 0.06981561279296875, 0.06970368194580077, 0.06941289520263672, 0.06951744079589844, 0.06974646759033203, 0.06946713256835937, 0.0696269760131836, 0.06936863708496094, 0.06951741027832031, 0.0697069091796875, 0.06936252593994141, 0.06968524932861328, 0.07047212982177735, 0.06828313446044922, 0.06796463775634766, 0.06796192169189454, 0.06801708984375, 0.06823101043701171, 0.06837398529052735, 0.06819296264648438, 0.06838377380371094, 0.06891209411621094, 0.06865833282470703, 0.06862659454345703, 0.068797119140625, 0.06901747131347656, 0.06891327667236329, 0.06890032196044922, 0.06865360260009766, 0.06844342041015625, 0.06851248168945312, 0.06849238586425781, 0.06897452545166016, 0.06883017730712891, 0.06874652862548829, 0.06871113586425781, 0.06875341033935548, 0.06899712371826172, 0.06908108520507812, 0.06899107360839844, 0.0692325439453125, 0.06890444946289062, 0.06878189086914062, 0.06865376281738281, 0.06895410919189453, 0.06863660430908203, 0.06863468933105468, 0.06901760101318359, 0.06916460418701172, 0.06903759765625, 0.0691223373413086, 0.06908992004394532, 0.0694824981689453, 0.06992009735107421, 0.06916547393798828, 0.06984320068359375, 0.0693411865234375, 0.06903193664550782, 0.06907698822021484, 0.06899097442626953, 0.06911727905273438, 0.06936377716064453, 0.0695591049194336, 0.06951283264160156, 0.06950313568115235, 0.0695050277709961, 0.06921968078613282, 0.0692804183959961, 0.06972211456298828, 0.06980406188964844, 0.06964425659179688, 0.06967862701416015, 0.06977584075927734, 0.06936716461181641, 0.06987449645996094, 0.07034265899658203, 0.06847257232666015, 0.06819859313964843, 0.06797727966308593, 0.06829875183105469, 0.06802227020263672, 0.0681426544189453, 0.06834585571289062, 0.06838886260986328, 0.06872077178955079, 0.06871183776855469, 0.06922128295898437, 0.06903561401367188, 0.06907433319091796, 0.06927632141113281, 0.06876399993896484, 0.0686173095703125, 0.06825667572021485, 0.06835199737548828, 0.06868396759033203, 0.06851673889160156, 0.06852294158935547, 0.06880662536621093, 0.06869612884521484, 0.06882275390625, 0.06869427490234375, 0.068951904296875, 0.06926966094970703, 0.06922000122070313, 0.06919190216064453, 0.0689644775390625, 0.068780029296875, 0.06896575927734375, 0.06906237030029297, 0.06891567993164062, 0.06895014190673829, 0.06907526397705079, 0.0689560317993164, 0.06913193511962891, 0.0691204833984375, 0.0694901123046875, 0.06949945831298829, 0.06946320343017579, 0.06964514923095703, 0.06928998565673829, 0.06939190673828124, 0.06922879791259766, 0.06968329620361328, 0.06908489227294921, 0.06962422180175781, 0.06953984069824219, 0.06937948608398438, 0.06911622619628906, 0.06945410919189453, 0.06965042877197265, 0.0697786865234375, 0.07013616180419922, 0.06978924560546874, 0.06944649505615234, 0.07017472076416016, 0.06986252593994141, 0.06921884918212891, 0.06978173065185547]",tokens/s,14.498576747163469,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,3582.152704,5177.737216,0.0,4775.215104,4427.072512,s,1,11.3964912109375,11.3964912109375,0.0,11.3964912109375,11.3964912109375,11.3964912109375,11.3964912109375,[11.3964912109375],,kWh,0.00011449419642499899,1.2617917603133284e-05,4.6580870598e-05,0.00017369298462613229,,MB,1546.592256,5211.291648,0.0,4794.089472,4030.239232,s,10,24.800694091796874,2.4800694091796873,0.007708678859187906,2.4799436035156246,2.489766137695313,2.490499035644531,2.491085354003906,"[2.46824169921875, 2.4697705078125, 2.474000244140625, 2.476288330078125, 2.47737890625, 2.48250830078125, 2.484843994140625, 2.486826904296875, 2.489603271484375, 2.49123193359375]",tokens/s,103.2229174927306,kWh,7.283007262416674e-05,8.032972356036021e-06,4.833278866620003e-05,0.00012919583364640278,tokens/kWh,1981488.0462836649,MB,1559.896064,5219.680256,0.0,4802.47808,4030.241792,s,10,21.36642431640625,2.1366424316406247,0.016240443250495337,2.134020263671875,2.159182763671875,2.1597437255859377,2.1601924951171876,"[2.150485595703125, 2.147908203125, 2.1603046875, 2.15905810546875, 2.14230078125, 2.123168212890625, 2.118420166015625, 2.121210205078125, 2.11782861328125, 2.12573974609375]",tokens/s,29.48551384502148,kWh,6.266203498333316e-05,6.912070600589083e-06,3.997025419839998e-05,0.00010954435978232223,tokens/kWh,575109.4819047604,,s,630,21.363492374420165,0.033910305356222484,0.0006341888598051476,0.03383174324035645,0.03424395332336426,0.03471952705383301,0.03741173374176029,"[0.03494268798828125, 0.034172382354736325, 0.03407462310791016, 0.034851104736328124, 0.0344601936340332, 0.03412908935546875, 0.03404179382324219, 0.0340365104675293, 0.0340399055480957, 0.03397945785522461, 0.03417184066772461, 0.034094432830810546, 0.033966751098632814, 0.03392102432250976, 0.03485068893432617, 0.03605721664428711, 0.03413593673706055, 0.03426931381225586, 0.03408281707763672, 0.03408486557006836, 0.034344959259033206, 0.034119678497314454, 0.0341167984008789, 0.03405497741699219, 0.03406028747558594, 0.03400294494628906, 0.034506752014160154, 0.03406643295288086, 0.03406643295288086, 0.03422208023071289, 0.034045951843261715, 0.03401113510131836, 0.03416604614257813, 0.03394838333129883, 0.03477280044555664, 0.03407059097290039, 0.034021503448486326, 0.034162593841552735, 0.03406857681274414, 0.033984001159667966, 0.03401779174804687, 0.03393667221069336, 0.03405184173583985, 0.03393990325927734, 0.033943870544433596, 0.03372409439086914, 0.03393552017211914, 0.033839488983154295, 0.03395993423461914, 0.03398870468139648, 0.03390044784545899, 0.034170879364013675, 0.033939456939697264, 0.034102977752685545, 0.03402975845336914, 0.03404796981811523, 0.033888416290283205, 0.03391827011108398, 0.03389692687988281, 0.03391644668579102, 0.03392758560180664, 0.033995040893554686, 0.033955265045166015, 0.03478812789916992, 0.03396198272705078, 0.03404214477539062, 0.03410710525512695, 0.03856588745117188, 0.033853408813476565, 0.033847328186035155, 0.033837120056152345, 0.033815486907958985, 0.033807361602783206, 0.03378585433959961, 0.03383705520629883, 0.033914878845214845, 0.03391017532348633, 0.03400969696044922, 0.03550003051757813, 0.03386748886108398, 0.03382505416870117, 0.03382624053955078, 0.033808383941650394, 0.03373932647705078, 0.03380633544921875, 0.03387094497680664, 0.03379702377319336, 0.03444271850585937, 0.034660064697265625, 0.0338636474609375, 0.03377033615112305, 0.033841022491455076, 0.033734272003173825, 0.033724929809570314, 0.033751041412353515, 0.03385343933105469, 0.03403366470336914, 0.03846144104003906, 0.03407462310791016, 0.03384524917602539, 0.034000511169433596, 0.03382847976684571, 0.033853759765625, 0.03384336090087891, 0.034183456420898435, 0.034180511474609376, 0.03399740982055664, 0.03406438446044922, 0.03399884796142578, 0.03376128005981445, 0.0339128303527832, 0.033825790405273434, 0.033847423553466795, 0.033935550689697266, 0.03375302505493164, 0.033892097473144533, 0.033893375396728515, 0.033941505432128906, 0.03374643325805664, 0.03438547134399414, 0.033807296752929684, 0.03370585632324219, 0.033708160400390624, 0.03374675369262695, 0.03385158538818359, 0.03376067352294922, 0.034807167053222655, 0.033890750885009764, 0.033860862731933596, 0.03420665740966797, 0.03439369583129883, 0.03397468948364258, 0.03386732864379883, 0.03382838439941406, 0.033807262420654294, 0.0353171501159668, 0.033989215850830076, 0.03388633728027344, 0.034078590393066405, 0.033963489532470706, 0.03392768096923828, 0.03454364776611328, 0.03401318359375, 0.033953216552734374, 0.03511510467529297, 0.03420927810668945, 0.038582687377929685, 0.03411804962158203, 0.0340973129272461, 0.03406419372558594, 0.03417417526245117, 0.03568124771118164, 0.03394559860229492, 0.034095104217529294, 0.03387187194824219, 0.03417702484130859, 0.034050048828125, 0.033976318359375, 0.0341847038269043, 0.033971870422363284, 0.034037921905517576, 0.03411014556884766, 0.0355893440246582, 0.03530364990234375, 0.034258846282958985, 0.034362014770507814, 0.03409920120239258, 0.03405414581298828, 0.03411561584472656, 0.034199520111083986, 0.034095104217529294, 0.03415779113769531, 0.033950496673583984, 0.0339128303527832, 0.034277374267578126, 0.034080768585205076, 0.038547328948974606, 0.03389452743530273, 0.033968097686767576, 0.03379404830932617, 0.03401321411132813, 0.033982463836669925, 0.03379747009277344, 0.033825439453125, 0.03378303909301758, 0.03388902282714844, 0.03376079940795899, 0.033716705322265624, 0.033817665100097656, 0.0353364143371582, 0.03402726364135742, 0.03399423980712891, 0.034054401397705075, 0.03391955184936524, 0.038016094207763675, 0.034204574584960935, 0.03405619049072266, 0.03405583953857422, 0.033953376770019535, 0.03384588623046875, 0.03380031967163086, 0.03386777496337891, 0.03402124786376953, 0.03397439956665039, 0.03385548782348633, 0.03392217636108399, 0.03380313491821289, 0.033906494140625, 0.03452127838134766, 0.033966079711914066, 0.03393119812011719, 0.03396323013305664, 0.03400102233886719, 0.03403440093994141, 0.03383500671386719, 0.033899551391601564, 0.034064960479736325, 0.033978752136230465, 0.034065601348876956, 0.034003807067871095, 0.03422208023071289, 0.03440380859375, 0.03414070510864258, 0.034186302185058595, 0.034449665069580075, 0.038728382110595705, 0.03426665496826172, 0.03416064071655273, 0.03414169692993164, 0.03413628768920898, 0.03401331329345703, 0.03419571304321289, 0.03428799819946289, 0.034113536834716796, 0.034154239654541015, 0.0363185920715332, 0.03408339309692383, 0.034162208557128905, 0.03406108856201172, 0.0340316162109375, 0.0340398063659668, 0.03424256134033203, 0.034147617340087894, 0.03413679885864258, 0.0345354232788086, 0.033981727600097655, 0.034011871337890624, 0.03443225479125977, 0.03397299194335938, 0.03398857498168945, 0.03411929702758789, 0.03401670455932617, 0.03534617614746094, 0.034286399841308594, 0.034144031524658204, 0.0340560302734375, 0.03397465515136719, 0.03404012680053711, 0.03395961761474609, 0.0339865608215332, 0.034057376861572265, 0.033979232788085935, 0.03393280029296875, 0.0340483512878418, 0.03390275192260742, 0.033972225189208984, 0.03403776168823242, 0.034127872467041014, 0.03431628799438476, 0.03395116806030273, 0.03386220932006836, 0.03399270248413086, 0.03404508972167969, 0.03394601440429688, 0.03398233413696289, 0.034087486267089843, 0.03401241683959961, 0.03443276977539062, 0.03413913726806641, 0.03395174407958984, 0.03399187088012695, 0.034111648559570315, 0.033936031341552736, 0.03395305633544922, 0.03413679885864258, 0.03392921447753906, 0.033955841064453124, 0.034078720092773435, 0.03379814529418945, 0.03413369750976562, 0.03399507141113281, 0.033844608306884766, 0.034089599609375, 0.033699840545654294, 0.03390991973876953, 0.036722591400146484, 0.03399980926513672, 0.03385903930664062, 0.0339288330078125, 0.033803169250488284, 0.033898494720458985, 0.03395395278930664, 0.0341295051574707, 0.03353216171264648, 0.03406028747558594, 0.03344998550415039, 0.033462272644042966, 0.03342131042480469, 0.03328764724731445, 0.03356111907958984, 0.033873920440673826, 0.03458214569091797, 0.03342716979980469, 0.033603294372558594, 0.03330524826049805, 0.037693214416503903, 0.03375711822509766, 0.0338807373046875, 0.033476158142089846, 0.03355177688598633, 0.0336475830078125, 0.0335186882019043, 0.03409193420410156, 0.03338415908813477, 0.03338703918457031, 0.033316352844238284, 0.033482112884521485, 0.033498046875, 0.033447265625, 0.0335994873046875, 0.0335447998046875, 0.03451264190673828, 0.03416864013671875, 0.03341353607177734, 0.03368889617919922, 0.0335753288269043, 0.0336629753112793, 0.034055713653564454, 0.03347945785522461, 0.033546241760253906, 0.03335097503662109, 0.0335076789855957, 0.03344163131713867, 0.034836990356445316, 0.03347398376464844, 0.033486846923828126, 0.033481281280517576, 0.0335810546875, 0.03359743881225586, 0.0336445426940918, 0.03354412841796875, 0.03346233749389648, 0.033538047790527346, 0.03387801742553711, 0.033519615173339845, 0.0340398063659668, 0.03398355102539063, 0.03345094299316406, 0.033476608276367184, 0.03355648040771484, 0.03358687973022461, 0.03334380722045899, 0.0335093765258789, 0.033480766296386716, 0.03407823944091797, 0.03371868896484375, 0.03368755340576172, 0.033484447479248045, 0.03378931045532227, 0.03347964859008789, 0.03360563278198242, 0.03355648040771484, 0.033675262451171875, 0.03349094390869141, 0.03341884613037109, 0.033406593322753905, 0.03403833770751953, 0.033306846618652346, 0.034603073120117185, 0.03551609420776367, 0.03441279983520508, 0.03366912078857422, 0.03356982421875, 0.03365372848510742, 0.03342316818237305, 0.033549697875976565, 0.033407329559326175, 0.034113983154296874, 0.03365276718139648, 0.033539070129394534, 0.03360015869140625, 0.03395004653930664, 0.03349913787841797, 0.03374054336547851, 0.034078975677490235, 0.03363603210449219, 0.03375750350952148, 0.03334143829345703, 0.033500350952148435, 0.0333768310546875, 0.03350486373901367, 0.033909088134765626, 0.0334851188659668, 0.0334840316772461, 0.03329702377319336, 0.033425537109375, 0.03335168075561523, 0.033333438873291016, 0.033599296569824216, 0.03392102432250976, 0.03373020935058594, 0.033422847747802735, 0.033552288055419925, 0.0335984001159668, 0.033476608276367184, 0.03338163375854492, 0.03361868667602539, 0.03342851257324219, 0.033597728729248044, 0.0333765754699707, 0.03355481719970703, 0.03349654388427734, 0.03338499069213867, 0.03344095993041992, 0.033565311431884765, 0.033466560363769535, 0.03381657409667969, 0.033501182556152344, 0.033527198791503905, 0.033464897155761716, 0.03327388763427734, 0.0334062728881836, 0.03352441787719727, 0.03352163314819336, 0.03342953491210938, 0.03385331344604492, 0.03350444793701172, 0.033514015197753905, 0.03337286376953125, 0.03378966522216797, 0.03361996841430664, 0.034947200775146486, 0.03400703811645508, 0.033724929809570314, 0.03352166366577149, 0.0334969596862793, 0.033792129516601564, 0.033501182556152344, 0.033588737487792966, 0.03349657440185547, 0.03363942337036133, 0.033478656768798826, 0.033454078674316406, 0.03341907119750977, 0.03371846389770508, 0.033617790222167966, 0.03353395080566406, 0.03359865570068359, 0.03346294403076172, 0.03350147247314453, 0.03363372802734375, 0.03349993515014649, 0.03349071884155273, 0.03360153579711914, 0.03358230209350586, 0.03596537780761719, 0.033511775970458985, 0.03342131042480469, 0.033538047790527346, 0.03353500747680664, 0.03357753753662109, 0.033968544006347655, 0.03357040023803711, 0.03359075164794922, 0.033518783569335936, 0.03362092971801758, 0.03364761734008789, 0.03359494400024414, 0.033452415466308595, 0.03353177642822266, 0.034256481170654295, 0.03517452621459961, 0.03353219223022461, 0.03411920166015625, 0.03384368133544922, 0.033498592376708984, 0.03350284957885742, 0.033428382873535153, 0.033323009490966796, 0.0335250244140625, 0.03343769454956055, 0.033501182556152344, 0.03345072174072266, 0.0334266242980957, 0.033394943237304686, 0.034535999298095706, 0.03335286331176758, 0.033343776702880856, 0.033395263671875, 0.03353961563110352, 0.03340745544433594, 0.033544193267822264, 0.03355033493041992, 0.03348886489868164, 0.03459481430053711, 0.03358319854736328, 0.033551647186279294, 0.03356140899658203, 0.033562145233154296, 0.03360956954956055, 0.03351801681518555, 0.03355852890014648, 0.0335093765258789, 0.033424991607666016, 0.03348112106323242, 0.03343769454956055, 0.03345612716674805, 0.0335810546875, 0.033584320068359375, 0.0334692497253418, 0.03355852890014648, 0.03335683059692383, 0.03342435073852539, 0.03350527954101563, 0.03345529556274414, 0.03361993789672851, 0.03351017761230469, 0.03347040176391602, 0.03344182586669922, 0.03388396835327148, 0.03408230209350586, 0.034734142303466796, 0.03347119903564453, 0.03341068649291992, 0.03353129577636719, 0.033649631500244144, 0.03363430404663086, 0.03355350494384766, 0.03402640151977539, 0.033783679962158204, 0.033536128997802735, 0.03363225555419922, 0.033609375, 0.0335909423828125, 0.03363705444335938, 0.03350889587402344, 0.03363068771362305, 0.033613662719726566, 0.0335750732421875, 0.03351551818847656, 0.03371212768554688, 0.03359558486938476, 0.03347004699707031, 0.03423235321044922, 0.03355871963500977, 0.03345344161987305, 0.03343833541870117, 0.03350851058959961, 0.033454944610595706, 0.033759231567382815, 0.033482078552246095, 0.03351990509033203, 0.03362787246704101, 0.03347455978393555, 0.03366924667358399, 0.03350991821289063, 0.0336363525390625, 0.03470166397094727, 0.034136062622070314, 0.033933311462402346, 0.03387334442138672, 0.03353190231323242, 0.033546817779541015, 0.035023872375488284, 0.03402035140991211, 0.033451713562011716, 0.03463008117675781, 0.033476478576660154, 0.03367513656616211, 0.03362188720703125, 0.03353414535522461, 0.03372003173828125, 0.033347457885742185, 0.03354262542724609, 0.033525054931640624, 0.033613697052001956, 0.03367200088500977, 0.033560577392578124, 0.03361382293701172, 0.03362390518188477, 0.033601406097412106, 0.033769439697265625, 0.033466560363769535, 0.03355660629272461, 0.03367679977416992, 0.03408127975463867, 0.03365811157226563, 0.03369241714477539, 0.03361382293701172, 0.03366204833984375, 0.03362499237060547, 0.03379193496704101, 0.033826431274414065, 0.033554561614990236, 0.033789409637451175, 0.03374313735961914, 0.03361820983886719, 0.03362025451660156, 0.03349913787841797, 0.0338935661315918, 0.033635135650634765, 0.033533374786376954, 0.033685470581054686, 0.03357756805419922, 0.033628128051757813, 0.03566185760498047, 0.03367116928100586, 0.03360768127441406, 0.03346176147460937, 0.03360764694213867, 0.03373519897460937, 0.03363174438476563, 0.033591808319091795, 0.0335912971496582, 0.03351347351074219, 0.03355350494384766, 0.03350620651245117, 0.03363225555419922, 0.03363840103149414, 0.03385139083862305]",tokens/s,29.48956045942835,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5169.020928,6234.701824,0.0,5832.179712,5645.103616,s,1,11.83770703125,11.83770703125,0.0,11.83770703125,11.83770703125,11.83770703125,11.83770703125,[11.83770703125],,kWh,0.0001503739561333153,1.6576530495309934e-05,6.749755399799995e-05,0.00023444804062662517,,MB,1726.070784,6291.324928,0.0,5874.122752,5138.454528,s,10,40.41888793945312,4.041888793945312,0.005693735657651986,4.042459716796875,4.047507617187501,4.047914331054687,4.048239702148437,"[4.029178466796875, 4.034541015625, 4.040712646484375, 4.04206103515625, 4.044652099609375, 4.0470849609375, 4.042512451171875, 4.047417236328125, 4.042406982421875, 4.048321044921875]",tokens/s,63.33672524179391,kWh,0.00011776025953124721,1.2989124830392766e-05,7.833031266420016e-05,0.00020907969702584013,tokens/kWh,1224413.4827130584,MB,1728.991232,6310.199296,0.0,5892.99712,5138.457088,s,10,28.47827978515625,2.847827978515625,0.0050528570952255965,2.849043701171875,2.8527834716796874,2.852992321777344,2.8531594018554687,"[2.852737060546875, 2.848662109375, 2.8387421875, 2.842830078125, 2.849322021484375, 2.851498291015625, 2.84023486328125, 2.853201171875, 2.85228662109375, 2.848765380859375]",tokens/s,22.122122710809773,kWh,8.487492424542172e-05,9.362374535520935e-06,5.625643389399885e-05,0.00015049373267494152,tokens/kWh,418622.0839912095,,s,630,28.47606952667236,0.0452001103597974,0.000614137314015824,0.04506484794616699,0.04549851379394532,0.04578562488555908,0.04803146789550782,"[0.04760870361328125, 0.046610015869140625, 0.04572611236572265, 0.04544102478027344, 0.045213695526123046, 0.045055999755859374, 0.04513542556762695, 0.045660606384277345, 0.045114879608154294, 0.04494982528686523, 0.04813177490234375, 0.045158367156982425, 0.04490908813476562, 0.04500428771972656, 0.04496953582763672, 0.045118335723876954, 0.04517472076416015, 0.045789310455322266, 0.04511948776245117, 0.045125633239746096, 0.045328384399414064, 0.04533248138427735, 0.04519504165649414, 0.04523846435546875, 0.045168670654296875, 0.04524851226806641, 0.04496511840820312, 0.045058624267578125, 0.04504707336425781, 0.04536540985107422, 0.045394527435302735, 0.045737663269042966, 0.04536892700195312, 0.045060993194580075, 0.04488806533813477, 0.04508051300048828, 0.045512672424316405, 0.04514416122436524, 0.045028671264648434, 0.045005054473876954, 0.045085121154785156, 0.04510924911499024, 0.04516454315185547, 0.04534473419189453, 0.04507183837890625, 0.04512006378173828, 0.04513587188720703, 0.0450621452331543, 0.04492902374267578, 0.04516793441772461, 0.044917438507080076, 0.045195262908935545, 0.04550559997558594, 0.04519372940063476, 0.0449950065612793, 0.045058048248291016, 0.044980224609375, 0.04519935989379883, 0.04549772644042969, 0.04485388946533203, 0.04473651123046875, 0.04513587188720703, 0.044873409271240235, 0.046785152435302735, 0.045624992370605466, 0.04508691024780274, 0.045029537200927734, 0.045271041870117185, 0.04505190277099609, 0.04507424163818359, 0.044986560821533204, 0.04489318466186523, 0.045031585693359376, 0.04489302444458008, 0.04514985656738281, 0.04503792190551758, 0.044943359375, 0.0451212158203125, 0.04497798538208008, 0.04484966278076172, 0.044955646514892575, 0.04486348724365234, 0.04500867080688477, 0.04502447891235352, 0.04499267196655273, 0.04492988967895508, 0.045238273620605465, 0.04493024063110351, 0.04494009780883789, 0.04472217559814453, 0.04500787353515625, 0.044895233154296874, 0.0451371841430664, 0.044948192596435545, 0.04461772918701172, 0.04623116683959961, 0.04486790466308594, 0.04580767822265625, 0.04497612762451172, 0.0449389762878418, 0.044921119689941405, 0.044972030639648435, 0.044971839904785156, 0.04487785720825195, 0.04477872085571289, 0.044925888061523436, 0.04498566436767578, 0.044933822631835936, 0.044984321594238284, 0.045021183013916014, 0.044940574645996094, 0.04504374313354492, 0.04478543853759766, 0.045061023712158206, 0.045000705718994144, 0.044967937469482425, 0.04482175827026367, 0.0449277458190918, 0.04478265762329101, 0.05314451217651367, 0.04510105514526367, 0.04507161712646485, 0.04482944107055664, 0.04519286346435547, 0.045189472198486326, 0.04734716796875, 0.04658752059936523, 0.04572761535644531, 0.04515625762939453, 0.045343326568603515, 0.04524236679077148, 0.04495974349975586, 0.04500998306274414, 0.04499552154541016, 0.04499209594726562, 0.04507894515991211, 0.04532191848754883, 0.045066558837890625, 0.04509491348266602, 0.044974079132080076, 0.04504780960083008, 0.04485087966918945, 0.044935489654541014, 0.04505587387084961, 0.04479398345947266, 0.04507436752319336, 0.04502700805664062, 0.04503587341308594, 0.04506832122802734, 0.04509471893310547, 0.0451278076171875, 0.045529151916503904, 0.045174785614013675, 0.044883968353271485, 0.04489804840087891, 0.04504092788696289, 0.04498688125610351, 0.04475747299194336, 0.045217025756835935, 0.044808414459228514, 0.04505001449584961, 0.04483110427856445, 0.04511862564086914, 0.044907009124755856, 0.04480441665649414, 0.04477523040771485, 0.04490467071533203, 0.04476716613769531, 0.04485456085205078, 0.04485919952392578, 0.044937374114990235, 0.0449318733215332, 0.04494246292114258, 0.044960670471191407, 0.044935169219970705, 0.04472422409057617, 0.04487145614624023, 0.044728286743164064, 0.044891998291015624, 0.044790176391601565, 0.04481209564208984, 0.04512172698974609, 0.044802047729492187, 0.04516044616699219, 0.0447977294921875, 0.04698339080810547, 0.04506009674072266, 0.04534886550903321, 0.04489420700073242, 0.04663123321533203, 0.0454964485168457, 0.04505619049072265, 0.044843006134033206, 0.04490854263305664, 0.04503283309936523, 0.045146751403808597, 0.045176830291748044, 0.045017055511474606, 0.0450002555847168, 0.04484137725830078, 0.044894271850585935, 0.04494745635986328, 0.04498636627197266, 0.0449986572265625, 0.04560486221313476, 0.04505190277099609, 0.04522588729858398, 0.04493670272827149, 0.04503612899780273, 0.04512768173217773, 0.044988414764404294, 0.04499216079711914, 0.04480649566650391, 0.0449617919921875, 0.04500275039672851, 0.044988414764404294, 0.04506617736816406, 0.04500076675415039, 0.045168609619140626, 0.045053985595703124, 0.04502268981933594, 0.0449716796875, 0.04487628936767578, 0.04496217727661133, 0.045058048248291016, 0.04515414428710938, 0.04533407974243164, 0.045478496551513675, 0.045030975341796876, 0.045101505279541015, 0.04505583953857422, 0.0449189453125, 0.04501737594604492, 0.04510198211669922, 0.045197311401367186, 0.04498720169067383, 0.04501299285888672, 0.04500889587402344, 0.04509036636352539, 0.044886463165283205, 0.04503756713867187, 0.0457625617980957, 0.04605295944213867, 0.045101470947265625, 0.04491443252563477, 0.044980480194091794, 0.04500275039672851, 0.044972030639648435, 0.04530995178222656, 0.04527465438842773, 0.04569715118408203, 0.04524272155761719, 0.046884544372558595, 0.04565599822998047, 0.045176929473876956, 0.04494364929199219, 0.044859390258789066, 0.04487724685668945, 0.044935745239257814, 0.04498406219482422, 0.04494156646728516, 0.044980224609375, 0.04486300659179687, 0.04489878463745117, 0.044938335418701174, 0.04503414535522461, 0.04521599960327148, 0.0449060173034668, 0.044740478515625, 0.04616252899169922, 0.045039264678955075, 0.04507254409790039, 0.04516998291015625, 0.04498726272583008, 0.04523417663574219, 0.04488191986083984, 0.04518835067749023, 0.0451407356262207, 0.04495302581787109, 0.045057697296142576, 0.04493401718139649, 0.04484076690673828, 0.04492924880981446, 0.04512521743774414, 0.04493142318725586, 0.04489542388916016, 0.04516339111328125, 0.04505395126342773, 0.04500214385986328, 0.045259296417236326, 0.04507859039306641, 0.045000705718994144, 0.045352031707763675, 0.045472671508789066, 0.04531184005737305, 0.04513808059692383, 0.044990463256835936, 0.045143039703369144, 0.045246654510498044, 0.04519712066650391, 0.0450978889465332, 0.048941150665283206, 0.04524835205078125, 0.04501686477661133, 0.04513625717163086, 0.045178878784179685, 0.04528876876831055, 0.04722707366943359, 0.045483936309814454, 0.04494585418701172, 0.04498419189453125, 0.045123233795166015, 0.04516230392456055, 0.045138751983642575, 0.04533808135986328, 0.04664566421508789, 0.045536640167236325, 0.04520336151123047, 0.04492771148681641, 0.04505593490600586, 0.04506351852416992, 0.045032161712646485, 0.04590950393676758, 0.045054145812988285, 0.04505427169799805, 0.045217376708984375, 0.045098880767822265, 0.04500230407714844, 0.04506313705444336, 0.04505190277099609, 0.044984321594238284, 0.045041664123535156, 0.045125633239746096, 0.045072383880615234, 0.04511881637573242, 0.04505974578857422, 0.045781120300292966, 0.04567494583129883, 0.045054241180419924, 0.04509711837768555, 0.04517644882202149, 0.044900737762451175, 0.0449617919921875, 0.04491059112548828, 0.0449268798828125, 0.0450305290222168, 0.045110015869140624, 0.04496201705932617, 0.044849151611328124, 0.04488499069213867, 0.04502771377563477, 0.04496236801147461, 0.04493683242797852, 0.044840641021728515, 0.044905216217041015, 0.04492268753051758, 0.04495692825317383, 0.04504991912841797, 0.04506623840332031, 0.050135711669921874, 0.04547391891479492, 0.04529510498046875, 0.04981411361694336, 0.04502937698364258, 0.04505753707885742, 0.04495820617675781, 0.04506009674072266, 0.0449854736328125, 0.04510809707641601, 0.04504780960083008, 0.044974079132080076, 0.0449536018371582, 0.04493721771240235, 0.045090080261230465, 0.045005535125732424, 0.04515225601196289, 0.044942657470703126, 0.044947711944580075, 0.046993057250976564, 0.04569187164306641, 0.045229377746582033, 0.04493145751953125, 0.04488739013671875, 0.04515939331054687, 0.045008544921875, 0.0449150390625, 0.044956993103027344, 0.04498451232910156, 0.04484966278076172, 0.045077632904052735, 0.044810497283935546, 0.045095550537109376, 0.04487372970581055, 0.04475043106079102, 0.044792224884033206, 0.04482048034667969, 0.04508262252807617, 0.04489529418945312, 0.04503647994995117, 0.044935169219970705, 0.045156352996826174, 0.04521295928955078, 0.045330528259277345, 0.04505254364013672, 0.045089950561523436, 0.04503590393066406, 0.04488550567626953, 0.04510556793212891, 0.04493324661254883, 0.044824928283691404, 0.045021438598632814, 0.04490140914916992, 0.04485990524291992, 0.04513004684448242, 0.04499251174926758, 0.045010047912597655, 0.0450384635925293, 0.045074432373046876, 0.044985694885253905, 0.044886688232421874, 0.0447977294921875, 0.044952831268310546, 0.04492297744750977, 0.04505062484741211, 0.045559070587158204, 0.045179744720458985, 0.04545238494873047, 0.04521052932739258, 0.04511334228515625, 0.0451932144165039, 0.04532403182983399, 0.04501497650146485, 0.04505756759643555, 0.04512553787231445, 0.04509990310668945, 0.04509273529052735, 0.045286720275878906, 0.04513670349121094, 0.045231838226318356, 0.04488745498657227, 0.04502412796020508, 0.046685760498046874, 0.04560736083984375, 0.04656262588500976, 0.04527110290527344, 0.04531443023681641, 0.04514147186279297, 0.045085311889648434, 0.04531584167480469, 0.04515603256225586, 0.04507907104492188, 0.04492512130737305, 0.04500681686401367, 0.04490758514404297, 0.04517577743530273, 0.04497817611694336, 0.045011009216308594, 0.045152191162109376, 0.04506623840332031, 0.045049854278564457, 0.04506828689575195, 0.045074432373046876, 0.04498627090454101, 0.04502703857421875, 0.045043071746826174, 0.04507519912719726, 0.04510892868041992, 0.04543292617797851, 0.045168926239013675, 0.04482271957397461, 0.044865535736083983, 0.04499456024169922, 0.044990463256835936, 0.04483420944213867, 0.04478217697143555, 0.04486892700195313, 0.04572985458374024, 0.047785888671875, 0.045376224517822264, 0.04542668914794922, 0.0453939208984375, 0.048451454162597656, 0.04558860778808594, 0.04523382568359375, 0.045187423706054684, 0.0449536018371582, 0.04485276794433594, 0.04492652893066406, 0.045404384613037106, 0.04492950439453125, 0.04579555130004883, 0.04604108810424805, 0.04518000030517578, 0.0450978889465332, 0.04510220718383789, 0.045050750732421874, 0.04499660873413086, 0.045093921661376955, 0.0450918083190918, 0.04510617446899414, 0.04523519897460938, 0.04530995178222656, 0.0450252799987793, 0.044979423522949216, 0.04651193618774414, 0.04555702209472656, 0.04502211380004883, 0.045006847381591795, 0.04483212661743164, 0.04499078369140625, 0.04541795349121094, 0.044915550231933596, 0.044908287048339844, 0.04490265655517578, 0.044936576843261716, 0.045017536163330076, 0.04523641586303711, 0.04500185775756836, 0.04509763336181641, 0.04488623809814453, 0.045041664123535156, 0.04523417663574219, 0.045297664642333986, 0.04552703857421875, 0.04538982391357422, 0.04535500717163086, 0.045178878784179685, 0.045343841552734375, 0.04521052932739258, 0.045211647033691404, 0.04515225601196289, 0.04573183822631836, 0.04548966217041016, 0.04815513610839844, 0.04552908706665039, 0.045301055908203124, 0.04509356689453125, 0.04516972732543945, 0.04530659103393555, 0.04517644882202149, 0.04509308624267578, 0.04514031982421875, 0.04505807876586914, 0.04516864013671875, 0.04539187240600586, 0.04511670303344727, 0.04511401748657227, 0.04647436904907227, 0.045128639221191404, 0.04526079940795898, 0.045303359985351566, 0.04498067092895508, 0.045115360260009764, 0.04496294403076172, 0.0454931526184082, 0.045328033447265624, 0.04528566360473633, 0.04520937728881836, 0.04493340682983398, 0.045115264892578125, 0.045066368103027346, 0.04531942367553711, 0.04568550491333008, 0.04499660873413086, 0.04507852935791016, 0.04505395126342773, 0.04506009674072266, 0.04694015884399414, 0.04562124633789062, 0.04516659164428711, 0.04516188812255859, 0.045081184387207034, 0.045238143920898435, 0.0450274543762207, 0.045244415283203124, 0.04510515213012695, 0.04527065658569336, 0.04504409790039063, 0.04507385635375977, 0.04517331314086914, 0.0448614387512207, 0.04511539077758789, 0.0452749137878418, 0.0450964469909668, 0.04501116943359375, 0.0448455696105957, 0.04512960052490234, 0.0453790397644043, 0.04505855941772461, 0.045004959106445315, 0.045297664642333986, 0.04524851226806641, 0.045279232025146485, 0.045123008728027346, 0.04511187362670899, 0.04506009674072266, 0.04517472076416015, 0.04502748870849609, 0.04560870361328125, 0.04507183837890625, 0.04497030258178711, 0.04508412933349609, 0.045047969818115235, 0.04510960006713867, 0.045513118743896484, 0.045125633239746096, 0.045182849884033205, 0.04512575912475586, 0.04501913452148437, 0.04538163375854492, 0.04515340805053711, 0.04525353622436523, 0.04496585464477539, 0.04503756713867187, 0.04507852935791016, 0.04568473434448242, 0.045049854278564457, 0.04524176025390625, 0.04517929458618164, 0.045439296722412106, 0.045241950988769535, 0.04522198486328125, 0.045004352569580075, 0.04510083389282227, 0.04527177429199219, 0.04530598449707031, 0.045557758331298825, 0.04539328002929687, 0.04530377578735351, 0.045571807861328126]",tokens/s,22.123839787998303,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,861.9008,690.946048,0.0,295.698432,277.263872,s,1,8.0635947265625,8.0635947265625,0.0,8.0635947265625,8.0635947265625,8.0635947265625,8.0635947265625,[8.0635947265625],,kWh,2.460304224586404e-05,2.7061199168236418e-06,7.568061610008225e-06,3.487722377269591e-05,,MB,1191.817216,743.374848,0.0,335.54432,313.833472,s,14,0.4949744300842286,0.03535531643458775,0.0012292449171760196,0.03517835235595704,0.035903210830688474,0.03715352516174316,0.03892500129699707,"[0.039367870330810545, 0.03465631866455078, 0.03466857528686523, 0.035565631866455075, 0.035257183074951175, 0.03448144149780273, 0.035961185455322266, 0.035099521636962894, 0.03540876770019531, 0.03440915298461914, 0.035350238800048825, 0.034116863250732425, 0.03486374282836914, 0.03576793670654297]",tokens/s,7240.778072899887,kWh,1.2312160439345533e-06,1.3578023718483657e-07,8.150780078986655e-07,2.1820742890180554e-06,tokens/kWh,117319562.07375567,MB,1225.547776,768.540672,0.0,360.710144,313.836032,s,14,10.121002380371094,0.7229287414550781,0.00635519996754101,0.7221958007812499,0.7312540771484375,0.7341511932373047,0.7368073382568359,"[0.7176253662109375, 0.7140076904296875, 0.7209071655273438, 0.72089501953125, 0.7236705932617188, 0.7178732299804688, 0.72321728515625, 0.7286656494140625, 0.7237313232421875, 0.72117431640625, 0.7139846801757812, 0.7323634033203125, 0.7374713745117187, 0.725415283203125]",tokens/s,87.1455184824945,kWh,2.0780348703982563e-05,2.2917244849225763e-06,8.175504581388059e-06,3.12475777702932e-05,tokens/kWh,2016156.2749959312,,s,882,10.113854816436769,0.011466955574191346,0.00022937992220901338,0.011431695938110352,0.01170497932434082,0.01181095232963562,0.012260950794219969,"[0.011403488159179688, 0.011644927978515626, 0.0115316162109375, 0.011442432403564453, 0.011473440170288085, 0.0114335355758667, 0.011442463874816894, 0.011468159675598144, 0.011324064254760742, 0.011296992301940917, 0.011361056327819825, 0.01130780792236328, 0.011359904289245605, 0.011339808464050293, 0.011444448471069336, 0.011673439979553222, 0.011359968185424806, 0.011336223602294922, 0.011352319717407227, 0.011333727836608886, 0.01260758399963379, 0.013354144096374511, 0.011467391967773438, 0.011323391914367676, 0.01133743953704834, 0.011254048347473145, 0.011313376426696777, 0.011245344161987305, 0.011288576126098633, 0.011302495956420898, 0.011268095970153809, 0.011286016464233398, 0.01129155158996582, 0.011314528465270996, 0.011327775955200195, 0.01144870376586914, 0.011360095977783204, 0.011386112213134765, 0.011315168380737305, 0.011229887962341308, 0.011311615943908691, 0.011306752204895019, 0.011230239868164063, 0.011289504051208496, 0.011214912414550781, 0.01126195240020752, 0.011260095596313477, 0.011236224174499512, 0.011250144004821777, 0.011237183570861816, 0.011293343544006348, 0.011284640312194824, 0.011264863967895508, 0.01123356819152832, 0.011227871894836426, 0.011220992088317871, 0.01125376033782959, 0.011231295585632324, 0.011275775909423828, 0.011335743904113769, 0.011250047683715821, 0.011274304389953613, 0.0113372802734375, 0.011330112457275391, 0.01147209644317627, 0.011428192138671875, 0.011411199569702149, 0.01130355167388916, 0.011405247688293457, 0.01128172779083252, 0.011393600463867188, 0.011323519706726074, 0.011408800125122071, 0.011369407653808594, 0.01132464027404785, 0.01136684799194336, 0.01131926441192627, 0.011272576332092285, 0.011351519584655762, 0.011419903755187989, 0.01136633586883545, 0.011435775756835938, 0.011355551719665527, 0.011346079826354981, 0.011326144218444825, 0.011337727546691894, 0.011345919609069824, 0.011345919609069824, 0.01140236759185791, 0.011309727668762207, 0.011190303802490234, 0.011291839599609376, 0.01121337604522705, 0.01121507167816162, 0.01124988842010498, 0.01120787239074707, 0.011440959930419922, 0.011239744186401368, 0.012365504264831543, 0.011552576065063477, 0.011360063552856445, 0.011300928115844726, 0.011266112327575684, 0.01124777603149414, 0.011228608131408692, 0.011326111793518067, 0.01123532772064209, 0.011172096252441406, 0.011218688011169434, 0.011181440353393555, 0.011229696273803711, 0.01124675178527832, 0.011240256309509278, 0.011159711837768555, 0.011259903907775879, 0.01129417610168457, 0.011256352424621582, 0.01129593563079834, 0.011239744186401368, 0.011225888252258301, 0.011259008407592773, 0.01123151969909668, 0.01130732822418213, 0.011308927536010743, 0.011290399551391602, 0.011388928413391113, 0.011482560157775878, 0.011681695938110352, 0.011526816368103028, 0.011593728065490723, 0.011624447822570801, 0.011556672096252442, 0.0116779842376709, 0.01159158420562744, 0.011636735916137696, 0.011517248153686523, 0.011556608200073242, 0.011480192184448242, 0.01146275234222412, 0.011404159545898438, 0.011451231956481933, 0.011536383628845214, 0.011481087684631347, 0.011493696212768554, 0.011534015655517578, 0.011474143981933594, 0.011467840194702148, 0.011396832466125488, 0.011404864311218262, 0.011401791572570801, 0.01139724826812744, 0.011450176239013671, 0.011431455612182617, 0.011364768028259278, 0.011393024444580077, 0.011313152313232423, 0.011290847778320312, 0.012256031990051269, 0.011883808135986327, 0.012088800430297851, 0.011522303581237793, 0.011390303611755371, 0.011479167938232422, 0.011391615867614746, 0.011430015563964844, 0.011533408164978028, 0.011322367668151855, 0.011304351806640625, 0.011335968017578125, 0.011358048439025879, 0.011309087753295899, 0.011308768272399902, 0.011266112327575684, 0.011313023567199708, 0.011321824073791504, 0.011324864387512207, 0.011278911590576172, 0.011325440406799316, 0.011284671783447265, 0.011266880035400391, 0.01124998378753662, 0.011247296333312989, 0.011187423706054687, 0.011220704078674317, 0.011242655754089356, 0.01119651222229004, 0.011250335693359375, 0.011190784454345704, 0.011285759925842286, 0.011575231552124024, 0.011571040153503418, 0.011489119529724121, 0.011396575927734375, 0.011297696113586426, 0.011331680297851563, 0.011224127769470215, 0.011281215667724609, 0.01123680019378662, 0.011340319633483887, 0.011176159858703613, 0.011200384140014648, 0.011235296249389648, 0.011216896057128906, 0.011278335571289062, 0.011333919525146485, 0.011385600090026855, 0.011286687850952149, 0.011305312156677245, 0.011327263832092285, 0.011418304443359374, 0.01136796760559082, 0.011430368423461915, 0.011419551849365234, 0.012865407943725586, 0.011578751564025878, 0.01147100830078125, 0.01140118408203125, 0.011432607650756836, 0.011395199775695801, 0.011441856384277345, 0.01144972801208496, 0.011476160049438477, 0.011587264060974121, 0.011507840156555175, 0.011484383583068847, 0.011430815696716308, 0.011513343811035156, 0.01143126392364502, 0.011440799713134766, 0.01140940761566162, 0.011530495643615723, 0.011558239936828613, 0.01160371208190918, 0.011573151588439941, 0.011569952011108399, 0.011495200157165528, 0.011482912063598633, 0.011414175987243652, 0.011422847747802735, 0.011434880256652832, 0.011519359588623048, 0.011436672210693359, 0.0114171199798584, 0.011534079551696778, 0.011498208045959473, 0.011470848083496094, 0.011420736312866211, 0.011387871742248536, 0.011331680297851563, 0.01136787223815918, 0.011262175559997558, 0.01124687957763672, 0.011088607788085937, 0.011468768119812012, 0.01138252830505371, 0.011340031623840332, 0.01134598445892334, 0.011241151809692382, 0.011415552139282227, 0.011268095970153809, 0.011351231575012208, 0.01121065616607666, 0.011311455726623535, 0.011274815559387207, 0.011770112037658691, 0.011855615615844726, 0.011703519821166993, 0.01183414363861084, 0.011359935760498046, 0.011292991638183593, 0.011280384063720703, 0.011249695777893066, 0.011220959663391114, 0.011225088119506836, 0.011243519783020019, 0.011216896057128906, 0.011234592437744141, 0.011248160362243652, 0.01118841552734375, 0.011231295585632324, 0.01126147174835205, 0.011248031616210937, 0.011356160163879395, 0.011566271781921387, 0.011623231887817383, 0.011593728065490723, 0.011503392219543458, 0.011526368141174316, 0.011777536392211914, 0.01170307159423828, 0.011705056190490722, 0.011633664131164552, 0.011556703567504883, 0.011509535789489746, 0.011535840034484863, 0.01151692771911621, 0.011590592384338379, 0.011651424407958985, 0.01161894416809082, 0.011617664337158204, 0.011775775909423828, 0.011679712295532227, 0.011608223915100097, 0.01166425609588623, 0.011669440269470215, 0.011741087913513184, 0.011821056365966797, 0.011821056365966797, 0.011597408294677734, 0.011573375701904297, 0.011591967582702636, 0.011632991790771484, 0.011294367790222169, 0.011347359657287597, 0.011409055709838868, 0.011405311584472656, 0.011651040077209473, 0.011698207855224609, 0.01165721607208252, 0.011586560249328613, 0.01154355239868164, 0.011553824424743652, 0.011398112297058106, 0.01145241641998291, 0.011576607704162597, 0.011461024284362792, 0.011354432106018067, 0.011395071983337402, 0.011447775840759277, 0.01131935977935791, 0.011526111602783204, 0.011369183540344238, 0.01131497573852539, 0.011371999740600587, 0.011373087882995606, 0.011346112251281739, 0.011360063552856445, 0.011386879920959473, 0.011419615745544433, 0.011374624252319335, 0.011337887763977051, 0.011381759643554687, 0.011508671760559083, 0.011468704223632813, 0.011585536003112793, 0.011557951927185058, 0.011334591865539551, 0.011310463905334472, 0.011267904281616212, 0.01127507209777832, 0.011332703590393066, 0.01130793571472168, 0.011272192001342773, 0.011259903907775879, 0.011263999938964844, 0.011261311531066895, 0.011276000022888184, 0.011248543739318847, 0.01125369644165039, 0.011231103897094726, 0.011288224220275879, 0.011293472290039063, 0.011259231567382812, 0.011260319709777832, 0.011262271881103516, 0.011361760139465332, 0.011584768295288086, 0.011307040214538574, 0.011324383735656739, 0.011296704292297363, 0.01134012794494629, 0.011361984252929688, 0.011298815727233886, 0.011330975532531738, 0.011333919525146485, 0.011348511695861816, 0.0114169282913208, 0.011497152328491212, 0.012027487754821778, 0.011574111938476562, 0.0116428804397583, 0.011390975952148438, 0.011563008308410644, 0.0116428804397583, 0.011363743782043457, 0.011469247817993165, 0.011325599670410157, 0.01152950382232666, 0.011551199913024902, 0.011322655677795411, 0.011316320419311524, 0.011231103897094726, 0.011314623832702637, 0.011254688262939454, 0.01140662384033203, 0.011229439735412598, 0.011254112243652343, 0.011211615562438965, 0.011282431602478027, 0.011326399803161621, 0.011257184028625489, 0.011262016296386719, 0.011207263946533203, 0.011232383728027344, 0.011305407524108886, 0.011262399673461914, 0.011304960250854493, 0.01124556827545166, 0.011244864463806152, 0.011252415657043458, 0.011378080368041991, 0.01124617576599121, 0.011280287742614746, 0.011660927772521973, 0.012281920433044434, 0.012151519775390624, 0.011476672172546387, 0.011294015884399414, 0.011532992362976073, 0.011392448425292969, 0.011419679641723633, 0.01149392032623291, 0.011507328033447266, 0.011538816452026366, 0.011731103897094726, 0.01148031997680664, 0.011518560409545898, 0.011707648277282715, 0.011471263885498047, 0.011491616249084473, 0.011447360038757324, 0.01148412799835205, 0.01168182373046875, 0.012041312217712402, 0.011553343772888183, 0.011510111808776856, 0.011593215942382813, 0.011566975593566894, 0.011558655738830567, 0.011801088333129883, 0.011587264060974121, 0.011406880378723144, 0.011686367988586426, 0.011440128326416015, 0.011627840042114259, 0.011637439727783203, 0.011429887771606445, 0.011537440299987793, 0.011457344055175782, 0.011647263526916503, 0.011495295524597169, 0.011575200080871583, 0.011534239768981934, 0.011588895797729492, 0.011454591751098633, 0.011549471855163574, 0.011757568359375, 0.011622400283813476, 0.011550623893737793, 0.011527968406677245, 0.011519424438476563, 0.011487839698791504, 0.011532575607299804, 0.0116779842376709, 0.011550399780273437, 0.011546655654907227, 0.01155081558227539, 0.01157145595550537, 0.011563936233520507, 0.011567584037780762, 0.011536800384521484, 0.011599488258361816, 0.011583744049072265, 0.011562911987304688, 0.011529888153076172, 0.011477439880371094, 0.01144422435760498, 0.011487232208251954, 0.011521535873413086, 0.011497535705566407, 0.011549087524414062, 0.011487263679504395, 0.011661312103271485, 0.011429887771606445, 0.01145036792755127, 0.011460800170898438, 0.01144927978515625, 0.01141756820678711, 0.011411840438842774, 0.011421440124511718, 0.01155907154083252, 0.011528736114501953, 0.012009568214416504, 0.011638976097106934, 0.011622207641601563, 0.011583488464355468, 0.01160912036895752, 0.011588255882263183, 0.011632960319519043, 0.011699423789978028, 0.011747103691101074, 0.011604991912841797, 0.011628543853759766, 0.01157744026184082, 0.011522144317626952, 0.011870559692382812, 0.01173459243774414, 0.011749216079711914, 0.011684736251831055, 0.011573311805725097, 0.011674495697021484, 0.011569952011108399, 0.011797696113586427, 0.011747232437133789, 0.011868831634521484, 0.011695648193359375, 0.011712320327758789, 0.011699104309082031, 0.011755135536193848, 0.01174892807006836, 0.011548959732055664, 0.011411999702453613, 0.011452735900878907, 0.011359071731567383, 0.011346783638000488, 0.01132953643798828, 0.011319104194641114, 0.011277983665466309, 0.011364895820617676, 0.011281439781188965, 0.011302176475524903, 0.011283167839050293, 0.011279328346252442, 0.011243519783020019, 0.011241472244262696, 0.011304448127746582, 0.011274784088134766, 0.01127939224243164, 0.01125062370300293, 0.01132153606414795, 0.01126585578918457, 0.011243776321411133, 0.011277888298034668, 0.011392704010009766, 0.011475520133972168, 0.011506848335266114, 0.011553567886352539, 0.011544575691223144, 0.011504672050476073, 0.011539423942565918, 0.011489279747009277, 0.01148249626159668, 0.011401856422424316, 0.011388192176818848, 0.011440863609313965, 0.011406975746154785, 0.011524479866027832, 0.011514016151428223, 0.011478879928588866, 0.011489279747009277, 0.011495776176452636, 0.011515616416931152, 0.011445216178894043, 0.011524224281311035, 0.011443103790283203, 0.011438143730163575, 0.011558624267578125, 0.01145088005065918, 0.011946432113647461, 0.012744159698486328, 0.013918304443359375, 0.011760064125061035, 0.011454239845275879, 0.011450592041015626, 0.01135148811340332, 0.011385408401489258, 0.011295999526977539, 0.0112991361618042, 0.01128934383392334, 0.011409088134765625, 0.011450207710266114, 0.011407872200012208, 0.011488032341003419, 0.011385727882385253, 0.011243040084838868, 0.011333727836608886, 0.011247039794921875, 0.011389887809753418, 0.01143398380279541, 0.011334848403930665, 0.011411904335021972, 0.01136473560333252, 0.011292672157287598, 0.011271583557128905, 0.01128825569152832, 0.011278719902038574, 0.011726911544799806, 0.011370623588562011, 0.011608415603637696, 0.011376832008361816, 0.011354080200195312, 0.011351967811584473, 0.011384767532348633, 0.011304127693176269, 0.011352928161621093, 0.011325504302978516, 0.011401120185852051, 0.01155072021484375, 0.011531776428222656, 0.011538944244384765, 0.011388319969177246, 0.011485631942749024, 0.011309215545654297, 0.011259615898132324, 0.011268256187438964, 0.011288127899169921, 0.01122976016998291, 0.01128825569152832, 0.011302847862243653, 0.011351840019226074, 0.011283040046691895, 0.01130239963531494, 0.011294560432434082, 0.011342720031738281, 0.011259679794311523, 0.011272192001342773, 0.011261792182922363, 0.011323712348937988, 0.011277376174926758, 0.011351872444152832, 0.011060768127441407, 0.01130083179473877, 0.011373087882995606, 0.011223039627075194, 0.011313152313232423, 0.011272192001342773, 0.011229056358337403, 0.01152012825012207, 0.011280384063720703, 0.01134335994720459, 0.011264639854431152, 0.01119206428527832, 0.011238783836364745, 0.011266143798828124, 0.011231776237487792, 0.011243359565734863, 0.011309344291687012, 0.011237343788146973, 0.011253791809082031, 0.011226176261901855, 0.01130406379699707, 0.011236895561218261, 0.011174240112304688, 0.011255743980407715, 0.011229184150695801, 0.011157504081726074, 0.011214112281799317, 0.011211135864257812, 0.011233632087707519, 0.011228192329406738, 0.011156255722045899, 0.01122537612915039, 0.011193471908569336, 0.011192223548889161, 0.011273088455200196, 0.011206656455993653, 0.011258208274841308, 0.01126959991455078, 0.011335871696472167, 0.011396575927734375, 0.011434528350830078, 0.011341823577880859, 0.011403264045715332, 0.011432255744934082, 0.011663040161132813, 0.011469887733459474, 0.011458975791931152, 0.011501279830932618, 0.011456928253173827, 0.01139958381652832, 0.011423744201660157, 0.011406368255615234, 0.01171174430847168, 0.01147056007385254, 0.011601920127868653, 0.011392640113830567, 0.011360671997070313, 0.011433856010437012, 0.011335935592651367, 0.011564736366271972, 0.011388223648071289, 0.01135206413269043, 0.011373408317565918, 0.011412896156311036, 0.011710656166076661, 0.011675583839416503, 0.01176419162750244, 0.01172480010986328, 0.011798848152160644, 0.011900704383850097, 0.011863903999328613, 0.012111712455749511, 0.01194985580444336, 0.011942336082458496, 0.011911168098449706, 0.011839232444763183, 0.011695679664611816, 0.011718848228454589, 0.011803135871887208, 0.011666912078857422, 0.011622336387634278, 0.011560544013977051, 0.011570015907287597, 0.011670720100402833, 0.011633631706237792, 0.012658880233764649, 0.011720288276672363, 0.011671775817871094, 0.01166323184967041, 0.01162662410736084, 0.011562656402587891, 0.011684415817260742, 0.011495200157165528, 0.011542688369750977, 0.011478879928588866, 0.011486720085144043, 0.011510272026062012, 0.011491392135620117, 0.011511743545532227, 0.011468799591064453, 0.011578559875488281, 0.011553600311279296, 0.011573247909545899, 0.011522047996520996, 0.011537631988525391, 0.011473695755004882, 0.011417920112609864, 0.01140937614440918, 0.011400544166564941, 0.011421855926513671, 0.011374272346496583, 0.01138742446899414, 0.011400351524353028, 0.011523072242736816, 0.011544416427612305, 0.011511551856994628, 0.011464768409729004, 0.011424256324768066, 0.011446047782897949, 0.011374496459960937, 0.011367456436157226, 0.01142796802520752, 0.01181987190246582, 0.011470656394958496, 0.011558879852294921, 0.011698271751403809, 0.011947936058044433, 0.012320768356323243, 0.011929920196533203, 0.01181446361541748, 0.011773344039916991, 0.011782719612121581, 0.011998720169067383, 0.011981439590454102, 0.011710176467895508, 0.011808671951293946, 0.011704287528991698, 0.011635135650634766, 0.011674847602844238, 0.011926303863525391, 0.01163468837738037, 0.011686016082763671, 0.011825056076049804, 0.01174665641784668, 0.011768159866333007, 0.011624064445495606, 0.011600543975830078, 0.011790335655212402, 0.011699872016906738, 0.011752960205078124, 0.011708383560180663, 0.01176159954071045, 0.011840448379516602, 0.011638784408569336, 0.011569087982177734, 0.011511808395385742, 0.011429951667785645, 0.011514080047607423, 0.011498944282531738, 0.011546719551086425, 0.01162880039215088, 0.011642463684082031, 0.011549087524414062, 0.011648063659667968, 0.011690336227416992, 0.011649279594421387, 0.01178048038482666, 0.011672608375549317, 0.01165510368347168, 0.01176579189300537, 0.011651264190673829, 0.011694879531860351, 0.01164083194732666, 0.011595775604248047, 0.011849408149719238, 0.011887040138244629, 0.011870368003845215, 0.011785216331481933, 0.011793120384216309, 0.011653120040893555, 0.01176531219482422, 0.01154032039642334, 0.011472672462463378, 0.011524928092956544, 0.011470848083496094, 0.011583488464355468, 0.011517951965332032, 0.011391039848327636, 0.011452447891235352, 0.011499263763427735, 0.01184547233581543, 0.011791872024536134, 0.011622912406921386, 0.01146662425994873, 0.011416095733642578, 0.011463680267333985, 0.011471232414245606, 0.011428480148315429, 0.011526144027709961, 0.011431936264038087, 0.011519840240478516, 0.011452608108520508, 0.0116080322265625, 0.01155081558227539, 0.011462143898010254, 0.011532704353332519, 0.01162649631500244, 0.01164902400970459, 0.01181107234954834, 0.011560704231262207, 0.01189254379272461, 0.011679936408996582, 0.011687328338623047, 0.011589664459228516, 0.011514431953430175, 0.011530400276184082, 0.011517024040222168, 0.011432543754577636, 0.01145257568359375, 0.011462656021118164, 0.011502880096435547, 0.011419391632080078, 0.011514528274536133, 0.011438400268554687, 0.011436032295227052, 0.011448543548583984, 0.011387680053710937, 0.011536416053771972, 0.01142416000366211, 0.011557215690612794, 0.011524319648742676, 0.011521504402160645, 0.011532832145690917, 0.011560959815979004, 0.011745280265808105, 0.011413503646850585, 0.011466464042663575, 0.011520159721374513, 0.011449503898620605, 0.01150051212310791, 0.011435263633728028, 0.011440095901489257, 0.01132857608795166, 0.011366111755371094, 0.011436063766479493, 0.011390463829040527, 0.011350496292114258, 0.011357312202453613, 0.011274463653564453, 0.011427680015563964, 0.011375295639038085, 0.011364480018615723]",tokens/s,87.20710510562175,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4242.948096,5784.928256,0.0,5389.68064,5000.446464,s,1,11.290736328125,11.290736328125,0.0,11.290736328125,11.290736328125,11.290736328125,11.290736328125,[11.290736328125],,kWh,0.000129021958274969,1.4224751504962389e-05,5.5991711460007254e-05,0.00019923842123993865,,MB,1386.84416,5801.705472,0.0,5393.874944,4706.596864,s,10,29.3511875,2.93511875,0.0039120314747161915,2.9365336914062503,2.9385712646484374,2.938667834472656,2.938745090332031,"[2.9254248046875, 2.931824951171875, 2.933055419921875, 2.9367763671875, 2.93485791015625, 2.936291015625, 2.938044189453125, 2.9375986328125, 2.938764404296875, 2.9385498046875]",tokens/s,87.21963975052117,kWh,8.559857491208428e-05,9.441424095514069e-06,5.6893462181398234e-05,0.0001519334611889966,tokens/kWh,1684948.1213460315,MB,1390.985216,5801.705472,0.0,5393.874944,4876.091904,s,10,18.76966467285156,1.8769664672851563,0.0130080446991286,1.8743575439453126,1.8922148071289062,1.8949106872558594,1.8970673913574219,"[1.876090087890625, 1.8976065673828124, 1.872625, 1.886764404296875, 1.8672735595703125, 1.864760009765625, 1.8615469970703125, 1.89161572265625, 1.8606595458984374, 1.8907227783203124]",tokens/s,33.56479782567622,kWh,5.547119228875015e-05,6.118897369139303e-06,3.672366826780183e-05,9.831375792569128e-05,tokens/kWh,640805.5325035733,,s,630,18.766633272171003,0.029788306781223843,0.0004898927835957104,0.029722991943359374,0.030298616790771483,0.030615639686584473,0.03148992485046387,"[0.03096950340270996, 0.03009971237182617, 0.029877632141113282, 0.02973155212402344, 0.02972640037536621, 0.029688127517700197, 0.029478912353515626, 0.029432863235473634, 0.029373311996459962, 0.029357471466064454, 0.02940764808654785, 0.02943132781982422, 0.02942848014831543, 0.02953411293029785, 0.029824447631835938, 0.02971023941040039, 0.02960051155090332, 0.02959769630432129, 0.02936627197265625, 0.029566303253173828, 0.029614271163940428, 0.02994428825378418, 0.029580959320068358, 0.029720672607421873, 0.030179391860961913, 0.029755584716796873, 0.029554655075073242, 0.02964892768859863, 0.02953388786315918, 0.029495328903198243, 0.029458719253540037, 0.029462528228759766, 0.029461727142333985, 0.029479711532592774, 0.029829120635986327, 0.029886463165283202, 0.029970432281494142, 0.029914432525634766, 0.029776063919067383, 0.029704704284667968, 0.029783424377441407, 0.030356096267700194, 0.02977340888977051, 0.029850015640258788, 0.030011392593383788, 0.030130176544189452, 0.029877376556396485, 0.029818784713745116, 0.030100032806396483, 0.029915168762207033, 0.02983359909057617, 0.029857215881347657, 0.029904575347900392, 0.02983616065979004, 0.029847551345825195, 0.02993961524963379, 0.029698144912719725, 0.02967296028137207, 0.029788448333740235, 0.030008991241455077, 0.030212671279907226, 0.030191328048706053, 0.030187231063842773, 0.03135897636413574, 0.030502912521362304, 0.030570240020751954, 0.030398719787597655, 0.030327999114990234, 0.03054470443725586, 0.030469696044921876, 0.03066268730163574, 0.030642463684082032, 0.030802047729492188, 0.03066166305541992, 0.030614496231079102, 0.030616575241088868, 0.030299135208129883, 0.032126976013183595, 0.031229951858520507, 0.030052352905273437, 0.030100704193115235, 0.029686559677124025, 0.030021631240844726, 0.029566976547241212, 0.029665279388427734, 0.029405183792114258, 0.02957926368713379, 0.031498239517211916, 0.03035308837890625, 0.029557056427001953, 0.029846944808959962, 0.02959971237182617, 0.032943904876708986, 0.030569311141967773, 0.029965568542480468, 0.029805311203002928, 0.029853696823120116, 0.029566303253173828, 0.029633119583129884, 0.030142528533935547, 0.029800447463989257, 0.029472639083862304, 0.02965894317626953, 0.02961235237121582, 0.029681663513183593, 0.029406944274902345, 0.02962460708618164, 0.02956287956237793, 0.029652639389038084, 0.02941129684448242, 0.029808223724365233, 0.02966966438293457, 0.029827583312988282, 0.029548320770263672, 0.02968160057067871, 0.029566719055175782, 0.02982761573791504, 0.030015487670898438, 0.029894655227661132, 0.02970195198059082, 0.02978220748901367, 0.02991923141479492, 0.030111743927001954, 0.030238719940185548, 0.03034316825866699, 0.030159872055053712, 0.030890207290649414, 0.029956832885742187, 0.029879295349121093, 0.029702943801879884, 0.029702367782592772, 0.029800447463989257, 0.029861888885498046, 0.02952396774291992, 0.029296319961547853, 0.029714752197265625, 0.02963206481933594, 0.029530559539794922, 0.029636608123779298, 0.02962784004211426, 0.029520448684692384, 0.029980287551879883, 0.029688192367553712, 0.029493215560913087, 0.029382688522338867, 0.02958336067199707, 0.02956598472595215, 0.029539295196533203, 0.029499391555786132, 0.02962985610961914, 0.02927881622314453, 0.029560352325439455, 0.029409759521484374, 0.029534048080444335, 0.02934377670288086, 0.029237056732177736, 0.029284223556518555, 0.029366720199584962, 0.02939904022216797, 0.030125375747680663, 0.029455039978027345, 0.02959769630432129, 0.029564544677734374, 0.02948748779296875, 0.02968272018432617, 0.029496288299560545, 0.029392608642578123, 0.029499679565429687, 0.029429759979248047, 0.029572736740112304, 0.029713855743408204, 0.03208287811279297, 0.02960383987426758, 0.029594688415527343, 0.030145471572875976, 0.02990880012512207, 0.029892799377441406, 0.029881856918334962, 0.029942272186279296, 0.03026095962524414, 0.029951360702514647, 0.029805471420288086, 0.029875648498535155, 0.029905471801757812, 0.029892608642578124, 0.02984502410888672, 0.029669855117797853, 0.03002899169921875, 0.029940351486206055, 0.031337087631225585, 0.029970176696777345, 0.029931840896606447, 0.0302838077545166, 0.029593599319458007, 0.029812736511230467, 0.029747200012207032, 0.029900800704956054, 0.03031449508666992, 0.03041823959350586, 0.030325439453125, 0.030324735641479493, 0.030227807998657225, 0.030114463806152344, 0.030212095260620117, 0.030208000183105467, 0.030402559280395508, 0.030283008575439453, 0.03036851119995117, 0.030445568084716795, 0.030334047317504883, 0.03019254493713379, 0.030552064895629883, 0.03001747131347656, 0.02990246391296387, 0.02982956886291504, 0.0299233283996582, 0.03062579154968262, 0.029724672317504884, 0.029708127975463867, 0.029423776626586913, 0.02969375991821289, 0.029675264358520508, 0.030298559188842774, 0.02972876739501953, 0.029772863388061524, 0.02967033576965332, 0.0295731201171875, 0.0332861442565918, 0.029284351348876952, 0.029362112045288085, 0.030754144668579102, 0.02983919906616211, 0.029365119934082032, 0.02982707214355469, 0.029826879501342773, 0.029724159240722657, 0.029651647567749025, 0.029482656478881836, 0.02964240074157715, 0.02953696060180664, 0.029739007949829102, 0.02975849533081055, 0.02961712074279785, 0.02933964729309082, 0.029646848678588866, 0.029652992248535157, 0.029431264877319337, 0.02939753532409668, 0.029394336700439453, 0.029358688354492186, 0.02939289665222168, 0.029315071105957033, 0.03098467254638672, 0.029911327362060546, 0.029859840393066408, 0.029912927627563476, 0.029708063125610352, 0.029405120849609376, 0.02958585548400879, 0.02982707214355469, 0.029876224517822264, 0.02968288040161133, 0.02955961608886719, 0.029583200454711914, 0.029237407684326172, 0.029198335647583007, 0.029222911834716796, 0.029158687591552733, 0.02956723213195801, 0.029333984375, 0.029612031936645508, 0.02979840087890625, 0.02978201675415039, 0.02973695945739746, 0.030238719940185548, 0.029808639526367187, 0.02974070358276367, 0.02972003173828125, 0.02973731231689453, 0.029632896423339845, 0.029879680633544924, 0.029749343872070313, 0.0297192325592041, 0.030117887496948242, 0.029822975158691405, 0.029652639389038084, 0.029783456802368165, 0.029766592025756836, 0.029707263946533204, 0.029674495697021484, 0.02964409637451172, 0.029340351104736328, 0.02939084815979004, 0.029286399841308593, 0.02944000053405762, 0.029400127410888672, 0.02967238426208496, 0.029773344039916994, 0.029415903091430665, 0.029419519424438476, 0.029698047637939453, 0.029405183792114258, 0.029310976028442383, 0.029258783340454102, 0.02959459114074707, 0.02954854393005371, 0.029460479736328125, 0.02966691207885742, 0.029484735488891602, 0.029403871536254882, 0.029607936859130858, 0.029716480255126954, 0.029501440048217774, 0.0295280647277832, 0.029726303100585938, 0.030853151321411133, 0.029883615493774413, 0.029442655563354493, 0.029151424407958985, 0.029171712875366212, 0.029057024002075195, 0.029255327224731446, 0.02918844795227051, 0.0293621768951416, 0.029247488021850586, 0.02918796730041504, 0.02985487937927246, 0.02961302375793457, 0.02942060852050781, 0.029385663986206054, 0.029255327224731446, 0.029624671936035157, 0.029284351348876952, 0.02939289665222168, 0.029136159896850585, 0.0291232967376709, 0.029159423828125, 0.02931219291687012, 0.029299520492553712, 0.029270015716552734, 0.02919161605834961, 0.02924323272705078, 0.02916012763977051, 0.02932329559326172, 0.029521472930908205, 0.02920697593688965, 0.029454336166381836, 0.02927804756164551, 0.029296607971191407, 0.02933884811401367, 0.029554655075073242, 0.029312000274658204, 0.029400447845458984, 0.02935366439819336, 0.0294532470703125, 0.03081625556945801, 0.030511104583740234, 0.029421472549438478, 0.029513824462890626, 0.029353183746337892, 0.029487903594970704, 0.029321216583251954, 0.02960383987426758, 0.03000115203857422, 0.029868032455444334, 0.029829120635986327, 0.029673471450805664, 0.03219660949707031, 0.0295031681060791, 0.02977619171142578, 0.03146956825256347, 0.02997987174987793, 0.02957801628112793, 0.029691520690917968, 0.02970252799987793, 0.02981888008117676, 0.029999103546142578, 0.03035136032104492, 0.030580543518066407, 0.029860063552856444, 0.02958847999572754, 0.029158336639404297, 0.029320480346679688, 0.029205280303955077, 0.029267936706542968, 0.02910825538635254, 0.02918764877319336, 0.029170112609863283, 0.0293353271484375, 0.029296863555908204, 0.029435903549194335, 0.029288448333740235, 0.029367616653442383, 0.029307199478149415, 0.029347232818603516, 0.029346784591674804, 0.029419519424438476, 0.02940015983581543, 0.02955548858642578, 0.02935737609863281, 0.029406015396118163, 0.029179231643676758, 0.029278879165649415, 0.029410528182983398, 0.029233951568603516, 0.029152544021606445, 0.02926665687561035, 0.02934169578552246, 0.029290496826171877, 0.0294072322845459, 0.02935603141784668, 0.029582656860351563, 0.02929270362854004, 0.0306376953125, 0.02951875114440918, 0.029343360900878905, 0.029548095703125, 0.029406015396118163, 0.0295546875, 0.029812511444091798, 0.02999932861328125, 0.02982268714904785, 0.029860031127929686, 0.029743200302124025, 0.029814783096313476, 0.029812736511230467, 0.02979840087890625, 0.02980454444885254, 0.029782112121582032, 0.02979827117919922, 0.029855712890625, 0.030183488845825196, 0.030074880599975585, 0.029681663513183593, 0.029722623825073242, 0.02933884811401367, 0.02958620834350586, 0.029915136337280275, 0.029693952560424806, 0.0295133113861084, 0.029555103302001954, 0.031035104751586915, 0.029963808059692384, 0.029759328842163087, 0.029594112396240234, 0.029593088150024413, 0.030112672805786132, 0.030044160842895507, 0.029569023132324217, 0.02976563262939453, 0.029896703720092774, 0.029949888229370118, 0.029511743545532227, 0.029607648849487304, 0.02967296028137207, 0.029884992599487306, 0.029917407989501953, 0.029898752212524415, 0.029906784057617188, 0.029979808807373047, 0.030766080856323243, 0.02988627243041992, 0.029914688110351563, 0.02981747245788574, 0.029792255401611328, 0.030043872833251953, 0.02980892753601074, 0.02976972770690918, 0.03005606460571289, 0.029867935180664062, 0.029706592559814452, 0.02960358428955078, 0.029552928924560545, 0.03001968002319336, 0.030195711135864257, 0.029841407775878907, 0.0301527042388916, 0.029746431350708008, 0.029757183074951173, 0.03013734436035156, 0.029998815536499024, 0.02984783935546875, 0.029906944274902345, 0.02980601692199707, 0.02983788871765137, 0.02983907127380371, 0.029829248428344727, 0.029763744354248046, 0.029837312698364257, 0.030271488189697264, 0.029894655227661132, 0.029924352645874022, 0.030122047424316407, 0.031144800186157225, 0.03132953643798828, 0.030155616760253905, 0.030158016204833986, 0.030061344146728515, 0.030371871948242188, 0.032761856079101564, 0.030304256439208983, 0.03014441680908203, 0.02999510383605957, 0.0299233283996582, 0.030814207077026368, 0.03004185676574707, 0.02946668815612793, 0.029333696365356446, 0.029273216247558593, 0.029151647567749024, 0.029328927993774415, 0.029170591354370116, 0.029194271087646485, 0.02926710319519043, 0.029227872848510743, 0.02922822380065918, 0.029299135208129882, 0.029328927993774415, 0.029682527542114256, 0.02919628715515137, 0.02918400001525879, 0.029478143692016602, 0.029324031829833983, 0.029237056732177736, 0.029236480712890624, 0.02968057632446289, 0.02923520088195801, 0.02940889549255371, 0.029622623443603516, 0.0294932804107666, 0.029313024520874024, 0.029310976028442383, 0.02947068786621094, 0.029284032821655273, 0.029828672409057618, 0.02934864044189453, 0.029205503463745116, 0.029459455490112304, 0.029442047119140623, 0.02960588836669922, 0.029437952041625977, 0.02920243263244629, 0.029207775115966797, 0.029480991363525392, 0.029569311141967772, 0.029862079620361328, 0.029773536682128905, 0.029733440399169923, 0.02974515151977539, 0.029550592422485353, 0.029392192840576172, 0.029863744735717773, 0.030069631576538088, 0.029356000900268554, 0.029468704223632812, 0.029388799667358398, 0.029429279327392577, 0.030171615600585937, 0.0297574405670166, 0.029880319595336914, 0.029863935470581054, 0.030064640045166017, 0.029814783096313476, 0.029758655548095703, 0.02961827278137207, 0.02972336006164551, 0.02996019172668457, 0.03140940856933594, 0.03031260871887207, 0.03015875244140625, 0.030266048431396485, 0.030004480361938476, 0.02998944091796875, 0.029997247695922852, 0.029894655227661132, 0.029885824203491212, 0.02979193687438965, 0.030939680099487304, 0.02976358413696289, 0.02983772850036621, 0.02976083183288574, 0.029662912368774413, 0.029417695999145507, 0.029713184356689452, 0.029378559112548826, 0.02956185531616211, 0.029631488800048827, 0.031044736862182617, 0.029725536346435547, 0.02989468765258789, 0.030007295608520508, 0.029863935470581054, 0.029911039352416992, 0.030007295608520508, 0.03006780815124512, 0.03013929557800293, 0.029943231582641602, 0.02993414306640625, 0.030069791793823242, 0.030095935821533203, 0.0300097599029541, 0.029962240219116212, 0.029648895263671874, 0.030195711135864257, 0.029863199234008788, 0.030173023223876952, 0.030091968536376953, 0.03042729568481445, 0.029728031158447264, 0.030200351715087892, 0.030167167663574218, 0.029611200332641602, 0.029680543899536133, 0.02972876739501953, 0.029847423553466798, 0.029644927978515624, 0.03016499137878418, 0.0305930233001709, 0.030357503890991212, 0.029937440872192383, 0.03039254379272461, 0.030044160842895507, 0.030039968490600585, 0.030125343322753906, 0.02991391944885254, 0.030023679733276368, 0.030064640045166017, 0.030089216232299806, 0.02976959991455078, 0.029855871200561525]",tokens/s,33.57021959470082,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7114.584064,9046.982656,0.0,8644.460544,8128.172544,s,1,14.6313193359375,14.6313193359375,0.0,14.6313193359375,14.6313193359375,14.6313193359375,14.6313193359375,[14.6313193359375],,kWh,0.0002047841478958162,2.2581312061304883e-05,9.387868621399398e-05,0.0003212441461711151,,MB,2960.13824,9061.66272,0.0,8644.460544,7350.14144,s,10,58.019437499999995,5.8019437499999995,0.005131407392645799,5.80232421875,5.8084748046875,5.808785498046875,5.809034052734376,"[5.79199755859375, 5.7949072265625, 5.7997998046875, 5.80165576171875, 5.80113232421875, 5.80299267578125, 5.804568359375, 5.80840576171875, 5.8048818359375, 5.80909619140625]",tokens/s,44.12314407563845,kWh,0.00016929640906791898,1.8673866270088877e-05,0.0001126287845474,0.00030059905988540786,tokens/kWh,851632.736634607,MB,2972.24192,9061.66272,0.0,8644.460544,7612.392448,s,10,27.652163085937502,2.76521630859375,0.008448875935972263,2.765815185546875,2.7712907470703123,2.7772309448242187,2.781983103027344,"[2.768875244140625, 2.769970703125, 2.769178466796875, 2.763192138671875, 2.767791748046875, 2.763838623046875, 2.757003662109375, 2.758634765625, 2.750506591796875, 2.783171142578125]",tokens/s,22.78302778853443,kWh,8.051136240874937e-05,8.881069780126944e-06,5.338898715560036e-05,0.0001427814193444767,tokens/kWh,441233.88245640846,,s,630,27.642104255676287,0.0438763559613909,0.00048118867023250574,0.043823007583618165,0.044306482315063477,0.04469192943572998,0.04604657787322998,"[0.04487721633911133, 0.04397711944580078, 0.04347308731079102, 0.04333276748657226, 0.043202945709228516, 0.04351174545288086, 0.04347548675537109, 0.04353023910522461, 0.043270145416259766, 0.043596897125244144, 0.043528385162353515, 0.04372348785400391, 0.04378607940673828, 0.04367375946044922, 0.04360723114013672, 0.043541313171386715, 0.0435643196105957, 0.04424163055419922, 0.04488566589355469, 0.04370671844482422, 0.04385993576049805, 0.04400336074829102, 0.044060672760009766, 0.04386611175537109, 0.043777599334716796, 0.04364720153808594, 0.043589855194091795, 0.04381695938110351, 0.044025856018066405, 0.04416713714599609, 0.04382252883911133, 0.04376432037353516, 0.04386611175537109, 0.04353638458251953, 0.0438476791381836, 0.04402988815307617, 0.044103038787841796, 0.044036800384521485, 0.044068382263183596, 0.04428236770629883, 0.044177375793457034, 0.04400332641601563, 0.04383129501342774, 0.04389683151245117, 0.04399273681640625, 0.044082622528076175, 0.04412303924560547, 0.04422172927856445, 0.044157665252685545, 0.043943935394287106, 0.04415488052368164, 0.04440063858032227, 0.04436787033081055, 0.044066368103027345, 0.044047840118408205, 0.04416780853271484, 0.04397030258178711, 0.043966625213623045, 0.044442272186279295, 0.04430947113037109, 0.044233535766601564, 0.04416825485229492, 0.04434579086303711, 0.044838848114013674, 0.04396892929077149, 0.04323942565917969, 0.046399486541748046, 0.04325785446166992, 0.043393024444580076, 0.04345651245117187, 0.04336844635009766, 0.04354227066040039, 0.04381254577636719, 0.04357587051391602, 0.04352783966064453, 0.04359766387939453, 0.04366368103027344, 0.04347308731079102, 0.04358720016479492, 0.04364940643310547, 0.04359167861938477, 0.0438476791381836, 0.04382822418212891, 0.04350694274902344, 0.043928638458251956, 0.044108478546142575, 0.044126113891601565, 0.04418979263305664, 0.043649215698242184, 0.04362630462646484, 0.04373196792602539, 0.04364534378051758, 0.043942272186279295, 0.0438581428527832, 0.04374528121948242, 0.043851936340332034, 0.04381846237182617, 0.044042495727539065, 0.04389900970458984, 0.043843582153320314, 0.04373715209960938, 0.0438675537109375, 0.04388230514526367, 0.043947967529296875, 0.04393040084838867, 0.043882560729980466, 0.044189632415771486, 0.04430847930908203, 0.0442716178894043, 0.04442480087280273, 0.044312992095947266, 0.04406169509887695, 0.04399353790283203, 0.04469359970092773, 0.04428249740600586, 0.043997024536132814, 0.04440278244018555, 0.0444681282043457, 0.04410572814941406, 0.04431872177124024, 0.04421174240112305, 0.044230911254882814, 0.04414822387695312, 0.044161472320556644, 0.044134529113769534, 0.04442665481567383, 0.044985950469970705, 0.043968769073486326, 0.04348339080810547, 0.043440383911132814, 0.043237377166748046, 0.043401214599609376, 0.04358348846435547, 0.043394241333007816, 0.04345734405517578, 0.04380057525634766, 0.0437760009765625, 0.04363468933105469, 0.043499519348144534, 0.04354457473754883, 0.04384902572631836, 0.043623104095458984, 0.043699710845947266, 0.04364457702636719, 0.043475200653076175, 0.04373062515258789, 0.04389276885986328, 0.043823486328125, 0.04382566452026367, 0.043829120635986325, 0.04395635223388672, 0.04385603332519531, 0.04383916854858398, 0.04386345672607422, 0.043755966186523436, 0.043506046295166016, 0.043734977722167966, 0.04397055816650391, 0.043810462951660155, 0.04502924728393555, 0.043727489471435545, 0.04375126266479492, 0.044770431518554685, 0.0447918701171875, 0.04386854553222656, 0.04380841445922851, 0.044050910949707034, 0.04385823822021485, 0.044028926849365234, 0.04429663848876953, 0.04406035232543945, 0.04410995101928711, 0.04460825729370117, 0.04422041702270508, 0.044128128051757816, 0.04402188873291016, 0.044162528991699215, 0.044093345642089846, 0.04408793640136719, 0.04408115386962891, 0.04404838562011719, 0.04411391830444336, 0.04416307067871094, 0.044036094665527346, 0.04439651107788086, 0.04429971313476563, 0.04444793701171875, 0.043934078216552735, 0.04418902587890625, 0.04482870483398437, 0.04402793502807617, 0.043417598724365236, 0.04329062271118164, 0.04323507308959961, 0.04330316925048828, 0.04326838302612305, 0.04353200149536133, 0.04352819061279297, 0.04361830520629883, 0.04364102554321289, 0.04374444961547851, 0.04360460662841797, 0.04381695938110351, 0.04352342224121094, 0.043641502380371094, 0.043852863311767576, 0.043776416778564455, 0.04359843063354492, 0.04347833633422851, 0.043833984375, 0.04410131072998047, 0.04398710250854492, 0.043909374237060546, 0.043706497192382815, 0.043585025787353515, 0.04366476821899414, 0.043738014221191404, 0.043652801513671874, 0.04364511871337891, 0.04385395050048828, 0.04376780700683594, 0.04398255920410156, 0.043833023071289064, 0.04401369476318359, 0.043811294555664064, 0.0437589111328125, 0.04375798416137695, 0.04376707077026367, 0.04398591995239258, 0.04395827102661133, 0.04392086410522461, 0.04393423843383789, 0.044107200622558594, 0.044210750579833986, 0.044238208770751956, 0.04399782562255859, 0.04388355255126953, 0.044158977508544923, 0.04410367965698242, 0.04399801635742188, 0.04388470458984375, 0.04390911865234375, 0.043872257232666016, 0.043988800048828124, 0.04399484634399414, 0.043999614715576174, 0.04409673690795898, 0.04403308868408203, 0.044592960357666016, 0.04415078353881836, 0.043918785095214845, 0.044028480529785155, 0.04500649642944336, 0.04442310333251953, 0.04468988800048828, 0.04362031936645508, 0.04320463943481445, 0.0431487045288086, 0.04343235015869141, 0.043536575317382815, 0.04351087951660156, 0.04369295883178711, 0.04364716720581055, 0.043372352600097655, 0.044299774169921875, 0.044034561157226565, 0.04374665451049805, 0.04380944061279297, 0.04379238510131836, 0.04373503875732422, 0.043996448516845706, 0.043903327941894534, 0.04376764678955078, 0.043753856658935546, 0.04391952133178711, 0.04382080078125, 0.04391686248779297, 0.043923999786376955, 0.04397795104980469, 0.04382787322998047, 0.043615966796875, 0.04379030227661133, 0.04366912078857422, 0.044175678253173825, 0.04394371032714844, 0.04372979354858399, 0.04401356887817383, 0.04368320083618164, 0.04392819213867188, 0.04390252685546875, 0.04390342330932617, 0.043974655151367184, 0.043837440490722655, 0.043943008422851565, 0.04384656143188476, 0.04405452728271484, 0.04403200149536133, 0.044034046173095705, 0.04419379043579102, 0.04389254379272461, 0.04405062484741211, 0.04397260665893555, 0.04377395248413086, 0.04365116882324219, 0.04400259017944336, 0.044065406799316406, 0.04397260665893555, 0.04408115386962891, 0.04395212936401367, 0.043943935394287106, 0.044306430816650394, 0.044254337310791016, 0.04430694580078125, 0.04430633544921875, 0.0443171501159668, 0.04490467071533203, 0.04402431869506836, 0.0432531852722168, 0.043408191680908204, 0.043439998626708984, 0.043292671203613284, 0.043476993560791016, 0.043390975952148435, 0.04343724822998047, 0.04351878356933594, 0.044133758544921874, 0.043686527252197266, 0.04348825454711914, 0.04361523056030273, 0.04357529449462891, 0.043593055725097654, 0.043595775604248044, 0.04352809524536133, 0.04353305435180664, 0.043509025573730466, 0.043565792083740236, 0.044160064697265626, 0.04420684814453125, 0.043991233825683596, 0.043936832427978516, 0.04395513534545899, 0.04355891036987305, 0.04353433609008789, 0.04347420883178711, 0.0438135986328125, 0.043646976470947264, 0.04379046249389648, 0.04349734497070312, 0.04369615936279297, 0.04398076629638672, 0.044856414794921876, 0.043849758148193356, 0.04356556701660156, 0.04376553726196289, 0.043527870178222655, 0.04375606536865234, 0.04371484756469726, 0.04383139038085938, 0.04394598388671875, 0.04433622360229492, 0.04537436676025391, 0.0443939208984375, 0.04438483047485352, 0.04428799819946289, 0.04378009414672852, 0.04430847930908203, 0.04381478500366211, 0.04404646301269531, 0.044042240142822264, 0.04382659149169922, 0.04388051223754883, 0.044128799438476564, 0.04413644790649414, 0.043985153198242186, 0.044306175231933594, 0.04371660614013672, 0.04393369674682617, 0.04397881698608398, 0.045026561737060544, 0.04361260986328125, 0.043129150390625, 0.043413089752197265, 0.043339710235595706, 0.043143646240234375, 0.043020286560058595, 0.04313865661621094, 0.04394230270385742, 0.043585281372070316, 0.04349568176269531, 0.0434312973022461, 0.04353907012939453, 0.043401214599609376, 0.04354601669311523, 0.0436102409362793, 0.04351776123046875, 0.043477088928222656, 0.04350009536743164, 0.043579391479492184, 0.04353376007080078, 0.044012096405029295, 0.04402995300292969, 0.04377622222900391, 0.04356995010375977, 0.043578369140625, 0.043606014251708985, 0.04349923324584961, 0.043617889404296874, 0.043573505401611326, 0.04351839828491211, 0.0434884147644043, 0.04371542358398438, 0.04362358474731445, 0.043964607238769535, 0.04367792129516602, 0.0437457275390625, 0.043853214263916016, 0.04386857604980469, 0.04385929489135742, 0.04390326309204102, 0.04384415817260742, 0.04397260665893555, 0.0442716178894043, 0.04452150344848633, 0.044324832916259764, 0.04425475311279297, 0.04380627059936523, 0.04374620819091797, 0.043650177001953124, 0.04361846542358398, 0.04385472106933594, 0.04368572616577148, 0.04379852676391602, 0.043937793731689455, 0.04388454437255859, 0.04409270477294922, 0.04402044677734375, 0.04404633712768555, 0.04396140670776367, 0.04405923080444336, 0.043977054595947265, 0.043980480194091794, 0.04482640075683594, 0.043673824310302735, 0.04316928100585937, 0.04327475357055664, 0.043184127807617184, 0.04314886474609375, 0.043254302978515624, 0.043351680755615234, 0.04352233505249024, 0.043633697509765625, 0.0438256950378418, 0.043526592254638674, 0.04338278579711914, 0.04364492797851562, 0.04374118423461914, 0.043679649353027344, 0.04357948684692383, 0.04356697463989258, 0.04394134521484375, 0.04425116729736328, 0.04353043365478516, 0.045480384826660156, 0.04352227020263672, 0.04332028961181641, 0.043690303802490234, 0.043827713012695314, 0.04418374252319336, 0.0435931510925293, 0.04354816055297851, 0.04351382446289063, 0.04397526550292969, 0.044085567474365234, 0.04361011123657227, 0.04377190399169922, 0.043593406677246094, 0.04360192108154297, 0.04375568008422852, 0.043595935821533205, 0.043587711334228514, 0.04376972961425781, 0.04371046447753906, 0.043691520690917966, 0.044071422576904294, 0.044001182556152346, 0.044021728515625, 0.04384780883789063, 0.04360348892211914, 0.043475425720214844, 0.04376553726196289, 0.04389091110229492, 0.04378009414672852, 0.044122047424316406, 0.044533824920654295, 0.043744926452636716, 0.04394953536987305, 0.043740192413330076, 0.04360111999511719, 0.04379302215576172, 0.04448419189453125, 0.044022174835205076, 0.043902976989746094, 0.04389068984985352, 0.04409654235839844, 0.04525804901123047, 0.04401017761230469, 0.043275360107421876, 0.04309468841552734, 0.04309401702880859, 0.043118144989013674, 0.04302918243408203, 0.04299161529541016, 0.04441088104248047, 0.04333900833129883, 0.04312924957275391, 0.04317398452758789, 0.04324787139892578, 0.04353638458251953, 0.04341964721679688, 0.043324737548828124, 0.04333401489257813, 0.0431926383972168, 0.043224769592285155, 0.043243553161621096, 0.043397407531738284, 0.04381033706665039, 0.043923839569091794, 0.04392559814453125, 0.0437916488647461, 0.04343267059326172, 0.043579391479492184, 0.04343603134155274, 0.04373708724975586, 0.043415550231933595, 0.043401214599609376, 0.04344627380371094, 0.043498687744140625, 0.04343888092041016, 0.043552799224853514, 0.04347897720336914, 0.04356512069702148, 0.043603553771972656, 0.04348348617553711, 0.043518016815185544, 0.0433070068359375, 0.04364486312866211, 0.04350729751586914, 0.04355039978027344, 0.04372172927856445, 0.04405977630615234, 0.043666080474853514, 0.04408224105834961, 0.044034400939941404, 0.04361891174316406, 0.04366950225830078, 0.044660736083984375, 0.04560486221313476, 0.043679744720458984, 0.04347289657592773, 0.04355491256713867, 0.044007328033447264, 0.04399945449829101, 0.04417238235473633, 0.04412015914916992, 0.0444661750793457, 0.04380937576293945, 0.043812862396240236, 0.04527308654785156, 0.044020767211914065, 0.04354556655883789, 0.04339712142944336, 0.04317593765258789, 0.0431487045288086, 0.04339292907714844, 0.043555809020996095, 0.04342345428466797, 0.04357535934448242, 0.04345439910888672, 0.04343523025512695, 0.04343209457397461, 0.04364761734008789, 0.043499519348144534, 0.04360156631469726, 0.043485023498535155, 0.043510433197021484, 0.04357107162475586, 0.04374256134033203, 0.04376947021484375, 0.04402025604248047, 0.04388083267211914, 0.04379862213134766, 0.04390611267089844, 0.04369865417480469, 0.04366902542114258, 0.043659774780273435, 0.04379692840576172, 0.04339712142944336, 0.04380435180664063, 0.04375532913208008, 0.04356966400146484, 0.044763137817382816, 0.04386159896850586, 0.04366172790527344, 0.043921409606933595, 0.04404019165039062, 0.04397375869750977, 0.04380352020263672, 0.04370608139038086, 0.04362460708618164, 0.0451646728515625, 0.0440709114074707, 0.044106014251708986, 0.04399004745483399, 0.04401017761230469, 0.04386816024780273, 0.04396646499633789, 0.044018753051757814, 0.043928512573242186, 0.04380672073364258, 0.04384972763061523, 0.04631363296508789, 0.046038303375244144, 0.04582364654541016, 0.046039966583251955, 0.04604927825927734, 0.04603903961181641, 0.04624998474121094, 0.04634009552001953, 0.046206432342529295, 0.046150177001953126]",tokens/s,22.791318423981068,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4019.245056,5295.177728,0.0,4892.655616,4450.714112,s,1,11.121876953125,11.121876953125,0.0,11.121876953125,11.121876953125,11.121876953125,11.121876953125,[11.121876953125],,kWh,0.00011472648681669093,1.2647819483617766e-05,5.074587393000263e-05,0.00017812018023031134,,MB,2013.446144,5309.857792,0.0,4892.655616,3973.526016,s,10,29.36771435546875,2.9367714355468753,0.004354115109419825,2.937447143554688,2.940952197265625,2.9423509765625,2.94347,"[2.928671875, 2.932039306640625, 2.933560546875, 2.934386474609375, 2.936850830078125, 2.93804345703125, 2.939215087890625, 2.940641357421875, 2.943749755859375, 2.9405556640625]",tokens/s,87.17055638084705,kWh,8.569944044958371e-05,9.452558327825413e-06,5.7016628946601754e-05,0.0001521686277240109,tokens/kWh,1682344.1456297329,MB,2022.023168,5309.857792,0.0,4892.655616,4123.167744,s,10,16.690455200195313,1.6690455200195313,0.0037946185355252453,1.6684172973632814,1.6734431884765626,1.6749025024414061,1.6760699536132813,"[1.6714803466796875, 1.668669677734375, 1.66193798828125, 1.673118896484375, 1.67636181640625, 1.66640380859375, 1.6702271728515625, 1.6681649169921875, 1.667302001953125, 1.66678857421875]",tokens/s,37.746124503100894,kWh,4.876358629625046e-05,5.378988630104439e-06,3.227244248459882e-05,8.641501741095376e-05,tokens/kWh,729039.9503178747,,s,630,16.6820188522339,0.026479395003545854,0.00029843480094449734,0.02643937587738037,0.026742912483215333,0.026932418632507324,0.027790650558471682,"[0.027729312896728517, 0.027053760528564452, 0.02655302429199219, 0.026350751876831054, 0.026137439727783204, 0.026232831954956053, 0.026220544815063477, 0.02611814308166504, 0.026261503219604493, 0.026201248168945312, 0.0262544002532959, 0.026269472122192383, 0.026251039505004882, 0.026271520614624025, 0.0262128963470459, 0.026449119567871094, 0.02630726432800293, 0.026284032821655274, 0.02649465560913086, 0.026413375854492188, 0.026383615493774413, 0.02658176040649414, 0.0265863037109375, 0.02630931282043457, 0.026406976699829103, 0.026509376525878908, 0.02648678398132324, 0.026400768280029296, 0.02676736068725586, 0.02656870460510254, 0.026550271987915038, 0.026673152923583986, 0.0265031681060791, 0.026592639923095702, 0.026616447448730467, 0.026662912368774414, 0.02682019233703613, 0.026965503692626954, 0.02669046401977539, 0.026711040496826172, 0.02657587242126465, 0.026468351364135743, 0.026464256286621093, 0.026554336547851564, 0.026566688537597655, 0.026430591583251953, 0.026406816482543945, 0.02640380859375, 0.02649497604370117, 0.02646124839782715, 0.02649203109741211, 0.026387775421142578, 0.026673568725585937, 0.026687583923339843, 0.026804224014282226, 0.026644479751586913, 0.02655436706542969, 0.026615808486938477, 0.02671820831298828, 0.026729631423950195, 0.026692447662353517, 0.02675302314758301, 0.026671104431152344, 0.027844127655029298, 0.02701907157897949, 0.026497663497924803, 0.026349599838256837, 0.026089471817016603, 0.026238975524902345, 0.026218496322631835, 0.02621779251098633, 0.026104511260986327, 0.0262893123626709, 0.02615996742248535, 0.026262624740600586, 0.0262706241607666, 0.026267648696899414, 0.02637414360046387, 0.02612633514404297, 0.026189952850341796, 0.026251136779785158, 0.02636358451843262, 0.026464576721191405, 0.02644915199279785, 0.02636406326293945, 0.026388191223144532, 0.02627008056640625, 0.026257919311523437, 0.026281984329223632, 0.02641836738586426, 0.026383167266845704, 0.02644953536987305, 0.02641974449157715, 0.026627328872680663, 0.026475103378295898, 0.026354879379272462, 0.02644051170349121, 0.026591232299804687, 0.026718015670776366, 0.02679622459411621, 0.026945535659790038, 0.026632192611694337, 0.026565664291381835, 0.026498271942138673, 0.026505023956298827, 0.026517440795898437, 0.02650726318359375, 0.026634239196777345, 0.026385503768920897, 0.026407615661621094, 0.026525087356567383, 0.026479616165161132, 0.02649273681640625, 0.02651136016845703, 0.026564607620239256, 0.026488639831542968, 0.026527936935424806, 0.026625696182250976, 0.02650351905822754, 0.026611936569213866, 0.027047712326049803, 0.026465599060058593, 0.026483104705810546, 0.026451711654663087, 0.026580831527709962, 0.02655302429199219, 0.027521215438842773, 0.026808319091796876, 0.02636185646057129, 0.02624502372741699, 0.026110048294067382, 0.026140703201293945, 0.02610345649719238, 0.026114208221435547, 0.02614259147644043, 0.026064352035522462, 0.02603225517272949, 0.026063552856445314, 0.026220352172851562, 0.02616953659057617, 0.026148416519165038, 0.02613654327392578, 0.02628246307373047, 0.02613043212890625, 0.026193344116210937, 0.026164928436279298, 0.026183679580688478, 0.02614691162109375, 0.026157024383544922, 0.02626233673095703, 0.026238975524902345, 0.02626918411254883, 0.02621286392211914, 0.02627174377441406, 0.026462207794189452, 0.026284032821655274, 0.026204063415527345, 0.0261243839263916, 0.02615091133117676, 0.02626083183288574, 0.02634614372253418, 0.02655232048034668, 0.02653388786315918, 0.02654742431640625, 0.02653878402709961, 0.02655232048034668, 0.026687488555908204, 0.026697696685791014, 0.026582048416137694, 0.026647552490234375, 0.026652671813964843, 0.026758272171020506, 0.026616735458374022, 0.026552480697631838, 0.026519359588623045, 0.02643574333190918, 0.02640185546875, 0.026458911895751953, 0.026523040771484374, 0.026438240051269532, 0.026403871536254883, 0.02636899185180664, 0.02633113670349121, 0.026351615905761717, 0.026531679153442383, 0.026519712448120118, 0.02649907112121582, 0.02650931167602539, 0.02665657615661621, 0.028159807205200196, 0.02700841522216797, 0.02666111946105957, 0.02636016082763672, 0.02629974365234375, 0.026436256408691405, 0.026189823150634766, 0.026236928939819337, 0.026159008026123046, 0.026216255187988282, 0.0261942081451416, 0.026161151885986327, 0.026221599578857422, 0.02636294364929199, 0.026283424377441408, 0.026278175354003907, 0.02631625556945801, 0.026264320373535155, 0.02647987174987793, 0.026292991638183594, 0.0264368953704834, 0.026343360900878906, 0.028549760818481446, 0.027059904098510744, 0.02625321578979492, 0.02632147216796875, 0.02631020736694336, 0.026378271102905273, 0.02639094352722168, 0.026752063751220703, 0.026404895782470704, 0.026409887313842775, 0.026410623550415038, 0.026417535781860353, 0.02651750373840332, 0.02674483108520508, 0.026572799682617186, 0.026744895935058594, 0.02664031982421875, 0.026646623611450194, 0.026676895141601563, 0.026571008682250978, 0.026564607620239256, 0.026574848175048828, 0.026463359832763673, 0.026389375686645507, 0.026550271987915038, 0.026580991744995116, 0.02652886390686035, 0.026506143569946287, 0.02647260856628418, 0.026482656478881837, 0.026593151092529296, 0.02660905647277832, 0.02664918327331543, 0.026648767471313478, 0.026711872100830078, 0.02670159912109375, 0.026632415771484376, 0.026626047134399415, 0.026627328872680663, 0.02657494354248047, 0.02653558349609375, 0.027858720779418946, 0.027137535095214844, 0.026751359939575194, 0.026380640029907226, 0.026429088592529296, 0.026320959091186525, 0.026327327728271486, 0.026258495330810545, 0.026317535400390626, 0.026265823364257812, 0.026413055419921876, 0.02624220848083496, 0.02646019172668457, 0.02634796714782715, 0.026247039794921875, 0.026231168746948242, 0.026422719955444336, 0.02647520065307617, 0.026492544174194336, 0.026484895706176757, 0.026566879272460937, 0.026517087936401368, 0.02643302345275879, 0.026362783432006837, 0.026342432022094728, 0.02652390480041504, 0.026442464828491212, 0.02637353515625, 0.026546783447265625, 0.026621952056884765, 0.02649609565734863, 0.026578847885131835, 0.026452991485595705, 0.026595327377319337, 0.026638336181640625, 0.026969247817993165, 0.026792800903320313, 0.02692639923095703, 0.026835615158081055, 0.026662399291992187, 0.02670031929016113, 0.026617311477661134, 0.026664543151855468, 0.02649363136291504, 0.02660089683532715, 0.026675872802734375, 0.026587295532226562, 0.026570751190185548, 0.02671820831298828, 0.02670182418823242, 0.02677689552307129, 0.02666975975036621, 0.026660863876342773, 0.026614847183227538, 0.026702783584594728, 0.02705129623413086, 0.026735328674316407, 0.02674278450012207, 0.026658432006835937, 0.026795520782470703, 0.0266997127532959, 0.02665116882324219, 0.026691871643066405, 0.02754969596862793, 0.02674892807006836, 0.026414304733276366, 0.026223392486572267, 0.026058464050292968, 0.026120479583740235, 0.02613043212890625, 0.026191520690917968, 0.026100223541259765, 0.02622857666015625, 0.026287904739379885, 0.026200288772583007, 0.026187776565551758, 0.026294111251831054, 0.026179744720458985, 0.02623632049560547, 0.02626121520996094, 0.02617740821838379, 0.026227872848510744, 0.026275680541992186, 0.02632863998413086, 0.026259904861450196, 0.026234783172607423, 0.02628236770629883, 0.026244415283203124, 0.026257823944091797, 0.026275840759277344, 0.026417152404785156, 0.027035648345947266, 0.026886144638061524, 0.026390527725219725, 0.026351615905761717, 0.026365280151367188, 0.026399520874023436, 0.02662182426452637, 0.02672435188293457, 0.026774784088134766, 0.026780191421508788, 0.026769632339477538, 0.02668339157104492, 0.026443775177001954, 0.02643382453918457, 0.02641632080078125, 0.026443456649780272, 0.02629654312133789, 0.02639731216430664, 0.026388320922851562, 0.02648431968688965, 0.026663488388061523, 0.026588319778442383, 0.026602336883544922, 0.02661311912536621, 0.02648742485046387, 0.026406911849975585, 0.026417152404785156, 0.026480640411376953, 0.026428863525390624, 0.026380895614624023, 0.026554336547851564, 0.02670755195617676, 0.026538400650024413, 0.026523231506347656, 0.026532543182373046, 0.02746486473083496, 0.02683958435058594, 0.026537792205810547, 0.026222816467285158, 0.026146207809448242, 0.02631603240966797, 0.02734230422973633, 0.027844831466674803, 0.02596633529663086, 0.02620182418823242, 0.026071584701538086, 0.026286079406738282, 0.026167295455932618, 0.026140224456787108, 0.026167743682861327, 0.027744255065917968, 0.026244543075561524, 0.026216512680053712, 0.026241024017333983, 0.02618828773498535, 0.026259456634521484, 0.026253311157226563, 0.026179584503173828, 0.02628812789916992, 0.026443552017211915, 0.026378463745117188, 0.02617462348937988, 0.02623369598388672, 0.026251264572143555, 0.026424352645874023, 0.026417503356933592, 0.026366592407226563, 0.026376031875610353, 0.026402975082397463, 0.026595327377319337, 0.026482688903808595, 0.026619136810302733, 0.026698495864868162, 0.026642431259155275, 0.02665648078918457, 0.02674502372741699, 0.0266910400390625, 0.026667648315429688, 0.02652297592163086, 0.026456735610961915, 0.026343423843383788, 0.026351615905761717, 0.02648678398132324, 0.026429439544677736, 0.02663203239440918, 0.026509471893310547, 0.026429439544677736, 0.026520927429199218, 0.026561183929443358, 0.026529792785644532, 0.026538047790527344, 0.026562496185302733, 0.026488351821899413, 0.026459680557250977, 0.02672489547729492, 0.026702112197875976, 0.026613727569580078, 0.026814624786376952, 0.027501535415649415, 0.026728288650512695, 0.026491039276123046, 0.02643507194519043, 0.026331647872924805, 0.026249216079711913, 0.026327199935913086, 0.02615500831604004, 0.026064735412597656, 0.02603990364074707, 0.026015743255615235, 0.02622083282470703, 0.02635759925842285, 0.026272159576416015, 0.026186752319335937, 0.026252159118652342, 0.02621343994140625, 0.026182144165039063, 0.02635638427734375, 0.026584863662719727, 0.026402015686035157, 0.02629097557067871, 0.026267648696899414, 0.02636198425292969, 0.02622630310058594, 0.026331071853637696, 0.026335552215576173, 0.026228639602661134, 0.026179103851318358, 0.026228351593017576, 0.026270175933837892, 0.026319328308105468, 0.026394496917724608, 0.02637401580810547, 0.026429695129394533, 0.026611040115356446, 0.026647199630737306, 0.026812416076660156, 0.02672230339050293, 0.02674406433105469, 0.026566848754882813, 0.026443359375, 0.026560480117797852, 0.026435935974121094, 0.026419776916503906, 0.02640675163269043, 0.02647884750366211, 0.026417152404785156, 0.026406911849975585, 0.026472448348999023, 0.026673343658447264, 0.026679040908813477, 0.02677894401550293, 0.02666067123413086, 0.02664339256286621, 0.026594751358032225, 0.02653775978088379, 0.027085599899291993, 0.027258495330810546, 0.026761600494384766, 0.02656051254272461, 0.0265350399017334, 0.026655616760253905, 0.027461631774902344, 0.026730495452880858, 0.02657689666748047, 0.026214399337768556, 0.02612633514404297, 0.026245119094848633, 0.02611404800415039, 0.026169151306152345, 0.02624025535583496, 0.026231679916381836, 0.026110015869140624, 0.026167295455932618, 0.02627993583679199, 0.0261441593170166, 0.026198623657226562, 0.02611404800415039, 0.026406911849975585, 0.026191808700561522, 0.026162496566772463, 0.027038528442382814, 0.026374080657958984, 0.026460159301757814, 0.02625766372680664, 0.026400831222534178, 0.026317728042602538, 0.026321695327758788, 0.026343423843383788, 0.026389759063720705, 0.0263045768737793, 0.02630726432800293, 0.026327039718627928, 0.026263296127319338, 0.026259008407592772, 0.026296159744262696, 0.026358623504638672, 0.0267325439453125, 0.028233728408813476, 0.02650111961364746, 0.026492767333984375, 0.026345632553100587, 0.026406911849975585, 0.026584320068359375, 0.026480512619018556, 0.026380416870117187, 0.02657731246948242, 0.026421600341796875, 0.027809600830078125, 0.026304704666137695, 0.026357088088989258, 0.026423967361450196, 0.02667670440673828, 0.026597919464111327, 0.026488832473754883, 0.02669977569580078, 0.026583040237426758, 0.02629971122741699, 0.026311071395874023, 0.02633296012878418, 0.02657855987548828, 0.02654707145690918, 0.026426944732666015, 0.026431936264038086, 0.026460159301757814, 0.027562463760375976, 0.026945375442504884, 0.02659328079223633, 0.026255359649658205, 0.02622489547729492, 0.026109119415283204, 0.025975360870361328, 0.026009599685668947, 0.026061920166015624, 0.02610678482055664, 0.0261345272064209, 0.026215871810913085, 0.02611667251586914, 0.026236255645751952, 0.026237600326538085, 0.026314783096313476, 0.026380256652832033, 0.026260608673095702, 0.026270048141479492, 0.02622431945800781, 0.02622073554992676, 0.026194719314575194, 0.026291423797607422, 0.026225311279296875, 0.026345407485961914, 0.026343488693237306, 0.026306560516357422, 0.026724031448364258, 0.026468671798706055, 0.026276927947998047, 0.02639148712158203, 0.02645590400695801, 0.02639683151245117, 0.026302463531494142, 0.026504735946655273, 0.02653027153015137, 0.026654495239257812, 0.026773727416992188, 0.02677555274963379, 0.02677555274963379, 0.02660710334777832, 0.02658764839172363, 0.02651955223083496, 0.026407039642333985, 0.026363775253295897, 0.02632499122619629, 0.02639580726623535, 0.026344287872314454, 0.02644144058227539, 0.026515199661254884, 0.026695295333862303, 0.026704191207885742, 0.02669219207763672, 0.02693734359741211, 0.026557695388793944, 0.026640384674072266, 0.026465024948120117, 0.026587135314941408, 0.02651750373840332, 0.026589183807373046, 0.026654720306396484, 0.026529792785644532, 0.026513376235961915]",tokens/s,37.76521328625862,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2188.152832,2907.635712,0.0,2512.388096,2240.694784,s,1,9.295291015625,9.295291015625,0.0,9.295291015625,9.295291015625,9.295291015625,9.295291015625,[9.295291015625],,kWh,6.9199416970802e-05,7.625468309715327e-06,2.827668928798155e-05,0.00010510157456849888,,MB,2235.039744,2922.315776,0.0,2514.485248,2227.003904,s,10,11.443010864257813,1.1443010864257812,0.0009773683644739432,1.1445831298828124,1.1452647338867188,1.1453716857910157,1.145457247314453,"[1.1451075439453124, 1.143201904296875, 1.1424122314453125, 1.1437933349609375, 1.1447080078125, 1.1434659423828124, 1.144458251953125, 1.145240966796875, 1.1454786376953126, 1.14514404296875]",tokens/s,223.71734418221584,kWh,3.36507683370822e-05,3.7112161071116853e-06,2.2440184618799376e-05,5.980216906299326e-05,tokens/kWh,4280781.182541048,MB,2246.934528,2922.315776,0.0,2514.485248,2334.959104,s,10,18.52756018066406,1.852756018066406,0.012863735451607414,1.8540548706054687,1.8657800903320314,1.870597869873047,1.8744520935058595,"[1.8308460693359374, 1.85354541015625, 1.86470947265625, 1.84231201171875, 1.8545643310546875, 1.8624730224609376, 1.8384434814453126, 1.845342041015625, 1.85990869140625, 1.8754156494140626]",tokens/s,34.00339784930169,kWh,5.4726120434997937e-05,6.036620997139593e-06,2.994935729279946e-05,9.0712098724937e-05,tokens/kWh,694504.9324791019,,s,630,18.524415981292716,0.029403834890940832,0.00044869455187966965,0.02934132766723633,0.029830815505981444,0.030034862422943114,0.03134245605468751,"[0.029881599426269532, 0.02961894416809082, 0.029163040161132813, 0.029067264556884766, 0.028934623718261717, 0.028903423309326173, 0.028970783233642577, 0.028920032501220702, 0.028895231246948243, 0.028848127365112306, 0.029108224868774416, 0.029357088088989257, 0.0290416316986084, 0.029075456619262696, 0.028929855346679686, 0.02875366401672363, 0.028874208450317383, 0.028949472427368166, 0.02893529510498047, 0.028912511825561524, 0.028862464904785157, 0.028906496047973632, 0.02899046325683594, 0.028972063064575195, 0.028998239517211914, 0.02914067268371582, 0.02897990417480469, 0.029129728317260743, 0.02913382339477539, 0.029099679946899413, 0.02894883155822754, 0.029048351287841796, 0.02890595245361328, 0.029070463180541992, 0.028910463333129882, 0.02889904022216797, 0.02887299156188965, 0.028994592666625976, 0.02883683204650879, 0.0289434871673584, 0.0291910400390625, 0.02899660873413086, 0.029683935165405274, 0.02902889633178711, 0.02908185577392578, 0.029046783447265623, 0.029401023864746093, 0.02945849609375, 0.02899135971069336, 0.02897430419921875, 0.028849023818969727, 0.028860448837280273, 0.028850175857543944, 0.028887039184570314, 0.028794879913330077, 0.028987295150756837, 0.028885087966918944, 0.028882944107055664, 0.028878303527832033, 0.029157375335693358, 0.029600191116333007, 0.029636320114135743, 0.029606271743774414, 0.02998080062866211, 0.02951932716369629, 0.02926748847961426, 0.029428735733032226, 0.029128704071044922, 0.029319072723388673, 0.029057119369506838, 0.029265920639038087, 0.02879283142089844, 0.02904047966003418, 0.028951711654663086, 0.029320192337036134, 0.0290119686126709, 0.029648895263671874, 0.029017311096191406, 0.029143199920654297, 0.028895456314086913, 0.028938655853271485, 0.028839935302734376, 0.029190143585205077, 0.02936627197265625, 0.029509632110595704, 0.02943180847167969, 0.029546239852905273, 0.02951398468017578, 0.02962544059753418, 0.029682592391967775, 0.02948281669616699, 0.02950163269042969, 0.029560831069946288, 0.0299899845123291, 0.029729696273803712, 0.02950102424621582, 0.029599584579467774, 0.029329984664916993, 0.029490367889404297, 0.02937939262390137, 0.029616128921508788, 0.02939904022216797, 0.029638656616210936, 0.029403135299682616, 0.029670495986938477, 0.029338399887084962, 0.029367551803588868, 0.029427679061889648, 0.029813663482666015, 0.029437952041625977, 0.029895967483520507, 0.029359935760498047, 0.0294748477935791, 0.029158271789550782, 0.029198175430297853, 0.029232959747314453, 0.02954857635498047, 0.030172512054443358, 0.03141526412963867, 0.029382591247558595, 0.029267232894897462, 0.029168415069580077, 0.029288448333740235, 0.02914303970336914, 0.029261823654174804, 0.029163520812988283, 0.03021366310119629, 0.029829599380493163, 0.029708288192749024, 0.02925923156738281, 0.02929088020324707, 0.029615808486938476, 0.029513887405395508, 0.029457952499389647, 0.029205280303955077, 0.029304832458496095, 0.02926915168762207, 0.029696863174438478, 0.02941542434692383, 0.029624319076538085, 0.029913087844848633, 0.02955264091491699, 0.029703584671020508, 0.02969046401977539, 0.02953830337524414, 0.029492288589477538, 0.02925200080871582, 0.02937606430053711, 0.029516767501831055, 0.029814783096313476, 0.029693952560424806, 0.029877504348754882, 0.029906944274902345, 0.02986470413208008, 0.029689247131347657, 0.029698144912719725, 0.029530624389648437, 0.029509632110595704, 0.029330751419067384, 0.02942608070373535, 0.029077056884765626, 0.02907200050354004, 0.02914886474609375, 0.029816448211669924, 0.029181791305541993, 0.029178815841674803, 0.028983295440673826, 0.029187231063842772, 0.029807455062866212, 0.030068735122680663, 0.029122560501098634, 0.029620223999023438, 0.029153247833251954, 0.029779296875, 0.03011849594116211, 0.029694047927856446, 0.02971238327026367, 0.029615808486938476, 0.02945465660095215, 0.029749248504638674, 0.02972876739501953, 0.029718143463134766, 0.030136703491210937, 0.032372417449951174, 0.029878591537475584, 0.02953011131286621, 0.029155328750610353, 0.0293143367767334, 0.02924412727355957, 0.029981695175170898, 0.029518848419189454, 0.029174816131591796, 0.029034847259521483, 0.02892451286315918, 0.029091840744018556, 0.028886911392211913, 0.028971168518066408, 0.028802112579345705, 0.02891257667541504, 0.02872662353515625, 0.02902697563171387, 0.02873139190673828, 0.028964864730834962, 0.028807167053222657, 0.029390623092651367, 0.02896099281311035, 0.028833791732788085, 0.028892799377441405, 0.029329504013061523, 0.029242847442626955, 0.029124927520751954, 0.029190656661987304, 0.029378559112548826, 0.029190143585205077, 0.02936832046508789, 0.029485055923461914, 0.029722623825073242, 0.029618175506591796, 0.029964223861694336, 0.03068934440612793, 0.029757375717163085, 0.029335615158081054, 0.02928963279724121, 0.02964271926879883, 0.029426559448242188, 0.029708288192749024, 0.029515039443969725, 0.02924617576599121, 0.02919593620300293, 0.02938252830505371, 0.02950396728515625, 0.029422943115234374, 0.02936899185180664, 0.02927408027648926, 0.029620256423950195, 0.029120479583740234, 0.029222623825073242, 0.029347423553466798, 0.02929737663269043, 0.029069311141967775, 0.02906243133544922, 0.02895302391052246, 0.028983232498168945, 0.028809568405151368, 0.028929920196533204, 0.028899423599243163, 0.029099456787109373, 0.028916032791137695, 0.029147424697875977, 0.029042688369750977, 0.02920857620239258, 0.029255680084228516, 0.02991472053527832, 0.029235584259033203, 0.029681663513183593, 0.032481281280517575, 0.02920038414001465, 0.029087135314941406, 0.029091903686523438, 0.02911020851135254, 0.028957632064819334, 0.029066719055175782, 0.02918931198120117, 0.029682687759399414, 0.02931056022644043, 0.029289920806884764, 0.029460575103759764, 0.029528959274291992, 0.029609983444213867, 0.02957030487060547, 0.029383424758911134, 0.02923520088195801, 0.0293023681640625, 0.029615711212158204, 0.029610559463500975, 0.02969772720336914, 0.029696575164794924, 0.029726720809936522, 0.02958457565307617, 0.029743936538696288, 0.029517824172973633, 0.029701311111450194, 0.029341535568237306, 0.029506528854370117, 0.028915712356567383, 0.03224694442749024, 0.0290098876953125, 0.029055871963500977, 0.029046783447265623, 0.02917580795288086, 0.02895052719116211, 0.02913689613342285, 0.02917318344116211, 0.029344127655029296, 0.029122304916381837, 0.0293687686920166, 0.029411327362060546, 0.029464576721191408, 0.029493247985839844, 0.029790048599243165, 0.029455615997314454, 0.02929961585998535, 0.029210399627685547, 0.029540576934814454, 0.029059072494506837, 0.029327360153198243, 0.02914838409423828, 0.02919628715515137, 0.029145631790161133, 0.029230623245239257, 0.029059776306152342, 0.029128095626831055, 0.029522560119628907, 0.029122560501098634, 0.028990528106689454, 0.030030656814575195, 0.029372415542602538, 0.02897715187072754, 0.028983295440673826, 0.029009088516235352, 0.029397823333740233, 0.029248735427856446, 0.029240095138549804, 0.02917580795288086, 0.029306880950927733, 0.029121728897094728, 0.02926265525817871, 0.02936422348022461, 0.03140764808654785, 0.029757919311523436, 0.02952979278564453, 0.029278303146362306, 0.02965433692932129, 0.0295699520111084, 0.029646751403808593, 0.029562496185302736, 0.029556896209716795, 0.02923756790161133, 0.02929254341125488, 0.02921174430847168, 0.02934982490539551, 0.02919251251220703, 0.028989471435546876, 0.028990079879760742, 0.02931711959838867, 0.02962380790710449, 0.03012646484375, 0.02970841598510742, 0.02958131217956543, 0.029626176834106444, 0.02978160095214844, 0.02945289611816406, 0.0295546875, 0.029519872665405275, 0.029661184310913087, 0.029634559631347656, 0.02982022476196289, 0.029727231979370116, 0.029693151473999025, 0.029647167205810548, 0.029858463287353514, 0.029910240173339844, 0.029944351196289062, 0.02997052764892578, 0.02984351921081543, 0.029628511428833007, 0.02974515151977539, 0.030519296646118164, 0.030103008270263673, 0.029878271102905272, 0.0297938232421875, 0.02965376091003418, 0.029796607971191408, 0.02949734306335449, 0.029498464584350587, 0.02920745658874512, 0.029199487686157228, 0.02896143913269043, 0.029861663818359373, 0.0294017276763916, 0.028915712356567383, 0.029005727767944335, 0.028893280029296874, 0.029068288803100587, 0.02894745635986328, 0.029044351577758788, 0.029114751815795897, 0.029388639450073244, 0.029225120544433592, 0.029255519866943358, 0.02958051109313965, 0.029262624740600585, 0.029108383178710937, 0.029243392944335936, 0.028921184539794923, 0.029109952926635742, 0.02904707145690918, 0.029233407974243165, 0.029139392852783202, 0.029445152282714843, 0.029468992233276366, 0.02928236770629883, 0.028892799377441405, 0.029113311767578125, 0.0289849910736084, 0.0289836483001709, 0.02874163246154785, 0.029087648391723633, 0.028833663940429688, 0.02919036865234375, 0.02899558448791504, 0.029034496307373047, 0.028999679565429686, 0.02912665557861328, 0.02925763130187988, 0.029233247756958007, 0.029222911834716796, 0.029392127990722657, 0.029073312759399415, 0.029066080093383788, 0.028911615371704103, 0.028962560653686523, 0.02896691131591797, 0.029051136016845704, 0.029183263778686522, 0.029245983123779296, 0.029326976776123045, 0.02937094306945801, 0.029304832458496095, 0.02919424057006836, 0.028991487503051756, 0.029916255950927735, 0.02905900764465332, 0.02917875289916992, 0.02911852836608887, 0.03012816047668457, 0.028868064880371094, 0.0291190071105957, 0.02927110481262207, 0.029464832305908205, 0.029278911590576173, 0.03003830337524414, 0.029491327285766603, 0.029396991729736328, 0.029261823654174804, 0.029327360153198243, 0.029449504852294923, 0.029329408645629884, 0.029398752212524415, 0.02921779251098633, 0.029939647674560546, 0.029269184112548828, 0.029551488876342773, 0.029669376373291017, 0.029370111465454103, 0.02940883255004883, 0.029481664657592773, 0.029581375122070312, 0.029523872375488282, 0.029440256118774415, 0.029857568740844727, 0.03000934410095215, 0.02960095977783203, 0.0292109432220459, 0.02900223922729492, 0.028872127532958983, 0.02934432029724121, 0.029018112182617187, 0.029236223220825194, 0.029221887588500976, 0.029156383514404298, 0.02900432014465332, 0.029661312103271484, 0.0289401912689209, 0.029341119766235352, 0.02880988883972168, 0.028850496292114256, 0.028771360397338866, 0.02902524757385254, 0.028876800537109375, 0.03140809631347656, 0.028914976119995116, 0.028850944519042968, 0.02876620864868164, 0.02895871925354004, 0.02886649513244629, 0.02900982475280762, 0.028977312088012696, 0.02936627197265625, 0.029095935821533202, 0.0291059513092041, 0.02889686393737793, 0.029147775650024414, 0.029105375289916992, 0.029330175399780275, 0.029085727691650392, 0.029908992767333983, 0.029429119110107423, 0.029337631225585938, 0.029131359100341796, 0.02923107147216797, 0.02910972785949707, 0.029196832656860353, 0.02887887954711914, 0.030092191696166993, 0.029625663757324217, 0.02936467170715332, 0.029535839080810547, 0.029455007553100584, 0.02959769630432129, 0.029511615753173827, 0.02977987289428711, 0.029628576278686522, 0.02969536018371582, 0.02949184036254883, 0.029715776443481445, 0.029583072662353514, 0.030097759246826172, 0.02964339256286621, 0.029654624938964844, 0.029554912567138672, 0.029735103607177734, 0.03012403106689453, 0.029560831069946288, 0.029421567916870117, 0.03185183906555176, 0.030444095611572266, 0.029569055557250975, 0.02914041519165039, 0.029382720947265625, 0.029014623641967774, 0.029333311080932616, 0.0294169921875, 0.029301408767700196, 0.029159263610839845, 0.02930607986450195, 0.029108415603637694, 0.029258495330810548, 0.029244415283203123, 0.029166015625, 0.02931123161315918, 0.02929897689819336, 0.02916969680786133, 0.029421567916870117, 0.02935398483276367, 0.029308704376220702, 0.02922064018249512, 0.029229503631591797, 0.029195743560791014, 0.02946451187133789, 0.0295053768157959, 0.030706432342529295, 0.029685152053833007, 0.029583967208862305, 0.029313024520874024, 0.029418624877929688, 0.029147104263305666, 0.029442975997924805, 0.02918809509277344, 0.02920403289794922, 0.02917830467224121, 0.029198335647583007, 0.029003679275512697, 0.029112415313720705, 0.029287424087524414, 0.030271776199340822, 0.029743040084838867, 0.030091264724731445, 0.029640512466430666, 0.02954662322998047, 0.02949724769592285, 0.02972051239013672, 0.029634176254272462, 0.029620832443237304, 0.02971379280090332, 0.030816896438598633, 0.030748672485351562, 0.029726720809936522, 0.02995110321044922, 0.029808704376220702, 0.030386560440063475, 0.029825023651123047, 0.029872287750244142, 0.029628704071044922, 0.029713632583618164, 0.029725471496582032, 0.029778976440429688, 0.02966147232055664, 0.029747903823852537, 0.029515264511108398, 0.030722560882568358, 0.030390272140502928, 0.030029439926147462, 0.02972096061706543, 0.029672447204589843, 0.029420127868652345, 0.02959401512145996, 0.02956867218017578, 0.029841760635375976, 0.030611455917358397, 0.03118284797668457, 0.029718271255493166, 0.029823232650756835, 0.029599231719970705, 0.02988083267211914, 0.029603904724121093, 0.02976255989074707, 0.02970275115966797, 0.029820415496826173, 0.02963462448120117, 0.02958393669128418, 0.02936636734008789, 0.02956300735473633, 0.029413087844848633, 0.029681055068969727, 0.029444128036499023, 0.029508127212524413, 0.029263391494750976, 0.029502239227294922, 0.029351936340332032, 0.02967737579345703, 0.029332767486572264, 0.02951046371459961, 0.02935171127319336, 0.02958367919921875, 0.029620223999023438, 0.02986137580871582, 0.02965862464904785, 0.029637184143066406, 0.029490943908691405]",tokens/s,34.00916933825169,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1302.87616,1127.153664,0.0,731.906048,703.86944,s,1,8.206771484375,8.206771484375,0.0,8.206771484375,8.206771484375,8.206771484375,8.206771484375,[8.206771484375],,kWh,3.93551164999811e-05,4.328630238657786e-06,1.4016677879979955e-05,5.7700424618618844e-05,,MB,1343.090688,1406.07488,0.0,998.244352,942.608384,s,10,1.392733917236328,0.1392733917236328,0.0007123218924973523,0.13926077270507814,0.14019971618652344,0.14021211700439454,0.14022203765869143,"[0.13982981872558595, 0.13877853393554687, 0.13936058044433594, 0.13832496643066405, 0.14019696044921875, 0.13818496704101563, 0.13876640319824218, 0.1391609649658203, 0.1399062042236328, 0.14022451782226564]",tokens/s,1838.11133506387,kWh,4.299435067341441e-06,4.7415109055924025e-07,2.863846245323007e-06,7.637432403223689e-06,tokens/kWh,33519118.26963533,MB,1371.758592,1416.56064,0.0,1006.63296,942.610944,s,10,14.617361083984376,1.4617361083984375,0.012876126630085818,1.4593582763671873,1.4756720092773439,1.4820754455566407,1.4871981945800783,"[1.4884788818359376, 1.441701416015625, 1.45390087890625, 1.4595833740234374, 1.4591331787109374, 1.452447998046875, 1.460952392578125, 1.473926025390625, 1.4529879150390625, 1.4742490234375]",tokens/s,43.099434732461006,kWh,4.2311381469324745e-05,4.66651367157255e-06,1.775817025228097e-05,6.473606539317828e-05,tokens/kWh,973182.407941629,,s,630,14.615057729721066,0.023198504332890585,0.0010747253268728483,0.0230548152923584,0.02352419910430908,0.023748496055603027,0.024971472358703634,"[0.02363670349121094, 0.0233123836517334, 0.023070783615112306, 0.023119487762451173, 0.023009599685668944, 0.023181312561035155, 0.023079999923706053, 0.02294166374206543, 0.04850377655029297, 0.02305174446105957, 0.02288483238220215, 0.022913087844848634, 0.022966272354125978, 0.023103296279907228, 0.02301971244812012, 0.02309119987487793, 0.023581727981567383, 0.023249504089355468, 0.023692928314208984, 0.023134975433349608, 0.0231112003326416, 0.023187360763549804, 0.023136831283569335, 0.023205888748168944, 0.023328800201416016, 0.023369056701660156, 0.023322271347045898, 0.02337481689453125, 0.023478271484375, 0.02348441505432129, 0.023517343521118166, 0.0235313606262207, 0.02349398422241211, 0.023423648834228514, 0.023410688400268553, 0.023393375396728516, 0.023516128540039063, 0.023551488876342775, 0.023534175872802734, 0.02339619255065918, 0.023410688400268553, 0.023279712677001952, 0.02335436820983887, 0.023339008331298827, 0.023380895614624024, 0.02327884864807129, 0.02335001564025879, 0.0233504638671875, 0.02338899230957031, 0.023314079284667968, 0.02327587127685547, 0.023146495819091797, 0.023029760360717775, 0.02293350410461426, 0.022916095733642578, 0.022803455352783202, 0.02289779281616211, 0.022886335372924806, 0.022965183258056642, 0.02289004707336426, 0.02288889694213867, 0.022990848541259764, 0.02289446449279785, 0.023520448684692382, 0.023073600769042968, 0.023182687759399415, 0.02286204719543457, 0.022997440338134764, 0.022939647674560547, 0.022837247848510742, 0.02286172866821289, 0.022797887802124023, 0.0227476806640625, 0.02266921615600586, 0.022767711639404296, 0.022735967636108398, 0.023396255493164063, 0.022614240646362305, 0.022860031127929687, 0.022872608184814455, 0.02274508857727051, 0.022898687362670898, 0.02269388771057129, 0.022777856826782225, 0.02291302490234375, 0.022872064590454103, 0.022743040084838868, 0.022645984649658203, 0.02284623908996582, 0.023092639923095702, 0.02277571105957031, 0.022710975646972657, 0.02292230415344238, 0.022946048736572265, 0.023136959075927735, 0.022756576538085938, 0.023046207427978516, 0.022895328521728514, 0.022865184783935545, 0.02280112075805664, 0.022918880462646483, 0.02282035255432129, 0.022752031326293946, 0.022839296340942384, 0.022834943771362304, 0.022888320922851563, 0.02285945510864258, 0.022909631729125978, 0.02285977554321289, 0.02312940788269043, 0.02301817512512207, 0.022873407363891603, 0.022870719909667967, 0.022822912216186524, 0.02285158348083496, 0.022884319305419922, 0.02287548828125, 0.022862367630004883, 0.02291859245300293, 0.022814815521240234, 0.022911392211914062, 0.022786272048950194, 0.02291097640991211, 0.02277891159057617, 0.022881248474121093, 0.02280243110656738, 0.023179264068603517, 0.023015071868896484, 0.022899040222167967, 0.022889503479003905, 0.022811616897583008, 0.022904224395751953, 0.02289254379272461, 0.02276620864868164, 0.022914016723632812, 0.022791263580322265, 0.02299075126647949, 0.02286796760559082, 0.022906015396118164, 0.022878400802612303, 0.02286467170715332, 0.022899871826171876, 0.022936288833618163, 0.023248512268066405, 0.02297894477844238, 0.022999040603637694, 0.022929407119750975, 0.023276992797851562, 0.023089727401733397, 0.022995071411132814, 0.022982528686523437, 0.022958335876464845, 0.022886016845703124, 0.022932960510253907, 0.02291801643371582, 0.023029535293579102, 0.023067840576171873, 0.022944576263427736, 0.022887744903564454, 0.02305913543701172, 0.0228121280670166, 0.022968864440917967, 0.022959295272827147, 0.023060287475585937, 0.02282352066040039, 0.023030176162719726, 0.022961631774902343, 0.023009824752807616, 0.02294806480407715, 0.022850656509399415, 0.02323891258239746, 0.02304979133605957, 0.0229116153717041, 0.022918527603149413, 0.02454207992553711, 0.025317375183105468, 0.02311577606201172, 0.023189504623413085, 0.022869983673095704, 0.02304115104675293, 0.023408607482910158, 0.02292627143859863, 0.02300444793701172, 0.025187040328979494, 0.02320128059387207, 0.022974111557006835, 0.022891359329223634, 0.02293286323547363, 0.02295257568359375, 0.02390572738647461, 0.02316739273071289, 0.023077024459838866, 0.02301568031311035, 0.02295577621459961, 0.02301468849182129, 0.023483104705810547, 0.022945791244506835, 0.02288640022277832, 0.022960128784179686, 0.025794240951538087, 0.023046623229980467, 0.02288800048828125, 0.02298918342590332, 0.02287401580810547, 0.02284873580932617, 0.02287081527709961, 0.022790143966674805, 0.022798336029052735, 0.022747135162353514, 0.022794240951538085, 0.02285772705078125, 0.022857599258422852, 0.022771232604980467, 0.022852191925048827, 0.022856767654418946, 0.0227706241607666, 0.022806079864501953, 0.02310188865661621, 0.023027711868286133, 0.0231014404296875, 0.02320115280151367, 0.023295679092407227, 0.02322528076171875, 0.024016895294189454, 0.02401424026489258, 0.02336582374572754, 0.02340496063232422, 0.023275199890136718, 0.023271615982055665, 0.023824512481689455, 0.02330419158935547, 0.023225759506225584, 0.023167520523071288, 0.023006271362304688, 0.023070816040039063, 0.0230449275970459, 0.023038047790527344, 0.023090463638305664, 0.02303657531738281, 0.022930559158325196, 0.02304300880432129, 0.02291302490234375, 0.02302764892578125, 0.023717952728271485, 0.023127487182617187, 0.023319103240966796, 0.023170463562011717, 0.023155296325683594, 0.023005184173583985, 0.023150592803955077, 0.023452735900878905, 0.023597375869750976, 0.024172128677368163, 0.023325056076049806, 0.024043071746826173, 0.023056896209716796, 0.02304198455810547, 0.02300851249694824, 0.02280729675292969, 0.022961151123046874, 0.022916095733642578, 0.022941919326782228, 0.022753055572509766, 0.022847488403320314, 0.022847488403320314, 0.022784000396728517, 0.022863775253295898, 0.02299679946899414, 0.02294416046142578, 0.023084640502929688, 0.022982944488525392, 0.02287001609802246, 0.022831104278564454, 0.022937599182128905, 0.02303385543823242, 0.02335273551940918, 0.023762752532958984, 0.02518671989440918, 0.023163135528564454, 0.023136383056640626, 0.02292736053466797, 0.022958208084106445, 0.023627647399902342, 0.02512227249145508, 0.023079072952270508, 0.023058816909790038, 0.022996992111206056, 0.02291916847229004, 0.022833152770996092, 0.022865087509155273, 0.022893375396728515, 0.02285772705078125, 0.022867136001586914, 0.022956863403320312, 0.023945215225219727, 0.024352767944335937, 0.02296944046020508, 0.023247776031494142, 0.023001087188720702, 0.02298454475402832, 0.023093408584594726, 0.022962175369262695, 0.022964223861694336, 0.022851232528686524, 0.02336944007873535, 0.02301804733276367, 0.02321824073791504, 0.02288640022277832, 0.022977823257446288, 0.023026399612426758, 0.023010623931884765, 0.023160736083984376, 0.02290505599975586, 0.02312169647216797, 0.023230432510375976, 0.023426143646240235, 0.022989055633544923, 0.02294646453857422, 0.02285935974121094, 0.02303545570373535, 0.022934560775756837, 0.022888256072998048, 0.022836767196655273, 0.022837728500366212, 0.02279347229003906, 0.022925567626953126, 0.022860288619995117, 0.022937599182128905, 0.022792127609252928, 0.023054079055786134, 0.022919488906860352, 0.022908607482910157, 0.022825279235839845, 0.023033504486083985, 0.022808927536010742, 0.023377344131469725, 0.023071296691894533, 0.023055551528930664, 0.022936159133911133, 0.022789567947387696, 0.02287696075439453, 0.022868223190307617, 0.022971744537353515, 0.022808031082153322, 0.022881216049194335, 0.0228351993560791, 0.02288844871520996, 0.022792192459106447, 0.02304777526855469, 0.02293507194519043, 0.022923744201660157, 0.022888864517211914, 0.02302329635620117, 0.022992607116699218, 0.022864448547363282, 0.022928991317749024, 0.02289299201965332, 0.022803808212280275, 0.022939712524414062, 0.022991455078125, 0.02309119987487793, 0.023033824920654297, 0.02301139259338379, 0.023295967102050782, 0.02307891273498535, 0.02302566337585449, 0.023126016616821288, 0.023045343399047853, 0.02351798439025879, 0.023557439804077148, 0.02349305534362793, 0.023537919998168944, 0.023452831268310548, 0.02347520065307617, 0.023521120071411133, 0.02354742431640625, 0.023775711059570312, 0.023623327255249023, 0.024602272033691405, 0.02423638343811035, 0.02342246437072754, 0.023298559188842775, 0.023205888748168944, 0.023231584548950194, 0.02315727996826172, 0.02322470474243164, 0.023162879943847657, 0.022929407119750975, 0.022894271850585936, 0.022782272338867187, 0.02282048034667969, 0.0228089599609375, 0.022819839477539062, 0.02277174377441406, 0.02274403190612793, 0.022925216674804686, 0.022997087478637695, 0.02288198471069336, 0.022884767532348634, 0.022903839111328126, 0.023134944915771484, 0.022976415634155273, 0.02291472053527832, 0.022959808349609374, 0.02293142318725586, 0.02297542381286621, 0.023090240478515624, 0.023148511886596678, 0.023038496017456056, 0.02293564796447754, 0.023219615936279296, 0.023286272048950195, 0.023151039123535156, 0.02313577651977539, 0.023190176010131836, 0.023365440368652343, 0.023171072006225587, 0.023037343978881835, 0.022952543258666993, 0.023114944458007814, 0.02303468894958496, 0.022992895126342772, 0.023044095993041993, 0.023227424621582032, 0.023094240188598632, 0.022984128952026367, 0.023058176040649414, 0.023105823516845703, 0.023269920349121093, 0.02329100799560547, 0.02349350357055664, 0.02363520050048828, 0.023523616790771484, 0.023548063278198243, 0.023605567932128906, 0.023406848907470704, 0.023518495559692383, 0.023424800872802735, 0.02352943992614746, 0.0237238712310791, 0.023731071472167967, 0.024048959732055664, 0.02337775993347168, 0.02346281623840332, 0.02342911911010742, 0.023513088226318358, 0.023330816268920897, 0.023471935272216797, 0.023332927703857424, 0.023444927215576172, 0.023372703552246094, 0.02349443244934082, 0.02346188735961914, 0.023627328872680663, 0.023643680572509766, 0.02364022445678711, 0.023479040145874024, 0.023497983932495116, 0.02366326332092285, 0.02378883171081543, 0.02348217582702637, 0.023369983673095705, 0.023345920562744142, 0.02356950378417969, 0.023409568786621093, 0.023353343963623048, 0.023811744689941405, 0.02333139228820801, 0.023190336227416994, 0.023231456756591797, 0.023391807556152344, 0.02336332893371582, 0.02338476753234863, 0.023195648193359376, 0.023289056777954103, 0.023480640411376954, 0.023343616485595704, 0.02350486373901367, 0.023307392120361328, 0.023331552505493163, 0.024008575439453124, 0.023488191604614257, 0.023230144500732422, 0.023358367919921876, 0.02330419158935547, 0.02336067199707031, 0.023184223175048826, 0.023084928512573242, 0.023023008346557617, 0.02288092803955078, 0.02299235153198242, 0.022891008377075195, 0.022881536483764647, 0.023142528533935548, 0.02305932807922363, 0.023315391540527343, 0.022950143814086915, 0.02342793655395508, 0.023104448318481446, 0.02330508804321289, 0.022975744247436522, 0.02314521598815918, 0.02356003189086914, 0.02525609588623047, 0.023888927459716797, 0.02334409523010254, 0.023220191955566405, 0.02300441551208496, 0.022983455657958986, 0.023157920837402344, 0.022805408477783205, 0.02285971260070801, 0.022877599716186522, 0.0227989444732666, 0.022812671661376953, 0.022889984130859374, 0.0227740478515625, 0.022851808547973633, 0.022847711563110353, 0.022927135467529298, 0.02284339141845703, 0.022826271057128908, 0.022884416580200195, 0.022821279525756837, 0.022774015426635742, 0.02287615966796875, 0.022975584030151368, 0.02291935920715332, 0.022989536285400392, 0.023025535583496095, 0.023122047424316405, 0.023257087707519532, 0.023080511093139647, 0.023457216262817382, 0.023617887496948244, 0.023017728805541992, 0.023253408432006836, 0.022930816650390626, 0.023232959747314454, 0.023957696914672852, 0.022982656478881838, 0.022998207092285155, 0.023019840240478515, 0.02302195167541504, 0.02291315269470215, 0.02314646339416504, 0.022958112716674806, 0.02297235107421875, 0.022829120635986327, 0.023100608825683593, 0.02288912010192871, 0.023010623931884765, 0.02362051200866699, 0.023019039154052734, 0.02293596839904785, 0.022982624053955077, 0.022972448348999024, 0.023035903930664063, 0.022960031509399414, 0.022927423477172852, 0.023130144119262695, 0.02314035224914551, 0.023234560012817384, 0.02325503921508789, 0.023321887969970704, 0.023200223922729492, 0.02324710464477539, 0.02442073631286621, 0.023675039291381837, 0.02340425682067871, 0.023546207427978517, 0.023469472885131838, 0.02328835105895996, 0.023420671463012695, 0.023372032165527343, 0.02339344024658203, 0.02316889572143555, 0.023120864868164063, 0.023152639389038086, 0.023998367309570313, 0.02312406349182129, 0.023179264068603517, 0.023138303756713868, 0.023242624282836914, 0.023146623611450194, 0.023225439071655272, 0.023247520446777345, 0.02325872039794922, 0.02329052734375, 0.023293407440185546, 0.023148927688598633, 0.02300739288330078, 0.023188831329345704, 0.023147167205810545, 0.023384063720703126, 0.02414182472229004, 0.0240762882232666, 0.023477632522583006, 0.023470592498779298, 0.023296127319335936, 0.023416831970214845, 0.023220096588134766, 0.023221792221069334, 0.02320240020751953, 0.02330419158935547, 0.02316867256164551, 0.023357791900634764, 0.023451648712158202, 0.023386112213134767, 0.023135648727416993, 0.02309087944030762, 0.023210912704467773, 0.023326719284057617, 0.023719072341918945, 0.023495359420776366, 0.023568544387817383, 0.02347337532043457, 0.02351798439025879, 0.024325408935546876, 0.02379225540161133, 0.023479455947875975, 0.023317440032958985, 0.023459583282470702, 0.023400703430175782, 0.023431167602539063, 0.02332876777648926, 0.023370752334594725, 0.023161855697631836, 0.02336329650878906, 0.023400672912597655]",tokens/s,43.10622726579019,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1845.264384,2861.498368,0.0,2466.250752,2401.696256,s,1,9.3064697265625,9.3064697265625,0.0,9.3064697265625,9.3064697265625,9.3064697265625,9.3064697265625,[9.3064697265625],,kWh,6.920842063327655e-05,7.62660962710615e-06,2.5094464520009074e-05,0.00010192949478039177,,MB,1639.661568,3087.990784,0.0,2680.160256,2582.173696,s,10,5.461781799316406,0.5461781799316406,0.002053754413714127,0.5455082397460937,0.548137109375,0.5496025207519531,0.5507748498535157,"[0.5510679321289063, 0.544267578125, 0.54442578125, 0.5445333251953125, 0.5478114624023438, 0.5469923095703125, 0.5443823852539063, 0.5457959594726562, 0.5472845458984374, 0.5452205200195313]",tokens/s,468.711511016498,kWh,1.6342655354862e-05,1.8015006276936102e-06,1.0818573469665674e-05,2.8962729452221288e-05,tokens/kWh,8838945.943348104,MB,1639.661568,3090.087936,0.0,2680.160256,2582.176256,s,10,15.202343749999999,1.5202343749999998,0.008446818794560296,1.5178092041015625,1.5296287109375,1.5350193725585939,1.5393319018554688,"[1.5284307861328126, 1.5095377197265625, 1.51311474609375, 1.521512451171875, 1.5233001708984375, 1.51756640625, 1.5404100341796876, 1.5158154296875, 1.51460400390625, 1.518052001953125]",tokens/s,41.440978467547154,kWh,4.3751790035144206e-05,4.8266266312004895e-06,2.3022925825737693e-05,7.160134249208238e-05,tokens/kWh,879871.7706580221,,s,630,15.200313385009773,0.024127481563507566,0.00042550805216144027,0.024015567779541017,0.02454982967376709,0.02477015514373779,0.025282242221832275,"[0.02515177536010742, 0.02418659210205078, 0.0241232967376709, 0.024244319915771483, 0.024126752853393555, 0.02398886489868164, 0.024005792617797853, 0.02413814353942871, 0.024048160552978516, 0.02402444839477539, 0.024124031066894532, 0.024070144653320313, 0.024057119369506837, 0.024205375671386718, 0.023984607696533204, 0.023998655319213868, 0.023975231170654296, 0.023982112884521484, 0.02395952033996582, 0.023952064514160157, 0.023975711822509765, 0.024025312423706056, 0.02411929512023926, 0.024197120666503907, 0.024344575881958007, 0.024393728256225586, 0.024444927215576173, 0.024451072692871095, 0.024434688568115235, 0.024427967071533205, 0.024650304794311524, 0.02460198402404785, 0.024660383224487305, 0.024381248474121094, 0.024301376342773438, 0.024125471115112304, 0.024123071670532226, 0.024127872467041015, 0.024166208267211914, 0.024955583572387696, 0.024201215744018553, 0.02417411231994629, 0.024117727279663086, 0.02409062385559082, 0.024473600387573242, 0.02579631996154785, 0.025287967681884765, 0.024222240447998047, 0.024191455841064455, 0.024169984817504882, 0.02427724838256836, 0.024950944900512695, 0.024669599533081055, 0.02446406364440918, 0.024188928604125977, 0.023963647842407225, 0.02394460868835449, 0.02395372772216797, 0.024072479248046875, 0.023948959350585938, 0.02390265655517578, 0.023964704513549803, 0.02393587112426758, 0.025022464752197264, 0.024383487701416014, 0.024134912490844727, 0.024048383712768556, 0.023867168426513673, 0.023865568161010743, 0.02390425682067871, 0.024207328796386717, 0.023920671463012695, 0.023932928085327147, 0.023889759063720702, 0.023875743865966796, 0.023821504592895507, 0.02380803108215332, 0.023849760055541992, 0.02384886360168457, 0.02378329658508301, 0.024049888610839842, 0.02384828758239746, 0.02392540740966797, 0.023901824951171876, 0.02374220848083496, 0.023847551345825196, 0.023816192626953125, 0.023767040252685546, 0.023837696075439452, 0.02386227226257324, 0.023778783798217774, 0.023888416290283203, 0.023887264251708985, 0.023954015731811523, 0.02406399917602539, 0.023828479766845705, 0.023922016143798828, 0.023868064880371093, 0.023901472091674803, 0.023834623336791993, 0.023919328689575196, 0.02393926429748535, 0.023914239883422853, 0.023930944442749024, 0.023994367599487306, 0.024022815704345703, 0.0239815673828125, 0.023909088134765624, 0.023934207916259765, 0.02390912055969238, 0.023949312210083007, 0.023982080459594726, 0.02423526382446289, 0.023995136260986327, 0.024016895294189454, 0.02400985527038574, 0.024099103927612303, 0.02404582405090332, 0.023992671966552734, 0.024068191528320314, 0.02403721618652344, 0.024014911651611327, 0.023895072937011718, 0.023910911560058593, 0.02397795104980469, 0.02397235107421875, 0.025036800384521486, 0.024236032485961914, 0.024029151916503906, 0.023980064392089842, 0.023962976455688477, 0.024015520095825197, 0.024012479782104492, 0.024004928588867186, 0.023990272521972656, 0.023971839904785155, 0.02391244888305664, 0.02388787269592285, 0.023777055740356445, 0.02388400077819824, 0.02393497657775879, 0.02390732765197754, 0.02394153594970703, 0.023818656921386717, 0.023896255493164063, 0.02386124801635742, 0.02381337547302246, 0.023912576675415038, 0.023932832717895508, 0.023908639907836916, 0.023769088745117187, 0.023869152069091796, 0.023892831802368165, 0.023918464660644533, 0.023897855758666993, 0.02402124786376953, 0.023870752334594725, 0.023866048812866213, 0.023966880798339845, 0.02393907165527344, 0.023961503982543944, 0.02392697525024414, 0.02387171173095703, 0.024017471313476563, 0.02405171203613281, 0.02409219169616699, 0.024381568908691406, 0.02411087989807129, 0.02405958366394043, 0.02459123229980469, 0.02418057632446289, 0.023957344055175783, 0.0242238712310791, 0.023959264755249024, 0.023957984924316406, 0.024258560180664062, 0.024065280914306642, 0.02394598388671875, 0.02390220832824707, 0.024432031631469727, 0.02386390495300293, 0.023985727310180664, 0.02395180892944336, 0.0240762882232666, 0.023999616622924803, 0.024478591918945313, 0.02395136070251465, 0.023988224029541014, 0.024025087356567384, 0.02523574447631836, 0.024180736541748047, 0.024270048141479494, 0.024027711868286134, 0.024013248443603516, 0.023899616241455077, 0.023982175827026365, 0.0239550724029541, 0.023888479232788085, 0.023863296508789062, 0.023875072479248048, 0.023798271179199217, 0.023946943283081053, 0.02394144058227539, 0.023910400390625, 0.02384396743774414, 0.023865440368652343, 0.02390505599975586, 0.023767040252685546, 0.023971839904785155, 0.02389401626586914, 0.02388902473449707, 0.0239270076751709, 0.023982751846313478, 0.0241727352142334, 0.023971647262573243, 0.023996448516845702, 0.02393235206604004, 0.025938623428344725, 0.03129737663269043, 0.024203264236450195, 0.0240797119140625, 0.024040096282958983, 0.02405171203613281, 0.024016895294189454, 0.024041471481323243, 0.02432204818725586, 0.02409881591796875, 0.02388172721862793, 0.02395039939880371, 0.02413043212890625, 0.024162368774414064, 0.024020288467407228, 0.024008384704589845, 0.023972864151000976, 0.024771839141845702, 0.024029951095581054, 0.024035232543945313, 0.02398627281188965, 0.024049184799194337, 0.02389616012573242, 0.02400422477722168, 0.02392755126953125, 0.02388787269592285, 0.023879680633544922, 0.023819711685180663, 0.023806432723999023, 0.023890016555786132, 0.023881824493408203, 0.023822240829467774, 0.02384486389160156, 0.023875583648681642, 0.023781343460083006, 0.024661951065063477, 0.02415523147583008, 0.023929664611816406, 0.023981760025024414, 0.023880159378051758, 0.02388172721862793, 0.023914239883422853, 0.023894271850585937, 0.025006080627441408, 0.025540607452392578, 0.024194623947143553, 0.024285631179809572, 0.024288448333740234, 0.024421184539794923, 0.024290464401245118, 0.024243040084838866, 0.024319999694824217, 0.024401920318603516, 0.024444223403930664, 0.024357376098632814, 0.024331743240356446, 0.024316448211669922, 0.024284351348876954, 0.02413590431213379, 0.024032032012939453, 0.02403705596923828, 0.02401545524597168, 0.024106719970703124, 0.0242992000579834, 0.02433875274658203, 0.024200864791870117, 0.024187231063842775, 0.02405097579956055, 0.024062623977661134, 0.024021055221557615, 0.02398988723754883, 0.02405526351928711, 0.02404630470275879, 0.024106496810913085, 0.024039327621459963, 0.024050464630126955, 0.024444032669067382, 0.024861312866210936, 0.024549055099487304, 0.024170495986938476, 0.024344768524169922, 0.02402137565612793, 0.024030847549438475, 0.023945791244506836, 0.02398188781738281, 0.02409881591796875, 0.023868511199951172, 0.023910879135131836, 0.023968191146850587, 0.023910175323486327, 0.0238635196685791, 0.023843008041381834, 0.023881248474121094, 0.02388969612121582, 0.02396342468261719, 0.023967679977416993, 0.024373823165893555, 0.024400096893310547, 0.025037599563598634, 0.02434252738952637, 0.024104543685913086, 0.02399273681640625, 0.023805952072143553, 0.023795711517333985, 0.023838720321655273, 0.0238450870513916, 0.02375235176086426, 0.023771263122558593, 0.023748607635498048, 0.023893056869506837, 0.023845823287963867, 0.02390425682067871, 0.023867391586303712, 0.023910400390625, 0.023887519836425782, 0.023904672622680666, 0.02374550437927246, 0.023865728378295897, 0.0238372802734375, 0.023820287704467775, 0.023797760009765623, 0.023871488571166992, 0.023776512145996093, 0.02387830352783203, 0.02386089515686035, 0.02386966323852539, 0.02387990379333496, 0.023883775711059572, 0.023811296463012697, 0.02408732795715332, 0.02489753532409668, 0.024792224884033202, 0.02400489616394043, 0.024012928009033204, 0.0238353271484375, 0.024009504318237306, 0.024314847946166993, 0.024082111358642577, 0.023961151123046875, 0.024070911407470703, 0.024004608154296874, 0.024139776229858398, 0.023994367599487306, 0.02401033592224121, 0.024017311096191405, 0.02453651237487793, 0.024072416305541994, 0.02426915168762207, 0.025136415481567382, 0.024829856872558592, 0.024108928680419923, 0.024215583801269532, 0.02411123275756836, 0.024134431838989258, 0.02395683288574219, 0.024076959609985352, 0.023994367599487306, 0.024573951721191405, 0.023928672790527343, 0.024082592010498047, 0.02595840072631836, 0.025231231689453126, 0.02427587127685547, 0.024047264099121092, 0.023974239349365236, 0.02423583984375, 0.024006752014160155, 0.024269920349121094, 0.024208383560180666, 0.02425177574157715, 0.024348608016967775, 0.024535743713378907, 0.024481119155883788, 0.024506208419799804, 0.02461574363708496, 0.024510719299316405, 0.024512256622314453, 0.024389184951782227, 0.024426944732666017, 0.02427494430541992, 0.02423753547668457, 0.024297727584838866, 0.02437353515625, 0.02457804870605469, 0.024576000213623047, 0.024567808151245117, 0.024627199172973634, 0.024646751403808592, 0.024564544677734376, 0.024727039337158203, 0.02465564727783203, 0.02464851188659668, 0.02467430305480957, 0.024573951721191405, 0.02463942337036133, 0.024691808700561525, 0.025084896087646483, 0.024813568115234375, 0.0248623046875, 0.024753984451293946, 0.02459231948852539, 0.024691167831420897, 0.02471340751647949, 0.024805343627929688, 0.02457792091369629, 0.024616191864013672, 0.024382080078125, 0.024395103454589843, 0.024213855743408202, 0.024283744812011718, 0.024172544479370117, 0.024020992279052734, 0.024066047668457033, 0.024190975189208985, 0.024426496505737305, 0.024451040267944337, 0.02479865646362305, 0.024723455429077147, 0.024154720306396486, 0.02404915237426758, 0.023955968856811522, 0.024006015777587892, 0.02395724868774414, 0.02426969528198242, 0.025402496337890625, 0.024168703079223634, 0.024023456573486326, 0.023957727432250976, 0.02384931182861328, 0.0239835205078125, 0.024041631698608398, 0.024195167541503908, 0.023952991485595702, 0.02410655975341797, 0.023951583862304688, 0.02404416084289551, 0.02392268753051758, 0.02404153633117676, 0.024074176788330078, 0.024033279418945314, 0.023920064926147462, 0.024110912322998047, 0.02396236801147461, 0.024135679244995118, 0.024161663055419922, 0.02421824073791504, 0.024194656372070314, 0.024410175323486327, 0.024400224685668947, 0.024221696853637696, 0.024352767944335937, 0.02407164764404297, 0.024017055511474608, 0.024070016860961913, 0.023910560607910157, 0.02393516731262207, 0.024032512664794923, 0.024064287185668946, 0.023923328399658203, 0.024276992797851563, 0.024056831359863282, 0.024015615463256836, 0.024041887283325195, 0.024042720794677733, 0.02396633529663086, 0.02392185592651367, 0.023935712814331055, 0.02396771240234375, 0.02388800048828125, 0.023958816528320312, 0.024183519363403322, 0.023971839904785155, 0.023919872283935547, 0.02403753662109375, 0.023839328765869142, 0.023860544204711915, 0.023945920944213866, 0.024556800842285155, 0.024009471893310548, 0.023821727752685547, 0.02391481590270996, 0.023910688400268554, 0.023889759063720702, 0.02394268798828125, 0.02393948745727539, 0.024004831314086914, 0.02393017578125, 0.025045440673828124, 0.024164352416992187, 0.023965055465698243, 0.023824544906616212, 0.02384492874145508, 0.023867807388305663, 0.023881759643554688, 0.023895360946655272, 0.023855775833129884, 0.023842111587524414, 0.023841407775878905, 0.023791679382324217, 0.023770496368408202, 0.023788192749023437, 0.023853023529052733, 0.023788576126098634, 0.024034271240234373, 0.02399967956542969, 0.024130559921264647, 0.024102720260620117, 0.023945215225219727, 0.02400636863708496, 0.023875360488891603, 0.023956064224243165, 0.024002464294433593, 0.023928831100463867, 0.02430156707763672, 0.023930303573608397, 0.023829055786132813, 0.02386944007873535, 0.023868640899658202, 0.02385590362548828, 0.0238919677734375, 0.02386124801635742, 0.0238590087890625, 0.02391878318786621, 0.023826271057128905, 0.02403139114379883, 0.02386124801635742, 0.02385478401184082, 0.023795488357543946, 0.023883552551269532, 0.023878400802612304, 0.023971839904785155, 0.023998016357421874, 0.024240575790405273, 0.024137727737426756, 0.0241582088470459, 0.02418611145019531, 0.024285503387451172, 0.024474048614501955, 0.024768096923828125, 0.024366624832153322, 0.024578336715698243, 0.024262752532958985, 0.024297824859619142, 0.02407030487060547, 0.024270847320556642, 0.024152063369750978, 0.024129535675048826, 0.024141216278076173, 0.024322080612182616, 0.02425814437866211, 0.025225215911865235, 0.02427891159057617, 0.02406208038330078, 0.024195072174072265, 0.024039424896240235, 0.024052864074707032, 0.02407084846496582, 0.02406991958618164, 0.02401321601867676, 0.023971839904785155, 0.024002559661865236, 0.024012384414672853, 0.024060159683227538, 0.02419318389892578, 0.024059551239013672, 0.024103263854980468, 0.02405580711364746, 0.02406809616088867, 0.02408166313171387, 0.02398409652709961, 0.024009599685668945, 0.024022815704345703, 0.02404774475097656, 0.02395955276489258, 0.02400611114501953, 0.024010976791381835, 0.023981632232666014, 0.023974655151367187, 0.025268224716186522, 0.024215551376342775, 0.024002559661865236, 0.024027135848999022, 0.02394726371765137, 0.02400230407714844, 0.024015104293823242, 0.024199167251586915, 0.0241213436126709, 0.024051456451416015, 0.024036895751953124, 0.023890655517578126, 0.023997440338134765, 0.024024063110351563, 0.023977632522583007, 0.023982431411743162, 0.023926528930664062, 0.024018943786621092, 0.023971391677856446, 0.024080287933349608, 0.024869279861450197, 0.0242159366607666, 0.02393440055847168, 0.023945791244506836, 0.02486409568786621, 0.024090400695800783, 0.0239968318939209, 0.023941600799560547, 0.0239554557800293, 0.023963552474975586, 0.023971744537353516, 0.02395359992980957, 0.02393087959289551, 0.02397123146057129, 0.023879711151123046]",tokens/s,41.446513900252405,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,837.541888,558.825472,0.0,163.577856,152.009216,s,1,7.91282421875,7.91282421875,0.0,7.91282421875,7.91282421875,7.91282421875,7.91282421875,[7.91282421875],,kWh,2.1531786091691175e-05,2.3678079476961558e-06,7.5216726840021675e-06,3.14212667233895e-05,,MB,1184.538624,609.15712,0.0,201.326592,184.525824,s,25,0.26236425590515133,0.010494570236206056,0.00017778441085430742,0.010455743789672851,0.010575654602050781,0.010639872360229492,0.011143333168029785,"[0.011298399925231933, 0.010406208038330079, 0.010385151863098144, 0.010464159965515137, 0.010456607818603516, 0.010431903839111328, 0.010458687782287597, 0.010485952377319335, 0.010497440338134765, 0.010553824424743652, 0.010455743789672851, 0.010590208053588868, 0.010537983894348145, 0.010449248313903809, 0.01038265609741211, 0.01041494369506836, 0.010396063804626465, 0.010374976158142089, 0.010462847709655762, 0.010541376113891601, 0.010652288436889649, 0.010414463996887207, 0.01037007999420166, 0.010439807891845704, 0.010443231582641602]",tokens/s,24393.566791025434,kWh,3.431983375147018e-07,3.783589778108301e-08,2.2630583452407013e-07,6.07340069819855e-07,tokens/kWh,421510143.52788043,MB,1218.269184,609.15712,0.0,201.326592,184.528384,s,25,10.12959649658203,0.40518385986328126,0.004919799178749007,0.40542120361328127,0.40999371948242186,0.4109174865722656,0.4166025048828125,"[0.4047490234375, 0.39681671142578123, 0.4053542175292969, 0.4055596923828125, 0.4014530944824219, 0.4110566711425781, 0.4010058898925781, 0.4103607482910156, 0.4067662048339844, 0.40542120361328127, 0.4089087219238281, 0.4082795104980469, 0.40944317626953125, 0.40081253051757815, 0.407805908203125, 0.39951266479492187, 0.3957163696289063, 0.4001940307617188, 0.41835382080078126, 0.4073577880859375, 0.4045588073730469, 0.3997763061523438, 0.40915234375, 0.40476751708984376, 0.4064135437011719]",tokens/s,155.48496927112967,kWh,1.1434372355484986e-05,1.2610269749405768e-06,4.364320060197409e-06,1.705971939062297e-05,tokens/kWh,3692909.5114324405,,s,1575,10.117378850460058,0.0064237326034667,0.00023948435789487624,0.006399040222167969,0.006530944156646728,0.006600761699676514,0.007333048276901245,"[0.006305280208587646, 0.0065354561805725095, 0.006521312236785889, 0.006501344203948975, 0.006511392116546631, 0.006486015796661377, 0.006481696128845215, 0.006534880161285401, 0.006525055885314941, 0.006526336193084716, 0.0065465598106384274, 0.006597856044769287, 0.006517407894134522, 0.006493696212768555, 0.00643123197555542, 0.006483615875244141, 0.006457759857177735, 0.006574368000030518, 0.006440608024597168, 0.00637065601348877, 0.006427296161651611, 0.006379519939422608, 0.006360608100891113, 0.00641209602355957, 0.006372064113616943, 0.006414432048797608, 0.006438464164733887, 0.006332543849945069, 0.006367392063140869, 0.00636518383026123, 0.006395808219909668, 0.006373472213745117, 0.006362720012664795, 0.006395999908447266, 0.006375775814056396, 0.006434207916259766, 0.006370175838470459, 0.006403840065002441, 0.00637667179107666, 0.006382304191589356, 0.006423679828643799, 0.006372223854064942, 0.006340288162231446, 0.006373407840728759, 0.007237919807434082, 0.006307839870452881, 0.006292768001556396, 0.0062507839202880855, 0.006257279872894287, 0.006257823944091797, 0.006307680130004883, 0.006287551879882812, 0.006292096138000488, 0.006313983917236328, 0.006268896102905273, 0.006352287769317627, 0.0063218560218811035, 0.0063879361152648926, 0.00639628791809082, 0.006392159938812256, 0.00649948787689209, 0.006372191905975342, 0.006391808032989502, 0.005998879909515381, 0.006312960147857666, 0.0062576642036437985, 0.00629750394821167, 0.006269023895263672, 0.006330592155456543, 0.0063357119560241695, 0.006334047794342041, 0.0063497920036315915, 0.00633241605758667, 0.006305856227874756, 0.006287295818328858, 0.006290592193603515, 0.0062841281890869145, 0.006221151828765869, 0.006253536224365234, 0.006227263927459717, 0.006281599998474121, 0.006229055881500244, 0.006246912002563477, 0.006296031951904297, 0.006256608009338379, 0.0066007041931152345, 0.006301375865936279, 0.006256159782409668, 0.006244128227233887, 0.006228127956390381, 0.006237023830413818, 0.006292992115020752, 0.006322976112365723, 0.0062353920936584475, 0.006261216163635254, 0.006264832019805908, 0.0062873601913452145, 0.006300992012023926, 0.006269728183746338, 0.006344607830047607, 0.006375423908233643, 0.006268928050994873, 0.00623638391494751, 0.006295296192169189, 0.006239520072937012, 0.006198016166687012, 0.006273344039916993, 0.006276927947998047, 0.006487872123718262, 0.0062362241744995115, 0.006253888130187988, 0.00623686408996582, 0.006352511882781983, 0.006300000190734864, 0.0062746238708496094, 0.006266848087310791, 0.006320576190948487, 0.006326335906982422, 0.006445184230804444, 0.006537087917327881, 0.006338431835174561, 0.006286655902862549, 0.006255551815032959, 0.006297696113586426, 0.0062708802223205564, 0.006265984058380127, 0.006023168087005615, 0.006303743839263916, 0.006244031906127929, 0.00687340784072876, 0.006317984104156494, 0.006270336151123047, 0.0063170561790466305, 0.006307551860809326, 0.0063482880592346195, 0.00627180814743042, 0.006311615943908691, 0.006292992115020752, 0.006304255962371826, 0.006307424068450928, 0.006318016052246094, 0.0068793601989746095, 0.0064617919921875, 0.006436863899230957, 0.006463615894317627, 0.006422656059265137, 0.006534207820892334, 0.006388031959533692, 0.006361663818359375, 0.006393631935119629, 0.006320159912109375, 0.006387712001800537, 0.006362880229949951, 0.0063480958938598635, 0.006427487850189209, 0.006330463886260986, 0.006366367816925049, 0.006438752174377441, 0.006466559886932373, 0.006415679931640625, 0.0063731842041015625, 0.00646611213684082, 0.006359360218048096, 0.00635920000076294, 0.006408160209655762, 0.006342559814453125, 0.0064297599792480465, 0.006490784168243408, 0.006420735836029053, 0.006446656227111817, 0.00642409610748291, 0.0063743038177490234, 0.006381343841552734, 0.006484479904174805, 0.006397151947021484, 0.006428607940673828, 0.00639958381652832, 0.00643779182434082, 0.006924352169036865, 0.006443295955657959, 0.006440767765045166, 0.006375328063964843, 0.006415359973907471, 0.006376448154449463, 0.007439775943756104, 0.0064704642295837405, 0.006706975936889649, 0.006422495841979981, 0.0063918399810791015, 0.006064991950988769, 0.006389696121215821, 0.006326335906982422, 0.0063610877990722655, 0.006311552047729492, 0.0063942399024963376, 0.006354944229125976, 0.0063119039535522465, 0.007235616207122803, 0.006341663837432862, 0.006306272029876709, 0.006356800079345703, 0.006322336196899414, 0.006351359844207764, 0.006332704067230225, 0.006354688167572021, 0.006342656135559082, 0.006329951763153076, 0.00633516788482666, 0.006583424091339112, 0.006382175922393799, 0.0064245758056640625, 0.006468671798706055, 0.006483007907867431, 0.006469503879547119, 0.0064412798881530765, 0.00643884801864624, 0.006432064056396484, 0.006513088226318359, 0.006451295852661133, 0.006424479961395264, 0.006455296039581298, 0.006445055961608887, 0.006479328155517578, 0.006472224235534668, 0.0064430079460144046, 0.0064471039772033695, 0.006416384220123291, 0.006450623989105225, 0.006442560195922852, 0.006429728031158447, 0.006445024013519287, 0.0064430079460144046, 0.006401055812835693, 0.006417376041412353, 0.0064471039772033695, 0.006521024227142334, 0.006475584030151367, 0.0064143362045288085, 0.006536831855773926, 0.006467936038970948, 0.006513728141784668, 0.006461503982543945, 0.00649513578414917, 0.006438144207000732, 0.006433568000793457, 0.0064587841033935545, 0.0064635519981384275, 0.006426144123077393, 0.006439328193664551, 0.00641648006439209, 0.006402368068695068, 0.006420544147491455, 0.006106048107147217, 0.006385439872741699, 0.006408095836639404, 0.00637724781036377, 0.006385727882385254, 0.006426784038543701, 0.006569183826446533, 0.006396031856536865, 0.006418496131896973, 0.006429279804229736, 0.0063772802352905274, 0.006410560131072998, 0.006352767944335937, 0.00640121603012085, 0.006396192073822022, 0.006349343776702881, 0.006376480102539063, 0.006353888034820556, 0.0065989761352539065, 0.006379199981689453, 0.006359039783477783, 0.0063569917678833006, 0.00638486385345459, 0.006453055858612061, 0.006418560028076172, 0.006411104202270508, 0.0064143362045288085, 0.006361216068267822, 0.006377312183380127, 0.006397984027862549, 0.00633241605758667, 0.006313471794128418, 0.006300511837005615, 0.0063463678359985355, 0.00629804801940918, 0.006374623775482178, 0.006369247913360596, 0.006348415851593018, 0.006350719928741455, 0.006346687793731689, 0.006374495983123779, 0.006340479850769043, 0.006324543952941895, 0.0063422398567199705, 0.0063339838981628415, 0.006414912223815918, 0.006334752082824707, 0.006327616214752197, 0.006293087959289551, 0.006302080154418945, 0.0063240318298339844, 0.0063220481872558595, 0.006345056056976319, 0.0063564801216125484, 0.006383520126342773, 0.006355967998504639, 0.006313983917236328, 0.00635475206375122, 0.0063203201293945315, 0.006292895793914795, 0.006303616046905517, 0.00632528018951416, 0.006364128112792969, 0.006125696182250976, 0.006363135814666748, 0.0063201279640197755, 0.006372960090637207, 0.006312352180480957, 0.006339968204498291, 0.006368959903717041, 0.00669536018371582, 0.006439424037933349, 0.006405151844024658, 0.006427487850189209, 0.006493599891662598, 0.0065474557876586915, 0.006584479808807373, 0.006571616172790527, 0.006564832210540771, 0.0072353601455688475, 0.007833824157714844, 0.0075443840026855466, 0.007920063972473145, 0.006651904106140137, 0.006569983959197998, 0.006600895881652832, 0.006577792167663574, 0.006838655948638916, 0.00661411190032959, 0.006529248237609863, 0.006517248153686523, 0.006485951900482178, 0.006420544147491455, 0.0064633598327636715, 0.006430912017822266, 0.006431807994842529, 0.00639408016204834, 0.007348896026611328, 0.006415840148925781, 0.006425248146057129, 0.006344575881958008, 0.006391744136810303, 0.0063591041564941405, 0.006378592014312744, 0.00639628791809082, 0.006359583854675293, 0.006395904064178467, 0.006371327877044678, 0.006374527931213379, 0.0063515200614929195, 0.006344064235687256, 0.006370143890380859, 0.006370880126953125, 0.0063455038070678715, 0.006339392185211182, 0.006356128215789795, 0.0063721280097961425, 0.006340640068054199, 0.006337408065795898, 0.006360095977783203, 0.006355936050415039, 0.00638976001739502, 0.006323552131652832, 0.006372000217437744, 0.006342656135559082, 0.006352128028869629, 0.006159071922302246, 0.006537312030792236, 0.006510496139526367, 0.006452672004699707, 0.006472352027893066, 0.006419904232025146, 0.006364863872528076, 0.006342495918273926, 0.0063681921958923336, 0.006338560104370118, 0.006307616233825684, 0.00632041597366333, 0.006389472007751465, 0.006322400093078613, 0.00633241605758667, 0.006291647911071777, 0.006373023986816407, 0.006324384212493896, 0.006336832046508789, 0.006313663959503174, 0.006311520099639893, 0.0063162240982055666, 0.006342271804809571, 0.006287168025970459, 0.006298399925231933, 0.006278783798217774, 0.006293407917022705, 0.006283616065979004, 0.006325664043426514, 0.006316991806030273, 0.006323200225830078, 0.006367839813232422, 0.006336639881134034, 0.006514272212982178, 0.006329152107238769, 0.006358687877655029, 0.006366496086120606, 0.006318111896514892, 0.006341311931610107, 0.006303743839263916, 0.006344704151153564, 0.006308896064758301, 0.006288352012634277, 0.006334496021270752, 0.00630508804321289, 0.006611616134643555, 0.006368480205535889, 0.006305791854858398, 0.006330848217010498, 0.006347263813018799, 0.0063240318298339844, 0.00627836799621582, 0.006355743885040283, 0.006352735996246338, 0.006379039764404297, 0.0064139838218688966, 0.006375840187072754, 0.006418015956878662, 0.006454239845275879, 0.0064102401733398436, 0.006488351821899414, 0.0063935680389404295, 0.006430751800537109, 0.00617196798324585, 0.006466239929199219, 0.006444767951965332, 0.006418655872344971, 0.006453311920166015, 0.006490111827850342, 0.006491136074066162, 0.006468192100524902, 0.006441023826599121, 0.006479904174804687, 0.006461472034454346, 0.006422848224639893, 0.006473504066467285, 0.006455488204956054, 0.006448416233062744, 0.006453055858612061, 0.006448287963867188, 0.0064962878227233884, 0.006464928150177002, 0.00654585599899292, 0.00651251220703125, 0.0064778242111206055, 0.0070388798713684085, 0.006533215999603272, 0.006616384029388428, 0.006566624164581299, 0.006537439823150634, 0.0065780158042907716, 0.006536223888397217, 0.006520768165588379, 0.006529983997344971, 0.006526976108551025, 0.006475168228149414, 0.006475967884063721, 0.0064757437705993655, 0.006416831970214844, 0.0064899840354919434, 0.006459392070770263, 0.006406271934509277, 0.006399744033813477, 0.006417984008789062, 0.006436927795410156, 0.00646998405456543, 0.006421792030334473, 0.006381824016571045, 0.006408768177032471, 0.006606592178344727, 0.0064947838783264164, 0.006913919925689697, 0.0065474557876586915, 0.006494207859039307, 0.006528639793395996, 0.0065289921760559085, 0.006539680004119873, 0.006474944114685058, 0.00641926383972168, 0.0063751678466796875, 0.0064956479072570805, 0.006479872226715088, 0.006486879825592041, 0.0064791679382324215, 0.007381760120391846, 0.006494143962860108, 0.0061975998878479, 0.006475967884063721, 0.0064766077995300295, 0.006500895977020264, 0.006492767810821533, 0.006649119853973389, 0.006498784065246582, 0.006487040042877197, 0.006455967903137207, 0.006463039875030518, 0.006449952125549317, 0.0064204797744750975, 0.006454495906829834, 0.006445856094360352, 0.006408415794372559, 0.006444831848144531, 0.006395391941070557, 0.00644927978515625, 0.0064347200393676755, 0.006389567852020264, 0.006436607837677002, 0.006402976036071777, 0.006422048091888428, 0.006447616100311279, 0.006422751903533935, 0.006493951797485352, 0.006491360187530518, 0.006484767913818359, 0.006470655918121338, 0.006462463855743408, 0.006450240135192871, 0.006425536155700683, 0.00643071985244751, 0.006518784046173095, 0.006450592041015625, 0.006466400146484375, 0.006505824089050293, 0.0065335359573364254, 0.006474783897399902, 0.006454112052917481, 0.006408160209655762, 0.006415999889373779, 0.0063903040885925294, 0.006411871910095215, 0.006399648189544678, 0.006400767803192139, 0.0064280638694763186, 0.0064496641159057615, 0.006446591854095459, 0.006435679912567139, 0.006463232040405273, 0.00645743989944458, 0.0064429759979248045, 0.006442944049835205, 0.006453120231628418, 0.006459807872772217, 0.006460800170898438, 0.006441311836242676, 0.006520832061767578, 0.006454592227935791, 0.006388415813446045, 0.006430592060089111, 0.006439104080200196, 0.006159840106964111, 0.006457952022552491, 0.0063961601257324216, 0.006381599903106689, 0.006352735996246338, 0.0063949117660522465, 0.006427487850189209, 0.006367231845855713, 0.006380703926086426, 0.006363999843597412, 0.006383264064788818, 0.006433375835418701, 0.006419392108917237, 0.006650335788726806, 0.006618879795074463, 0.00650710391998291, 0.006475776195526123, 0.006450399875640869, 0.006422304153442383, 0.006445280075073242, 0.0064048957824707035, 0.006389887809753418, 0.006410111904144287, 0.006516863822937011, 0.006469759941101074, 0.006416224002838135, 0.006433951854705811, 0.006415103912353516, 0.006465536117553711, 0.006405344009399414, 0.0064354238510131835, 0.006391295909881592, 0.006383967876434326, 0.006420127868652344, 0.006445759773254394, 0.00643891191482544, 0.006412288188934326, 0.0064245758056640625, 0.006430016040802002, 0.006410943984985352, 0.006400032043457031, 0.0064141440391540525, 0.006350912094116211, 0.006379615783691406, 0.006402048110961914, 0.00638976001739502, 0.006440864086151123, 0.0063714241981506345, 0.006625408172607422, 0.006430751800537109, 0.006403584003448487, 0.00644547176361084, 0.006409823894500732, 0.006363615989685058, 0.006475647926330566, 0.006390079975128173, 0.006401631832122803, 0.006508800029754638, 0.006373504161834717, 0.006438432216644287, 0.006506688117980957, 0.006504447937011719, 0.006475456237792969, 0.006160384178161621, 0.006466815948486328, 0.006494624137878418, 0.006488224029541015, 0.00645907211303711, 0.0065090560913085935, 0.006462783813476563, 0.006459296226501465, 0.006526048183441162, 0.006377344131469727, 0.006416192054748535, 0.006402048110961914, 0.0064215359687805175, 0.006573247909545899, 0.0065000319480896, 0.006475488185882569, 0.006447679996490478, 0.006430528163909912, 0.006475776195526123, 0.006510591983795166, 0.006457344055175781, 0.006500448226928711, 0.007337215900421142, 0.0064784960746765135, 0.006529151916503906, 0.0064503679275512696, 0.006394688129425049, 0.006407296180725098, 0.006417151927947998, 0.006456895828247071, 0.00646608018875122, 0.006448416233062744, 0.00647811222076416, 0.006443359851837158, 0.0064839677810668945, 0.006463488101959228, 0.006455296039581298, 0.006519904136657715, 0.006471583843231201, 0.006489088058471679, 0.006536543846130371, 0.006500895977020264, 0.006551680088043213, 0.006472928047180176, 0.006460192203521728, 0.006487775802612305, 0.006459392070770263, 0.006465824127197266, 0.006456448078155518, 0.006458047866821289, 0.006453440189361572, 0.00645743989944458, 0.006436960220336914, 0.006452223777770996, 0.006430560111999511, 0.0065270719528198245, 0.006626175880432129, 0.006442463874816894, 0.006500192165374756, 0.0064899840354919434, 0.006553728103637696, 0.006513343811035156, 0.0064430079460144046, 0.0061296639442443845, 0.006388031959533692, 0.006413568019866944, 0.006488192081451416, 0.006426943778991699, 0.006395008087158203, 0.006395071983337402, 0.006448512077331543, 0.0064997758865356445, 0.006460288047790527, 0.006528223991394043, 0.00657692813873291, 0.006512576103210449, 0.00653715181350708, 0.006510719776153564, 0.006481887817382813, 0.006469664096832276, 0.0064297599792480465, 0.0064419198036193845, 0.0065922880172729495, 0.0064019842147827145, 0.0064041919708251955, 0.006348991870880127, 0.0064093761444091795, 0.006423232078552246, 0.006441120147705078, 0.006506400108337402, 0.006432928085327148, 0.006509984016418457, 0.006504896163940429, 0.006482336044311523, 0.006481632232666016, 0.006458528041839599, 0.006497087955474853, 0.006465248107910156, 0.00645904016494751, 0.006499040126800537, 0.00648796796798706, 0.006413856029510498, 0.006461919784545899, 0.006422304153442383, 0.006504479885101318, 0.0065170879364013674, 0.006464384078979492, 0.006515456199645996, 0.00647382402420044, 0.006445184230804444, 0.0064711041450500485, 0.006425055980682373, 0.006484288215637207, 0.006457119941711426, 0.006468768119812012, 0.006488959789276123, 0.006467455863952636, 0.006487936019897461, 0.006465536117553711, 0.006465760231018067, 0.006516736030578613, 0.006580575942993164, 0.006573728084564209, 0.0066068158149719235, 0.006618144035339356, 0.006601247787475586, 0.00634281587600708, 0.006684671878814697, 0.006606912136077881, 0.006594079971313477, 0.006547872066497803, 0.006559455871582031, 0.006539552211761474, 0.006536960124969482, 0.006541056156158447, 0.006566207885742187, 0.00654969596862793, 0.0065147199630737306, 0.00647983980178833, 0.00653107213973999, 0.006520832061767578, 0.006526976108551025, 0.0065177597999572755, 0.006508639812469482, 0.0064910402297973634, 0.0064471039772033695, 0.006446815967559814, 0.006479648113250733, 0.006461023807525635, 0.0064910402297973634, 0.0065075201988220215, 0.006441023826599121, 0.006491199970245361, 0.006464928150177002, 0.006457824230194092, 0.006465760231018067, 0.0064878082275390625, 0.0066943359375, 0.006535999774932862, 0.006735551834106446, 0.006506591796875, 0.006518879890441895, 0.006473631858825684, 0.006479872226715088, 0.006460544109344483, 0.00644595193862915, 0.006444672107696533, 0.00640883207321167, 0.006432511806488037, 0.006426239967346191, 0.006395679950714111, 0.006432447910308838, 0.006412288188934326, 0.006909952163696289, 0.006778048038482666, 0.006442527770996093, 0.006437056064605713, 0.006441088199615479, 0.0064039678573608394, 0.0064011201858520505, 0.006403007984161377, 0.006444863796234131, 0.006414624214172363, 0.006379392147064209, 0.006371200084686279, 0.00636736011505127, 0.006405663967132568, 0.00638105583190918, 0.0063787841796875, 0.006033120155334473, 0.0063712639808654785, 0.006404448032379151, 0.006399328231811523, 0.0063515200614929195, 0.0063504638671875, 0.006379903793334961, 0.0063582401275634765, 0.006367680072784424, 0.006381279945373535, 0.006354623794555664, 0.006394815921783447, 0.006367455959320068, 0.0063649601936340335, 0.006354944229125976, 0.006420703887939453, 0.006397727966308594, 0.0064204797744750975, 0.006408095836639404, 0.006522304058074952, 0.006431072235107422, 0.006407519817352295, 0.00641267204284668, 0.0063695359230041505, 0.006625951766967774, 0.006377151966094971, 0.0064430079460144046, 0.006364255905151367, 0.006386591911315918, 0.006416160106658936, 0.0063463358879089354, 0.00639244794845581, 0.006379519939422608, 0.006397535800933838, 0.006359583854675293, 0.006356895923614502, 0.006379583835601807, 0.006369184017181397, 0.0063508481979370115, 0.006352896213531494, 0.006346752166748047, 0.006363135814666748, 0.006365439891815186, 0.006329984188079834, 0.006416512012481689, 0.00632422399520874, 0.006318079948425293, 0.006240255832672119, 0.006270976066589356, 0.006287583827972412, 0.006268703937530517, 0.006281216144561768, 0.006291711807250976, 0.006305535793304443, 0.006275072097778321, 0.006311935901641846, 0.006301663875579834, 0.006268735885620117, 0.006309311866760254, 0.006286176204681396, 0.006320064067840576, 0.006245791912078857, 0.006310495853424072, 0.005974016189575195, 0.006301023960113525, 0.0062984957695007325, 0.006343647956848144, 0.006298431873321533, 0.006313151836395263, 0.00629641580581665, 0.006295135974884033, 0.00635532808303833, 0.006268928050994873, 0.006336512088775635, 0.006318079948425293, 0.0063281598091125485, 0.007385280132293701, 0.006432096004486084, 0.0065030078887939454, 0.00859875202178955, 0.007103424072265625, 0.0066128640174865725, 0.006550559997558594, 0.006575232028961182, 0.0065288000106811525, 0.006492224216461182, 0.0065413122177124024, 0.006522047996520996, 0.006488895893096923, 0.006543360233306885, 0.006502336025238037, 0.006504511833190918, 0.006496255874633789, 0.006449408054351806, 0.006450240135192871, 0.00644323205947876, 0.006459871768951416, 0.006448192119598389, 0.006433728218078613, 0.006494495868682861, 0.006483359813690186, 0.006414463996887207, 0.0064791679382324215, 0.0064551677703857425, 0.006398975849151611, 0.006408415794372559, 0.006373151779174805, 0.006422751903533935, 0.00637500810623169, 0.006377600193023682, 0.006422016143798828, 0.006359551906585693, 0.006404287815093994, 0.006365056037902832, 0.006345823764801025, 0.006382495880126953, 0.006372384071350098, 0.006408991813659668, 0.006332831859588623, 0.006388927936553955, 0.006343264102935791, 0.006336512088775635, 0.006368576049804687, 0.006378176212310791, 0.006321695804595948, 0.006332191944122314, 0.006023200035095215, 0.006301663875579834, 0.0063143038749694826, 0.006301375865936279, 0.006299295902252197, 0.006291808128356934, 0.006313920021057129, 0.006322400093078613, 0.006256480216979981, 0.0063610877990722655, 0.007538496017456054, 0.006288896083831787, 0.006355296134948731, 0.006356639862060547, 0.006307839870452881, 0.006399903774261475, 0.006302720069885254, 0.006323967933654785, 0.006299071788787842, 0.006304448127746582, 0.006307744026184082, 0.0063201279640197755, 0.006313248157501221, 0.006248767852783203, 0.006324831962585449, 0.006326079845428467, 0.006291584014892578, 0.006307551860809326, 0.006330527782440185, 0.006329376220703125, 0.0062969279289245604, 0.006397568225860596, 0.006391808032989502, 0.006369279861450195, 0.006369088172912598, 0.006328512191772461, 0.006387584209442139, 0.006354047775268555, 0.006305791854858398, 0.0063170561790466305, 0.006305280208587646, 0.006351327896118164, 0.006332448005676269, 0.0063285760879516605, 0.006341856002807617, 0.006296160221099853, 0.006299583911895752, 0.0062993597984313966, 0.00632041597366333, 0.006300960063934326, 0.006284095764160156, 0.006547359943389892, 0.006260735988616943, 0.0062791681289672855, 0.006242368221282959, 0.006320064067840576, 0.006275072097778321, 0.006301695823669433, 0.006295551776885986, 0.006258272171020508, 0.006308256149291992, 0.006338560104370118, 0.006289408206939697, 0.00602569580078125, 0.006383264064788818, 0.006302048206329345, 0.006322175979614258, 0.006309120178222656, 0.006292319774627685, 0.006264351844787598, 0.00637721586227417, 0.0063496317863464356, 0.006341824054718018, 0.0062798080444335935, 0.006359039783477783, 0.006270976066589356, 0.006291776180267334, 0.006252223968505859, 0.006322175979614258, 0.006256703853607178, 0.006233183860778808, 0.00626361608505249, 0.006246560096740723, 0.006250463962554932, 0.006209440231323242, 0.006268928050994873, 0.006205440044403076, 0.006309792041778565, 0.00623583984375, 0.006209248065948486, 0.0061914238929748535, 0.006216063976287842, 0.0061922879219055175, 0.006204480171203613, 0.0062154560089111325, 0.006187071800231933, 0.0063446397781372075, 0.006205408096313476, 0.0062280001640319825, 0.0062341117858886715, 0.006227968215942382, 0.006219007968902588, 0.0061979198455810545, 0.006243743896484375, 0.006242720127105713, 0.006249023914337158, 0.006237247943878174, 0.006224544048309326, 0.006422368049621582, 0.006281375885009766, 0.006285120010375977, 0.00619539213180542, 0.006339744091033935, 0.006269536018371582, 0.006205664157867431, 0.006245471954345703, 0.006235072135925293, 0.006264832019805908, 0.006250624179840088, 0.006330239772796631, 0.0062849922180175785, 0.00625871992111206, 0.006947103977203369, 0.006329823970794677, 0.006337215900421142, 0.006279007911682129, 0.0059985918998718265, 0.0062707200050354005, 0.006320735931396484, 0.0062709121704101565, 0.006238080024719238, 0.006258495807647705, 0.006262080192565918, 0.006256671905517578, 0.006230368137359619, 0.006277184009552002, 0.006270271778106689, 0.006275072097778321, 0.006337056159973145, 0.006304192066192627, 0.006294847965240479, 0.006281919956207275, 0.006291456222534179, 0.006312096118927002, 0.006330207824707031, 0.006389791965484619, 0.006354015827178955, 0.0063434882164001465, 0.0065567679405212406, 0.006362080097198486, 0.006709248065948486, 0.006362688064575195, 0.006371327877044678, 0.006754271984100342, 0.006320352077484131, 0.006333759784698486, 0.0063006081581115725, 0.006453536033630371, 0.006436063766479492, 0.00640390396118164, 0.006370240211486816, 0.006321919918060303, 0.006346752166748047, 0.006254591941833496, 0.0062197761535644535, 0.007120384216308594, 0.0062984638214111325, 0.0062986879348754885, 0.006265567779541016, 0.006235136032104492, 0.006281631946563721, 0.006228447914123535, 0.0062845759391784665, 0.006284287929534912, 0.006303264141082764, 0.006301887989044189, 0.006301695823669433, 0.006313695907592774, 0.006294015884399414, 0.006297376155853272, 0.0062863359451293946, 0.0063211522102355954, 0.006362207889556885, 0.006443935871124268, 0.006464799880981445, 0.006457471847534179, 0.006417344093322754, 0.006401919841766358, 0.006396031856536865, 0.006118400096893311, 0.006386847972869873, 0.006331200122833252, 0.006283360004425049, 0.0063056640625, 0.006331840038299561, 0.006343552112579346, 0.006315775871276855, 0.00632422399520874, 0.006326272010803223, 0.006305247783660889, 0.006291679859161377, 0.006299967765808105, 0.006326272010803223, 0.006313983917236328, 0.006498303890228272, 0.006844480037689209, 0.006610623836517334, 0.006901247978210449, 0.00668342399597168, 0.0068198080062866215, 0.006694911956787109, 0.012613632202148438, 0.008443903923034669, 0.006449151992797852, 0.006561791896820069, 0.006459392070770263, 0.0064495038986206055, 0.006485568046569824, 0.006559648036956787, 0.006512832164764404, 0.006506080150604248, 0.006547071933746338, 0.00655785608291626, 0.006623936176300049, 0.006490047931671143, 0.006477407932281494, 0.006474112033843994, 0.006506527900695801, 0.006449151992797852, 0.006428927898406983, 0.006482687950134278, 0.006481120109558106, 0.006489952087402343, 0.006676415920257569, 0.006551328182220459, 0.006580448150634766, 0.006612736225128174, 0.006614304065704345, 0.0067266240119934085, 0.006524384021759033, 0.006562655925750733, 0.006530752182006836, 0.006530208110809326, 0.006502848148345947, 0.0065886077880859375, 0.0066152639389038086, 0.006531424045562744, 0.006591775894165039, 0.006694464206695556, 0.006584159851074219, 0.006548448085784912, 0.006526624202728272, 0.006287263870239258, 0.0065717120170593266, 0.006576735973358155, 0.0065569281578063965, 0.006519360065460205, 0.006576128005981445, 0.00656828784942627, 0.006518432140350342, 0.006523935794830322, 0.006458335876464843, 0.006498367786407471, 0.006470623970031738, 0.006461599826812744, 0.006510464191436768, 0.006472703933715821, 0.0064347200393676755, 0.0064553279876708985, 0.006440671920776367, 0.006504320144653321, 0.006482336044311523, 0.006434815883636475, 0.006471680164337158, 0.006516736030578613, 0.006501984119415283, 0.006635424137115479, 0.006795775890350342, 0.006483232021331787, 0.006500576019287109, 0.006500480175018311, 0.006547008037567139, 0.006550335884094238, 0.006496384143829346, 0.00648528003692627, 0.006460288047790527, 0.006435872077941895, 0.006529727935791016, 0.0064646081924438475, 0.006449600219726562, 0.006425055980682373, 0.006391808032989502, 0.0064204797744750975, 0.00636518383026123, 0.006405312061309815, 0.006407167911529541, 0.006328127861022949, 0.006362368106842041, 0.0063450241088867185, 0.006398591995239258, 0.006352799892425537, 0.006364480018615722, 0.006381696224212647, 0.006383103847503662, 0.006479231834411621, 0.006377056121826172, 0.006335616111755371, 0.006343679904937744, 0.006367104053497314, 0.006408192157745361, 0.006404255867004395, 0.0063814082145690915, 0.0063836159706115725, 0.006409664154052735, 0.006486815929412842, 0.006401760101318359, 0.006476319789886474, 0.00643071985244751, 0.006457344055175781, 0.00639081621170044, 0.006364128112792969, 0.006375423908233643, 0.0063949117660522465, 0.006341856002807617, 0.006352287769317627, 0.006324575901031494, 0.006371520042419434, 0.006350560188293457, 0.006338272094726562, 0.006360511779785156, 0.00635590410232544, 0.006334464073181153, 0.006338751792907714, 0.0063731842041015625, 0.006368896007537842, 0.006362783908843994, 0.006326943874359131, 0.0063324799537658695, 0.006395808219909668, 0.006373472213745117, 0.006313375949859619, 0.006377823829650879, 0.006305535793304443, 0.006324704170227051, 0.0062930240631103515, 0.006303775787353516, 0.006324096202850342, 0.006323935985565185, 0.006369631767272949, 0.007159391880035401, 0.0064460477828979495, 0.006471839904785157, 0.0064061121940612795, 0.006362912178039551, 0.006775904178619385, 0.0068858561515808105, 0.006566559791564942, 0.006413568019866944, 0.006502175807952881, 0.006503551959991455, 0.00648364782333374, 0.006452320098876953, 0.006463808059692383, 0.0063985600471496585, 0.006436704158782959, 0.006441120147705078, 0.00637337589263916, 0.006391808032989502, 0.006306943893432618, 0.0063640961647033695, 0.006449088096618653, 0.006348864078521728, 0.006356927871704101, 0.006368512153625488, 0.006512479782104492, 0.006404255867004395, 0.0063424639701843265, 0.006460319995880127, 0.00601475191116333, 0.006338111877441406, 0.00629807996749878, 0.0063116798400878905, 0.006285823822021484, 0.0064061121940612795, 0.006402048110961914, 0.006383935928344727, 0.0063909759521484375, 0.006344992160797119, 0.006525152206420899, 0.006363135814666748, 0.006338016033172607, 0.006351391792297364, 0.006356575965881348, 0.006408127784729004, 0.006316512107849121, 0.006342751979827881, 0.006288608074188232, 0.0063034558296203615, 0.006326591968536377, 0.006314655780792236, 0.00636518383026123, 0.006340608119964599, 0.006295296192169189, 0.006283391952514648, 0.0063489279747009275, 0.006275072097778321, 0.006319392204284668, 0.006258975982666015, 0.00629094409942627, 0.006255519866943359, 0.006293536186218262, 0.00626204776763916, 0.006314015865325927, 0.006310463905334473, 0.006323775768280029, 0.00630947208404541, 0.006353312015533448, 0.006488639831542969, 0.006272863864898681, 0.007192736148834229, 0.0068297600746154786, 0.006313759803771973, 0.0063433279991149906, 0.006295328140258789, 0.006260096073150635, 0.0062873921394348145, 0.006314432144165039, 0.0063647360801696775, 0.006257343769073487, 0.006305791854858398, 0.0063134078979492185, 0.006263519763946533, 0.006299488067626953, 0.0062873601913452145, 0.006292992115020752, 0.006273536205291748, 0.006297887802124024, 0.006350304126739502, 0.006265408039093018, 0.006290112018585205, 0.006255616188049316, 0.005999839782714843, 0.006224768161773682, 0.006280576229095459, 0.006279647827148438, 0.00630185604095459, 0.006262784004211426, 0.006307839870452881, 0.006299071788787842, 0.0062674560546875, 0.006453279972076416, 0.006313951969146728, 0.006615039825439453, 0.006309887886047363, 0.00632422399520874, 0.006344704151153564, 0.0063491201400756835, 0.0064652800559997555, 0.006313695907592774, 0.006367455959320068, 0.006408095836639404, 0.006348896026611328, 0.006459392070770263, 0.006391808032989502, 0.006375584125518799, 0.006360928058624268, 0.006385248184204101, 0.0063976640701293944, 0.007331583976745605, 0.006437920093536377, 0.0064629120826721195, 0.006449632167816162, 0.007112703800201416, 0.00771011209487915, 0.007934559822082519, 0.007710720062255859, 0.007201056003570557, 0.00653004789352417, 0.006484576225280762, 0.0064943361282348636, 0.006465824127197266, 0.006491104125976563, 0.006438687801361084, 0.006390751838684082, 0.006378975868225098, 0.006361343860626221, 0.006418496131896973, 0.006363359928131104, 0.006371327877044678, 0.006370719909667969, 0.00639353609085083, 0.006430880069732666, 0.0064122557640075685, 0.006392767906188965, 0.006404255867004395, 0.00637062406539917, 0.006404543876647949, 0.006389696121215821, 0.006399040222167969, 0.006388671875, 0.006371327877044678, 0.006412288188934326, 0.00639737606048584, 0.006384128093719482, 0.006056831836700439, 0.006418560028076172, 0.006722335815429688, 0.006388671875, 0.006404096126556396, 0.006418432235717773, 0.00636627197265625, 0.006362048149108887, 0.006344128131866455, 0.006389696121215821, 0.006373824119567871, 0.006353280067443848, 0.006440767765045166, 0.006364384174346924, 0.006384416103363037, 0.006369279861450195, 0.0063836159706115725, 0.0063991999626159665, 0.0064429759979248045, 0.006386335849761963, 0.006366879940032959, 0.006584640026092529, 0.006442944049835205, 0.0064076800346374516, 0.006448959827423096, 0.00647379207611084, 0.006433472156524658, 0.006434400081634522, 0.006417088031768799, 0.006453152179718018, 0.00643891191482544, 0.006492127895355224, 0.006591872215270996, 0.006481791973114014, 0.006474527835845947, 0.006478943824768066, 0.006419360160827637, 0.006555295944213867, 0.006470016002655029, 0.006420447826385498, 0.00645308780670166, 0.00643017578125, 0.006464000225067139, 0.0065248641967773436, 0.0063994240760803225, 0.006416575908660889, 0.0063760638236999515, 0.006395904064178467, 0.006320288181304931, 0.006348639965057373, 0.006376704216003418, 0.006370048046112061, 0.006416384220123291, 0.006392864227294922, 0.0063740482330322265, 0.006378880023956299, 0.006378015995025635, 0.006408607959747315, 0.006408192157745361, 0.0064124159812927245, 0.006387584209442139, 0.006369279861450195, 0.00643071985244751, 0.006132160186767578, 0.006469727993011475, 0.006444960117340088, 0.006387263774871826, 0.0063710718154907225, 0.006351039886474609, 0.006420447826385498, 0.006377151966094971, 0.0063517122268676755, 0.006393119812011718, 0.006384160041809082, 0.00641871976852417, 0.0063712320327758785, 0.0064204797744750975, 0.006399104118347168, 0.006382304191589356, 0.006391424179077149, 0.006347296237945557, 0.0064074559211730955, 0.006367680072784424, 0.006359327793121338, 0.006404096126556396, 0.006382880210876465, 0.006431136131286621, 0.0063919677734375, 0.006375584125518799, 0.006369279861450195, 0.006356800079345703, 0.006387712001800537, 0.006352128028869629, 0.006370528221130371, 0.006418144226074219, 0.006402048110961914, 0.006496352195739746, 0.006501952171325684, 0.006472032070159912, 0.006478047847747802, 0.0064694080352783205, 0.006448351860046387, 0.006511392116546631, 0.006448959827423096, 0.006452928066253662, 0.006455423831939697, 0.006470016002655029, 0.006510591983795166, 0.006535168170928955, 0.006516736030578613, 0.0064856958389282226, 0.006454751968383789, 0.006509407997131348, 0.006545407772064209, 0.00651910400390625, 0.006487743854522705, 0.00742300796508789, 0.006487296104431152, 0.006532608032226562, 0.006502431869506836, 0.006462656021118164, 0.006433792114257812, 0.006406144142150879, 0.00642252779006958, 0.006440256118774414, 0.006441664218902588]",tokens/s,155.67273137433045,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1847.017472,3107.848192,0.0,2705.32608,2414.155264,s,1,8.847134765625,8.847134765625,0.0,8.847134765625,8.847134765625,8.847134765625,8.847134765625,[8.847134765625],,kWh,5.507035327500489e-05,6.056845403764191e-06,2.097473900197766e-05,8.210193768074675e-05,,MB,1911.758848,3135.111168,0.0,2717.908992,2121.090048,s,10,9.45817791748047,0.9458177917480469,0.0012122481178672446,0.9457148132324218,0.9474126342773438,0.9474803649902344,0.947534549560547,"[0.9442252197265625, 0.9439141235351562, 0.9452274169921875, 0.9450123291015625, 0.9463364868164063, 0.9454437255859375, 0.9459859008789062, 0.9470870361328125, 0.9473975830078125, 0.947548095703125]",tokens/s,270.6652404231733,kWh,2.773862027311087e-05,3.0590703508746054e-06,1.8309686364911057e-05,4.910737698889654e-05,tokens/kWh,5213066.0543706715,MB,1914.978304,3135.111168,0.0,2717.908992,2121.092608,s,10,19.97584033203125,1.997584033203125,0.015063302124616007,1.9939617919921875,2.019777917480469,2.020516033935547,2.0211065270996094,"[1.9823839111328125, 1.993352783203125, 1.9882220458984374, 1.9884158935546874, 1.9731546630859376, 1.99457080078125, 2.0196138916015625, 2.0086177978515627, 2.00625439453125, 2.021254150390625]",tokens/s,31.538097498196127,kWh,5.880711104439328e-05,6.486833712977465e-06,3.0938519700288915e-05,9.623246445765964e-05,tokens/kWh,654664.7262444239,,s,630,19.97378351783753,0.031704418282281786,0.0004619133069984286,0.031646896362304694,0.03211336822509765,0.0323892219543457,0.033755502243041996,"[0.03288108825683594, 0.0317042236328125, 0.031532896041870116, 0.031542272567749025, 0.03156793594360351, 0.03147974395751953, 0.03146342468261719, 0.031478912353515624, 0.03127187156677246, 0.03142195129394531, 0.031457408905029294, 0.031287584304809574, 0.03147983932495117, 0.03153071975708008, 0.03149648094177246, 0.031333887100219726, 0.031435264587402346, 0.0314870719909668, 0.03141222381591797, 0.031478687286376955, 0.031494144439697266, 0.03162521553039551, 0.031655712127685545, 0.03146115112304688, 0.03168300819396973, 0.03153686332702637, 0.03155612754821777, 0.03138739204406738, 0.031457279205322264, 0.03148595237731933, 0.03139897537231445, 0.031392704010009764, 0.031399711608886716, 0.03149663925170899, 0.03137337684631348, 0.03168019294738769, 0.031383583068847656, 0.03136511993408203, 0.031518688201904295, 0.0313809928894043, 0.03145372772216797, 0.031481151580810544, 0.03141087913513184, 0.032092063903808594, 0.031340639114379884, 0.031307680130004886, 0.03135462379455566, 0.031304031372070315, 0.031186111450195314, 0.03136537551879883, 0.03128998374938965, 0.03144470405578613, 0.03141228866577148, 0.031592607498168945, 0.03138467216491699, 0.03136400032043457, 0.031358272552490234, 0.03124265670776367, 0.03128144073486328, 0.03118489646911621, 0.031293279647827146, 0.03130931282043457, 0.031261184692382815, 0.03287263870239258, 0.03191878318786621, 0.031678464889526366, 0.03175014305114746, 0.03149382400512695, 0.031714784622192384, 0.03177670478820801, 0.03136195182800293, 0.03137299156188965, 0.03179257583618164, 0.03135577583312988, 0.03418646240234375, 0.032154399871826174, 0.03167027282714844, 0.031488000869750975, 0.03171228790283203, 0.031712223052978515, 0.03134614372253418, 0.03127350425720215, 0.03145113563537598, 0.03137126350402832, 0.031241695404052736, 0.031238304138183594, 0.03128278350830078, 0.0314080638885498, 0.0314202880859375, 0.031886335372924804, 0.031438560485839845, 0.03156342315673828, 0.031713184356689454, 0.031447744369506835, 0.03159033584594727, 0.031518783569335934, 0.03131391906738281, 0.03124019241333008, 0.03153203201293946, 0.03137433624267578, 0.031287296295166016, 0.03141939163208008, 0.031214431762695314, 0.03125663948059082, 0.031606592178344724, 0.031545791625976566, 0.031796768188476564, 0.031514175415039064, 0.031575904846191404, 0.0314172477722168, 0.03186892890930176, 0.03163299179077148, 0.031326623916625974, 0.03146956825256347, 0.03152016067504883, 0.03211734390258789, 0.031422464370727536, 0.03141164779663086, 0.03169132804870606, 0.03158016014099121, 0.03377151870727539, 0.032110111236572265, 0.03170787239074707, 0.031551231384277345, 0.031356927871704104, 0.03130982398986817, 0.03280486297607422, 0.031715328216552735, 0.031630912780761716, 0.0313371524810791, 0.03141100883483887, 0.03140080070495605, 0.03133619117736816, 0.03126063919067383, 0.03140547180175781, 0.0313470401763916, 0.0313514232635498, 0.03129155158996582, 0.031243488311767577, 0.03130419158935547, 0.032282527923583985, 0.03135919952392578, 0.031340192794799805, 0.03137337684631348, 0.031559968948364256, 0.031369216918945314, 0.031682559967041016, 0.031708959579467774, 0.03176572799682617, 0.03175299263000488, 0.03154556846618652, 0.03158131217956543, 0.03144384002685547, 0.031611936569213864, 0.03166876792907715, 0.031516704559326175, 0.03154780769348144, 0.03138150405883789, 0.03154323196411133, 0.031741376876831054, 0.031457183837890625, 0.03189833641052246, 0.03169209671020508, 0.03172969627380371, 0.03151059150695801, 0.03168316841125488, 0.03182083129882812, 0.03159753608703613, 0.03149337577819824, 0.0316362247467041, 0.03160454368591309, 0.031530656814575196, 0.031504928588867186, 0.03146342468261719, 0.031687904357910156, 0.031605472564697264, 0.03148601531982422, 0.03146339225769043, 0.03142630386352539, 0.03153039932250976, 0.03131702423095703, 0.03136220741271973, 0.03132844734191895, 0.03132441520690918, 0.03163366317749024, 0.031442943572998046, 0.031573535919189454, 0.03173014450073242, 0.0318525447845459, 0.03282767868041992, 0.03158211135864258, 0.03155539131164551, 0.031640352249145506, 0.031556671142578124, 0.03158857536315918, 0.031778688430786135, 0.03178172874450683, 0.03168358421325684, 0.03171225547790527, 0.03148972892761231, 0.03135519981384277, 0.031280384063720704, 0.031378175735473635, 0.03131760025024414, 0.03128566360473633, 0.031331968307495114, 0.031244672775268555, 0.032522464752197264, 0.032012065887451174, 0.03196518325805664, 0.03186879920959473, 0.03167654418945313, 0.03159577560424805, 0.031568639755249026, 0.0317706241607666, 0.031543296813964845, 0.03133584022521973, 0.031412256240844726, 0.03140460777282715, 0.03134172821044922, 0.03138601684570313, 0.034530750274658205, 0.03187129592895508, 0.03137196731567383, 0.03132777595520019, 0.03127958488464355, 0.031291072845458984, 0.03144735908508301, 0.03139116859436035, 0.031734336853027345, 0.03122585678100586, 0.03129091262817383, 0.03126320075988769, 0.031205375671386718, 0.03130572891235352, 0.031455232620239255, 0.03143680000305176, 0.03119260787963867, 0.031227712631225587, 0.031240863800048826, 0.03150803184509277, 0.03165567970275879, 0.031426847457885744, 0.03132038307189942, 0.031254528045654296, 0.031424320220947266, 0.03134876823425293, 0.03134502410888672, 0.031487871170043945, 0.03130326461791992, 0.031365535736083985, 0.03188121604919433, 0.0324483528137207, 0.03169539260864258, 0.0313753604888916, 0.031440895080566404, 0.031259872436523437, 0.03138985633850098, 0.03130380821228027, 0.03131808090209961, 0.031180639266967774, 0.031265375137329104, 0.031358591079711916, 0.03139955139160156, 0.03127497673034668, 0.03140483283996582, 0.03126681518554687, 0.031322111129760744, 0.03148812866210938, 0.03136675262451172, 0.031567808151245116, 0.03135231971740723, 0.03137027168273926, 0.031297311782836915, 0.03131395149230957, 0.03125590324401856, 0.03124496078491211, 0.03127705574035645, 0.03115238380432129, 0.03124336051940918, 0.03114988708496094, 0.03123081588745117, 0.031188352584838867, 0.031199871063232423, 0.031172607421875, 0.03143680000305176, 0.031221759796142577, 0.0313917121887207, 0.031292736053466795, 0.031222496032714844, 0.03126377677917481, 0.03119318389892578, 0.03125747108459473, 0.03130739212036133, 0.031185279846191405, 0.03125571250915527, 0.03130454444885254, 0.03133366394042969, 0.031240928649902345, 0.031180799484252928, 0.03137945556640625, 0.031211200714111327, 0.031577472686767576, 0.03136812782287598, 0.03130572891235352, 0.03120240020751953, 0.031210399627685546, 0.03129343986511231, 0.03117241668701172, 0.031380800247192385, 0.03130662345886231, 0.0313118724822998, 0.031153791427612303, 0.031188768386840822, 0.031234560012817384, 0.032786304473876954, 0.031734975814819336, 0.031529920578002926, 0.03288780975341797, 0.03424563217163086, 0.03149350357055664, 0.03154803276062012, 0.03138310432434082, 0.03149030494689942, 0.03127248001098633, 0.03140060806274414, 0.03146342468261719, 0.03152076721191406, 0.031815776824951174, 0.03172944068908692, 0.03175436782836914, 0.031600831985473636, 0.03170470428466797, 0.031662336349487306, 0.031323999404907224, 0.03194883155822754, 0.03144300842285156, 0.03140345573425293, 0.03149676895141602, 0.03176243209838867, 0.031529983520507815, 0.03208406448364258, 0.031810047149658204, 0.0316114559173584, 0.031530208587646484, 0.031633440017700194, 0.03164425659179688, 0.031558944702148435, 0.031800031661987305, 0.03151059150695801, 0.03191596794128418, 0.03495872116088867, 0.031611520767211916, 0.03165798377990723, 0.031352640151977536, 0.03145747184753418, 0.03134364891052246, 0.031335391998291016, 0.031254528045654296, 0.03139094352722168, 0.031242687225341795, 0.031236448287963868, 0.03120854377746582, 0.03146640014648437, 0.03199180793762207, 0.031903743743896484, 0.031426559448242186, 0.03160883140563965, 0.03123583984375, 0.03134899139404297, 0.031264768600463864, 0.03148185539245606, 0.031274688720703124, 0.0312609920501709, 0.031188480377197264, 0.031214080810546874, 0.03134464073181152, 0.0312828483581543, 0.03318342590332031, 0.031643135070800785, 0.0313055362701416, 0.03130816078186035, 0.03208396911621094, 0.03125516891479492, 0.03201603317260742, 0.03137884712219238, 0.03145209693908691, 0.031440895080566404, 0.03194416046142578, 0.031883808135986326, 0.03199590492248535, 0.03210553741455078, 0.03199276733398437, 0.032073440551757815, 0.031953184127807614, 0.031739839553833006, 0.031751840591430665, 0.03168297576904297, 0.032024574279785153, 0.031803136825561525, 0.03389465713500977, 0.03221286392211914, 0.031907968521118164, 0.032048736572265625, 0.033137054443359376, 0.0320634880065918, 0.03198975944519043, 0.03212255859375, 0.03189792060852051, 0.03217542266845703, 0.03202467346191406, 0.03175484848022461, 0.032015487670898436, 0.03346303939819336, 0.03197350311279297, 0.031889055252075194, 0.032153182983398435, 0.03221376037597656, 0.03205104064941406, 0.032008289337158206, 0.03180550384521484, 0.03177267265319824, 0.031649791717529296, 0.031723392486572265, 0.03177193641662598, 0.03173222351074219, 0.031662431716918946, 0.03168838310241699, 0.031741727828979494, 0.03186454391479492, 0.032188735961914065, 0.03237507247924805, 0.032149185180664064, 0.03250435256958008, 0.03431628799438476, 0.032830463409423825, 0.03233484649658203, 0.03213955307006836, 0.032040672302246095, 0.032052734375, 0.032043521881103515, 0.033363967895507815, 0.032057632446289064, 0.03180515289306641, 0.03187507247924805, 0.03173344039916992, 0.031800863265991214, 0.032064289093017576, 0.03196723175048828, 0.03174604797363281, 0.0322754898071289, 0.03200908660888672, 0.03171952056884766, 0.031715328216552735, 0.031956415176391605, 0.03206918334960938, 0.03210092926025391, 0.03197996711730957, 0.03195084762573242, 0.03177078437805176, 0.0316495361328125, 0.03178505516052246, 0.0317071361541748, 0.03177881622314453, 0.031868896484375, 0.031722911834716795, 0.031725791931152346, 0.03188572883605957, 0.03178278350830078, 0.03196112060546875, 0.03205129623413086, 0.031831424713134764, 0.031817920684814455, 0.031881664276123045, 0.03203267288208008, 0.03188742446899414, 0.031827999114990235, 0.03194428825378418, 0.03174851226806641, 0.03170099258422852, 0.03168191909790039, 0.0316627197265625, 0.0318133430480957, 0.03181795120239258, 0.03181702423095703, 0.03174291229248047, 0.031700063705444335, 0.031664863586425784, 0.03162627220153809, 0.03166102409362793, 0.03160268783569336, 0.03171465682983399, 0.03172624015808106, 0.03171327972412109, 0.03192617607116699, 0.03231548690795898, 0.03233587265014649, 0.03194879913330078, 0.032007553100585937, 0.03224435043334961, 0.03198566436767578, 0.031783008575439455, 0.031643552780151366, 0.03170099258422852, 0.03300352096557617, 0.03189145660400391, 0.03179110336303711, 0.03172147178649903, 0.03162931251525879, 0.031731712341308595, 0.03202252960205078, 0.03199590492248535, 0.0319337272644043, 0.03170787239074707, 0.03178873634338379, 0.031705408096313475, 0.03186892890930176, 0.03180339241027832, 0.0317392635345459, 0.031736288070678714, 0.031606176376342776, 0.031683328628540036, 0.03170243263244629, 0.031739871978759764, 0.03165247917175293, 0.03177881622314453, 0.03178694343566894, 0.031866943359375, 0.031950368881225585, 0.03188147163391113, 0.031791328430175785, 0.031694623947143556, 0.03174831962585449, 0.03186460876464844, 0.03184457588195801, 0.03178275108337402, 0.03173391914367676, 0.03191593551635742, 0.03162326431274414, 0.031752191543579104, 0.031656991958618166, 0.031878400802612304, 0.031669567108154294, 0.03200191879272461, 0.03296646499633789, 0.03288956832885742, 0.031899648666381834, 0.03201004791259766, 0.03197539138793945, 0.03179747200012207, 0.03169273567199707, 0.03156179237365723, 0.031604736328125, 0.03157196807861328, 0.03159769630432129, 0.031765375137329105, 0.03182796859741211, 0.031684160232543945, 0.03197587203979492, 0.031831584930419925, 0.03190793609619141, 0.03171571159362793, 0.03192534446716309, 0.03163382339477539, 0.03177302360534668, 0.03176464080810547, 0.03200614547729492, 0.03371628952026367, 0.03241984176635742, 0.03217407989501953, 0.03237004852294922, 0.03232998275756836, 0.03217036819458008, 0.03199180793762207, 0.03189760017395019, 0.031905792236328126, 0.03222937774658203, 0.031850496292114255, 0.03175152015686035, 0.03180201530456543, 0.0317071361541748, 0.032011455535888675, 0.03197625541687012, 0.03213260650634766, 0.03215407943725586, 0.03192160034179688, 0.03200995254516602, 0.03188585662841797, 0.03185494422912598, 0.03177375984191894, 0.031753152847290037, 0.03183001518249512, 0.03180544090270996, 0.03182387161254883, 0.03171891212463379, 0.031844032287597655, 0.03177555274963379, 0.03225395202636719, 0.03207939147949219, 0.03205168151855469, 0.03199795150756836, 0.032074752807617186, 0.03241471862792969, 0.032118911743164065, 0.03179830360412598, 0.032011104583740235, 0.031895423889160154, 0.032077953338623046, 0.03189248085021973, 0.03209318542480469, 0.03204915237426758, 0.03248854446411133, 0.032194561004638675, 0.03208489608764648, 0.03212464141845703, 0.032112926483154294, 0.03169273567199707, 0.03234944152832031, 0.03173868751525879, 0.03188870429992676, 0.03295654296875, 0.031836736679077146, 0.03210377502441406, 0.03265388870239258, 0.03240079879760742, 0.032176830291748046, 0.03201551818847656, 0.03224457550048828, 0.032645118713378905, 0.031940607070922854]",tokens/s,31.541345155632655,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4758.032384,7476.215808,0.0,7073.693696,6385.996288,s,1,11.98059765625,11.98059765625,0.0,11.98059765625,11.98059765625,11.98059765625,11.98059765625,[11.98059765625],,kWh,0.00014289447340003485,1.5754843355513827e-05,6.213782748801244e-05,0.00022078714424356112,,MB,1778.880512,7495.090176,0.0,7077.888,5802.016768,s,10,36.32957592773438,3.6329575927734377,0.005335850459459854,3.6333765869140624,3.6386541748046874,3.6392095336914063,3.6396538208007816,"[3.620331787109375, 3.631783447265625, 3.62912890625, 3.63088330078125, 3.63184521484375, 3.634907958984375, 3.635334716796875, 3.63853076171875, 3.63706494140625, 3.639764892578125]",tokens/s,70.46600282624465,kWh,0.00010584486245958334,1.1674523645608644e-05,7.044991747099938e-05,0.00018796930357619135,tokens/kWh,1361924.5011259678,MB,1784.209408,7495.090176,0.0,7077.888,5803.198976,s,10,30.187338378906247,3.018733837890625,0.008122443903937482,3.0151019287109375,3.0341118408203127,3.034656872558594,3.0350928979492187,"[3.013308349609375, 3.014730712890625, 3.014213623046875, 3.01547314453125, 3.03399072265625, 3.017769775390625, 3.01127099609375, 3.035201904296875, 3.017119140625, 3.014260009765625]",tokens/s,20.86967695171893,kWh,7.040883002707991e-05,7.766876980792194e-06,4.654798168280128e-05,0.0001247236886906734,tokens/kWh,505116.55533413537,,s,630,30.18510116958617,0.047912858999343146,0.0005693884518473729,0.0477713451385498,0.04836174850463867,0.0488572940826416,0.05036586894989014,"[0.049430782318115235, 0.04839212799072266, 0.04817427062988281, 0.04778422546386719, 0.048087646484375, 0.048319808959960936, 0.04835391998291016, 0.048150592803955075, 0.04842287826538086, 0.04806864166259766, 0.04807827377319336, 0.048089664459228514, 0.04814451217651367, 0.04795996856689453, 0.048118881225585934, 0.04803647994995117, 0.04809344100952148, 0.04840447998046875, 0.04783705520629883, 0.04787827301025391, 0.0479109115600586, 0.04754022216796875, 0.04734342575073242, 0.04741782379150391, 0.047621505737304684, 0.04763881683349609, 0.04763616180419922, 0.04775155258178711, 0.047510753631591796, 0.04761075210571289, 0.04768143844604492, 0.04763033676147461, 0.04765081787109375, 0.04751545715332031, 0.04756089782714844, 0.04751318359375, 0.04751212692260742, 0.0475687370300293, 0.04756070327758789, 0.047570945739746094, 0.04749638366699219, 0.04780729675292969, 0.04788633728027344, 0.04777164840698242, 0.04782627105712891, 0.04773065567016602, 0.04775596618652344, 0.04794339370727539, 0.047838817596435546, 0.047660961151123046, 0.047844127655029295, 0.047666271209716796, 0.04763740921020508, 0.04773036956787109, 0.04772431945800781, 0.0475940170288086, 0.047531391143798826, 0.04752153778076172, 0.04758969497680664, 0.04793385696411133, 0.04766553497314453, 0.047642398834228515, 0.04772576141357422, 0.04921139144897461, 0.04789452743530274, 0.05215641784667969, 0.047719711303710936, 0.047867904663085936, 0.04745872116088867, 0.04759523010253906, 0.04776816177368164, 0.04756063842773438, 0.04756195068359375, 0.04763734436035156, 0.048228351593017575, 0.04888576126098633, 0.04748400115966797, 0.047661983489990234, 0.04792729568481445, 0.047405055999755856, 0.04762214279174805, 0.047470592498779295, 0.04786150360107422, 0.04782080078125, 0.04769196701049805, 0.04784259033203125, 0.04820864105224609, 0.050183902740478514, 0.04823196792602539, 0.04793241500854492, 0.04780009460449219, 0.04775049591064453, 0.04780620956420899, 0.04777248001098633, 0.047532127380371096, 0.047800319671630856, 0.04931584167480469, 0.04784313583374023, 0.0482408332824707, 0.048173057556152345, 0.04746444702148438, 0.04738252639770508, 0.04740915298461914, 0.04742115020751953, 0.04750569534301758, 0.04740937423706055, 0.04747446441650391, 0.047851520538330077, 0.04771430587768555, 0.04745827102661133, 0.047349151611328126, 0.04732953643798828, 0.047395198822021485, 0.04743756866455078, 0.04745024108886719, 0.047432926177978514, 0.047942558288574216, 0.04773865509033203, 0.0475316162109375, 0.0475709114074707, 0.04792591857910156, 0.04749926376342774, 0.04758499145507813, 0.04741353607177735, 0.047482177734375, 0.04741523361206055, 0.04939199829101563, 0.048174369812011716, 0.048175392150878904, 0.04781071853637695, 0.04811190414428711, 0.04778815841674805, 0.047832958221435545, 0.04760780715942383, 0.0475722885131836, 0.047508384704589846, 0.047535903930664064, 0.04752931213378906, 0.04736598587036133, 0.04752624130249023, 0.047340000152587894, 0.047564289093017575, 0.04740377426147461, 0.04779801559448242, 0.04806444931030274, 0.049461311340332034, 0.0482529296875, 0.048228351593017575, 0.04789990234375, 0.04863257598876953, 0.0486767692565918, 0.04882441711425781, 0.04837376022338867, 0.048360641479492185, 0.048591678619384765, 0.04837171173095703, 0.047644672393798826, 0.04760543823242187, 0.047911231994628906, 0.04755152130126953, 0.047600608825683594, 0.04759756851196289, 0.04760166549682617, 0.04763375854492188, 0.04779484939575195, 0.047917057037353515, 0.047783935546875, 0.04754764938354492, 0.047732990264892576, 0.047583744049072264, 0.04795321655273437, 0.0479167366027832, 0.04760185623168945, 0.04748780822753906, 0.04752969741821289, 0.047531967163085935, 0.04767923355102539, 0.04775775909423828, 0.04812611389160156, 0.047648479461669925, 0.04756633758544922, 0.047639328002929686, 0.04767948913574219, 0.04742998504638672, 0.04734326553344727, 0.0475830078125, 0.047431297302246093, 0.04741532897949219, 0.04740131378173828, 0.049037311553955076, 0.047844863891601565, 0.04763289642333984, 0.04767948913574219, 0.04750950241088867, 0.04754806518554688, 0.04753216171264649, 0.04766332626342774, 0.04769996643066406, 0.04753763198852539, 0.04788860702514648, 0.04795177459716797, 0.04807721710205078, 0.047675392150878904, 0.04769161605834961, 0.0476673583984375, 0.04756870269775391, 0.04753654479980469, 0.047830814361572264, 0.04807593536376953, 0.04764275360107422, 0.0475709114074707, 0.047616767883300784, 0.04764614486694336, 0.04838864135742187, 0.047931167602539064, 0.04823270416259766, 0.047687519073486326, 0.047607521057128906, 0.04751814270019531, 0.04746547317504883, 0.04801228713989258, 0.04812595367431641, 0.0485225601196289, 0.048014015197753904, 0.04810342407226562, 0.050718624114990236, 0.048338336944580076, 0.0477088623046875, 0.04788336181640625, 0.047622592926025394, 0.04752339172363281, 0.047661983489990234, 0.04768153762817383, 0.04779193496704102, 0.047693824768066405, 0.04758911895751953, 0.04777414321899414, 0.04770975875854492, 0.04755708694458008, 0.04756816101074219, 0.04758963012695312, 0.048313854217529296, 0.047930591583251955, 0.04813190460205078, 0.047715713500976566, 0.047696414947509765, 0.04788803100585937, 0.04786544036865235, 0.04768956756591797, 0.04840969467163086, 0.04794460678100586, 0.047513694763183595, 0.04946492767333984, 0.04793119812011719, 0.04812572860717773, 0.048796768188476565, 0.04869456100463867, 0.04921603012084961, 0.04843920135498047, 0.04824044799804687, 0.04827974319458008, 0.048207489013671875, 0.04810982513427734, 0.04815254211425781, 0.04878979110717773, 0.04927065658569336, 0.04821145629882812, 0.04830054473876953, 0.04798643112182617, 0.04807455825805664, 0.04798303985595703, 0.04816096115112305, 0.048482112884521485, 0.04820691299438477, 0.04793849563598633, 0.04811983871459961, 0.04815561676025391, 0.049789375305175784, 0.04888419342041016, 0.04799619293212891, 0.04780268859863281, 0.047969982147216796, 0.047901153564453125, 0.04784112167358399, 0.04771481704711914, 0.04786995315551758, 0.05044019317626953, 0.048130046844482424, 0.04795391845703125, 0.04779417419433594, 0.04799692916870117, 0.047761409759521485, 0.048049663543701174, 0.048062976837158204, 0.048285633087158206, 0.04791913604736328, 0.04796419143676758, 0.047865856170654295, 0.0479109115600586, 0.0479161262512207, 0.047769569396972654, 0.04751865768432617, 0.04806768035888672, 0.047604736328125, 0.047578655242919925, 0.04769551849365234, 0.04823113632202149, 0.0477564811706543, 0.04774166488647461, 0.04778313446044922, 0.04768438339233398, 0.04777104187011719, 0.04779897689819336, 0.04768108749389648, 0.047892929077148434, 0.049170337677001956, 0.047664703369140624, 0.04760412979125977, 0.047620319366455076, 0.04791196823120117, 0.04757104110717773, 0.047604511260986325, 0.04779622268676758, 0.0482426872253418, 0.04781260681152344, 0.04760575866699219, 0.047621246337890624, 0.047477985382080076, 0.047459999084472654, 0.047464286804199216, 0.047478336334228516, 0.047759231567382814, 0.048202465057373044, 0.04796006393432617, 0.048796958923339843, 0.04920585632324219, 0.04834099197387695, 0.048167102813720705, 0.047888320922851564, 0.04772844696044922, 0.047951904296875, 0.04780694580078125, 0.04784735870361328, 0.047810302734375, 0.04816918563842774, 0.04782262420654297, 0.04763852691650391, 0.05107097625732422, 0.04903046417236328, 0.04747052764892578, 0.04762060928344727, 0.04754179382324219, 0.047559391021728514, 0.04782080078125, 0.04759552001953125, 0.047874046325683595, 0.04770406341552735, 0.047641918182373046, 0.04752864074707031, 0.04753203201293945, 0.04757622528076172, 0.04758204650878906, 0.04763238525390625, 0.04756070327758789, 0.047695873260498046, 0.05060812759399414, 0.047958015441894535, 0.04797644805908203, 0.047505409240722656, 0.04742316818237305, 0.047451934814453124, 0.04758752059936523, 0.04747875213623047, 0.04744963073730469, 0.047562976837158204, 0.04758182525634766, 0.04815052795410156, 0.04757503890991211, 0.04988742446899414, 0.047908992767333985, 0.04762937545776367, 0.047696704864501956, 0.04765081787109375, 0.047713279724121094, 0.047510528564453126, 0.04773839950561523, 0.04748905563354492, 0.04754796981811524, 0.0475591049194336, 0.04778553771972656, 0.047489086151123044, 0.047446849822998044, 0.047523456573486327, 0.04754470443725586, 0.047982593536376954, 0.047927520751953126, 0.04785334396362305, 0.04960790252685547, 0.048481056213378906, 0.047827999114990236, 0.04770284652709961, 0.04754060745239258, 0.047570720672607425, 0.047562751770019535, 0.04803379058837891, 0.04755187225341797, 0.04769171142578125, 0.04771680068969727, 0.04746265411376953, 0.047512992858886716, 0.04792127990722656, 0.047710113525390625, 0.047674976348876956, 0.04764889526367187, 0.047551326751708985, 0.04787142562866211, 0.047884864807128905, 0.04761804962158203, 0.04740035247802735, 0.04751971054077148, 0.047503040313720706, 0.047456478118896486, 0.04749590301513672, 0.04738457489013672, 0.04743929672241211, 0.047448638916015626, 0.04741852951049805, 0.04749311828613281, 0.047730751037597656, 0.04763011169433594, 0.047698944091796876, 0.04797235107421875, 0.047728641510009766, 0.04760095977783203, 0.04771705627441406, 0.04833651351928711, 0.04897385787963867, 0.0483304328918457, 0.04827340698242188, 0.0484788818359375, 0.04799488067626953, 0.04947868728637695, 0.04825596618652344, 0.04805945587158203, 0.0482334098815918, 0.04831027221679687, 0.04820377731323242, 0.048347137451171876, 0.04881203079223633, 0.048205825805664064, 0.04825088119506836, 0.0478474235534668, 0.048047775268554686, 0.04769753646850586, 0.04802809524536133, 0.04913180923461914, 0.04760985565185547, 0.04771635055541992, 0.04777983856201172, 0.04786934280395508, 0.0482064323425293, 0.04823807907104492, 0.047951839447021485, 0.04806460952758789, 0.047968704223632815, 0.048149887084960936, 0.05288204956054687, 0.048699390411376955, 0.04802969741821289, 0.048231552124023434, 0.04778278350830078, 0.04806598281860352, 0.04774764633178711, 0.048440895080566405, 0.04806291198730469, 0.04872550582885742, 0.04901862335205078, 0.04825983810424805, 0.048105472564697264, 0.04812799835205078, 0.04823654556274414, 0.04830822372436523, 0.04800467300415039, 0.048150753021240236, 0.04802377700805664, 0.04818739318847656, 0.047846881866455075, 0.04807324981689453, 0.04799897766113281, 0.04781260681152344, 0.047744991302490235, 0.04778144073486328, 0.04809980773925781, 0.04787734222412109, 0.04802163314819336, 0.04815017700195313, 0.04778895950317383, 0.04765052795410156, 0.04744348907470703, 0.047542911529541015, 0.04754774475097656, 0.04796876907348633, 0.04792291259765625, 0.04813840103149414, 0.049433151245117185, 0.04779241561889649, 0.04779622268676758, 0.04768534469604492, 0.04764281463623047, 0.0477608642578125, 0.047747711181640624, 0.04776761627197266, 0.04783612823486328, 0.04791097640991211, 0.048066879272460936, 0.04782140731811523, 0.04788633728027344, 0.04786995315551758, 0.04773593521118164, 0.04773545455932617, 0.04766876983642578, 0.047580097198486326, 0.04764031982421875, 0.04766310501098633, 0.049995040893554686, 0.04793404769897461, 0.047859840393066407, 0.05179904174804688, 0.04783740615844727, 0.047763294219970706, 0.04791801452636719, 0.04769792175292969, 0.04774086380004883, 0.04752329635620117, 0.04759312057495117, 0.047825695037841794, 0.04764892959594726, 0.048105472564697264, 0.04768767929077149, 0.047767551422119144, 0.0475871696472168, 0.04762547302246094, 0.04746441650390625, 0.04768601608276367, 0.047933792114257814, 0.04762646484375, 0.04863625717163086, 0.047584926605224606, 0.04750080108642578, 0.04754214477539063, 0.04764707183837891, 0.04791120147705078, 0.04760985565185547, 0.04766310501098633, 0.04757708740234375, 0.047695873260498046, 0.047865856170654295, 0.04792729568481445, 0.047777759552001954, 0.047847488403320315, 0.04788630294799805, 0.04786544036865235, 0.0477597770690918, 0.04758076858520508, 0.04797491073608398, 0.047462303161621096, 0.047923198699951174, 0.04933222579956055, 0.04788131332397461, 0.047680416107177735, 0.047506591796875, 0.04745097732543945, 0.04748086547851563, 0.04745827102661133, 0.04746883010864258, 0.04747849655151367, 0.04747455978393555, 0.047332958221435545, 0.04738102340698242, 0.04773068618774414, 0.04754988861083984, 0.04770873641967773, 0.04763375854492188, 0.04760377502441406, 0.04790902328491211, 0.048465633392333986, 0.04772940826416015, 0.04839737701416016, 0.04846694564819336, 0.048320289611816405, 0.047667327880859374, 0.047449440002441404, 0.047513790130615234, 0.04757961654663086, 0.04802326583862305, 0.04748521423339844, 0.04749311828613281, 0.04739481735229492, 0.047422496795654294, 0.047526878356933595, 0.04759347152709961, 0.047476417541503904, 0.04760790252685547, 0.04743612670898437, 0.04753968048095703, 0.04748944091796875, 0.047685630798339845, 0.04749107360839844, 0.047579265594482424, 0.04763395309448242, 0.04793711853027344, 0.0484666862487793, 0.04852326583862305, 0.0482542724609375, 0.04824486541748047, 0.04796268844604492, 0.04783718490600586, 0.04799897766113281, 0.0480928955078125, 0.04825430297851562, 0.048290592193603515, 0.048193695068359375, 0.04834099197387695, 0.048629344940185545, 0.04814438247680664, 0.047949569702148434, 0.047960735321044924, 0.0482529296875, 0.048070655822753904, 0.04809036636352539]",tokens/s,20.871223735859914,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,878.833664,559.808512,0.0,157.2864,148.864512,s,1,7.62063232421875,7.62063232421875,0.0,7.62063232421875,7.62063232421875,7.62063232421875,7.62063232421875,[7.62063232421875],,kWh,1.5371693370851365e-05,1.688395507096318e-06,4.774170485999374e-06,2.1834259363947055e-05,,MB,1299.59936,641.59744,0.0,224.395264,194.897408,s,19,0.7708507843017578,0.04057109391061883,0.001271387445596619,0.04045417785644531,0.040684031677246095,0.04136956939697265,0.044873285217285154,"[0.04574921417236328, 0.039653343200683595, 0.04056719970703125, 0.040379169464111325, 0.04029385757446289, 0.039676033020019534, 0.04046384048461914, 0.03960969543457031, 0.040882942199707034, 0.040029823303222654, 0.04063430404663086, 0.03979692840576172, 0.04031631851196289, 0.04049740982055664, 0.04054604721069336, 0.04045417785644531, 0.040221473693847654, 0.040495647430419925, 0.040583358764648435]",tokens/s,6309.911203380102,kWh,1.3840884315956252e-06,1.526126706767298e-07,9.187790393933842e-07,2.455480141665739e-06,tokens/kWh,104256595.54564172,MB,1332.355072,656.277504,0.0,239.075328,194.899968,s,19,9.746062774658204,0.5129506723504318,0.0023063866152985375,0.5127373046875,0.5156885498046875,0.5160179443359375,0.5170093505859376,"[0.5156358642578125, 0.5107625732421875, 0.5090270690917968, 0.5127373046875, 0.5172572021484375, 0.5131019897460938, 0.5141438598632813, 0.5158802490234375, 0.5125055541992187, 0.5126708374023438, 0.5093025207519531, 0.5096611633300782, 0.5118412780761719, 0.5131193237304688, 0.5108495788574219, 0.5144303588867187, 0.512284912109375, 0.5152105102539063, 0.515640625]",tokens/s,122.8188272204084,kWh,1.4641581597130323e-05,1.614748110550214e-06,5.964939917870148e-06,2.2221269625550693e-05,tokens/kWh,2835121.5327300956,,s,1197,9.736801027774789,0.008134336698224569,0.0001804442723444144,0.008088000297546387,0.008273561477661133,0.008379622459411621,0.008904083404541015,"[0.01046787166595459, 0.008371392250061034, 0.00839459228515625, 0.008408032417297363, 0.008359135627746582, 0.008264479637145996, 0.00820019245147705, 0.008132608413696289, 0.0081878080368042, 0.008171199798583984, 0.008317440032958985, 0.008271072387695312, 0.009152416229248048, 0.008245535850524902, 0.008212991714477539, 0.008118271827697754, 0.008132575988769531, 0.00806505584716797, 0.008094783782958984, 0.008057791709899903, 0.008085536003112794, 0.008060064315795899, 0.008043583869934081, 0.008031999588012696, 0.008089759826660155, 0.008019807815551758, 0.008097248077392578, 0.00806879997253418, 0.008078047752380371, 0.008062368392944335, 0.008056832313537597, 0.008034624099731446, 0.008073472023010253, 0.008046751976013183, 0.00803212833404541, 0.008040575981140137, 0.008064000129699708, 0.008068096160888672, 0.007995232105255127, 0.008022175788879395, 0.008120415687561035, 0.00800931167602539, 0.008012096405029296, 0.00803331184387207, 0.00809059238433838, 0.008087552070617676, 0.008023743629455566, 0.008046431541442871, 0.008066783905029297, 0.008061792373657227, 0.008193951606750488, 0.008074432373046876, 0.008057536125183106, 0.008081536293029785, 0.008064448356628418, 0.008253919601440429, 0.008298624038696288, 0.008237024307250977, 0.008146944046020508, 0.008158559799194337, 0.008116959571838379, 0.008091584205627441, 0.00811631965637207, 0.008112128257751466, 0.00832921600341797, 0.00820633602142334, 0.008177663803100586, 0.008101823806762696, 0.00810604763031006, 0.008099136352539062, 0.008342047691345215, 0.008127967834472656, 0.008049344062805176, 0.008057087898254394, 0.008066368103027344, 0.008022272109985351, 0.008093664169311524, 0.00812060832977295, 0.008083711624145508, 0.008220352172851563, 0.008034432411193847, 0.007987040042877197, 0.008015487670898437, 0.008017888069152832, 0.00800937557220459, 0.008006688117980956, 0.007999040126800538, 0.007994815826416016, 0.007985631942749023, 0.008038687705993652, 0.00800915241241455, 0.008024800300598144, 0.007982944011688232, 0.008026240348815918, 0.007990719795227051, 0.00803884792327881, 0.008175680160522461, 0.008079296112060546, 0.008089632034301757, 0.008144927978515624, 0.008265472412109376, 0.00822492790222168, 0.008165408134460449, 0.008132927894592286, 0.008126144409179687, 0.008085503578186035, 0.008154399871826172, 0.008155872344970703, 0.008230175971984863, 0.008182047843933105, 0.008171968460083008, 0.008126463890075684, 0.00836524772644043, 0.008117055892944336, 0.008177663803100586, 0.008062560081481934, 0.008031968116760255, 0.008083168029785157, 0.008092415809631348, 0.008057056427001953, 0.008095199584960938, 0.008045248031616211, 0.008083295822143555, 0.008058879852294922, 0.008022336006164551, 0.008015551567077638, 0.007784031867980957, 0.00803279972076416, 0.008045056343078612, 0.00803206443786621, 0.008056991577148438, 0.008054623603820801, 0.008054783821105957, 0.008021696090698242, 0.008034624099731446, 0.008048224449157714, 0.008057279586791992, 0.008039423942565918, 0.008008511543273925, 0.008043744087219239, 0.00801916790008545, 0.008041567802429199, 0.008047231674194336, 0.007991327762603759, 0.008020992279052735, 0.008056127548217773, 0.008060256004333495, 0.008042336463928223, 0.008022432327270507, 0.008060992240905762, 0.008019968032836914, 0.008011839866638183, 0.008159423828125, 0.008070816040039062, 0.008063263893127441, 0.00817084789276123, 0.008171008110046387, 0.008151488304138184, 0.008095935821533203, 0.008141152381896973, 0.008118271827697754, 0.008069120407104492, 0.008128864288330078, 0.008135775566101074, 0.00820486354827881, 0.008205471992492675, 0.008108896255493164, 0.008117568016052247, 0.008053664207458497, 0.008007455825805664, 0.008030367851257323, 0.008105567932128906, 0.008032511711120606, 0.008044384002685546, 0.008026016235351563, 0.008210687637329102, 0.00807254409790039, 0.00801052761077881, 0.008038271903991699, 0.008028160095214844, 0.00803609561920166, 0.008016127586364746, 0.008026144027709962, 0.008081376075744629, 0.008075263977050781, 0.008013824462890624, 0.008157312393188476, 0.008568703651428223, 0.008095840454101562, 0.007896096229553222, 0.008096991539001465, 0.008088640213012695, 0.008049344062805176, 0.008284416198730469, 0.00803718376159668, 0.00799014377593994, 0.00798521614074707, 0.00802406406402588, 0.008069215774536133, 0.007998432159423828, 0.008029120445251465, 0.007995391845703125, 0.008011712074279784, 0.008053088188171386, 0.008082943916320801, 0.00808512020111084, 0.008172127723693847, 0.008058879852294922, 0.00814243221282959, 0.00807363224029541, 0.008130111694335938, 0.008186304092407226, 0.008334336280822753, 0.008229887962341309, 0.008193120002746582, 0.00822099208831787, 0.008122783660888672, 0.008128704071044922, 0.008120320320129394, 0.008198431968688965, 0.008257247924804688, 0.008208383560180664, 0.008506400108337402, 0.008116864204406738, 0.00813212776184082, 0.008088000297546387, 0.008030400276184083, 0.008041824340820312, 0.008306943893432617, 0.008714879989624023, 0.008081376075744629, 0.008185855865478516, 0.008067071914672852, 0.008050687789916992, 0.008112159729003907, 0.008040767669677734, 0.008066720008850097, 0.008077152252197265, 0.008142335891723633, 0.009194432258605957, 0.00842518424987793, 0.00800169563293457, 0.00803321647644043, 0.008004511833190918, 0.008050944328308106, 0.008017215728759765, 0.008020352363586426, 0.00806713581085205, 0.008062975883483887, 0.008003775596618651, 0.008042304039001465, 0.008031807899475097, 0.007862271785736084, 0.008177663803100586, 0.008220671653747558, 0.008072671890258789, 0.008051584243774414, 0.008113823890686035, 0.008085023880004883, 0.008301088333129883, 0.008243136405944824, 0.008228863716125488, 0.008332736015319824, 0.008149375915527344, 0.008149056434631348, 0.008126591682434082, 0.008148991584777832, 0.008260736465454102, 0.008330240249633789, 0.008840383529663086, 0.00866374397277832, 0.008162431716918946, 0.00812054443359375, 0.008089247703552247, 0.008100607872009277, 0.00809718418121338, 0.008107199668884277, 0.008021663665771485, 0.008068991661071776, 0.008226304054260255, 0.008030624389648437, 0.008054464340209962, 0.008041215896606444, 0.00806713581085205, 0.008109087944030762, 0.008092351913452148, 0.00811411190032959, 0.00821664047241211, 0.008079360008239746, 0.008044480323791504, 0.008276032447814942, 0.00827939224243164, 0.008049311637878418, 0.00808140754699707, 0.008063136100769043, 0.008060768127441406, 0.00809596824645996, 0.008036160469055175, 0.008071136474609376, 0.008089599609375, 0.00818380832672119, 0.00929792022705078, 0.009668736457824708, 0.008324799537658691, 0.008101823806762696, 0.008083999633789063, 0.00809545612335205, 0.008122367858886719, 0.00810534381866455, 0.008209024429321289, 0.008223008155822754, 0.008415967941284179, 0.008294528007507324, 0.008163455963134766, 0.008153823852539063, 0.008181440353393554, 0.008338175773620605, 0.00818284797668457, 0.008217184066772461, 0.008108448028564454, 0.00811411190032959, 0.008095135688781738, 0.008116479873657226, 0.008070976257324218, 0.008188447952270507, 0.008087552070617676, 0.008087903976440429, 0.008046143531799317, 0.008163328170776368, 0.008082688331604004, 0.008125280380249024, 0.008112128257751466, 0.00821561622619629, 0.00806499195098877, 0.008075743675231934, 0.008015583992004395, 0.008063679695129394, 0.008050047874450683, 0.008427712440490722, 0.00805951976776123, 0.008098879814147949, 0.00808022403717041, 0.008048224449157714, 0.008245887756347657, 0.00802617645263672, 0.008015583992004395, 0.008038080215454102, 0.008014399528503418, 0.008039551734924317, 0.008026144027709962, 0.008027775764465332, 0.008058048248291016, 0.008050656318664551, 0.008082559585571289, 0.008050911903381348, 0.00806550407409668, 0.008275967597961426, 0.00810585594177246, 0.008099391937255859, 0.008196415901184083, 0.008267423629760743, 0.008225376129150391, 0.008183839797973633, 0.008134623527526856, 0.008151167869567872, 0.008193216323852538, 0.00816204833984375, 0.008248767852783202, 0.008389216423034668, 0.008309951782226562, 0.008187711715698242, 0.008176544189453124, 0.008451807975769042, 0.00823529624938965, 0.008056832313537597, 0.008087552070617676, 0.008080384254455567, 0.008149151802062988, 0.007879519939422607, 0.008087455749511719, 0.008065024375915527, 0.008079456329345704, 0.008031519889831543, 0.008043135643005371, 0.00808140754699707, 0.007997151851654052, 0.008050335884094238, 0.008010368347167968, 0.00799129581451416, 0.008112128257751466, 0.008019968032836914, 0.008056832313537597, 0.00803872013092041, 0.00807699203491211, 0.008056735992431641, 0.00803343963623047, 0.007998720169067382, 0.008055520057678223, 0.007999616146087647, 0.008090463638305664, 0.008083295822143555, 0.008048543930053711, 0.008038399696350097, 0.00804851245880127, 0.008079423904418945, 0.008071392059326173, 0.008196127891540528, 0.008703295707702636, 0.008280927658081055, 0.008872896194458009, 0.009118816375732423, 0.008296607971191406, 0.008235903739929199, 0.008258015632629394, 0.00838633632659912, 0.008543135643005371, 0.008254752159118653, 0.008207103729248048, 0.008315808296203613, 0.008188672065734863, 0.008126144409179687, 0.00813088035583496, 0.008112128257751466, 0.008144895553588867, 0.008261823654174804, 0.00814470386505127, 0.008105504035949706, 0.008119872093200683, 0.008117152214050292, 0.008204287528991699, 0.00810598373413086, 0.00808899211883545, 0.008061216354370118, 0.008126751899719238, 0.008172575950622558, 0.008104255676269531, 0.008074015617370605, 0.008117407798767089, 0.00815180778503418, 0.008064767837524415, 0.00805504035949707, 0.0078085122108459476, 0.00809830379486084, 0.008062975883483887, 0.008022015571594238, 0.008056832313537597, 0.008097760200500488, 0.00806710433959961, 0.008065024375915527, 0.008176671981811523, 0.008055295944213867, 0.00808188819885254, 0.008071423530578613, 0.008265472412109376, 0.00828940773010254, 0.008379424095153808, 0.008947551727294922, 0.008630271911621093, 0.008400896072387695, 0.00910540771484375, 0.008270079612731934, 0.008393759727478028, 0.008329952239990234, 0.008245471954345704, 0.008230143547058106, 0.008164192199707031, 0.008134336471557617, 0.00810159969329834, 0.008091936111450196, 0.008598591804504395, 0.00810694408416748, 0.008076416015625, 0.008067008018493652, 0.008162240028381347, 0.008068639755249024, 0.00804911994934082, 0.008066656112670899, 0.008085247993469238, 0.008171263694763184, 0.00807209587097168, 0.008029888153076172, 0.008051168441772462, 0.00807049560546875, 0.008051199913024902, 0.008044320106506348, 0.008018431663513183, 0.008169183731079102, 0.008076607704162598, 0.008096447944641113, 0.008101023674011231, 0.00811404800415039, 0.008287199974060059, 0.008163328170776368, 0.008695808410644532, 0.008409055709838867, 0.007991327762603759, 0.008017888069152832, 0.008053183555603027, 0.008253024101257325, 0.008062975883483887, 0.008060031890869141, 0.008070143699645996, 0.008123519897460938, 0.008176383972167968, 0.008317952156066894, 0.00848374366760254, 0.008368224143981933, 0.008425472259521484, 0.008345600128173827, 0.008265727996826172, 0.008126463890075684, 0.008167424201965333, 0.008122367858886719, 0.008130816459655762, 0.008121888160705566, 0.008108256340026855, 0.008075263977050781, 0.008135775566101074, 0.00808847999572754, 0.008117695808410645, 0.008087167739868164, 0.008088288307189941, 0.008138976097106933, 0.008071200370788574, 0.008043968200683593, 0.008156928062438965, 0.008124832153320313, 0.008100223541259766, 0.008026047706604004, 0.007995456218719483, 0.008077183723449706, 0.00802995204925537, 0.007983200073242188, 0.008029536247253418, 0.008133567810058594, 0.008037728309631348, 0.008354463577270509, 0.00801587200164795, 0.008048640251159669, 0.008011775970458984, 0.008046591758728027, 0.007995520114898682, 0.007984831809997558, 0.007980224132537841, 0.00799232006072998, 0.008021087646484374, 0.008062944412231446, 0.00800864028930664, 0.008023232460021972, 0.008073568344116211, 0.008075648307800293, 0.008069184303283692, 0.008048831939697266, 0.0081112642288208, 0.008039135932922364, 0.00805679988861084, 0.008064448356628418, 0.008206015586853027, 0.00838918399810791, 0.008194111824035644, 0.00839244842529297, 0.008249855995178223, 0.008075263977050781, 0.008072799682617187, 0.008300959587097168, 0.008181535720825195, 0.008283552169799804, 0.007935232162475585, 0.008199071884155274, 0.008406559944152832, 0.008310303688049317, 0.008059679985046387, 0.008118271827697754, 0.008376319885253907, 0.008467967987060548, 0.008112383842468261, 0.008051199913024902, 0.00805247974395752, 0.00810086441040039, 0.008045568466186523, 0.008057087898254394, 0.00815283203125, 0.008089792251586915, 0.00810752010345459, 0.008046815872192383, 0.008046688079833985, 0.00804684829711914, 0.008040191650390625, 0.008025407791137695, 0.008147647857666016, 0.00801360034942627, 0.008024031639099122, 0.007975168228149414, 0.008052512168884278, 0.008024288177490234, 0.008022368431091309, 0.008109600067138671, 0.008028287887573243, 0.007994976043701172, 0.008029919624328613, 0.008042336463928223, 0.008063839912414552, 0.008032032012939453, 0.008101247787475585, 0.008048800468444825, 0.008218496322631837, 0.008182175636291504, 0.008104351997375489, 0.008091648101806641, 0.00820019245147705, 0.008138912200927734, 0.008148832321166992, 0.008216575622558593, 0.00827187156677246, 0.008267423629760743, 0.008272192001342774, 0.008212127685546876, 0.008145312309265136, 0.00817353630065918, 0.0082391357421875, 0.008226783752441406, 0.008201855659484864, 0.008276032447814942, 0.008312352180480957, 0.008143199920654296, 0.00811235237121582, 0.008114399909973145, 0.008176992416381836, 0.008112159729003907, 0.008069567680358887, 0.007858176231384278, 0.008081695556640626, 0.008099231719970703, 0.008045151710510253, 0.008024831771850587, 0.008106176376342773, 0.008027263641357422, 0.008011360168457032, 0.008040512084960937, 0.008019968032836914, 0.008062080383300781, 0.008043328285217284, 0.008022080421447754, 0.008028191566467285, 0.008029536247253418, 0.008043456077575683, 0.008046272277832032, 0.008080991744995117, 0.007991712093353271, 0.007986400127410889, 0.008318943977355957, 0.008016160011291504, 0.008036895751953125, 0.008038399696350097, 0.008034303665161132, 0.008028063774108887, 0.008024160385131835, 0.008038399696350097, 0.008075615882873536, 0.008183520317077636, 0.008200127601623535, 0.00810598373413086, 0.008210432052612305, 0.008112288475036622, 0.00812332820892334, 0.008121248245239257, 0.008128512382507324, 0.00817465591430664, 0.008173983573913574, 0.008323391914367676, 0.008136927604675293, 0.008044863700866699, 0.00807699203491211, 0.008079360008239746, 0.008069120407104492, 0.008063296318054199, 0.008083135604858398, 0.00808895969390869, 0.00808131217956543, 0.008076000213623046, 0.00810700798034668, 0.008078335762023926, 0.008073216438293456, 0.008044544219970704, 0.008065216064453125, 0.00805078411102295, 0.008068832397460937, 0.008046815872192383, 0.008047840118408204, 0.008077055931091308, 0.008133503913879394, 0.008036224365234376, 0.00807862377166748, 0.007739071846008301, 0.008006464004516602, 0.008073216438293456, 0.008093088150024415, 0.008057248115539551, 0.008095040321350097, 0.008038944244384765, 0.008049344062805176, 0.008072863578796387, 0.008066271781921387, 0.008102975845336913, 0.008267487525939941, 0.008196224212646484, 0.00824505615234375, 0.008157247543334962, 0.00813270378112793, 0.008162816047668458, 0.008114591598510742, 0.008146944046020508, 0.008251392364501953, 0.008175616264343261, 0.00815891170501709, 0.00808080005645752, 0.008086432456970214, 0.008071167945861817, 0.00809779167175293, 0.00808140754699707, 0.008050687789916992, 0.008004704475402831, 0.008063520431518555, 0.00803433609008789, 0.008085663795471192, 0.008205696105957032, 0.008080191612243652, 0.008053088188171386, 0.008040032386779784, 0.008120736122131348, 0.008062623977661132, 0.008042495727539062, 0.008450048446655273, 0.008142848014831543, 0.008070336341857911, 0.008068191528320312, 0.008066911697387696, 0.008035231590270996, 0.008076288223266602, 0.008026399612426759, 0.008065823554992677, 0.008063679695129394, 0.0080830078125, 0.0080283203125, 0.008014335632324218, 0.007993343830108643, 0.008038399696350097, 0.008017024040222169, 0.00798406410217285, 0.008001472473144531, 0.007999231815338135, 0.00801846408843994, 0.007979040145874023, 0.008097727775573731, 0.00812617588043213, 0.008076767921447753, 0.00792572784423828, 0.008222592353820802, 0.008276351928710938, 0.008275967597961426, 0.008162752151489257, 0.008223296165466309, 0.008140800476074218, 0.008037440299987793, 0.008324031829833984, 0.008085408210754395, 0.008201824188232423, 0.008274751663208008, 0.008173248291015626, 0.008128512382507324, 0.008100159645080566, 0.0080863676071167, 0.008176480293273926, 0.008094816207885743, 0.008031200408935547, 0.008133824348449708, 0.00803932762145996, 0.008076704025268555, 0.008046527862548829, 0.00803996753692627, 0.008162528038024903, 0.008013407707214355, 0.008022175788879395, 0.00800972843170166, 0.007995391845703125, 0.00802735996246338, 0.008063776016235352, 0.00800972843170166, 0.008054719924926759, 0.00809785556793213, 0.008066368103027344, 0.008138591766357423, 0.008088095664978028, 0.008053055763244629, 0.008038559913635253, 0.00833017635345459, 0.00806595230102539, 0.008067071914672852, 0.008085503578186035, 0.008039584159851074, 0.008004192352294923, 0.008132863998413085, 0.008445088386535644, 0.008018783569335938, 0.008020031929016113, 0.0080414400100708, 0.008051679611206055, 0.008068127632141113, 0.008039392471313477, 0.00800153636932373, 0.008017919540405273, 0.008087615966796875, 0.008068832397460937, 0.008136927604675293, 0.008052736282348634, 0.008087552070617676, 0.008175616264343261, 0.008789728164672852, 0.00836131191253662, 0.008080544471740723, 0.008373087882995605, 0.00850227165222168, 0.008341792106628419, 0.008217120170593261, 0.008157376289367677, 0.008093024253845215, 0.008104191780090333, 0.008121824264526368, 0.008122495651245117, 0.008063808441162109, 0.008146656036376954, 0.0080830717086792, 0.00811689567565918, 0.008066816329956055, 0.008083904266357422, 0.008060735702514648, 0.008115519523620605, 0.00806931209564209, 0.008051199913024902, 0.008056351661682129, 0.00806982421875, 0.00810371208190918, 0.008079360008239746, 0.008023488044738769, 0.008022144317626953, 0.008112544059753419, 0.008158944129943847, 0.008023679733276367, 0.007979072093963624, 0.008010368347167968, 0.007978432178497315, 0.00799555206298828, 0.00803433609008789, 0.008019840240478516, 0.008018655776977539, 0.00810108757019043, 0.00801193618774414, 0.008001312255859375, 0.008045184135437011, 0.008044544219970704, 0.008380415916442872, 0.008070303916931152, 0.0082806396484375, 0.00809603214263916, 0.00812003231048584, 0.00819593620300293, 0.008286656379699707, 0.008267744064331055, 0.008223072052001953, 0.008156864166259766, 0.008226495742797851, 0.00876364803314209, 0.008123935699462891, 0.008255999565124511, 0.008261119842529297, 0.008226655960083007, 0.008200736045837402, 0.008250911712646485, 0.008085439682006836, 0.008129216194152833, 0.008113439559936524, 0.008067584037780762, 0.007822336196899414, 0.008073344230651855, 0.008069120407104492, 0.008104991912841798, 0.008110176086425782, 0.008084416389465332, 0.008113984107971192, 0.008017663955688476, 0.008503583908081055, 0.00806704044342041, 0.0079967041015625, 0.008002400398254395, 0.007984640121459961, 0.0080282564163208, 0.008016160011291504, 0.008067423820495606, 0.008046591758728027, 0.008079071998596192, 0.008035648345947265, 0.008043264389038086, 0.008008735656738281, 0.008084320068359374, 0.007974847793579102, 0.008027968406677246, 0.007995872020721436, 0.008038175582885743, 0.008040448188781739, 0.008024288177490234, 0.00810582447052002, 0.00822265625, 0.008146944046020508, 0.008152480125427247, 0.008227423667907715, 0.00827187156677246, 0.00820041561126709, 0.008091487884521485, 0.008211968421936035, 0.008239551544189453, 0.008220671653747558, 0.008194047927856446, 0.008089599609375, 0.008110079765319824, 0.008120287895202636, 0.008081664085388183, 0.008085247993469238, 0.008071104049682616, 0.00801801586151123, 0.008072992324829102, 0.00804911994934082, 0.008107775688171387, 0.008058624267578125, 0.008058464050292969, 0.008026783943176269, 0.008052032470703125, 0.008082112312316895, 0.008775679588317872, 0.008090687751770019, 0.008006752014160156, 0.00817136001586914, 0.008237343788146972, 0.008089311599731445, 0.00800972843170166, 0.008142848014831543, 0.007853536128997803, 0.00955344009399414, 0.008824895858764649, 0.008498111724853515, 0.008067328453063965, 0.008068223953247071, 0.008014431953430176, 0.008017312049865723, 0.008122847557067872, 0.008024224281311035, 0.008084639549255371, 0.008045408248901367, 0.00810422420501709, 0.00813862419128418, 0.008243040084838867, 0.008156991958618165, 0.008120415687561035, 0.008145248413085938, 0.008115967750549317, 0.008156255722045898, 0.0082542724609375, 0.008194144248962403, 0.008163328170776368, 0.008192288398742675, 0.008163392066955566, 0.008236703872680664, 0.008187135696411134, 0.008090368270874023, 0.00818380832672119, 0.008214559555053711, 0.008163264274597168, 0.008142975807189942, 0.008101984024047852, 0.00811945629119873, 0.008090271949768066, 0.008116191864013671, 0.008060256004333495, 0.008024767875671386, 0.008052639961242676, 0.008257408142089844, 0.00814025592803955, 0.008084223747253419, 0.008083616256713867, 0.008086976051330566, 0.008087967872619629, 0.008135775566101074, 0.008075776100158692, 0.008242688179016112, 0.008135583877563477, 0.008064512252807618, 0.00832265567779541, 0.008242239952087402, 0.008045472145080567, 0.008030943870544434, 0.008065247535705566, 0.008124416351318359, 0.008081631660461426, 0.008050463676452636, 0.008228863716125488, 0.00813814353942871, 0.008016703605651855, 0.008077088356018067, 0.00803651237487793, 0.007823455810546874, 0.008077119827270508, 0.008093600273132323, 0.008036319732666015, 0.008061887741088868, 0.008075072288513183, 0.00804099178314209, 0.008062623977661132, 0.008163328170776368, 0.008075263977050781, 0.008055104255676269, 0.008109760284423829, 0.008148991584777832, 0.00867296028137207, 0.008337568283081055, 0.00824675178527832, 0.008272768020629883, 0.008685152053833007, 0.008299039840698243, 0.008263263702392578, 0.008214240074157714, 0.008208767890930176, 0.008177472114562987, 0.00813599967956543, 0.008073344230651855, 0.008078080177307129, 0.008122048377990722, 0.009158975601196289, 0.008138239860534668, 0.008049152374267577, 0.008064064025878906, 0.0080250244140625, 0.008046591758728027, 0.008118240356445313, 0.008022047996520997, 0.00803872013092041, 0.008013407707214355, 0.007987167835235596, 0.008046048164367676, 0.008071840286254883, 0.008065024375915527, 0.008113759994506836, 0.008016096115112305, 0.00805497646331787, 0.00801916790008545, 0.008016672134399414, 0.008103872299194336, 0.008016032218933105, 0.008012960433959961, 0.008031999588012696, 0.008040800094604492, 0.008133088111877441, 0.008110431671142579, 0.0081079683303833, 0.008020959854125976, 0.00807583999633789, 0.008023839950561523, 0.008052576065063476, 0.008094528198242187, 0.008056768417358398, 0.00812217617034912, 0.008050880432128905, 0.008076704025268555, 0.008166272163391114, 0.00842563247680664, 0.008247136116027832, 0.008331263542175293, 0.008456192016601562, 0.00839475154876709, 0.008353055953979492, 0.00820041561126709, 0.008157183647155761, 0.008147456169128419, 0.008148703575134277, 0.008243328094482421, 0.008105183601379394, 0.008246047973632813, 0.00813276767730713, 0.00812063980102539, 0.008095359802246094, 0.008133695602416993, 0.008121696472167969, 0.008135456085205078, 0.00813276767730713, 0.008145248413085938, 0.008144512176513671, 0.0080697603225708, 0.008067071914672852, 0.008091423988342285, 0.008048064231872558, 0.008086175918579101, 0.008007935523986816, 0.008040448188781739, 0.008070655822753906, 0.008081055641174317, 0.00804742431640625, 0.008065312385559083, 0.008132351875305175, 0.008033823966979981, 0.008066720008850097, 0.008329536437988281, 0.008153599739074707, 0.008074399948120117, 0.008071264266967774, 0.00808240032196045, 0.008064800262451172, 0.008046591758728027, 0.008103551864624024, 0.00824300765991211, 0.008420928001403809, 0.00830361557006836, 0.008224255561828613, 0.008151424407958985, 0.008116640090942383, 0.008117631912231445, 0.008151391983032226, 0.008233247756958008, 0.008253151893615723, 0.008232192039489746, 0.008190784454345703, 0.00890227222442627, 0.008128479957580566, 0.008085280418395997, 0.008088224411010743, 0.008099712371826173, 0.008175295829772949, 0.007994048118591309, 0.008415200233459473, 0.008363743782043458, 0.008193632125854493, 0.008114912033081055, 0.009499872207641601, 0.008565119743347168, 0.008092063903808594, 0.008083711624145508, 0.008064767837524415, 0.00806710433959961, 0.00809603214263916, 0.008066495895385743, 0.00802998447418213, 0.00809990406036377, 0.00821664047241211, 0.008094047546386719, 0.008058879852294922, 0.008054271697998047, 0.00806505584716797, 0.008073311805725097, 0.008034687995910644, 0.00801587200164795, 0.007995391845703125, 0.008017919540405273, 0.00800934410095215, 0.008036447525024413, 0.00813804817199707, 0.008104928016662597, 0.008040448188781739, 0.008048640251159669, 0.008051872253417969, 0.008036640167236328, 0.008090208053588867, 0.008099807739257812, 0.00816320037841797, 0.008292736053466797, 0.008143744468688964, 0.008189023971557617, 0.008228511810302734, 0.008293951988220215, 0.008236767768859864, 0.008137632369995117, 0.008120448112487792, 0.008194047927856446, 0.008291616439819335, 0.008234848022460937, 0.008178688049316407, 0.008113856315612794, 0.008089599609375, 0.008099360466003418, 0.008251872062683106, 0.008206624031066895, 0.008019871711730957, 0.00804640007019043, 0.00816972827911377, 0.00941427230834961, 0.008089152336120606, 0.008092032432556153, 0.008090944290161132, 0.008438752174377442, 0.008134559631347656, 0.008171520233154296]",tokens/s,122.93565377226932,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 222315 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,954.49088,675.151872,0.0,272.62976,256.680448,s,1,8.5144033203125,8.5144033203125,0.0,8.5144033203125,8.5144033203125,8.5144033203125,8.5144033203125,[8.5144033203125],,kWh,2.6432266758358008e-05,2.9081216570203015e-06,8.73528476602603e-06,3.807567318140434e-05,,MB,1328.39424,744.357888,0.0,327.155712,300.077568,s,10,1.5136772918701171,0.15136772918701172,0.0009384427099555394,0.1511710433959961,0.15259154205322265,0.15291449203491211,0.15317285202026368,"[0.15323744201660155, 0.15209046936035156, 0.1512015380859375, 0.15148828125, 0.1511405487060547, 0.152519775390625, 0.1507091522216797, 0.15054301452636717, 0.15000146484375, 0.15074560546875]",tokens/s,1691.2455605627622,kWh,4.642426265277882e-06,5.119708138763072e-07,3.0732652452061317e-06,8.227662324360322e-06,tokens/kWh,31114548.690463338,MB,1360.879616,786.300928,0.0,369.098752,300.080128,s,10,11.373915649414062,1.1373915649414061,0.1602730559990387,1.1375762634277344,1.3071070068359374,1.3098237060546876,1.3119970654296875,"[1.3125404052734375, 1.2849775390625, 1.2826856689453126, 1.2995609130859376, 1.3065032958984375, 0.969165771484375, 0.9690479736328125, 0.9899205932617188, 0.9924668579101562, 0.967046630859375]",tokens/s,55.38989556621647,kWh,3.4917104482638874e-05,3.85070258397012e-06,1.3748914879195044e-05,5.2516721945804045e-05,tokens/kWh,1199617.905797975,,s,630,11.366465225219729,0.018042008293999566,0.002571660801775157,0.018842944145202635,0.02073211498260498,0.020833774375915527,0.02143222507476807,"[0.0202608642578125, 0.020559711456298826, 0.020783136367797852, 0.02168230438232422, 0.02071753692626953, 0.020667903900146483, 0.020899648666381835, 0.020859359741210937, 0.020848575592041015, 0.020764223098754885, 0.02089241600036621, 0.021027904510498047, 0.02060736083984375, 0.024156576156616212, 0.020619039535522462, 0.020664064407348633, 0.0205949764251709, 0.02064156723022461, 0.02021388816833496, 0.020033632278442383, 0.020035455703735352, 0.019917280197143554, 0.01986355209350586, 0.019945472717285157, 0.01983897590637207, 0.020307968139648438, 0.02074425506591797, 0.020801408767700195, 0.020724128723144532, 0.020795040130615235, 0.020854400634765624, 0.020732288360595704, 0.020606496810913085, 0.020754335403442382, 0.020628032684326173, 0.020617216110229493, 0.020788608551025392, 0.020844959259033204, 0.02075801658630371, 0.020896480560302733, 0.021776416778564452, 0.021808351516723633, 0.020900224685668944, 0.021109119415283203, 0.02068889617919922, 0.02066431999206543, 0.020823423385620116, 0.0207325439453125, 0.0206376953125, 0.020703231811523438, 0.020549983978271485, 0.02070207977294922, 0.021248224258422852, 0.02066899108886719, 0.02069011116027832, 0.02072662353515625, 0.020672767639160157, 0.020868448257446288, 0.020710880279541016, 0.020713983535766603, 0.020865535736083983, 0.021472991943359374, 0.02405331230163574, 0.020150592803955078, 0.020437088012695313, 0.020326784133911133, 0.02025436782836914, 0.020164608001708984, 0.020357120513916017, 0.020270143508911133, 0.020405088424682617, 0.020346975326538085, 0.020239776611328125, 0.020310335159301758, 0.020289791107177733, 0.020337791442871095, 0.020224479675292967, 0.020141759872436524, 0.020318784713745118, 0.020296064376831055, 0.02029100799560547, 0.02064975929260254, 0.020266719818115234, 0.020717472076416017, 0.0204454402923584, 0.020259807586669922, 0.020373247146606446, 0.02044927978515625, 0.020409887313842773, 0.020371871948242186, 0.020408287048339843, 0.020471391677856447, 0.020511232376098632, 0.020674560546875, 0.020565696716308594, 0.020682111740112304, 0.0205610237121582, 0.020672319412231445, 0.020553056716918944, 0.02075331115722656, 0.020788095474243165, 0.020472063064575195, 0.020368127822875975, 0.020318399429321288, 0.020106943130493164, 0.020113311767578124, 0.02018854331970215, 0.020151231765747072, 0.02001443290710449, 0.020165056228637696, 0.020106527328491212, 0.020259552001953125, 0.020185087203979494, 0.020614751815795897, 0.020602880477905275, 0.02036537551879883, 0.020230079650878908, 0.02028976058959961, 0.02029897689819336, 0.020462560653686523, 0.020371456146240235, 0.020357120513916017, 0.020373088836669922, 0.02050284767150879, 0.0209715518951416, 0.02052297592163086, 0.01979167938232422, 0.019888320922851564, 0.019866815567016603, 0.019678016662597657, 0.019701759338378907, 0.019779327392578126, 0.019686752319335938, 0.01965795135498047, 0.01974239921569824, 0.01969152069091797, 0.01963235282897949, 0.019742496490478517, 0.01963417625427246, 0.019685216903686523, 0.019649023056030272, 0.02039344024658203, 0.020731903076171874, 0.020607295989990233, 0.020535167694091798, 0.020700159072875975, 0.02064691162109375, 0.02072719955444336, 0.02072844886779785, 0.02059791946411133, 0.02075935935974121, 0.020649984359741212, 0.02063155174255371, 0.02068435287475586, 0.02051958465576172, 0.02062905693054199, 0.02039811134338379, 0.02034092712402344, 0.02033443260192871, 0.02032655906677246, 0.020421951293945313, 0.02039673614501953, 0.020471807479858398, 0.020570112228393556, 0.020467424392700197, 0.02051919937133789, 0.020768768310546876, 0.020612863540649413, 0.020563968658447264, 0.02067430305480957, 0.020662912368774412, 0.020578176498413085, 0.02073788833618164, 0.020634912490844728, 0.020814720153808593, 0.020600831985473633, 0.020607999801635742, 0.020755456924438476, 0.020547584533691408, 0.020465599060058594, 0.02038380813598633, 0.02044540786743164, 0.02036841583251953, 0.02038153648376465, 0.02043791961669922, 0.020407712936401368, 0.02033929634094238, 0.020199424743652345, 0.02031001663208008, 0.02008687973022461, 0.020688480377197265, 0.02064787292480469, 0.020619680404663086, 0.020623552322387696, 0.02063532829284668, 0.020695232391357423, 0.020659839630126953, 0.020567455291748048, 0.02052195167541504, 0.020527103424072265, 0.020499711990356446, 0.020433664321899414, 0.02041651153564453, 0.020315488815307616, 0.02038140869140625, 0.020317119598388673, 0.020400480270385744, 0.02053494453430176, 0.020516031265258788, 0.020578943252563476, 0.020557600021362303, 0.020586912155151366, 0.020670656204223634, 0.020825824737548827, 0.020647424697875977, 0.020646432876586913, 0.02051411247253418, 0.020490591049194335, 0.020480415344238282, 0.02047132873535156, 0.020552160263061524, 0.02089369583129883, 0.020654144287109374, 0.020602399826049805, 0.02059529685974121, 0.021129119873046876, 0.020745824813842774, 0.02072403144836426, 0.02067433547973633, 0.02062870407104492, 0.020642816543579103, 0.020824064254760744, 0.02061311912536621, 0.020611072540283205, 0.02061622428894043, 0.020685535430908203, 0.021018527984619142, 0.020726112365722655, 0.020560096740722657, 0.02081878471374512, 0.02073209571838379, 0.020577375411987304, 0.020577695846557616, 0.020561824798583983, 0.020574111938476563, 0.021332416534423828, 0.02067465591430664, 0.020704639434814452, 0.020546079635620117, 0.020543487548828124, 0.02055401611328125, 0.020506336212158204, 0.020281503677368164, 0.020770912170410157, 0.020723455429077147, 0.020618560791015626, 0.020468128204345702, 0.020748575210571288, 0.020584447860717774, 0.020545215606689454, 0.020914623260498047, 0.02079840087890625, 0.02086188888549805, 0.020827295303344727, 0.020959423065185546, 0.020736671447753905, 0.020572160720825194, 0.020688512802124023, 0.020616575241088866, 0.020612096786499022, 0.020628543853759767, 0.020585216522216798, 0.020772287368774414, 0.021090879440307617, 0.020876672744750975, 0.020716064453125, 0.020680288314819335, 0.020774911880493165, 0.02083404731750488, 0.020704479217529298, 0.020504287719726563, 0.020518911361694335, 0.020675968170166016, 0.02061392021179199, 0.020626848220825195, 0.02054105567932129, 0.020519168853759765, 0.020593215942382812, 0.020770719528198242, 0.020725088119506838, 0.020609792709350587, 0.020700607299804687, 0.02066694450378418, 0.0206561279296875, 0.020587871551513672, 0.020701696395874023, 0.02083344078063965, 0.02060825538635254, 0.02063545608520508, 0.020852607727050783, 0.020674016952514647, 0.02058415985107422, 0.02053321647644043, 0.020687456130981444, 0.020527423858642577, 0.020563552856445313, 0.020644256591796875, 0.020516864776611327, 0.020534656524658204, 0.020585088729858397, 0.02050662422180176, 0.020611072540283205, 0.020432287216186524, 0.020464223861694338, 0.024475648880004884, 0.015660256385803224, 0.01612851142883301, 0.017239839553833007, 0.01578444766998291, 0.015472415924072266, 0.015564319610595703, 0.01600924873352051, 0.015639103889465333, 0.016419008255004884, 0.015699968338012696, 0.015633631706237792, 0.016142400741577148, 0.01564540767669678, 0.015632608413696288, 0.015562527656555176, 0.01541107177734375, 0.01543718433380127, 0.015065855979919433, 0.015030271530151367, 0.015032447814941407, 0.015290143966674805, 0.015549887657165527, 0.015691712379455568, 0.01569660758972168, 0.015405055999755859, 0.01542505645751953, 0.015414048194885253, 0.015324864387512207, 0.01530675220489502, 0.015366399765014648, 0.015291584014892579, 0.015227423667907716, 0.015323103904724121, 0.015279520034790038, 0.015210207939147949, 0.01499846363067627, 0.015095359802246093, 0.015015423774719238, 0.015063008308410645, 0.015289312362670898, 0.014946240425109864, 0.015105343818664551, 0.015030783653259277, 0.014981280326843262, 0.015012351989746094, 0.01496662425994873, 0.015042304039001465, 0.014993408203125, 0.014942048072814941, 0.01502342414855957, 0.015004511833190917, 0.01499062442779541, 0.0149235200881958, 0.015782879829406737, 0.015031904220581054, 0.014989407539367675, 0.01503264045715332, 0.01519001579284668, 0.015220735549926758, 0.015267840385437012, 0.015538175582885743, 0.015511872291564942, 0.015521471977233886, 0.015288607597351074, 0.015582464218139648, 0.01565683174133301, 0.01577891159057617, 0.015462464332580566, 0.015448063850402831, 0.015563808441162109, 0.015758015632629394, 0.015638527870178224, 0.015483200073242187, 0.015439295768737793, 0.015362303733825683, 0.015400511741638184, 0.015202783584594726, 0.015013855934143066, 0.014915583610534668, 0.015527935981750488, 0.014943327903747559, 0.014980287551879882, 0.01492092800140381, 0.01494035243988037, 0.01500217628479004, 0.015109984397888184, 0.01554047966003418, 0.01580406379699707, 0.01560166358947754, 0.015519743919372558, 0.015333375930786132, 0.015265791893005372, 0.015333696365356446, 0.015900447845458986, 0.015291872024536133, 0.015093184471130372, 0.015133536338806153, 0.015374336242675781, 0.015320832252502442, 0.01527849578857422, 0.01516470432281494, 0.015071616172790528, 0.015257120132446289, 0.015102656364440917, 0.015016063690185547, 0.01500102424621582, 0.015058624267578124, 0.015047136306762696, 0.01588601589202881, 0.015251456260681152, 0.015077887535095215, 0.01530288028717041, 0.015765503883361818, 0.01593907165527344, 0.015945247650146484, 0.015979488372802733, 0.015902463912963866, 0.015790687561035157, 0.01588924789428711, 0.015483712196350098, 0.015309920310974121, 0.015255583763122558, 0.015231103897094726, 0.015184639930725097, 0.015218463897705078, 0.015145183563232422, 0.015518367767333984, 0.01587388801574707, 0.015995072364807128, 0.015859968185424806, 0.01584332847595215, 0.015945728302001954, 0.015874048233032227, 0.015892160415649413, 0.015806079864501953, 0.01592390441894531, 0.015658432006835938, 0.01566198444366455, 0.01553769588470459, 0.015534367561340332, 0.015400351524353028, 0.015623999595642089, 0.015487615585327148, 0.015597567558288575, 0.015534079551696778, 0.015494303703308105, 0.01585763168334961, 0.01565078353881836, 0.015563679695129394, 0.016080896377563478, 0.015775744438171386, 0.016219135284423827, 0.01579923152923584, 0.01570207977294922, 0.015585280418395997, 0.015680800437927245, 0.01563955211639404, 0.015853504180908203, 0.01580624008178711, 0.015780032157897948, 0.01587347221374512, 0.01567568016052246, 0.015746432304382323, 0.0164932804107666, 0.015906047821044923, 0.015817472457885742, 0.015873567581176758, 0.01577635192871094, 0.015736703872680665, 0.015863807678222656, 0.01578598403930664, 0.01590627193450928, 0.015786527633666992, 0.016143423080444336, 0.015729472160339354, 0.01580793571472168, 0.01564358425140381, 0.015683135986328124, 0.015562944412231446, 0.01558732795715332, 0.015371487617492676, 0.015421631813049317, 0.015488736152648926, 0.015319104194641114, 0.01531987190246582, 0.015122431755065918, 0.015278079986572265, 0.015251392364501954, 0.015287391662597656, 0.015321920394897461, 0.01558505630493164, 0.015417407989501954, 0.01532528018951416, 0.015218208312988282, 0.015274368286132812, 0.015378303527832031, 0.015392416000366211, 0.0156812162399292, 0.01595852756500244, 0.015851807594299317, 0.015855936050415038, 0.015770848274230958, 0.01672617530822754, 0.018053535461425782, 0.01574185562133789, 0.015938655853271484, 0.016246688842773437, 0.015937536239624024, 0.016685056686401366, 0.01576959991455078, 0.015687520027160645, 0.015721856117248537, 0.01558403205871582, 0.01540214443206787, 0.015498080253601075, 0.01543513584136963, 0.015345664024353027, 0.01615443229675293, 0.01545689582824707, 0.015548607826232911, 0.01545740795135498, 0.015580032348632812, 0.015366175651550292, 0.015378399848937989, 0.015226367950439454, 0.015229439735412598, 0.015309823989868163, 0.015518112182617188, 0.015416959762573242, 0.015432671546936035, 0.01589408016204834, 0.017213760375976564, 0.016165056228637696, 0.015640512466430664, 0.0158056001663208, 0.015808480262756348, 0.016083616256713867, 0.01578335952758789, 0.01571059226989746, 0.015687552452087403, 0.015831583976745604, 0.015611328125, 0.015511615753173828, 0.015735296249389647, 0.015970303535461427, 0.01600716781616211, 0.0159269437789917, 0.015879520416259764, 0.01577244758605957, 0.015642848014831543, 0.015671648025512696, 0.015605152130126953, 0.015325247764587402, 0.015693120002746582, 0.015589728355407714, 0.01566057586669922, 0.015604640007019043, 0.01582249641418457, 0.015524031639099121, 0.015859328269958495, 0.01569795227050781, 0.01556208038330078, 0.0154585599899292, 0.015338239669799804, 0.01520844841003418, 0.015173695564270019, 0.015200192451477051, 0.015265791893005372, 0.015267231941223144, 0.015291199684143067, 0.01550051212310791, 0.01612473678588867, 0.015535552024841309, 0.01534598445892334, 0.015313152313232423, 0.015177248001098632, 0.015335647583007813, 0.015539936065673828, 0.015173919677734375, 0.015167648315429688, 0.015257439613342284, 0.01530675220489502, 0.015247360229492187, 0.015202431678771973, 0.015310720443725586, 0.01518502426147461, 0.015271103858947754, 0.015164223670959473, 0.015172479629516601, 0.01517948818206787, 0.015218496322631836, 0.015335583686828613, 0.015266112327575684, 0.015222496032714843, 0.015277728080749512, 0.015163040161132813, 0.015184608459472657, 0.015107392311096191, 0.015180319786071777, 0.015174304008483887, 0.01526963233947754, 0.015171584129333495, 0.015539423942565918, 0.015391103744506836, 0.015343711853027344, 0.015071136474609375, 0.015331744194030762, 0.015279423713684083, 0.015311264038085937, 0.015257887840270996, 0.015302656173706054, 0.015257472038269044, 0.015262016296386719, 0.015124287605285645, 0.015350912094116211]",tokens/s,55.42620221123506,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7303.335936,9457.041408,0.0,9061.793792,8463.626752,s,1,14.3428876953125,14.3428876953125,0.0,14.3428876953125,14.3428876953125,14.3428876953125,14.3428876953125,[14.3428876953125],,kWh,0.00021487623368749383,2.36943618241099e-05,9.731035562601797e-05,0.00033588095113762167,,MB,1703.931904,9469.62432,0.0,9061.793792,7991.22432,s,10,57.66108496093751,5.766108496093751,0.004118686907086784,5.766230224609375,5.77012763671875,5.7712974609375,5.7722333203125,"[5.75782275390625, 5.76150146484375, 5.76316064453125, 5.76605322265625, 5.76616748046875, 5.76629296875, 5.76811865234375, 5.7696328125, 5.76986767578125, 5.77246728515625]",tokens/s,44.39736091914107,kWh,0.00016823414614416645,1.8556737855335502e-05,0.00011176370052199957,0.00029855458452150154,tokens/kWh,857464.6422204352,MB,1710.641152,9469.62432,0.0,9061.793792,8265.583104,s,10,29.5501298828125,2.9550129882812497,0.008249503465217734,2.9533575439453124,2.9656091796875,2.965724755859375,2.965817216796875,"[2.946534912109375, 2.94372900390625, 2.94428515625, 2.952433349609375, 2.96433447265625, 2.952168701171875, 2.95428173828125, 2.96558349609375, 2.96584033203125, 2.960938720703125]",tokens/s,21.319703246598333,kWh,8.632863470291603e-05,9.522743306560679e-06,5.753785158580105e-05,0.00015338922959527777,tokens/kWh,410719.84106203186,,s,630,29.54630256652833,0.04689889296274336,0.00047923563625497655,0.04689219093322754,0.04741061515808105,0.0475846269607544,0.048471395149230954,"[0.04812003326416016, 0.046564510345458984, 0.046091262817382815, 0.04598806381225586, 0.045979038238525394, 0.04610086441040039, 0.04623769760131836, 0.046126625061035154, 0.04627487945556641, 0.0464119987487793, 0.04636870574951172, 0.04629491043090821, 0.04653004837036133, 0.04624604797363281, 0.0463037109375, 0.046439838409423825, 0.046236255645751956, 0.04641299057006836, 0.046534881591796876, 0.04644262313842774, 0.046895584106445315, 0.04693507385253906, 0.04678960037231445, 0.04663296127319336, 0.04636188888549805, 0.04636064147949219, 0.04637763214111328, 0.04654489517211914, 0.04665958404541016, 0.04658995056152344, 0.046532127380371095, 0.046698814392089845, 0.046695873260498046, 0.04654572677612305, 0.04671990585327149, 0.04714707183837891, 0.04687548828125, 0.04673750305175781, 0.04682160186767578, 0.04728582382202148, 0.04730492782592773, 0.04722687911987305, 0.04717926406860352, 0.04693664169311523, 0.046825408935546875, 0.04672726440429688, 0.046781696319580075, 0.0467435188293457, 0.046682815551757816, 0.04675481414794922, 0.047612480163574215, 0.047161792755126955, 0.04750851058959961, 0.046975967407226565, 0.04699750518798828, 0.047193248748779296, 0.047197025299072264, 0.04832470321655274, 0.04708467102050781, 0.04720038223266602, 0.04728416061401367, 0.047280864715576174, 0.04733488082885742, 0.04825859069824219, 0.046731712341308594, 0.04619222259521484, 0.04598188781738281, 0.04601036834716797, 0.046211296081542966, 0.04611072158813476, 0.046046367645263674, 0.04602352142333985, 0.046219264984130856, 0.04647129440307617, 0.046382881164550784, 0.04619843292236328, 0.046271968841552734, 0.04626124954223633, 0.04636463928222656, 0.04648339080810547, 0.04655459213256836, 0.04655728149414062, 0.0468218879699707, 0.04706086349487305, 0.046960769653320314, 0.046833438873291014, 0.04676425552368164, 0.04658784103393555, 0.04655724716186523, 0.04634975814819336, 0.046292640686035155, 0.046274848937988285, 0.04633235168457031, 0.04642015838623047, 0.046405406951904295, 0.04632947158813477, 0.046516544342041014, 0.04664144134521484, 0.046680065155029295, 0.04659097671508789, 0.04692070388793945, 0.046868480682373044, 0.046908447265625, 0.046994110107421876, 0.04712502288818359, 0.04843084716796875, 0.047249408721923826, 0.04719113540649414, 0.047292896270751957, 0.04710780715942383, 0.04702076721191406, 0.04681932830810547, 0.046726367950439454, 0.04676268768310547, 0.04696428680419922, 0.0470164794921875, 0.046911617279052735, 0.046884193420410156, 0.04677072143554688, 0.04708784103393555, 0.04701136016845703, 0.047035808563232424, 0.047032833099365234, 0.04721903991699219, 0.04713881683349609, 0.04717363357543945, 0.048448993682861326, 0.04681372833251953, 0.04632985687255859, 0.04605155181884766, 0.04613507080078125, 0.04598691177368164, 0.04597993469238281, 0.04617635345458984, 0.04636092758178711, 0.04602873611450195, 0.046094593048095704, 0.046214176177978517, 0.04620540618896484, 0.046414337158203124, 0.04656332778930664, 0.04659423828125, 0.04630886459350586, 0.04629052734375, 0.04627920150756836, 0.046602432250976565, 0.046956703186035155, 0.04686627197265625, 0.046723072052001956, 0.046835712432861325, 0.04675174331665039, 0.04644019317626953, 0.04631577682495117, 0.04640563201904297, 0.04634771347045898, 0.04648739242553711, 0.04634492874145508, 0.04656860733032227, 0.04652732849121094, 0.046635009765625, 0.04697494506835938, 0.04677846527099609, 0.046747230529785154, 0.0466778564453125, 0.0465986557006836, 0.046884864807128904, 0.047101470947265626, 0.04703004837036133, 0.04692243194580078, 0.04698124694824219, 0.04709977722167969, 0.047099903106689454, 0.046852096557617184, 0.04713260650634766, 0.047089729309082035, 0.04699916839599609, 0.04694668960571289, 0.04708681488037109, 0.046955295562744144, 0.04676403045654297, 0.04717158508300781, 0.04718153762817383, 0.047229217529296874, 0.04706860733032227, 0.047003520965576175, 0.047384990692138675, 0.04756099319458008, 0.0473287353515625, 0.047217182159423825, 0.04809164810180664, 0.046617855072021486, 0.046058238983154295, 0.04595711898803711, 0.04606140899658203, 0.045977760314941406, 0.04595711898803711, 0.04585660934448242, 0.04624326324462891, 0.04619241714477539, 0.04619116973876953, 0.04616230392456055, 0.04610793685913086, 0.04618723297119141, 0.04657875061035156, 0.04675171279907227, 0.04671401596069336, 0.046746688842773436, 0.04660019302368164, 0.04665420913696289, 0.046873950958251955, 0.047063713073730466, 0.04682057571411133, 0.04673193740844726, 0.046639232635498046, 0.04659142303466797, 0.04652492904663086, 0.046714847564697265, 0.0465280647277832, 0.0465843505859375, 0.04690668869018555, 0.0468007698059082, 0.04688924789428711, 0.04699107360839844, 0.04694208145141601, 0.046994369506835935, 0.04685996627807617, 0.047056385040283207, 0.04709177780151367, 0.04719488143920898, 0.04715267181396485, 0.04732566452026367, 0.04740425491333008, 0.04741340637207031, 0.047096256256103516, 0.04718963241577148, 0.04701590347290039, 0.04707183837890625, 0.046960639953613284, 0.04712457656860351, 0.04698233413696289, 0.04712499237060547, 0.04708988952636719, 0.04728543853759765, 0.047343807220458986, 0.04734336090087891, 0.047420478820800784, 0.04759142303466797, 0.047453727722167965, 0.04759958267211914, 0.047540542602539065, 0.0474002571105957, 0.04755321502685547, 0.048724063873291014, 0.04682227325439453, 0.046636959075927735, 0.046728862762451175, 0.046213569641113283, 0.04620080184936524, 0.046272705078125, 0.046229343414306644, 0.04642176055908203, 0.046558719635009765, 0.046414592742919925, 0.04636003112792969, 0.047006240844726564, 0.04674483108520508, 0.04697574234008789, 0.04689100646972656, 0.046952449798583984, 0.04684185409545898, 0.04677632141113281, 0.0469667854309082, 0.047221950531005856, 0.04711907196044922, 0.04715724945068359, 0.0469582405090332, 0.04715280151367188, 0.04689385604858398, 0.04697910308837891, 0.04666527938842773, 0.04676428985595703, 0.04676214218139649, 0.04694217681884766, 0.04680707168579101, 0.0466431999206543, 0.04682345581054687, 0.04698704147338867, 0.04714617538452148, 0.0470906867980957, 0.047230976104736325, 0.04712038421630859, 0.0470568962097168, 0.0473636474609375, 0.04744755172729492, 0.04756576156616211, 0.04741030502319336, 0.04722134399414062, 0.04740739059448242, 0.047405055999755856, 0.047669246673583986, 0.04712857437133789, 0.047257598876953126, 0.047144577026367186, 0.047333087921142575, 0.04726211166381836, 0.047143169403076175, 0.04737638473510742, 0.04762419128417969, 0.04753939056396484, 0.047360862731933594, 0.04735382461547852, 0.04731059265136719, 0.04736223983764649, 0.04748223876953125, 0.0475445442199707, 0.04853670501708984, 0.04701248168945313, 0.046185791015625, 0.04621027374267578, 0.04616486358642578, 0.046248798370361326, 0.046341312408447265, 0.046357311248779294, 0.04632342529296875, 0.04640377426147461, 0.04646102523803711, 0.04658499145507813, 0.04655331039428711, 0.04665817642211914, 0.046611457824707034, 0.04657664108276367, 0.046706687927246096, 0.04667801666259765, 0.046652992248535155, 0.04698361587524414, 0.04749107360839844, 0.04720841598510742, 0.047080543518066405, 0.046936286926269534, 0.04684483337402344, 0.046894401550292966, 0.04694188690185547, 0.04696271896362305, 0.0466885757446289, 0.04684233474731445, 0.04717772674560547, 0.04699135971069336, 0.046706687927246096, 0.04650188827514649, 0.04665139389038086, 0.04663641738891602, 0.04648819351196289, 0.046524417877197265, 0.04674764633178711, 0.047537311553955075, 0.04712326431274414, 0.047478816986083985, 0.04728416061401367, 0.04684601593017578, 0.047323135375976565, 0.046835712432861325, 0.04685593414306641, 0.046970497131347655, 0.04687731170654297, 0.0467949104309082, 0.046922752380371094, 0.04699836730957031, 0.04702764892578125, 0.04691001510620117, 0.0468513298034668, 0.046727935791015626, 0.04718735885620117, 0.04701203155517578, 0.04694803237915039, 0.047104286193847655, 0.04710044860839844, 0.047230880737304685, 0.0473436164855957, 0.048180767059326175, 0.047288352966308594, 0.04623619079589844, 0.04670022583007812, 0.04611718368530274, 0.04605718231201172, 0.046182689666748045, 0.045983200073242185, 0.046325408935546875, 0.046159870147705076, 0.046182689666748045, 0.04687472152709961, 0.04628879928588867, 0.046136161804199216, 0.0461923828125, 0.04648067092895508, 0.0463326416015625, 0.046430206298828124, 0.04658380889892578, 0.04667334365844727, 0.04673593521118164, 0.04682547378540039, 0.04678678512573242, 0.04673680114746094, 0.04646105575561523, 0.04631299209594727, 0.04646166229248047, 0.046462303161621095, 0.0463919677734375, 0.04666777420043945, 0.04651200103759766, 0.04669862365722656, 0.04664934539794922, 0.046413822174072264, 0.04664115142822266, 0.046796672821044924, 0.046755455017089845, 0.04684592056274414, 0.047129119873046875, 0.04713452911376953, 0.047496734619140626, 0.04745891189575195, 0.04741535949707031, 0.047328575134277344, 0.047200958251953126, 0.04694156646728516, 0.047075969696044925, 0.04699523162841797, 0.04706035232543945, 0.04746326446533203, 0.04729811096191406, 0.04711673736572266, 0.0470252799987793, 0.04721779251098633, 0.04720544052124023, 0.047306785583496096, 0.04720220947265625, 0.04948384094238281, 0.04874310302734375, 0.0472597770690918, 0.047451007843017576, 0.04776028823852539, 0.047513694763183595, 0.04848054504394531, 0.0467861442565918, 0.04626454544067383, 0.04634223937988281, 0.04628595352172851, 0.046324542999267575, 0.04638127899169922, 0.04650908660888672, 0.04653955078125, 0.04675971221923828, 0.04687689590454101, 0.04659827041625977, 0.04665315246582031, 0.04696092987060547, 0.04662681579589844, 0.046583553314208985, 0.04687692642211914, 0.046886913299560545, 0.04662243270874023, 0.046975265502929686, 0.04703228759765625, 0.047285633087158205, 0.0469466552734375, 0.046811168670654296, 0.046644542694091795, 0.046762977600097656, 0.04671897506713867, 0.04665958404541016, 0.04730470275878906, 0.046982177734375, 0.04720640182495117, 0.04689599990844726, 0.047887489318847655, 0.04728316879272461, 0.04692377471923828, 0.046970977783203124, 0.04709120178222656, 0.04726416015625, 0.04719404983520508, 0.04802975845336914, 0.04727340698242188, 0.04769055938720703, 0.047324928283691406, 0.04739465713500977, 0.04714672088623047, 0.04736454391479492, 0.04728422546386719, 0.04704051208496094, 0.04717772674560547, 0.0472591667175293, 0.04723555374145508, 0.04725964736938477, 0.047365856170654294, 0.047153438568115234, 0.04736115264892578, 0.04742015838623047, 0.0473289909362793, 0.04731536102294922, 0.04739670562744141, 0.04727004623413086, 0.047685630798339845, 0.047628288269042966, 0.0475074577331543, 0.048653087615966796, 0.046886974334716794, 0.04667862319946289, 0.04627289581298828, 0.04627862548828125, 0.04643129730224609, 0.04657984161376953, 0.046465854644775394, 0.04653609466552734, 0.04644720077514648, 0.04671043014526367, 0.04664169692993164, 0.04664118576049805, 0.046626750946044924, 0.046567264556884765, 0.04695241546630859, 0.04703171157836914, 0.04704288101196289, 0.04691366577148438, 0.04694009780883789, 0.047867904663085936, 0.04702848052978516, 0.046870529174804686, 0.04710400009155274, 0.04673535919189453, 0.04680908966064453, 0.0467720947265625, 0.04682281494140625, 0.046970752716064455, 0.04705094528198242, 0.046758560180664065, 0.04679884719848633, 0.04684799957275391, 0.047026176452636716, 0.04710995101928711, 0.046984512329101565, 0.04690995025634766, 0.047110111236572265, 0.046946529388427735, 0.04733910369873047, 0.04738227081298828, 0.047395679473876955, 0.04742694473266602, 0.04741353607177735, 0.04741360092163086, 0.04702207946777344, 0.04707084655761719, 0.04716787338256836, 0.04711734390258789, 0.047372768402099606, 0.04719209671020508, 0.04696931076049805, 0.04720025634765625, 0.04723507308959961, 0.047343902587890625, 0.047832992553710936, 0.0473741455078125, 0.047865375518798825, 0.04770588684082031, 0.04754713439941406, 0.04782688140869141, 0.04764672088623047, 0.047693824768066405, 0.04885590362548828, 0.04704585647583008, 0.046653247833251955, 0.04640252685546875, 0.04646841430664062, 0.04648006439208984, 0.046448638916015625, 0.04653875350952148, 0.04646092987060547, 0.04650137710571289, 0.046407455444335936, 0.046400062561035155, 0.046626976013183594, 0.04677836990356445, 0.046910911560058596, 0.04670659255981445, 0.04689337539672851, 0.04677785491943359, 0.046785377502441404, 0.04705887985229492, 0.04739692687988281, 0.04735795211791992, 0.04698521423339844, 0.04684524917602539, 0.04713062286376953, 0.046754142761230466, 0.04676406478881836, 0.046806785583496095, 0.04678915023803711, 0.04674256134033203, 0.046717952728271485, 0.046653438568115234, 0.04671680068969727, 0.046737537384033204, 0.04684492874145508, 0.04688115310668945, 0.04687936019897461, 0.046973953247070314, 0.04695462417602539, 0.046855072021484374, 0.0469257926940918, 0.04731891250610352, 0.047371681213378904, 0.047417278289794924, 0.04724531173706055, 0.04713347244262695, 0.04715660858154297, 0.04734835052490234, 0.047164737701416014, 0.04728070449829101, 0.04728435134887695, 0.04717772674560547, 0.047034366607666016, 0.04712963104248047, 0.04730569458007813, 0.047314239501953126, 0.04723987197875976, 0.047331329345703124, 0.0473436164855957, 0.04738662338256836, 0.04751564788818359, 0.04754022216796875, 0.04757632064819336]",tokens/s,21.322464920321327,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4145.373184,5819.465728,0.0,5416.943616,4964.135424,s,1,10.955767578125,10.955767578125,0.0,10.955767578125,10.955767578125,10.955767578125,10.955767578125,[10.955767578125],,kWh,0.00011800446072913169,1.3009526382198153e-05,5.2163652841996644e-05,0.00018317763995332647,,MB,1424.990208,5853.02016,0.0,5435.817984,4562.7008,s,10,30.790093994140626,3.079009399414063,0.005732084637263689,3.07714697265625,3.086281884765625,3.086529736328125,3.086728017578125,"[3.06939111328125, 3.073928466796875, 3.0777529296875, 3.075769775390625, 3.074871337890625, 3.076541015625, 3.082999755859375, 3.086226806640625, 3.085835205078125, 3.086777587890625]",tokens/s,83.14362406581706,kWh,8.988687569749649e-05,9.9144799998973e-06,5.97359366776018e-05,0.00015953729237499557,tokens/kWh,1604640.4962061592,MB,1450.78272,5853.02016,0.0,5435.817984,4562.70336,s,10,28.087651367187497,2.80876513671875,0.37335336652681217,2.8037958984375,3.1879590576171877,3.2115250610351564,3.2303778637695313,"[3.18272216796875, 3.162566650390625, 3.164280517578125, 3.235091064453125, 3.163201904296875, 2.44177490234375, 2.434739501953125, 2.428841796875, 2.445025146484375, 2.42940771484375]",tokens/s,22.429785665026355,kWh,8.072805652625448e-05,8.904910431306665e-06,5.118226316799674e-05,0.00014081523012555788,tokens/kWh,447394.7877926703,,s,630,28.08534365463256,0.04457991056290884,0.0059767882948195015,0.04637816047668457,0.05064093551635742,0.05124616641998291,0.05402988220214844,"[0.05156476974487305, 0.05043996810913086, 0.050949886322021486, 0.05123712158203125, 0.05175868988037109, 0.050640705108642575, 0.05506313705444336, 0.05021491241455078, 0.05020889663696289, 0.05047286224365234, 0.050245216369628906, 0.05006988906860352, 0.050067455291748046, 0.05041337585449219, 0.050299072265625, 0.05015552139282226, 0.0510648307800293, 0.049979393005371096, 0.050034465789794924, 0.0500164794921875, 0.05021641540527344, 0.05032563018798828, 0.05038716888427734, 0.05059193420410156, 0.050119712829589845, 0.05010089492797851, 0.05014371109008789, 0.050042049407958984, 0.05027907180786133, 0.05058268737792969, 0.05055347061157227, 0.050448257446289065, 0.050407169342041015, 0.050350399017333985, 0.050417953491210935, 0.05059993743896484, 0.05048844909667969, 0.05144073486328125, 0.05042502212524414, 0.050184417724609375, 0.0503422737121582, 0.05008793640136719, 0.04993014526367188, 0.05047830581665039, 0.05019468688964844, 0.04970969772338867, 0.050229248046875, 0.049870849609375, 0.050361568450927735, 0.05063091278076172, 0.05068854522705078, 0.05016175842285156, 0.05037599945068359, 0.050190944671630856, 0.05013679885864258, 0.05000016021728516, 0.050531841278076174, 0.05012329483032227, 0.05024313735961914, 0.05014384078979492, 0.05349766540527344, 0.05086207962036133, 0.05066880035400391, 0.05151894378662109, 0.05020284652709961, 0.049923423767089845, 0.049925086975097656, 0.04989446258544922, 0.04998035049438477, 0.04977827072143555, 0.050081790924072264, 0.05047740936279297, 0.050047039031982425, 0.0497520637512207, 0.05016953659057617, 0.05018246459960937, 0.050042304992675785, 0.05016409683227539, 0.04992182540893555, 0.04993865585327149, 0.050049182891845706, 0.050138912200927734, 0.04985673522949219, 0.049813247680664065, 0.04976899337768555, 0.04979072189331055, 0.050099998474121096, 0.05040150451660156, 0.05070230484008789, 0.05049568176269531, 0.05010979080200195, 0.0504898567199707, 0.050296863555908206, 0.0501063346862793, 0.05031283187866211, 0.053922176361083984, 0.05015081787109375, 0.050522113800048826, 0.050659488677978516, 0.05037635040283203, 0.0506366081237793, 0.050164703369140626, 0.04995072174072265, 0.049839649200439456, 0.05059836959838867, 0.05044790267944336, 0.050082271575927734, 0.04998912048339844, 0.049968929290771485, 0.04988956832885742, 0.04984451293945313, 0.04979727935791016, 0.0501288948059082, 0.04992214584350586, 0.050151424407958986, 0.049921279907226564, 0.04969539260864258, 0.05002239990234375, 0.050171489715576174, 0.049987201690673826, 0.050167774200439455, 0.05051065444946289, 0.04999750518798828, 0.05052633666992187, 0.049940673828125, 0.049926143646240234, 0.05179619216918945, 0.05049155044555664, 0.05022220611572266, 0.05021503829956055, 0.05025046539306641, 0.05014940643310547, 0.050449920654296876, 0.050196735382080075, 0.04993868637084961, 0.04985651016235351, 0.04978899383544922, 0.049782718658447266, 0.0501956787109375, 0.05059212875366211, 0.050259807586669925, 0.05028665542602539, 0.0500164794921875, 0.04997753524780273, 0.04986851119995117, 0.05019686508178711, 0.05043526458740234, 0.04993107223510742, 0.049893375396728515, 0.04983155059814453, 0.050022113800048826, 0.05341417694091797, 0.05070796966552735, 0.05008063888549805, 0.05013094329833984, 0.04996915054321289, 0.049884960174560546, 0.04969903945922852, 0.05002451324462891, 0.050104255676269534, 0.0528054084777832, 0.05016214370727539, 0.049769790649414065, 0.0497520637512207, 0.049852672576904296, 0.0497657585144043, 0.049883201599121095, 0.04986297607421875, 0.049996318817138674, 0.04995836639404297, 0.05000979232788086, 0.049819553375244144, 0.05036515045166016, 0.04990784072875976, 0.05003673553466797, 0.050255870819091795, 0.05008793640136719, 0.05030659103393555, 0.052308448791503905, 0.050716670989990234, 0.050010112762451174, 0.04985820770263672, 0.04983955383300781, 0.049934944152832034, 0.049790943145751956, 0.04985878372192383, 0.04992934417724609, 0.050135711669921874, 0.05039139175415039, 0.051890048980712894, 0.05035238265991211, 0.05022515106201172, 0.04988854217529297, 0.04992073440551758, 0.050088001251220704, 0.052643775939941406, 0.05403647994995117, 0.05463216018676758, 0.054767902374267576, 0.053766143798828124, 0.054013729095458984, 0.05321136093139649, 0.053301025390625, 0.053026817321777345, 0.05319891357421875, 0.052881568908691404, 0.052944896697998046, 0.05250838470458984, 0.05019881439208984, 0.05019647979736328, 0.05019852828979492, 0.05007769775390625, 0.05000806427001953, 0.0501923828125, 0.0500469741821289, 0.05053414535522461, 0.050796798706054684, 0.056780574798583984, 0.05103152084350586, 0.05535001754760742, 0.05087443161010742, 0.05072889709472656, 0.050671104431152345, 0.05048345565795898, 0.050484703063964846, 0.050688800811767576, 0.05046265411376953, 0.05064300918579102, 0.05048099136352539, 0.05121449661254883, 0.050929214477539064, 0.050762176513671875, 0.050925567626953126, 0.05092739105224609, 0.050771774291992186, 0.05110179138183594, 0.05125356674194336, 0.051195903778076174, 0.05081292724609375, 0.05052620697021484, 0.050314304351806644, 0.050027454376220706, 0.051871742248535156, 0.05113651275634765, 0.05046441650390625, 0.050246273040771484, 0.05022515106201172, 0.05022012710571289, 0.050315841674804684, 0.05001017761230469, 0.05135564804077149, 0.05002444839477539, 0.05207660675048828, 0.050817024230957034, 0.050708030700683596, 0.050753982543945315, 0.050520065307617185, 0.050446334838867186, 0.050116031646728516, 0.04999020767211914, 0.04976639938354492, 0.04980329513549805, 0.05005923080444336, 0.05017599868774414, 0.05006950378417969, 0.0498809928894043, 0.049895038604736326, 0.0501743049621582, 0.050385089874267576, 0.05002796936035156, 0.05022364807128906, 0.05002374267578125, 0.049842208862304685, 0.04989513778686523, 0.04993526458740234, 0.05009612655639648, 0.05042524719238281, 0.05032815933227539, 0.04997347259521484, 0.049899009704589846, 0.0498928337097168, 0.0498853759765625, 0.04985651016235351, 0.04996944046020508, 0.05004528045654297, 0.04990367889404297, 0.049972446441650394, 0.05025046539306641, 0.050191551208496096, 0.050135265350341796, 0.04996156692504883, 0.0501288948059082, 0.05033932876586914, 0.05021235275268555, 0.05080883026123047, 0.05004556655883789, 0.05004326248168945, 0.050233345031738284, 0.04992825698852539, 0.05002799987792969, 0.04974435043334961, 0.05013734436035156, 0.049770240783691404, 0.04981472015380859, 0.04975484848022461, 0.05023535919189453, 0.0498034553527832, 0.04974585723876953, 0.04979897689819336, 0.05599776077270508, 0.050540702819824215, 0.05006819152832031, 0.04981481552124024, 0.04974256134033203, 0.04983385467529297, 0.04007699203491211, 0.03965481567382813, 0.038695838928222655, 0.03888947296142578, 0.0417196159362793, 0.03954297637939453, 0.03871744155883789, 0.038240383148193356, 0.03834406280517578, 0.04043183898925781, 0.03842486572265625, 0.038321727752685546, 0.03822227096557617, 0.03820943832397461, 0.039008350372314454, 0.0398328971862793, 0.03844985580444336, 0.03878297424316406, 0.03854716873168945, 0.03877507019042969, 0.03839945602416992, 0.039096927642822264, 0.038510528564453125, 0.03926220703125, 0.039163902282714845, 0.038485694885253906, 0.03845766448974609, 0.03835289764404297, 0.03837542343139649, 0.03834230422973633, 0.0384249267578125, 0.038547454833984376, 0.03870028686523438, 0.03843353652954101, 0.03815744018554688, 0.03857088088989258, 0.03848825454711914, 0.038833984375, 0.04034764862060547, 0.03878319931030273, 0.0386409912109375, 0.038537662506103514, 0.03863347244262695, 0.03839139175415039, 0.03863593673706055, 0.038694911956787106, 0.038408191680908206, 0.038088542938232425, 0.03886608123779297, 0.03879183959960938, 0.03844976043701172, 0.03818675231933594, 0.038414337158203124, 0.03831526565551758, 0.03868035125732422, 0.03862422561645508, 0.03880064010620117, 0.03852979278564453, 0.03872556686401367, 0.03855558395385742, 0.03821327972412109, 0.0384881591796875, 0.039263999938964844, 0.040136737823486326, 0.038743137359619144, 0.03871360015869141, 0.03876326370239258, 0.03864547348022461, 0.03876448059082031, 0.038531295776367186, 0.03839590454101562, 0.0383787841796875, 0.03893936157226562, 0.03848601531982422, 0.038469440460205076, 0.03848211288452148, 0.03848806381225586, 0.03848332977294922, 0.038650497436523434, 0.03865599822998047, 0.03897971343994141, 0.03868201446533203, 0.038744544982910155, 0.03847792053222656, 0.03871120071411133, 0.03891795349121094, 0.03881593704223633, 0.038752254486083985, 0.038580223083496096, 0.03841606521606445, 0.0384126091003418, 0.03824435043334961, 0.04111769485473633, 0.038649406433105465, 0.03848646545410156, 0.03861913681030273, 0.03841823959350586, 0.03840956878662109, 0.03868348693847656, 0.03837747192382813, 0.038422527313232424, 0.03866755294799805, 0.038550304412841796, 0.03836716842651367, 0.03851468658447266, 0.03855708694458008, 0.03855795288085938, 0.03852896118164063, 0.03851510238647461, 0.038594558715820314, 0.038553600311279294, 0.03853718566894531, 0.03849942398071289, 0.03858118438720703, 0.03840108871459961, 0.039217823028564455, 0.038426910400390625, 0.038588417053222655, 0.038491615295410155, 0.038445953369140626, 0.039425121307373044, 0.0382305908203125, 0.03864345550537109, 0.038084865570068356, 0.038591552734375, 0.03830470275878906, 0.03978851318359375, 0.03864166259765625, 0.03846556854248047, 0.03852313613891602, 0.038338462829589845, 0.038545406341552735, 0.0382380485534668, 0.038241310119628905, 0.03830646514892578, 0.03889507293701172, 0.038472545623779296, 0.038305438995361325, 0.03817068862915039, 0.03842281723022461, 0.038332416534423826, 0.03827711868286133, 0.038430721282958984, 0.038182910919189454, 0.03845865631103516, 0.038290145874023435, 0.03840147018432617, 0.038343231201171876, 0.03839385604858398, 0.03836547088623047, 0.03833967971801758, 0.03845798492431641, 0.038340606689453126, 0.03856304168701172, 0.03855779266357422, 0.03858931350708008, 0.03843888092041016, 0.038442977905273436, 0.03822118377685547, 0.03830137634277344, 0.038242527008056644, 0.03850844955444336, 0.038274814605712894, 0.03869494247436524, 0.03973606491088867, 0.038995105743408205, 0.03866329574584961, 0.03863859176635742, 0.038343616485595707, 0.038465152740478514, 0.03906387329101563, 0.03876361465454101, 0.03865510559082031, 0.03852793502807617, 0.03839267349243164, 0.04114636611938476, 0.03862063980102539, 0.03888387298583985, 0.03853084945678711, 0.038510433197021486, 0.038371711730957034, 0.038529022216796875, 0.038489505767822264, 0.03839241409301758, 0.03856793594360351, 0.038348800659179685, 0.03857408142089844, 0.03833980941772461, 0.03826563262939453, 0.04021059036254883, 0.0391734733581543, 0.042865249633789064, 0.043060928344726565, 0.03910598373413086, 0.03853811264038086, 0.03852908706665039, 0.03845523071289063, 0.03831577682495117, 0.03833852767944336, 0.03835113525390625, 0.03857408142089844, 0.03837363052368164, 0.03832012939453125, 0.038459136962890626, 0.038266624450683594, 0.03843302536010742, 0.038365184783935545, 0.03844710540771484, 0.038434814453125, 0.03853311920166016, 0.03855769729614258, 0.038553600311279294, 0.03834470367431641, 0.038506145477294924, 0.0394172477722168, 0.03852361679077149, 0.0382957763671875, 0.038703361511230466, 0.03875404739379883, 0.039174144744873046, 0.03870515060424805, 0.0385167350769043, 0.03846697616577149, 0.03864022445678711, 0.03861091232299805, 0.03915779113769531, 0.038860767364501954, 0.03854716873168945, 0.03838540649414063, 0.038133888244628905, 0.03843267059326172, 0.03873606491088867, 0.038768543243408206, 0.03891449737548828, 0.03879296112060547, 0.03902899169921875, 0.038389759063720705, 0.038497791290283204, 0.0385577278137207, 0.03861756896972656, 0.038711296081542966, 0.03859462356567383, 0.038631359100341794, 0.038637569427490234, 0.038397953033447264, 0.038717025756835936, 0.038914688110351564, 0.038626976013183593, 0.03898988723754883, 0.039734912872314454, 0.039626625061035155, 0.03848041534423828, 0.03997727966308594, 0.0389222412109375, 0.038676830291748045, 0.038495903015136716, 0.03871539306640625, 0.03884751892089844, 0.03843731307983399, 0.038320289611816404, 0.03824031829833984, 0.038289695739746096, 0.03947484970092773, 0.03885641479492188, 0.038814369201660155, 0.038422527313232424, 0.03829145431518555, 0.03873791885375977, 0.03845478439331055, 0.03854387283325195, 0.03837897491455078, 0.0382408332824707, 0.03836310577392578, 0.03886447906494141, 0.039612831115722655, 0.03868876647949219, 0.03889766311645508, 0.03902790451049805, 0.0385274543762207, 0.038632926940917967, 0.03834969711303711, 0.038604286193847655, 0.038459903717041014, 0.03855270385742188, 0.038382305145263675, 0.03850368118286133, 0.038437152862548826, 0.03818764877319336, 0.038242305755615234, 0.038544960021972656, 0.03829190444946289, 0.038520832061767575, 0.03821363067626953, 0.03831577682495117, 0.03816678237915039, 0.03854892730712891, 0.03833065414428711, 0.03825897598266602, 0.03869286346435547, 0.03813324737548828, 0.03880511856079102, 0.03875270462036133, 0.038951358795166015, 0.038323486328125, 0.038456031799316406, 0.03812931060791016, 0.03808905410766601, 0.03819664001464844, 0.03842057418823242, 0.038429183959960936, 0.039370750427246096, 0.038410175323486326, 0.038416446685791014, 0.038209537506103515, 0.03873788833618164]",tokens/s,22.43162867249032,,, 4bit-awq-exllama-v1-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1031.5776,965.67296,0.0,570.425344,525.840896,s,1,8.07862255859375,8.07862255859375,0.0,8.07862255859375,8.07862255859375,8.07862255859375,8.07862255859375,[8.07862255859375],,kWh,3.357288517912214e-05,3.6961730086164217e-06,1.2070287433996185e-05,4.933934562173475e-05,,MB,1262.501888,1005.518848,0.0,597.68832,584.940544,s,10,1.3383642425537108,0.13383642425537107,0.0006236488037619517,0.1337007522583008,0.13432077026367187,0.1349051055908203,0.13537257385253906,"[0.13548944091796875, 0.1332783966064453, 0.13394464111328125, 0.13326112365722656, 0.13419091796875, 0.1338299560546875, 0.1333603515625, 0.13388128662109375, 0.13357154846191407, 0.13355657958984374]",tokens/s,1912.7827228223807,kWh,4.107531149247839e-06,4.5298668327437093e-07,2.7255191557223735e-06,7.2860369882445834e-06,tokens/kWh,35135698.65388204,MB,1296.502784,1020.198912,0.0,612.368384,597.290496,s,10,13.734800659179687,1.3734800659179687,0.01288839497495525,1.3706681518554689,1.3920146484375,1.3957022094726563,1.3986522583007812,"[1.37127294921875, 1.3993897705078124, 1.3911951904296875, 1.35689794921875, 1.380880126953125, 1.3673121337890626, 1.3628775634765624, 1.3700633544921874, 1.3601531982421875, 1.3747584228515626]",tokens/s,45.8688855872792,kWh,3.944550456283639e-05,4.350508324748603e-06,1.5790801212874922e-05,5.958681410045991e-05,tokens/kWh,1057280.892611336,,s,630,13.728747289657596,0.02179166236453586,0.0005926802038786021,0.021665040016174315,0.022311522483825685,0.02247623510360718,0.024017830162048347,"[0.021702592849731445, 0.021878816604614257, 0.02166275215148926, 0.021687616348266603, 0.021984960556030272, 0.021578912734985353, 0.02175062370300293, 0.02166771125793457, 0.02207084846496582, 0.022034048080444336, 0.022055200576782227, 0.022097984313964845, 0.021896095275878907, 0.021925920486450194, 0.02201795196533203, 0.021746719360351562, 0.021730016708374024, 0.021786880493164063, 0.0215467529296875, 0.02178236770629883, 0.021530176162719728, 0.021594623565673828, 0.021624927520751954, 0.02163523292541504, 0.021725120544433593, 0.021622079849243164, 0.021665760040283203, 0.02189276885986328, 0.021909887313842774, 0.021782495498657226, 0.021547008514404296, 0.021490304946899415, 0.021520479202270508, 0.02208332824707031, 0.021560543060302733, 0.021574783325195312, 0.021687423706054688, 0.02184671974182129, 0.021788671493530275, 0.02183788871765137, 0.02208064079284668, 0.022030336380004883, 0.021713184356689452, 0.021602848052978515, 0.02157369613647461, 0.021520320892333984, 0.021427520751953123, 0.02154719924926758, 0.021602176666259767, 0.021635040283203125, 0.021746496200561523, 0.021525823593139648, 0.021412384033203124, 0.021428192138671875, 0.021442592620849608, 0.021647520065307617, 0.021646400451660157, 0.02192799949645996, 0.021881568908691407, 0.02209791946411133, 0.022228191375732422, 0.022180959701538085, 0.022189504623413087, 0.02251740837097168, 0.02215872001647949, 0.02196086311340332, 0.02212713623046875, 0.02216771125793457, 0.022167680740356445, 0.022293792724609376, 0.022042783737182617, 0.022104639053344727, 0.021972991943359374, 0.022137088775634764, 0.022148992538452147, 0.022161279678344727, 0.022689727783203124, 0.022320287704467773, 0.02234432029724121, 0.02226848030090332, 0.02253385543823242, 0.02214303970336914, 0.02224051284790039, 0.022389440536499022, 0.02219798469543457, 0.02225724792480469, 0.022399904251098633, 0.022361024856567383, 0.022383583068847655, 0.022339456558227538, 0.022437887191772463, 0.022374048233032226, 0.022333791732788086, 0.022386304855346678, 0.022335872650146485, 0.022401023864746093, 0.022107295989990235, 0.02224166488647461, 0.02240086364746094, 0.02238640022277832, 0.022272672653198242, 0.022290111541748047, 0.02229110336303711, 0.02220863914489746, 0.022261056900024414, 0.022250207901000976, 0.022134880065917968, 0.022046367645263673, 0.022053056716918946, 0.022072160720825195, 0.022250560760498046, 0.022071327209472656, 0.022026111602783203, 0.022214656829833986, 0.0219869441986084, 0.021733728408813477, 0.021549087524414062, 0.021560928344726563, 0.021816831588745117, 0.021932064056396486, 0.022317024230957033, 0.022172319412231446, 0.022048831939697266, 0.02206287956237793, 0.021989791870117188, 0.022829055786132812, 0.022126272201538087, 0.022487615585327147, 0.02252288055419922, 0.02257734489440918, 0.022480928421020507, 0.02247964859008789, 0.02232089614868164, 0.022429311752319336, 0.02242188835144043, 0.022504543304443358, 0.022623136520385743, 0.02257257652282715, 0.022532928466796876, 0.022433439254760743, 0.022069440841674805, 0.022092832565307616, 0.021948640823364257, 0.021979103088378905, 0.021967456817626952, 0.021934080123901366, 0.021824800491333007, 0.021609184265136718, 0.0214466552734375, 0.02166579246520996, 0.021884927749633788, 0.023183359146118163, 0.022454143524169922, 0.021768320083618165, 0.02170457649230957, 0.02168623924255371, 0.021787872314453127, 0.021869056701660155, 0.021882976531982422, 0.021916000366210938, 0.02190745544433594, 0.02315673637390137, 0.029048032760620117, 0.02218662452697754, 0.022224992752075196, 0.021939584732055664, 0.021932064056396486, 0.021545631408691406, 0.021518335342407227, 0.02147942352294922, 0.021503135681152342, 0.021646175384521484, 0.021743167877197267, 0.021652191162109376, 0.0216529598236084, 0.021575935363769533, 0.02153267288208008, 0.021415103912353517, 0.02144054412841797, 0.02143084716796875, 0.02143459129333496, 0.021527904510498047, 0.021469856262207033, 0.021635040283203125, 0.02154044723510742, 0.02142457580566406, 0.021702079772949218, 0.02213462448120117, 0.02201468849182129, 0.02181830406188965, 0.02204457664489746, 0.02193116760253906, 0.021977407455444336, 0.021991968154907227, 0.021759008407592772, 0.021577823638916017, 0.021551103591918946, 0.021502559661865234, 0.021528863906860353, 0.021522432327270507, 0.021646976470947266, 0.021635295867919922, 0.021679712295532227, 0.021643199920654298, 0.0224586238861084, 0.021559680938720703, 0.02149171257019043, 0.021429855346679686, 0.021495967864990233, 0.02136294364929199, 0.021389375686645506, 0.021343360900878905, 0.02148547172546387, 0.021523296356201174, 0.021720191955566407, 0.02193212890625, 0.021789535522460938, 0.021488832473754882, 0.021361024856567382, 0.02126892852783203, 0.021317632675170898, 0.021340160369873046, 0.02146086311340332, 0.021521888732910156, 0.021400224685668944, 0.02136819267272949, 0.021422719955444335, 0.02159984016418457, 0.021445215225219725, 0.021520191192626954, 0.02136172866821289, 0.021476255416870118, 0.02134223937988281, 0.02130534362792969, 0.02128873634338379, 0.021713119506835937, 0.021321727752685548, 0.021235712051391603, 0.021247648239135743, 0.021389375686645506, 0.021274528503417968, 0.021299264907836915, 0.021408063888549805, 0.021352287292480468, 0.0213504638671875, 0.021667903900146484, 0.02183359909057617, 0.021438623428344728, 0.021362688064575194, 0.021378175735473633, 0.021601152420043946, 0.021433504104614257, 0.0213175048828125, 0.021526016235351563, 0.021439104080200194, 0.0212620792388916, 0.021495519638061525, 0.021406240463256836, 0.02153990364074707, 0.022257919311523437, 0.026421279907226564, 0.026288799285888672, 0.022142431259155274, 0.02246441650390625, 0.022035104751586914, 0.021995840072631837, 0.021937503814697265, 0.023060800552368164, 0.021854015350341798, 0.02177043151855469, 0.021626880645751953, 0.021522432327270507, 0.02141961669921875, 0.021419519424438475, 0.02145372772216797, 0.02163692855834961, 0.021555391311645508, 0.021440383911132812, 0.021353887557983398, 0.02162761688232422, 0.02409062385559082, 0.022184032440185547, 0.021687904357910157, 0.021862207412719728, 0.02162067222595215, 0.02166431999206543, 0.021594112396240234, 0.021573631286621094, 0.021780479431152345, 0.022183935165405275, 0.02387721633911133, 0.021660064697265623, 0.022101631164550783, 0.02465830421447754, 0.02158729553222656, 0.021487775802612304, 0.02139187240600586, 0.021417600631713867, 0.021430656433105467, 0.021319679260253906, 0.021555200576782226, 0.02135161590576172, 0.02143929672241211, 0.02133331108093262, 0.02139411163330078, 0.021384191513061524, 0.021436704635620116, 0.02138185691833496, 0.021420032501220702, 0.021888448715209962, 0.021533248901367187, 0.02147327995300293, 0.021407743453979493, 0.021549055099487305, 0.021398944854736326, 0.021373056411743165, 0.021515743255615234, 0.021507776260375977, 0.02141881561279297, 0.02145712089538574, 0.021477184295654296, 0.02146713638305664, 0.02146303939819336, 0.02192793655395508, 0.021510143280029297, 0.02147478485107422, 0.021602848052978515, 0.0215285758972168, 0.02148761558532715, 0.021372415542602538, 0.022249824523925783, 0.021416095733642577, 0.02141779136657715, 0.021572864532470704, 0.021654464721679687, 0.021766368865966796, 0.021690143585205077, 0.02177008056640625, 0.022055072784423826, 0.02206924819946289, 0.021929983139038087, 0.021942272186279296, 0.02198297691345215, 0.02184623908996582, 0.021725471496582032, 0.021806848526000976, 0.021556480407714844, 0.021584640502929686, 0.021392831802368163, 0.021752384185791014, 0.02187264060974121, 0.021738847732543944, 0.02169718360900879, 0.021997312545776367, 0.02132371139526367, 0.021420095443725588, 0.021418239593505858, 0.021415327072143556, 0.021463487625122072, 0.02142633628845215, 0.021366239547729492, 0.021563936233520507, 0.021381120681762695, 0.021409791946411134, 0.021653600692749023, 0.02192095947265625, 0.02189753532409668, 0.021774751663208008, 0.02178767967224121, 0.021828575134277342, 0.02196678352355957, 0.021846208572387695, 0.021886240005493163, 0.021848672866821288, 0.021755903244018555, 0.02210028839111328, 0.023135135650634766, 0.02203126335144043, 0.021719039916992186, 0.02205695915222168, 0.021729183197021485, 0.02155945587158203, 0.021429695129394532, 0.02132633590698242, 0.02134864044189453, 0.021337823867797853, 0.02127462387084961, 0.02138444709777832, 0.02133011245727539, 0.021354143142700194, 0.022588319778442383, 0.021823200225830078, 0.02189651107788086, 0.021855199813842773, 0.021624767303466796, 0.021817407608032226, 0.021544639587402343, 0.02153081512451172, 0.0216060791015625, 0.021567264556884767, 0.02148198318481445, 0.021662111282348632, 0.021562751770019532, 0.021561088562011718, 0.02154969596862793, 0.021431488037109377, 0.021582015991210936, 0.02154355239868164, 0.02146665573120117, 0.021345760345458983, 0.021371488571166993, 0.021223072052001954, 0.021174495697021484, 0.021523040771484377, 0.021548160552978514, 0.021545791625976564, 0.021733312606811522, 0.021442432403564454, 0.02148953628540039, 0.02141209602355957, 0.02160214424133301, 0.021381343841552734, 0.021405696868896484, 0.02137196731567383, 0.021410207748413086, 0.02113795280456543, 0.02137276840209961, 0.021481632232666015, 0.021962080001831055, 0.022200159072875977, 0.022096832275390624, 0.021919904708862306, 0.021890111923217773, 0.02185487937927246, 0.021747711181640626, 0.022179840087890625, 0.021781856536865235, 0.02171356773376465, 0.022060895919799806, 0.02214076805114746, 0.02211187171936035, 0.02206230354309082, 0.022309759140014648, 0.022353279113769532, 0.02225619125366211, 0.022468576431274413, 0.02231091117858887, 0.022223007202148436, 0.022251264572143555, 0.022472063064575196, 0.02260479927062988, 0.022452064514160156, 0.022458240509033202, 0.022255104064941408, 0.022212287902832032, 0.022600255966186523, 0.022460800170898437, 0.022235231399536134, 0.022191904067993165, 0.021941984176635742, 0.02187708854675293, 0.0217640323638916, 0.021583520889282226, 0.021584096908569335, 0.021477504730224608, 0.021265632629394533, 0.021238304138183593, 0.021360895156860352, 0.021475360870361327, 0.021449823379516602, 0.0213239688873291, 0.021285568237304688, 0.02128611183166504, 0.02131817626953125, 0.021125375747680666, 0.021245023727416993, 0.021207967758178712, 0.021242944717407227, 0.02142464065551758, 0.021344703674316408, 0.02126848030090332, 0.021237024307250975, 0.021572032928466798, 0.0217458553314209, 0.021592159271240235, 0.02130454444885254, 0.021158239364624024, 0.021221824645996094, 0.021280096054077147, 0.021330591201782226, 0.021235712051391603, 0.021273664474487305, 0.021277631759643555, 0.021456127166748048, 0.02161347198486328, 0.021903263092041016, 0.021936063766479493, 0.02186240005493164, 0.02189004707336426, 0.02178755187988281, 0.02165155220031738, 0.021813247680664064, 0.02212563133239746, 0.021910207748413086, 0.02211043167114258, 0.02199692726135254, 0.021827583312988282, 0.02170911979675293, 0.021618879318237305, 0.021418367385864258, 0.02136819267272949, 0.02137766456604004, 0.021579776763916016, 0.021348352432250976, 0.02123097610473633, 0.02207916831970215, 0.021271488189697266, 0.021226879119873046, 0.021596799850463866, 0.021733184814453126, 0.02163020706176758, 0.021308416366577147, 0.021261407852172853, 0.02114406394958496, 0.02109270477294922, 0.021130399703979494, 0.021230432510375978, 0.021208127975463866, 0.021429183959960938, 0.021269535064697264, 0.02134048080444336, 0.021324447631835938, 0.021481472015380858, 0.02127257537841797, 0.02157360076904297, 0.021927967071533203, 0.022042623519897463, 0.02186467170715332, 0.021880064010620117, 0.021869087219238283, 0.021760000228881835, 0.02184339141845703, 0.022709856033325194, 0.02205820846557617, 0.02190140724182129, 0.021976383209228515, 0.022002016067504883, 0.022052831649780273, 0.02197711944580078, 0.021712896347045898, 0.021544960021972655, 0.021401599884033205, 0.021405183792114257, 0.021868703842163086, 0.02148796844482422, 0.02171801567077637, 0.021696575164794923, 0.02139846420288086, 0.02128233528137207, 0.021367200851440428, 0.021368703842163085, 0.02129142379760742, 0.02128463935852051, 0.02125791931152344, 0.021264543533325197, 0.021666271209716797, 0.021560415267944336, 0.021874912261962892, 0.02232694435119629, 0.023127647399902345, 0.022128639221191407, 0.022046560287475585, 0.021928640365600587, 0.021840896606445313, 0.02188582420349121, 0.0218951358795166, 0.021994911193847656, 0.02191231918334961, 0.021745471954345702, 0.021575872421264648, 0.02407526397705078, 0.025135135650634764, 0.021981632232666016, 0.02176630401611328, 0.021655071258544923, 0.021474111557006837, 0.02142620849609375, 0.021397504806518555, 0.021338111877441408, 0.02122127914428711, 0.021424224853515625, 0.02128281593322754, 0.02134556770324707, 0.021357280731201172, 0.02142207908630371, 0.02139116859436035, 0.02132601547241211, 0.02228953552246094, 0.021865215301513672, 0.021739648818969726, 0.021379423141479493, 0.02167363166809082, 0.021344255447387696, 0.021331647872924804, 0.021600576400756837, 0.021398815155029297, 0.021350303649902345, 0.021289535522460937, 0.02183737564086914, 0.0230447998046875, 0.021299104690551757, 0.02127676773071289, 0.021215200424194336, 0.02127177619934082, 0.021342655181884766, 0.021270591735839842, 0.021283136367797852, 0.021348031997680664, 0.021360960006713867, 0.021540864944458008, 0.02153267288208008, 0.02186854362487793, 0.022068384170532227, 0.022172512054443358, 0.0221693115234375, 0.022091487884521484, 0.02220921516418457, 0.02226982307434082, 0.022742528915405274, 0.022331327438354493]",tokens/s,45.88911039790236,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2188.3904,2549.02272,0.0,2153.775104,2041.744384,s,1,11.085216796875,11.085216796875,0.0,11.085216796875,11.085216796875,11.085216796875,11.085216796875,[11.085216796875],,kWh,8.28118539208276e-05,9.12761928141536e-06,3.244780373600625e-05,0.00012438727693824922,,MB,2259.472384,2563.702784,0.0,2153.775104,1917.689856,s,10,0.986597511291504,0.0986597511291504,0.00023554764159677342,0.09864913940429687,0.09887231750488282,0.09902524719238281,0.09914759094238282,"[0.09917817687988281, 0.09829135894775391, 0.0986562271118164, 0.09849993896484376, 0.0986915512084961, 0.09854611206054688, 0.09864205169677734, 0.09843129730224609, 0.09882246398925781, 0.09883833312988281]",tokens/s,2594.7764622361915,kWh,2.9290764352499914e-06,3.2302500115680017e-07,1.9342209918200847e-06,5.186322428226876e-06,tokens/kWh,49360602.5353735,MB,2267.312128,2563.702784,0.0,2153.775104,2001.551872,s,10,14.554385620117186,1.4554385620117187,0.008469782365896452,1.4553158569335936,1.4653969116210939,1.466353546142578,1.4671188537597657,"[1.462813720703125, 1.449466796875, 1.4407430419921874, 1.4507158203125, 1.4492359619140625, 1.465184326171875, 1.44768603515625, 1.4599158935546874, 1.4613138427734376, 1.4673101806640625]",tokens/s,43.28592195119587,kWh,4.23991138060001e-05,4.6763035155543845e-06,2.2585743068579413e-05,6.966116039013388e-05,tokens/kWh,904377.699813951,,s,630,14.551532554626458,0.02309767072162931,0.0003988010129198367,0.02298471927642822,0.023503081703186036,0.02363767557144165,0.024713653583526615,"[0.024669599533081055, 0.023513696670532228, 0.02348236846923828, 0.022975807189941407, 0.022803136825561524, 0.022976383209228516, 0.0230380802154541, 0.02292531204223633, 0.022818944931030274, 0.02282304000854492, 0.02290457534790039, 0.022890752792358398, 0.022785791397094725, 0.023414783477783203, 0.02318934440612793, 0.023064735412597657, 0.023159839630126952, 0.022975616455078125, 0.0232077751159668, 0.022969343185424804, 0.02297894477844238, 0.02293414306640625, 0.02296623992919922, 0.022913055419921877, 0.0232857608795166, 0.02297881507873535, 0.02299673652648926, 0.02306662368774414, 0.022998079299926758, 0.02303420829772949, 0.02303411293029785, 0.02328428840637207, 0.02305820846557617, 0.022988288879394532, 0.023513151168823243, 0.023439680099487305, 0.023545984268188477, 0.023390207290649414, 0.023504896163940428, 0.023502687454223632, 0.023478431701660155, 0.023475296020507814, 0.023463872909545897, 0.023399295806884764, 0.023474239349365236, 0.0235948486328125, 0.023548095703125, 0.023554271697998046, 0.023514911651611327, 0.02332057571411133, 0.02326323127746582, 0.02335536003112793, 0.023502880096435547, 0.02353561592102051, 0.02337123107910156, 0.023494495391845702, 0.023341760635375977, 0.023103456497192382, 0.023002912521362304, 0.023099647521972657, 0.02292736053466797, 0.02294169616699219, 0.022790143966674805, 0.02372403144836426, 0.023453983306884765, 0.023357152938842773, 0.0237589111328125, 0.02384889602661133, 0.02308095932006836, 0.023154687881469727, 0.022962175369262695, 0.022929216384887697, 0.022914783477783203, 0.02327731132507324, 0.022911584854125977, 0.02278812789916992, 0.02294758415222168, 0.022782304763793945, 0.02291529655456543, 0.022816543579101563, 0.022769664764404295, 0.022816768646240236, 0.022722143173217774, 0.022923871994018553, 0.022949760437011718, 0.023365568161010743, 0.02534809684753418, 0.022970367431640625, 0.023015424728393553, 0.02304819107055664, 0.0229171199798584, 0.02293270492553711, 0.023015520095825196, 0.022798559188842774, 0.022775423049926757, 0.023298912048339844, 0.02290281677246094, 0.022878175735473634, 0.022964223861694336, 0.02287753677368164, 0.02288912010192871, 0.022982656478881838, 0.02272870445251465, 0.022734848022460938, 0.022744352340698243, 0.02295078468322754, 0.02306550407409668, 0.02287868881225586, 0.02279583930969238, 0.02279884719848633, 0.0228110408782959, 0.022835071563720704, 0.022789472579956054, 0.02302345657348633, 0.022780864715576173, 0.022757375717163086, 0.023193248748779295, 0.022865503311157227, 0.022815168380737303, 0.022856000900268555, 0.0228853759765625, 0.022874975204467775, 0.0229005126953125, 0.022772096633911134, 0.022787071228027343, 0.02277897644042969, 0.02299283218383789, 0.022958751678466796, 0.022894880294799805, 0.022977888107299806, 0.022786176681518555, 0.02276937675476074, 0.022944416046142578, 0.022922752380371093, 0.02303657531738281, 0.02286422348022461, 0.022978208541870118, 0.022969823837280273, 0.022780448913574218, 0.022785120010375977, 0.022852512359619142, 0.02278972816467285, 0.02276188850402832, 0.022773727416992188, 0.022810239791870118, 0.02287161636352539, 0.022965087890625, 0.02287228775024414, 0.02281999969482422, 0.022839935302734374, 0.022759424209594727, 0.02284297561645508, 0.02277212715148926, 0.02274508857727051, 0.02273219108581543, 0.0227325439453125, 0.02297260856628418, 0.02287273597717285, 0.022812095642089844, 0.022935199737548827, 0.022836544036865233, 0.022953311920166017, 0.022786304473876952, 0.02279814338684082, 0.02275551986694336, 0.022792192459106447, 0.022768640518188478, 0.022810976028442384, 0.022802175521850584, 0.022767711639404296, 0.02283958435058594, 0.022782495498657226, 0.022820863723754883, 0.022878400802612303, 0.022896160125732423, 0.022871679306030273, 0.02288844871520996, 0.02289321517944336, 0.023443456649780273, 0.022962175369262695, 0.02293065643310547, 0.022952735900878905, 0.022816768646240236, 0.022869247436523438, 0.02275404739379883, 0.02322364807128906, 0.0228372802734375, 0.022768255233764648, 0.022773759841918945, 0.02352998352050781, 0.023880096435546876, 0.02355561637878418, 0.02357004737854004, 0.023380767822265624, 0.0233123836517334, 0.02387942314147949, 0.02342428779602051, 0.023253984451293945, 0.02332431983947754, 0.023031808853149413, 0.02291747283935547, 0.02322777557373047, 0.023298751831054686, 0.02313814353942871, 0.022982751846313477, 0.022915424346923827, 0.022918815612792968, 0.02281497573852539, 0.02270182418823242, 0.022812671661376953, 0.022779903411865234, 0.022887424468994142, 0.022940671920776368, 0.022999040603637694, 0.022969631195068358, 0.02336147117614746, 0.023334911346435547, 0.023292703628540037, 0.02306252861022949, 0.022845439910888672, 0.02305059242248535, 0.022904287338256835, 0.022884544372558595, 0.02278940773010254, 0.022865631103515624, 0.022811647415161132, 0.022708223342895507, 0.022810943603515627, 0.022867168426513672, 0.022888927459716796, 0.02308032035827637, 0.023040639877319337, 0.022874111175537108, 0.02281785583496094, 0.022815679550170897, 0.02278972816467285, 0.022862239837646483, 0.022733951568603517, 0.022834047317504883, 0.022849536895751952, 0.022837247848510742, 0.02284752082824707, 0.022984672546386718, 0.022759424209594727, 0.022818080902099608, 0.022905567169189452, 0.02286591911315918, 0.02282598304748535, 0.022897663116455077, 0.023080671310424804, 0.02312835121154785, 0.02287788772583008, 0.02314854431152344, 0.023004959106445313, 0.022857376098632812, 0.023005760192871094, 0.023277536392211914, 0.022906911849975585, 0.0227061767578125, 0.02292915153503418, 0.02283750343322754, 0.02286083221435547, 0.023014368057250975, 0.022990591049194337, 0.02291708755493164, 0.022903072357177735, 0.022914560317993164, 0.023295679092407227, 0.02326406478881836, 0.0230665283203125, 0.02306649589538574, 0.02305023956298828, 0.023007007598876954, 0.02304863929748535, 0.022884191513061522, 0.02301763153076172, 0.022921215057373046, 0.023069791793823242, 0.023159135818481447, 0.022938175201416014, 0.02290892791748047, 0.023131776809692382, 0.022929792404174806, 0.023043359756469727, 0.023114463806152345, 0.022915071487426757, 0.02292736053466797, 0.022921440124511718, 0.02293494415283203, 0.023017183303833007, 0.02284137535095215, 0.022905471801757813, 0.02290892791748047, 0.022804479598999023, 0.02291878318786621, 0.02302137565612793, 0.022964799880981445, 0.02290505599975586, 0.022961952209472655, 0.0231014404296875, 0.022910463333129884, 0.022858591079711915, 0.02292291259765625, 0.02307276725769043, 0.022898687362670898, 0.023068351745605467, 0.023470399856567382, 0.02331648063659668, 0.02308915138244629, 0.02302128028869629, 0.023048416137695312, 0.02318751907348633, 0.02309119987487793, 0.02288163185119629, 0.02288912010192871, 0.023566463470458984, 0.02326515197753906, 0.023549312591552733, 0.023114368438720702, 0.02296348762512207, 0.023113536834716796, 0.023128992080688478, 0.023033695220947267, 0.023070655822753906, 0.02309552001953125, 0.023011327743530274, 0.022916255950927736, 0.022914047241210937, 0.023004383087158204, 0.022846080780029296, 0.022779424667358397, 0.022890911102294922, 0.02291859245300293, 0.02292799949645996, 0.02314841651916504, 0.023230144500732422, 0.023210399627685546, 0.023363744735717774, 0.02344086456298828, 0.02378531265258789, 0.023405248641967774, 0.023380895614624024, 0.02344393539428711, 0.023349472045898437, 0.02335804748535156, 0.02346566390991211, 0.023498687744140625, 0.023250112533569334, 0.023190528869628906, 0.023241727828979493, 0.023474592208862305, 0.023336767196655273, 0.02328780746459961, 0.023227039337158202, 0.022994720458984375, 0.023042463302612306, 0.023238048553466797, 0.023635616302490236, 0.022948736190795897, 0.022998912811279297, 0.023203071594238282, 0.023231103897094728, 0.023058015823364256, 0.023218528747558594, 0.023288000106811525, 0.023549951553344727, 0.0235248966217041, 0.023607072830200197, 0.02343017578125, 0.024481439590454103, 0.024731647491455077, 0.02346988868713379, 0.02312620735168457, 0.02305843162536621, 0.023068672180175782, 0.022967327117919923, 0.023030752182006835, 0.022812320709228517, 0.02376192092895508, 0.02334534454345703, 0.023184064865112305, 0.023214176177978517, 0.02307072067260742, 0.023144447326660156, 0.022983808517456055, 0.02294028854370117, 0.022853631973266602, 0.022805791854858398, 0.022854272842407226, 0.022911327362060547, 0.022767328262329103, 0.022839231491088866, 0.02621670341491699, 0.025442399978637696, 0.022906879425048828, 0.023074047088623047, 0.022796831130981444, 0.022777856826782225, 0.022739168167114257, 0.02293484878540039, 0.02277609634399414, 0.02276393508911133, 0.022872064590454103, 0.022749183654785156, 0.022799808502197264, 0.02276380729675293, 0.02283344078063965, 0.022838432312011717, 0.02283942413330078, 0.02279292869567871, 0.02290278434753418, 0.022703519821166994, 0.02275299263000488, 0.022751359939575194, 0.022795007705688475, 0.022876096725463868, 0.022813791275024413, 0.022706207275390626, 0.02280463981628418, 0.022809375762939454, 0.022836896896362306, 0.023425376892089844, 0.022845439910888672, 0.022771711349487304, 0.02294169616699219, 0.022887807846069336, 0.02275529670715332, 0.022817440032958984, 0.022921152114868164, 0.022843135833740234, 0.022774080276489257, 0.023154783248901366, 0.022923168182373048, 0.022792192459106447, 0.022796287536621093, 0.022736864089965822, 0.022835231781005858, 0.022725984573364257, 0.02270070457458496, 0.022673408508300782, 0.022761280059814454, 0.02326291275024414, 0.023208288192749022, 0.023080896377563477, 0.02303388786315918, 0.022779552459716797, 0.022820383071899413, 0.022776512145996092, 0.02281484794616699, 0.022831104278564454, 0.022686784744262695, 0.022709087371826173, 0.022718559265136717, 0.022773632049560545, 0.022818944931030274, 0.02291231918334961, 0.02277609634399414, 0.022824960708618162, 0.022753087997436524, 0.022765888214111327, 0.022730783462524416, 0.022924991607666017, 0.022903104782104493, 0.02310371208190918, 0.023088384628295898, 0.023154560089111327, 0.023215007781982423, 0.023334911346435547, 0.023345151901245118, 0.023377920150756838, 0.023322399139404298, 0.023544031143188475, 0.023457311630249025, 0.02327190399169922, 0.02350284767150879, 0.023817472457885742, 0.02419728088378906, 0.023159679412841798, 0.023129823684692383, 0.023078176498413087, 0.023046239852905274, 0.023066591262817383, 0.0229403190612793, 0.022931488037109374, 0.02296009635925293, 0.023162879943847657, 0.022962175369262695, 0.023004575729370116, 0.022899423599243164, 0.022859104156494142, 0.02287606430053711, 0.02282521629333496, 0.023187040328979492, 0.023360288619995118, 0.026224639892578124, 0.023268447875976563, 0.023165056228637695, 0.023032127380371095, 0.0234256649017334, 0.023271360397338868, 0.023330720901489257, 0.023373823165893554, 0.023520288467407228, 0.02487571144104004, 0.026834943771362304, 0.023702848434448243, 0.02381279945373535, 0.02347007942199707, 0.02328166389465332, 0.023318527221679687, 0.023516672134399414, 0.023349760055541992, 0.023207935333251953, 0.023171072006225587, 0.023262399673461914, 0.023140512466430663, 0.02303027153015137, 0.02302921676635742, 0.02313852882385254, 0.023144927978515625, 0.02330611228942871, 0.0235536003112793, 0.023169567108154297, 0.023345184326171876, 0.023639360427856446, 0.02345439910888672, 0.023392255783081056, 0.023195775985717773, 0.023203712463378906, 0.02361078453063965, 0.02352547264099121, 0.023079391479492187, 0.0229847354888916, 0.02305023956298828, 0.023013023376464842, 0.022946144104003908, 0.023134208679199218, 0.022920192718505858, 0.02295091247558594, 0.023072639465332032, 0.022878335952758788, 0.023029760360717775, 0.022958080291748048, 0.022959808349609374, 0.02288876724243164, 0.022956031799316406, 0.02286796760559082, 0.022808576583862306, 0.022902528762817384, 0.022980863571166993, 0.022966079711914063, 0.022866079330444336, 0.022929439544677733, 0.023347200393676756, 0.0229171199798584, 0.02284499168395996, 0.02284339141845703, 0.023011775970458986, 0.022984703063964843, 0.022788095474243163, 0.02281062316894531, 0.022838911056518554, 0.022898624420166016, 0.023003103256225586, 0.02334681510925293, 0.023245855331420897, 0.023164735794067384, 0.023825536727905272, 0.023835647583007814, 0.023535680770874024, 0.023566047668457032, 0.02328057670593262, 0.023022687911987305, 0.022999679565429688, 0.023077280044555663, 0.023358623504638673, 0.023290592193603514, 0.023424928665161132, 0.023364864349365234, 0.023138208389282225, 0.023036319732666014, 0.023387744903564454, 0.023235136032104493, 0.023058816909790038, 0.0230645751953125, 0.023033760070800782, 0.023034303665161134, 0.022984415054321288, 0.023264799118041992, 0.02334351921081543, 0.023467840194702147, 0.02334943962097168, 0.023750143051147463, 0.023142080307006836, 0.022999488830566406, 0.023001472473144532, 0.02302355194091797, 0.02317081642150879, 0.02292972755432129, 0.023203840255737306, 0.0230882568359375, 0.022962207794189452, 0.022915903091430663, 0.022923519134521484, 0.022900768280029297, 0.022916576385498048, 0.023097631454467773, 0.02306831932067871, 0.023032159805297853, 0.023111679077148437, 0.023072256088256835, 0.023107423782348632, 0.023089088439941407, 0.023089887619018555, 0.0229783992767334, 0.023135936737060547, 0.02328851127624512, 0.023602975845336913, 0.023662591934204103, 0.023756799697875978, 0.02369536018371582, 0.02365235137939453, 0.023603200912475586, 0.023635520935058593, 0.023615936279296874, 0.023591264724731446, 0.023873279571533203, 0.023758304595947265, 0.023789440155029297, 0.02373641586303711]",tokens/s,43.29440886277644,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3145.99424,4639.883264,0.0,4244.635648,4125.520384,s,1,11.7520712890625,11.7520712890625,0.0,11.7520712890625,11.7520712890625,11.7520712890625,11.7520712890625,[11.7520712890625],,kWh,0.00014004919984996983,1.5440516300272363e-05,5.297976460599818e-05,0.00020846948075624037,,MB,3185.455104,4813.94688,0.0,4404.0192,4310.797312,s,10,1.1998705825805664,0.11998705825805664,0.00018602052537982502,0.11992507171630859,0.12016988677978516,0.1202754379272461,0.12035987884521485,"[0.12038098907470703, 0.12014105224609375, 0.11976278686523438, 0.12014643096923829, 0.11981970977783203, 0.11997030639648437, 0.11983433532714843, 0.12008499145507813, 0.11987983703613281, 0.11985014343261718]",tokens/s,2133.5634335614745,kWh,3.577076461839147e-06,3.944667214168399e-07,2.3632674732686106e-06,6.3348106565245984e-06,tokens/kWh,40411626.15276123,MB,3193.921536,4816.044032,0.0,4406.116352,4310.799872,s,10,24.742931640625002,2.4742931640625003,0.013718663129249119,2.4744366455078124,2.4924291015625,2.495043408203125,2.4971348535156253,"[2.49765771484375, 2.463353515625, 2.475403564453125, 2.451627685546875, 2.459360595703125, 2.4734697265625, 2.4754619140625, 2.49184814453125, 2.48601416015625, 2.468734619140625]",tokens/s,25.46181710196433,kWh,7.21368456039991e-05,7.956612219909271e-06,3.629331900752945e-05,0.00011638677683143779,tokens/kWh,541298.605521506,,s,630,24.74024644851682,0.039270232457963246,0.0006882776196660147,0.03920785522460937,0.039810585021972655,0.04008425540924072,0.041340187988281256,"[0.04134908676147461, 0.03932627105712891, 0.03967132949829102, 0.03985990524291992, 0.03962326431274414, 0.04112403106689453, 0.039345344543457034, 0.039279296875, 0.039335742950439456, 0.03915945434570312, 0.039241886138916014, 0.03938787078857422, 0.0393639030456543, 0.039313888549804686, 0.039327392578125, 0.03936399841308594, 0.03978335952758789, 0.039684097290039064, 0.03917824172973633, 0.03903219223022461, 0.03914355087280273, 0.03904790496826172, 0.03911420822143555, 0.03987420654296875, 0.039467681884765626, 0.03952025604248047, 0.03935577774047851, 0.03941849517822266, 0.03946086502075195, 0.039239200592041015, 0.03966204833984375, 0.03923484802246094, 0.03908809661865234, 0.03959414291381836, 0.047214942932128905, 0.03951795196533203, 0.039358272552490234, 0.03986092758178711, 0.03931260681152344, 0.03900495910644531, 0.039089344024658204, 0.03871334457397461, 0.03917907333374023, 0.04056268692016601, 0.039919200897216796, 0.041062976837158205, 0.039481407165527345, 0.03950774383544922, 0.04131840133666992, 0.039462913513183595, 0.04014688110351562, 0.03930527877807617, 0.03909632110595703, 0.03945676803588867, 0.03942195129394531, 0.03930492782592773, 0.0393935661315918, 0.03907920074462891, 0.03916419219970703, 0.03928067016601562, 0.039219615936279296, 0.03927859115600586, 0.039699615478515624, 0.03983657455444336, 0.03904307174682617, 0.03890995025634766, 0.03902668762207031, 0.03947865676879883, 0.040522144317626956, 0.03919279861450195, 0.03918588638305664, 0.0391923828125, 0.03957219314575195, 0.039288833618164064, 0.03920598220825195, 0.039025566101074216, 0.03917571258544922, 0.03917801666259765, 0.039072288513183596, 0.039220897674560544, 0.039010177612304686, 0.03896384048461914, 0.03891145706176758, 0.039043487548828124, 0.039026176452636716, 0.039434623718261716, 0.03989120101928711, 0.03944249725341797, 0.03893241500854492, 0.03923763275146484, 0.03898172760009765, 0.03903395080566406, 0.039093246459960936, 0.03908572769165039, 0.03881318283081055, 0.03879183959960938, 0.03866592025756836, 0.03891846466064453, 0.03999887847900391, 0.03904556655883789, 0.038551712036132814, 0.03845119857788086, 0.03920268630981445, 0.03899955368041992, 0.03890240097045899, 0.03877872085571289, 0.03876620864868164, 0.038455776214599606, 0.03851398468017578, 0.03846329498291016, 0.03863462448120117, 0.03848172760009766, 0.0390041618347168, 0.03900636672973633, 0.039198558807373045, 0.039131038665771486, 0.03933318328857422, 0.03920975875854492, 0.039433696746826175, 0.039249984741210935, 0.03890595245361328, 0.038965118408203124, 0.039112735748291015, 0.039526878356933595, 0.039245376586914064, 0.039131168365478516, 0.039691104888916015, 0.039272449493408204, 0.04541030502319336, 0.03937484741210937, 0.03950950241088867, 0.03945471954345703, 0.0392784309387207, 0.0396038703918457, 0.039397567749023435, 0.03933241653442383, 0.039495552062988284, 0.039475582122802735, 0.039360511779785154, 0.039282302856445316, 0.03952595138549805, 0.039140159606933594, 0.03994374465942383, 0.039206687927246096, 0.039209022521972656, 0.039041633605957034, 0.04006707382202149, 0.03893209457397461, 0.03925030517578125, 0.03924374389648438, 0.03879446411132813, 0.038881473541259766, 0.03858457565307617, 0.03849679946899414, 0.03861078262329101, 0.03842867279052734, 0.0385167350769043, 0.03870457458496094, 0.03838009643554687, 0.03862262344360352, 0.03850915145874023, 0.03853311920166016, 0.03851366424560547, 0.03840703964233398, 0.03878022384643555, 0.03849708938598633, 0.03909427261352539, 0.039261760711669924, 0.03910700988769531, 0.03938304138183594, 0.03920399856567383, 0.03914223861694336, 0.039505470275878904, 0.039774654388427734, 0.0393175048828125, 0.03963289642333984, 0.03934822463989258, 0.039293087005615235, 0.03917302322387695, 0.03941676712036133, 0.03948339080810547, 0.039419902801513675, 0.039693729400634765, 0.0394134407043457, 0.03949456024169922, 0.039645313262939456, 0.03939862442016601, 0.0395940170288086, 0.039546592712402344, 0.04001500701904297, 0.039836414337158205, 0.039124385833740234, 0.038442943572998045, 0.03866396713256836, 0.040225761413574215, 0.03870105743408203, 0.03854655838012695, 0.038839038848876954, 0.03893670272827148, 0.03851011276245117, 0.03849795150756836, 0.0384101448059082, 0.03845177459716797, 0.038441150665283204, 0.03855542373657227, 0.03845158386230469, 0.03829708862304688, 0.03836703872680664, 0.03844476699829102, 0.038487007141113284, 0.03859436798095703, 0.03867052841186523, 0.04043299102783203, 0.039614879608154296, 0.039549182891845704, 0.03916755294799805, 0.03917830276489258, 0.03935475158691406, 0.039179840087890626, 0.039077632904052736, 0.03913132858276367, 0.0391992301940918, 0.03897958374023437, 0.03949897766113281, 0.03916851043701172, 0.03915804672241211, 0.03900435256958008, 0.0387786865234375, 0.03868876647949219, 0.03868876647949219, 0.038966911315917965, 0.038663646697998044, 0.03876752090454102, 0.03939110565185547, 0.03914275360107422, 0.03888185501098633, 0.03885395050048828, 0.039059776306152344, 0.03883411026000977, 0.03850102233886719, 0.03887859344482422, 0.038666622161865234, 0.03851440048217773, 0.03874256134033203, 0.038490207672119144, 0.03881564712524414, 0.03877062225341797, 0.03852000045776367, 0.038699905395507814, 0.03887923049926758, 0.0385794563293457, 0.03940972900390625, 0.03936665725708008, 0.03906710433959961, 0.03904975891113281, 0.03959807968139648, 0.03955260848999023, 0.039350688934326174, 0.039213054656982424, 0.039820735931396484, 0.03925049591064453, 0.039317054748535155, 0.039442558288574216, 0.03907110214233398, 0.0391212158203125, 0.03901033782958984, 0.03903753662109375, 0.03883212661743164, 0.03873779296875, 0.03899532699584961, 0.038834304809570314, 0.03863843154907227, 0.03903776168823242, 0.03879811096191406, 0.038998046875, 0.039227550506591796, 0.03921654510498047, 0.04144803237915039, 0.038956737518310545, 0.03899628829956055, 0.03876012802124024, 0.03850223922729492, 0.03845987319946289, 0.03855721664428711, 0.03838111877441406, 0.03839888000488281, 0.038508544921875, 0.03845667266845703, 0.041138206481933594, 0.03907244873046875, 0.038708927154541016, 0.03881600189208984, 0.03864985656738281, 0.03890745544433594, 0.03896771240234375, 0.03858243179321289, 0.03849407958984375, 0.04115817642211914, 0.03872169494628906, 0.03843683242797852, 0.03849660873413086, 0.038368896484375, 0.0386187858581543, 0.03859667205810547, 0.03860448074340821, 0.03848223876953125, 0.03895391845703125, 0.039704288482666016, 0.03907174301147461, 0.03928793716430664, 0.03901887893676758, 0.03924224090576172, 0.03911065673828125, 0.03908758544921875, 0.03880809783935547, 0.03966595077514649, 0.03893494415283203, 0.03852492904663086, 0.03840358352661133, 0.03842508697509766, 0.03868467330932617, 0.03884848022460938, 0.039012382507324216, 0.03910246276855469, 0.039041023254394534, 0.03906268692016602, 0.03902483367919922, 0.03907180786132813, 0.039387744903564455, 0.03939731216430664, 0.03899808120727539, 0.03890585708618164, 0.03919820785522461, 0.0391992301940918, 0.038843711853027346, 0.038892223358154294, 0.038749313354492186, 0.03870809555053711, 0.038661312103271485, 0.038812000274658205, 0.038793472290039065, 0.039629024505615236, 0.0400849609375, 0.03909891128540039, 0.039032833099365234, 0.03902668762207031, 0.03973251342773437, 0.039277278900146484, 0.039198078155517575, 0.039293567657470704, 0.03915516662597656, 0.039332382202148436, 0.03959014511108398, 0.0396753921508789, 0.03968150329589844, 0.039446975708007814, 0.039083999633789064, 0.03919091033935547, 0.03918412780761719, 0.03893471908569336, 0.03896531295776367, 0.039049217224121094, 0.039016448974609375, 0.039190528869628906, 0.03946012878417969, 0.03944694519042969, 0.039629119873046875, 0.039394847869873045, 0.03953446578979492, 0.03957411193847656, 0.03942569732666015, 0.03968649673461914, 0.03944831848144531, 0.03938943862915039, 0.03933958435058594, 0.04362284851074219, 0.03954483032226563, 0.03944243240356445, 0.04016880035400391, 0.039348670959472656, 0.03923353576660156, 0.039300926208496095, 0.0392852783203125, 0.03922521591186524, 0.03951628875732422, 0.03936665725708008, 0.03937833786010742, 0.0392341423034668, 0.039394943237304685, 0.03970086288452149, 0.03967180633544922, 0.03934828948974609, 0.039312576293945314, 0.03913600158691406, 0.03929827117919922, 0.03882620620727539, 0.038986305236816406, 0.038803455352783206, 0.03857926559448242, 0.03850495910644531, 0.03877318572998047, 0.03856291198730469, 0.03852790451049805, 0.038573345184326174, 0.03888816070556641, 0.03899932861328125, 0.03910284805297851, 0.03941388702392578, 0.03931363296508789, 0.03980287933349609, 0.03911043167114258, 0.03920105743408203, 0.03930828857421875, 0.039113662719726563, 0.03892428970336914, 0.03893840026855469, 0.03915987014770508, 0.03896271896362305, 0.03897817611694336, 0.039230751037597655, 0.03930550384521484, 0.039449024200439456, 0.03946416091918945, 0.03971049499511719, 0.03945369720458984, 0.03952006530761719, 0.03932153701782227, 0.039551231384277345, 0.03940556716918946, 0.039387134552001955, 0.03965478515625, 0.03946355056762695, 0.03949977493286133, 0.03943388748168945, 0.039325504302978515, 0.039680545806884765, 0.03949929428100586, 0.03961471939086914, 0.0395, 0.03959603118896484, 0.04082876968383789, 0.03980204772949219, 0.03959894561767578, 0.03932156753540039, 0.03921920013427734, 0.03950592041015625, 0.039376895904541014, 0.03960319900512695, 0.039430721282958985, 0.0396923828125, 0.04012633514404297, 0.0394818229675293, 0.0395976333618164, 0.03931289672851562, 0.039010848999023434, 0.039489952087402344, 0.03988668823242188, 0.03945872116088867, 0.038946495056152344, 0.0390948486328125, 0.03925142288208008, 0.03919926452636719, 0.03928585433959961, 0.04003113555908203, 0.03934316635131836, 0.03969734573364258, 0.039773632049560546, 0.039634849548339846, 0.03948419189453125, 0.040287967681884765, 0.03998438262939453, 0.040465023040771486, 0.03992995071411133, 0.04022700881958008, 0.039895103454589846, 0.040083393096923825, 0.03973734283447266, 0.03988275146484375, 0.03961999893188477, 0.03933222579956055, 0.03924604797363281, 0.039198719024658206, 0.03909427261352539, 0.039065601348876954, 0.040382240295410154, 0.03973756790161133, 0.03940358352661133, 0.03982262420654297, 0.04093964767456055, 0.03904771041870117, 0.03912704086303711, 0.03878911972045898, 0.03920099258422852, 0.040629150390625, 0.03907900619506836, 0.03906092834472656, 0.03892185592651367, 0.03923952102661133, 0.03951475143432617, 0.040087329864501954, 0.03931321716308594, 0.039315166473388674, 0.03920582580566406, 0.03905023956298828, 0.03998118209838867, 0.039470272064208986, 0.0390904655456543, 0.03932419204711914, 0.03901567840576172, 0.039160575866699215, 0.039534591674804685, 0.04044800186157226, 0.039487232208251954, 0.03938751983642578, 0.039882625579833984, 0.03957110214233398, 0.039412063598632814, 0.03908153533935547, 0.039124736785888674, 0.042922687530517575, 0.039774208068847655, 0.039569408416748046, 0.039518207550048826, 0.04056883239746094, 0.039567039489746096, 0.03917446517944336, 0.03937484741210937, 0.039397087097167965, 0.039772449493408205, 0.039425857543945314, 0.04220332717895508, 0.039395328521728515, 0.03929849624633789, 0.03913580703735352, 0.03916185760498047, 0.03991551971435547, 0.03898489761352539, 0.03862611389160156, 0.03849625778198242, 0.03853311920166016, 0.03874390411376953, 0.038395488739013675, 0.03866476821899414, 0.038799072265625, 0.03881331253051758, 0.03893683242797852, 0.03870352172851563, 0.03901232147216797, 0.03957148742675781, 0.03980297470092774, 0.03981916809082031, 0.03969555282592773, 0.03970038223266602, 0.039682304382324216, 0.03976668930053711, 0.039774112701416016, 0.03982121658325195, 0.03970307159423828, 0.03955062484741211, 0.03924991989135742, 0.039279678344726565, 0.039574462890625, 0.039019775390625, 0.03919948959350586, 0.03893388748168945, 0.0389536018371582, 0.039806976318359374, 0.039007614135742184, 0.03877260971069336, 0.03833318328857422, 0.03852188873291015, 0.03854025650024414, 0.03827206420898437, 0.03819974517822266, 0.03820499038696289, 0.0382020149230957, 0.03854159927368164, 0.038606464385986326, 0.0386297607421875, 0.03903263854980469, 0.038846656799316405, 0.0390467529296875, 0.03876214218139649, 0.03980963134765625, 0.039780513763427734, 0.03992575836181641, 0.03991551971435547, 0.03995238494873047, 0.039823265075683595, 0.039612510681152346, 0.03942195129394531, 0.03949337768554687, 0.03951241683959961, 0.03984502410888672, 0.03977471923828125, 0.039524608612060544, 0.03958972930908203, 0.03986652755737305, 0.039172096252441405, 0.03922739028930664, 0.03906569671630859, 0.039118465423583985, 0.039151073455810544, 0.03900262451171875, 0.03886111831665039, 0.038782527923583984, 0.03903084945678711, 0.03913683319091797, 0.03912073516845703, 0.03952297592163086, 0.03900406265258789, 0.038582687377929685, 0.03848102569580078, 0.03857497787475586, 0.03879116821289062, 0.039272449493408204, 0.039371807098388674, 0.040106655120849606, 0.041148414611816404, 0.039405887603759765, 0.03928604888916016, 0.03935836791992187, 0.039594432830810544, 0.039250049591064456, 0.039145729064941404, 0.03913264083862305, 0.039066303253173826, 0.039540576934814456, 0.03934822463989258, 0.03945033645629883]",tokens/s,25.464580610019265,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2079.420416,2440.953856,0.0,2038.431744,1895.42912,s,1,9.404107421875,9.404107421875,0.0,9.404107421875,9.404107421875,9.404107421875,9.404107421875,[9.404107421875],,kWh,6.616547877915764e-05,7.291232553333861e-06,2.7718355507994064e-05,0.00010117506684048556,,MB,2170.748928,2457.731072,0.0,2040.528896,1788.34688,s,10,0.87230712890625,0.08723071289062499,0.00034458461402466113,0.0872159538269043,0.08761315689086914,0.08768603401184083,0.08774433570861817,"[0.08750233459472656, 0.0877589111328125, 0.08753465270996094, 0.08716793823242187, 0.08759696197509766, 0.08726396942138671, 0.08690828704833985, 0.08663132476806641, 0.08702291107177734, 0.08691983795166015]",tokens/s,2934.7461635558097,kWh,2.6031716906709063e-06,2.8708164947844127e-07,1.728927145185851e-06,4.619180485335199e-06,tokens/kWh,55421086.23223085,MB,2179.702784,2457.731072,0.0,2040.528896,1853.031936,s,10,14.895241699218749,1.4895241699218749,0.18316504213721582,1.5893568725585938,1.6576538330078125,1.6643720825195312,1.6697466821289062,"[1.655522216796875, 1.65103125, 1.6536485595703125, 1.67109033203125, 1.656160888671875, 1.5276824951171875, 1.270933349609375, 1.2681903076171874, 1.2722152099609374, 1.26876708984375]",tokens/s,42.29538618584775,kWh,4.828091793516253e-05,5.324705741087847e-06,2.3431315205211304e-05,7.703693888146167e-05,tokens/kWh,817789.5035125864,,s,630,14.88834210014342,0.02363228904784672,0.0030572147525366345,0.025939871788024902,0.02656772403717041,0.02666811056137085,0.027330661029815677,"[0.026607519149780275, 0.026389280319213868, 0.02617344093322754, 0.026273792266845702, 0.026191871643066408, 0.026578943252563478, 0.026072383880615235, 0.025921503067016603, 0.02599955177307129, 0.02594870376586914, 0.02616524887084961, 0.026154111862182618, 0.026608352661132813, 0.026175647735595702, 0.026021408081054687, 0.025961151123046877, 0.026009088516235353, 0.026411296844482422, 0.027570016860961916, 0.02666102409362793, 0.02660147285461426, 0.026279199600219728, 0.02638307189941406, 0.026234399795532225, 0.026343904495239257, 0.026078208923339844, 0.026076160430908202, 0.025963840484619142, 0.02643737602233887, 0.026362815856933595, 0.026249216079711913, 0.02609766387939453, 0.02612224006652832, 0.02594646453857422, 0.025874080657958983, 0.025894912719726562, 0.026078432083129884, 0.02611075210571289, 0.026193920135498046, 0.02616012763977051, 0.025993791580200196, 0.02590764808654785, 0.025827327728271485, 0.025831424713134765, 0.025870208740234376, 0.02736345672607422, 0.02634752082824707, 0.02627993583679199, 0.02612838363647461, 0.026222591400146485, 0.02653388786315918, 0.026582048416137694, 0.02661196708679199, 0.026532575607299803, 0.02651136016845703, 0.02663599967956543, 0.026973695755004884, 0.026758975982666015, 0.026461151123046874, 0.02620345687866211, 0.026269535064697265, 0.02596950340270996, 0.026050559997558592, 0.02647859191894531, 0.02631679916381836, 0.02614067268371582, 0.025985023498535157, 0.026024991989135743, 0.02599171257019043, 0.02602956771850586, 0.026051519393920898, 0.02623219108581543, 0.026171167373657225, 0.026335424423217773, 0.02596246337890625, 0.025987775802612304, 0.026027616500854493, 0.026472864151000978, 0.026443168640136717, 0.026312959671020507, 0.026296672821044923, 0.026251264572143555, 0.026408960342407226, 0.026281984329223632, 0.02615500831604004, 0.026290176391601562, 0.026248960494995116, 0.02602828788757324, 0.026064895629882814, 0.02591049575805664, 0.026669824600219726, 0.026357791900634767, 0.026660831451416015, 0.02655779266357422, 0.026464960098266602, 0.026385536193847658, 0.026495872497558595, 0.02676736068725586, 0.026549440383911133, 0.026571584701538087, 0.026388479232788087, 0.026314271926879882, 0.026159584045410158, 0.02614633560180664, 0.026152767181396485, 0.026415775299072266, 0.02627084732055664, 0.026276512145996092, 0.026065120697021483, 0.026056480407714844, 0.025966527938842774, 0.026128671646118165, 0.02601126480102539, 0.026120479583740235, 0.02594416046142578, 0.025952255249023438, 0.02593971252441406, 0.025909503936767577, 0.025822240829467772, 0.02593891143798828, 0.026088607788085937, 0.026192447662353516, 0.025992895126342775, 0.026184127807617186, 0.026055967330932617, 0.025869184494018555, 0.027455455780029298, 0.027964031219482422, 0.026428672790527345, 0.026657344818115235, 0.026239168167114257, 0.026283008575439453, 0.02653900718688965, 0.026409984588623047, 0.026327552795410155, 0.02623744010925293, 0.02648819160461426, 0.02654844856262207, 0.02642576026916504, 0.026337024688720703, 0.026337535858154296, 0.026343423843383788, 0.02621824073791504, 0.026074527740478515, 0.02621526336669922, 0.026202112197875976, 0.02610495948791504, 0.026213247299194335, 0.026164735794067383, 0.026091487884521484, 0.026665504455566407, 0.026228736877441407, 0.02614240074157715, 0.02592086410522461, 0.02590985679626465, 0.026066656112670897, 0.025995935440063477, 0.025843360900878905, 0.026359392166137696, 0.02597964859008789, 0.025863487243652342, 0.02581164741516113, 0.025899007797241212, 0.02640208053588867, 0.02601238441467285, 0.02595568084716797, 0.025889087677001953, 0.02621241569519043, 0.026337055206298827, 0.026450719833374024, 0.02682646369934082, 0.026571935653686523, 0.026557279586791993, 0.02633839988708496, 0.026588064193725586, 0.026619903564453123, 0.02614681625366211, 0.025987071990966795, 0.026023456573486328, 0.025821664810180663, 0.025810495376586914, 0.025857824325561524, 0.026043039321899414, 0.025923583984375, 0.025899007797241212, 0.025808799743652345, 0.026075231552124024, 0.025855552673339843, 0.02582943916320801, 0.026235328674316407, 0.026503135681152343, 0.026163200378417968, 0.026220256805419923, 0.02661609649658203, 0.026297887802124022, 0.026194400787353515, 0.02655411148071289, 0.026237184524536133, 0.02639695930480957, 0.026369760513305664, 0.02699673652648926, 0.02874367904663086, 0.026685440063476562, 0.0267325439453125, 0.026998783111572267, 0.026695680618286134, 0.02660147285461426, 0.02654412841796875, 0.028047359466552735, 0.026631967544555664, 0.026676671981811523, 0.02683776092529297, 0.026613792419433593, 0.026754783630371093, 0.026635616302490235, 0.02667206382751465, 0.026648576736450196, 0.02689001655578613, 0.027150367736816405, 0.026626239776611327, 0.026423295974731444, 0.02647039985656738, 0.02634262466430664, 0.026706720352172853, 0.02645167922973633, 0.026552064895629883, 0.026455968856811524, 0.026567295074462892, 0.026533279418945312, 0.026511968612670897, 0.026244352340698242, 0.026280704498291015, 0.026085216522216795, 0.026095775604248046, 0.025941408157348633, 0.026088031768798828, 0.025999359130859375, 0.02612633514404297, 0.025987071990966795, 0.02612224006652832, 0.02607923126220703, 0.02613862419128418, 0.02610380744934082, 0.02638217544555664, 0.02643984031677246, 0.02615407943725586, 0.026309152603149415, 0.02725036811828613, 0.0266492805480957, 0.02637414360046387, 0.026224639892578124, 0.026497024536132813, 0.026648927688598632, 0.026732255935668945, 0.026615743637084962, 0.02703785514831543, 0.027988224029541015, 0.027014848709106445, 0.02677996826171875, 0.02650931167602539, 0.02635113525390625, 0.02630089569091797, 0.026169343948364256, 0.02614271926879883, 0.025896223068237304, 0.026105663299560548, 0.025833791732788085, 0.025721439361572264, 0.025821184158325194, 0.025853952407836913, 0.0259051513671875, 0.02579017639160156, 0.02709328079223633, 0.027192703247070314, 0.026466848373413086, 0.02617148780822754, 0.02618320083618164, 0.025971168518066405, 0.026222591400146485, 0.026179584503173828, 0.026167232513427733, 0.02597599983215332, 0.025951135635375978, 0.026060447692871094, 0.026067071914672852, 0.026114240646362304, 0.02614681625366211, 0.026082752227783203, 0.0263272647857666, 0.026537984848022462, 0.02654038429260254, 0.02644742393493652, 0.026544319152832032, 0.026630399703979492, 0.026666015625, 0.02664137649536133, 0.02694044876098633, 0.026546335220336913, 0.0264682559967041, 0.026169599533081053, 0.025987136840820314, 0.026083967208862305, 0.025968288421630858, 0.02598464012145996, 0.02590176010131836, 0.026021888732910156, 0.02591948890686035, 0.02610380744934082, 0.026202112197875976, 0.026136512756347655, 0.025940031051635743, 0.025810943603515626, 0.025808895111083984, 0.02577359962463379, 0.025958879470825195, 0.02648534393310547, 0.026161151885986327, 0.02615910339355469, 0.025828895568847657, 0.025950687408447266, 0.02612838363647461, 0.026169248580932617, 0.026187871932983397, 0.02634956741333008, 0.026193920135498046, 0.026085311889648438, 0.02602176094055176, 0.026072736740112304, 0.026007808685302735, 0.026282272338867187, 0.02604854393005371, 0.025976064682006837, 0.02601628875732422, 0.026044063568115235, 0.02618217658996582, 0.026408960342407226, 0.026288063049316406, 0.026323007583618163, 0.026488832473754883, 0.02647039985656738, 0.02633932876586914, 0.02645987129211426, 0.02661404800415039, 0.02649907112121582, 0.026290176391601562, 0.026207456588745116, 0.02613327980041504, 0.026144128799438476, 0.0262478084564209, 0.026604768753051757, 0.026421119689941406, 0.026626976013183593, 0.026422271728515623, 0.026250240325927734, 0.026216415405273436, 0.026361471176147462, 0.02217206382751465, 0.020330495834350586, 0.020248575210571287, 0.020262367248535158, 0.020379711151123046, 0.020109792709350587, 0.020183040618896485, 0.020291584014892578, 0.020402175903320312, 0.020525056838989256, 0.02043084716796875, 0.020355072021484375, 0.02024448013305664, 0.020361215591430663, 0.02106368064880371, 0.02041644859313965, 0.021594175338745115, 0.020420095443725587, 0.020246591567993164, 0.02044358444213867, 0.020150272369384766, 0.020071456909179688, 0.020726240158081055, 0.020407968521118165, 0.020347583770751954, 0.02003049659729004, 0.020030431747436524, 0.020109247207641602, 0.02065376091003418, 0.020394336700439452, 0.02015558433532715, 0.020269567489624024, 0.020214111328125, 0.020287488937377928, 0.020353023529052734, 0.02025881576538086, 0.020215776443481444, 0.020246559143066407, 0.02017673683166504, 0.02037161636352539, 0.020412416458129884, 0.020368736267089845, 0.020361888885498048, 0.020985855102539062, 0.02041372871398926, 0.02041468811035156, 0.02015897560119629, 0.020033536911010744, 0.020122623443603514, 0.020523296356201173, 0.020031200408935548, 0.01991756820678711, 0.019935487747192383, 0.019992576599121094, 0.020154367446899413, 0.020083808898925783, 0.02001807975769043, 0.019977760314941407, 0.019913183212280274, 0.01998774337768555, 0.019935968399047852, 0.01988400077819824, 0.020070432662963867, 0.019838655471801758, 0.01983929634094238, 0.019951616287231445, 0.020602880477905275, 0.019927040100097656, 0.019874879837036133, 0.01985807991027832, 0.02014031982421875, 0.019945472717285157, 0.019984384536743165, 0.01997750473022461, 0.019950464248657228, 0.01993302345275879, 0.020031488418579102, 0.02004582405090332, 0.02011136054992676, 0.020316160202026368, 0.020137983322143553, 0.02027519989013672, 0.020102432250976562, 0.020154655456542967, 0.020138431549072265, 0.020268800735473633, 0.020346719741821288, 0.020036287307739258, 0.02022400093078613, 0.0209039363861084, 0.02009702491760254, 0.019959808349609375, 0.020010719299316405, 0.019949087142944334, 0.01985568046569824, 0.01986400032043457, 0.019904512405395508, 0.01992198371887207, 0.01992390441894531, 0.02004582405090332, 0.020051967620849608, 0.019953056335449217, 0.01983856010437012, 0.019915775299072267, 0.020329856872558595, 0.019808895111083986, 0.01990809631347656, 0.019982847213745117, 0.01983011245727539, 0.019861536026000978, 0.01985804748535156, 0.02003763198852539, 0.02016819190979004, 0.02020403289794922, 0.020041311264038086, 0.02009129524230957, 0.020164287567138672, 0.020291648864746093, 0.02021116828918457, 0.020108064651489257, 0.020139135360717774, 0.01996460723876953, 0.019923135757446288, 0.020350976943969725, 0.020015104293823242, 0.020082687377929686, 0.019939231872558593, 0.01994697570800781, 0.019923583984375, 0.019904512405395508, 0.019955839157104492, 0.019965087890625, 0.020033920288085937, 0.020332895278930663, 0.02009075164794922, 0.020149919509887697, 0.020342336654663087, 0.020394912719726564, 0.02089574432373047, 0.020576255798339844, 0.02045350456237793, 0.020375423431396485, 0.020356735229492186, 0.020351360321044922, 0.020350944519042968, 0.020246559143066407, 0.020207616806030275, 0.020137983322143553, 0.020456863403320313, 0.020224960327148437, 0.02019705581665039, 0.02001686477661133, 0.020087392807006835, 0.019947519302368166, 0.019955711364746095, 0.019969280242919923, 0.019941600799560547, 0.01989686393737793, 0.019967103958129884, 0.01985011291503906, 0.019838623046875, 0.01991235160827637, 0.019912992477416992, 0.019986848831176757, 0.020086399078369142, 0.020008575439453124, 0.020509439468383787, 0.020527103424072265, 0.019971712112426758, 0.023011711120605467, 0.020274751663208006, 0.020703039169311523, 0.02026348876953125, 0.020083904266357422, 0.0200828800201416, 0.020064960479736327, 0.01999168014526367, 0.020159360885620117, 0.020002048492431642, 0.020013568878173828, 0.020230239868164062, 0.020265119552612305, 0.02030182456970215, 0.02022761535644531, 0.020293439865112305, 0.020306272506713866, 0.020228416442871093, 0.02024393653869629, 0.020331039428710937, 0.02030182456970215, 0.020537343978881836, 0.02049228858947754, 0.020545536041259766, 0.020436992645263673, 0.02031820869445801, 0.020273151397705077, 0.02030182456970215, 0.02019327926635742, 0.020154016494750977, 0.020062559127807616, 0.019998720169067383, 0.019932416915893553, 0.019966272354125975, 0.019937728881835936, 0.019979776382446288, 0.019978208541870116, 0.019950111389160155, 0.019885503768920898, 0.019871519088745116, 0.019976640701293947, 0.01996188735961914, 0.020414047241210938, 0.020319007873535157, 0.02032611274719238, 0.020156160354614257, 0.02018943977355957, 0.02027497673034668, 0.02004195213317871, 0.020039680480957032, 0.020063295364379882, 0.020038591384887696, 0.019984384536743165, 0.02010918426513672, 0.01983683204650879, 0.019804159164428712, 0.019875648498535157, 0.019820959091186523, 0.019947519302368166, 0.019926464080810547, 0.02001263999938965, 0.020161184310913086, 0.02007276725769043, 0.02004172706604004, 0.020072416305541994, 0.020231935501098634, 0.020300064086914062, 0.020280736923217774, 0.02038422393798828, 0.02030758476257324, 0.020257280349731444, 0.02026838493347168, 0.02003215980529785, 0.019826688766479493, 0.020316160202026368, 0.02031820869445801, 0.019893760681152343, 0.019819007873535157, 0.019802112579345704, 0.01980620765686035, 0.02070265579223633, 0.01984364891052246, 0.01997369575500488, 0.0198721923828125, 0.01984511947631836, 0.019945472717285157, 0.01998953628540039, 0.01989731216430664, 0.019817888259887697, 0.020066240310668945, 0.01991107177734375, 0.019867679595947266, 0.01989187240600586, 0.01980473518371582, 0.019910655975341796, 0.019858911514282228, 0.021108863830566406, 0.01986729621887207, 0.019820287704467775, 0.023010303497314453, 0.021034400939941408, 0.020033119201660156, 0.02008166313171387, 0.020049119949340822, 0.020312864303588866]",tokens/s,42.31498683751568,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1884.52864,3105.75104,0.0,2703.228928,2640.03584,s,1,10.1569033203125,10.1569033203125,0.0,10.1569033203125,10.1569033203125,10.1569033203125,10.1569033203125,[10.1569033203125],,kWh,7.282366370418458e-05,8.025725611110727e-06,2.8268911503981364e-05,0.00010911830081927667,,MB,2023.38304,3932.028928,0.0,3512.7296,3035.408384,s,10,3.8691545104980465,0.38691545104980474,0.003637641688783701,0.38701538085937504,0.39206519775390625,0.3925721343994141,0.3929776837158203,"[0.38622512817382815, 0.39195254516601563, 0.3891755065917969, 0.3880172119140625, 0.38507891845703124, 0.3813725891113281, 0.38332559204101563, 0.38780563354492187, 0.3930790710449219, 0.383122314453125]",tokens/s,661.6432590257219,kWh,1.6694580696762556e-05,1.8409611421984087e-06,6.956039515445791e-06,2.5491581354406757e-05,tokens/kWh,10042531.157281265,MB,2033.463296,3936.223232,0.0,3514.826752,3035.410944,s,10,24.498244628906253,2.449824462890625,0.013429086684834014,2.4488443603515626,2.465947924804688,2.4710298706054687,2.4750954272460937,"[2.4390087890625, 2.453070556640625, 2.456275634765625, 2.464818603515625, 2.47611181640625, 2.456173828125, 2.43289453125, 2.4446181640625, 2.43183642578125, 2.443436279296875]",tokens/s,25.716128218290514,kWh,7.50368852978225e-05,8.276974104079337e-06,3.5673411254752735e-05,0.00011898727065665455,tokens/kWh,529468.4015552434,,s,630,24.496440612792973,0.038883239067925346,0.0004833938857627543,0.03880998420715332,0.03933204536437988,0.03947745399475098,0.04063166599273682,"[0.038983680725097655, 0.038698974609375, 0.038641342163085936, 0.0384249267578125, 0.03854950332641602, 0.038529022216796875, 0.038504447937011715, 0.03871744155883789, 0.038430721282958984, 0.03842047882080078, 0.03862732696533203, 0.03865753555297852, 0.03838617706298828, 0.038940673828125, 0.039747264862060545, 0.03860012817382812, 0.03922419357299805, 0.038698974609375, 0.03851472091674805, 0.03852864074707031, 0.038430721282958984, 0.038496639251708986, 0.038430721282958984, 0.03852288055419922, 0.03857408142089844, 0.03844095993041992, 0.03858227157592774, 0.038540767669677733, 0.03881740951538086, 0.03863216018676758, 0.03851203155517578, 0.03851958465576172, 0.03852457427978516, 0.03853123092651367, 0.03853126525878906, 0.03856758499145508, 0.038696575164794925, 0.03874070358276367, 0.038543296813964845, 0.038664257049560544, 0.038365184783935545, 0.03846963119506836, 0.03861708831787109, 0.038655872344970706, 0.038547584533691406, 0.038809600830078124, 0.03896934509277344, 0.038825279235839845, 0.038853313446044924, 0.03876454544067383, 0.03911065673828125, 0.03887014389038086, 0.038842273712158204, 0.0388515510559082, 0.038941761016845704, 0.03890911865234375, 0.038924030303955075, 0.039142879486083984, 0.03925660705566406, 0.03895046234130859, 0.039005760192871095, 0.03888150405883789, 0.039147552490234376, 0.03921775817871094, 0.038991870880126955, 0.038817790985107424, 0.03887308883666992, 0.038913150787353516, 0.03867494583129883, 0.038719871520996096, 0.03872972869873047, 0.03899801635742187, 0.039002113342285157, 0.038815425872802733, 0.03899571228027344, 0.03891987228393555, 0.039201663970947265, 0.03961577606201172, 0.0394738883972168, 0.03884572982788086, 0.03880972671508789, 0.038650463104248044, 0.038711296081542966, 0.0387092170715332, 0.03865398406982422, 0.03887494277954102, 0.03893267059326172, 0.03882556915283203, 0.03902096176147461, 0.038703102111816406, 0.0390959358215332, 0.03863587188720703, 0.03870483016967773, 0.038741600036621096, 0.03865676879882812, 0.03911884689331055, 0.039257598876953126, 0.03898374557495117, 0.03888787078857422, 0.038785022735595705, 0.03905238342285156, 0.03904758453369141, 0.03910895919799805, 0.03890966415405273, 0.0390733757019043, 0.03909891128540039, 0.03995065689086914, 0.03937279891967774, 0.039620574951171876, 0.03917398452758789, 0.039151809692382813, 0.03923763275146484, 0.03879731369018555, 0.038959102630615236, 0.03882377624511719, 0.038758560180664064, 0.03927964782714844, 0.038628318786621096, 0.03871113586425781, 0.03873603057861328, 0.03854111862182617, 0.03870124816894531, 0.03874534225463867, 0.03866860961914063, 0.03857984161376953, 0.038602783203125, 0.0400447998046875, 0.03911475372314453, 0.03901555252075195, 0.039347007751464845, 0.0392479362487793, 0.03923763275146484, 0.039018497467041016, 0.039190528869628906, 0.03926976013183594, 0.0390577278137207, 0.039184703826904296, 0.039036865234375, 0.039046497344970704, 0.0391421127319336, 0.039008255004882815, 0.03910198211669922, 0.038908382415771485, 0.03901353454589844, 0.039144287109375, 0.03935548782348633, 0.03934505462646484, 0.03899110412597656, 0.03934284973144531, 0.03895235061645508, 0.03916041564941406, 0.039008255004882815, 0.03894185638427734, 0.038636383056640626, 0.03876249694824219, 0.03856291198730469, 0.038556575775146484, 0.03854950332641602, 0.0384716796875, 0.03859408187866211, 0.03875068664550781, 0.03877478408813476, 0.03867766571044922, 0.03862524795532227, 0.03861324691772461, 0.03918236923217774, 0.03874671936035156, 0.038834175109863284, 0.038787071228027346, 0.03875174331665039, 0.038990337371826174, 0.03911414337158203, 0.03902934265136719, 0.03889561462402344, 0.0388218879699707, 0.03906294250488281, 0.03963475036621094, 0.03898038482666016, 0.0387665901184082, 0.03938886260986328, 0.03884064102172852, 0.03891785430908203, 0.039104801177978515, 0.03893164825439453, 0.03885958480834961, 0.03889708709716797, 0.03892281723022461, 0.039002113342285157, 0.03883340835571289, 0.039495681762695314, 0.03938508987426758, 0.039790592193603515, 0.03947520065307617, 0.0389832649230957, 0.039336158752441404, 0.03931260681152344, 0.03927487945556641, 0.040524574279785154, 0.040030017852783206, 0.03955615997314453, 0.039367294311523436, 0.039129409790039066, 0.03917363357543945, 0.03915817642211914, 0.03920495986938476, 0.0390997428894043, 0.03969232177734375, 0.04103843307495117, 0.039311393737792966, 0.03888102340698242, 0.038658302307128904, 0.03868467330932617, 0.039000064849853515, 0.0389956169128418, 0.03897993469238281, 0.03888451385498047, 0.03878179168701172, 0.038742015838623044, 0.04072777557373047, 0.03918880081176758, 0.03887356948852539, 0.03913859176635742, 0.03925680160522461, 0.03903398513793945, 0.039013248443603515, 0.039083744049072264, 0.03889503860473633, 0.03897225570678711, 0.039134334564208985, 0.039242401123046874, 0.03917148971557617, 0.03893740844726563, 0.0388485107421875, 0.03892428970336914, 0.038787071228027346, 0.03872723388671875, 0.03846118545532227, 0.03853548812866211, 0.03873830413818359, 0.038711296081542966, 0.03896640014648438, 0.03871219253540039, 0.039501121520996094, 0.03913974380493164, 0.038932769775390626, 0.038669471740722654, 0.038764865875244144, 0.0388449592590332, 0.03873747253417969, 0.03858419036865234, 0.038622943878173825, 0.0388034896850586, 0.039542560577392576, 0.0392217903137207, 0.039308929443359376, 0.03910079956054688, 0.03907347106933594, 0.0389573745727539, 0.03915327835083008, 0.03897328186035156, 0.03897603225708008, 0.03890790557861328, 0.039204864501953124, 0.03910601425170898, 0.03881014251708984, 0.03878297424316406, 0.03918595123291015, 0.039426174163818356, 0.03926051330566406, 0.03934207916259766, 0.039190528869628906, 0.039411712646484375, 0.03933184051513672, 0.03933388900756836, 0.03951984024047851, 0.03964355087280273, 0.03935232162475586, 0.03926611328125, 0.03921673583984375, 0.03938569641113281, 0.038912063598632814, 0.03935635375976562, 0.04245043182373047, 0.03949148941040039, 0.0390904655456543, 0.039149406433105466, 0.03918691253662109, 0.039218303680419925, 0.03929792022705078, 0.03907583999633789, 0.03921100616455078, 0.03906150436401367, 0.03916956710815429, 0.03923606491088867, 0.03925193786621094, 0.0394587516784668, 0.03925411224365234, 0.03937279891967774, 0.04063641738891602, 0.03941785430908203, 0.039206302642822266, 0.039231231689453125, 0.039483936309814456, 0.03942758560180664, 0.03928556823730469, 0.03945062255859375, 0.03934822463989258, 0.039272319793701174, 0.039267807006835936, 0.039226016998291015, 0.039188480377197264, 0.03926220703125, 0.03921491241455078, 0.03900844955444336, 0.03877648162841797, 0.038997920989990234, 0.038833568572998044, 0.0387938232421875, 0.03857612609863281, 0.03859414291381836, 0.03903734588623047, 0.0388587532043457, 0.038954238891601566, 0.03901721572875977, 0.0389939193725586, 0.03945644760131836, 0.038945087432861326, 0.03902848052978516, 0.03959423828125, 0.039593982696533206, 0.039452671051025394, 0.039479297637939455, 0.0393072624206543, 0.03934793472290039, 0.03918262481689453, 0.03916595077514649, 0.03912499237060547, 0.03908956909179687, 0.03910512161254883, 0.0387825927734375, 0.038648193359375, 0.03870521545410156, 0.03858015823364258, 0.03861708831787109, 0.03866624069213867, 0.03868672180175781, 0.039040767669677734, 0.038946144104003905, 0.038855422973632814, 0.03885481643676758, 0.03879935836791992, 0.04062003326416016, 0.03912073516845703, 0.03879967880249023, 0.038890625, 0.0390335693359375, 0.03875219345092774, 0.03873987197875976, 0.0388359375, 0.03883049774169922, 0.03877276611328125, 0.038825313568115236, 0.03884918212890625, 0.038795265197753906, 0.039272064208984374, 0.03892422485351563, 0.039393184661865234, 0.03936105728149414, 0.03885260772705078, 0.03873926544189453, 0.038806209564208986, 0.03867443084716797, 0.03910860824584961, 0.03877439880371094, 0.03880179214477539, 0.038782878875732424, 0.03905344009399414, 0.03886896133422851, 0.03893907165527344, 0.03870038223266602, 0.03901295852661133, 0.038742080688476566, 0.03865971374511719, 0.0386893424987793, 0.03847568130493164, 0.03852825546264648, 0.038806175231933596, 0.03856758499145508, 0.038973438262939454, 0.03860921478271485, 0.038535198211669924, 0.038391807556152346, 0.03845513534545898, 0.03873193740844726, 0.038306049346923825, 0.038870014190673825, 0.03884518432617187, 0.038555648803710936, 0.03855369567871094, 0.03831123352050781, 0.03869887924194336, 0.03847372817993164, 0.038793918609619144, 0.03845328140258789, 0.038472991943359375, 0.038349536895751955, 0.038755840301513675, 0.03854387283325195, 0.038526622772216794, 0.03850476837158203, 0.038354976654052735, 0.03846144104003906, 0.03845119857788086, 0.03833174514770508, 0.03882867050170898, 0.03838911819458008, 0.03831260681152344, 0.03843174362182617, 0.03877545547485352, 0.038610401153564455, 0.0385032958984375, 0.03837712097167969, 0.03875056076049805, 0.038408191680908206, 0.03989619064331055, 0.03840908813476562, 0.03840204620361328, 0.038389759063720705, 0.03837129592895508, 0.038289440155029296, 0.03832598495483398, 0.038322463989257816, 0.038309505462646484, 0.038393791198730466, 0.04024524688720703, 0.03926784133911133, 0.0387163200378418, 0.03907587051391601, 0.03835903930664063, 0.03847126388549805, 0.03866835021972656, 0.038886783599853515, 0.03856972885131836, 0.03846140670776367, 0.038383617401123046, 0.038421215057373045, 0.03849849700927734, 0.038520126342773436, 0.038553985595703125, 0.03856771087646484, 0.038596256256103516, 0.038890369415283205, 0.03880255889892578, 0.038701953887939455, 0.03868057632446289, 0.03848166275024414, 0.0387825927734375, 0.03905724716186523, 0.03874639892578125, 0.03881772613525391, 0.0390313606262207, 0.0388218879699707, 0.03894694519042969, 0.038816959381103515, 0.03912953567504883, 0.03896051025390625, 0.0388616943359375, 0.03871539306640625, 0.039347393035888675, 0.039155582427978515, 0.038908863067626955, 0.038735519409179686, 0.03906092834472656, 0.03905583953857422, 0.038747966766357424, 0.038672000885009765, 0.03923046493530274, 0.038827201843261716, 0.038987934112548826, 0.03906851196289062, 0.03888828659057617, 0.03897430419921875, 0.03869504165649414, 0.038621185302734375, 0.03880316925048828, 0.03911260986328125, 0.03882022476196289, 0.038731456756591794, 0.03871366500854492, 0.03883414459228516, 0.039017982482910156, 0.03866883087158203, 0.038586624145507814, 0.03850617599487305, 0.03844646453857422, 0.0384785270690918, 0.03844710540771484, 0.03841843032836914, 0.03873177719116211, 0.039221248626708984, 0.03916377639770508, 0.03914355087280273, 0.03911699295043945, 0.038798686981201175, 0.03901987075805664, 0.038836448669433594, 0.03841708755493164, 0.03845500946044922, 0.03839369583129883, 0.038369438171386716, 0.038397953033447264, 0.03845110321044922, 0.0384450569152832, 0.038330432891845706, 0.03840208053588867, 0.04331520080566406, 0.038973472595214845, 0.0384469108581543, 0.03892444610595703, 0.03841228866577148, 0.03855526351928711, 0.038750015258789065, 0.038599231719970706, 0.038817790985107424, 0.03844710540771484, 0.03848191833496094, 0.03845676803588867, 0.03856969451904297, 0.038513504028320315, 0.03841843032836914, 0.03844684982299805, 0.03843916702270508, 0.03860233688354492, 0.03839836883544922, 0.03871945571899414, 0.03850387191772461, 0.0384251823425293, 0.03837500762939453, 0.038459808349609374, 0.03841971206665039, 0.038460159301757814, 0.03846553421020508, 0.038392864227294925, 0.0383927993774414, 0.03856496047973633, 0.03852931213378906, 0.03848255920410156, 0.038498046875, 0.03890201568603516, 0.038694911956787106, 0.03836016082763672, 0.038319007873535156, 0.03908403015136719, 0.038763904571533205, 0.03871603012084961, 0.0384532470703125, 0.038349056243896486, 0.03844684982299805, 0.0383583984375, 0.03844979095458984, 0.03841024017333984, 0.03833977508544922, 0.03858643341064453, 0.03834751892089844, 0.03836476898193359, 0.03852873611450195, 0.038410945892333986, 0.03915081787109375, 0.039029537200927736, 0.043560768127441404, 0.04143942260742187, 0.038934528350830076, 0.03860070419311523, 0.0388702392578125, 0.03855763244628906, 0.038585182189941405, 0.0386129264831543, 0.03856185531616211, 0.03852207946777344, 0.03851686477661133, 0.03843052673339844, 0.0385230712890625, 0.03861161422729492, 0.03847779083251953, 0.038653118133544925, 0.038464351654052736, 0.0385904655456543, 0.03852019119262695, 0.0386115837097168, 0.03857977676391602, 0.038451648712158205, 0.03860857772827148, 0.03860416030883789, 0.0385522575378418, 0.038944385528564454, 0.03868121719360352, 0.0386572151184082, 0.03861587142944336, 0.03856588745117188, 0.038676383972167966, 0.038451297760009766, 0.03848396682739258, 0.03844710540771484, 0.03849420928955078, 0.03857392120361328, 0.03839718246459961, 0.038478111267089846, 0.038453887939453125, 0.038596607208251955, 0.03851878356933594, 0.03845452880859375, 0.038445823669433596, 0.03850979232788086, 0.03856259155273437, 0.038596607208251955, 0.03859772872924805, 0.03849641418457031, 0.03880624008178711, 0.0388098258972168, 0.03880531311035156, 0.03898275375366211, 0.03889654541015625, 0.03875833511352539, 0.03868239974975586, 0.03950211334228516, 0.039032447814941404, 0.03872396850585937, 0.03869081497192383, 0.0388218879699707, 0.03937478256225586]",tokens/s,25.718022057089804,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3731.329024,5369.69216,0.0,4974.444544,4685.071872,s,1,11.1715478515625,11.1715478515625,0.0,11.1715478515625,11.1715478515625,11.1715478515625,11.1715478515625,[11.1715478515625],,kWh,0.00012451798164166424,1.3727608633347855e-05,5.261337542400088e-05,0.00019085896569901296,,MB,1988.276224,5403.246592,0.0,4993.318912,4233.624576,s,10,1.9220595092773436,0.19220595092773438,0.00023303240069167826,0.19226694488525392,0.19240954589843748,0.1924179229736328,0.19242462463378907,"[0.19225372314453126, 0.19157609558105468, 0.19242630004882813, 0.19224838256835938, 0.19240768432617186, 0.1921829376220703, 0.19234197998046876, 0.19228518676757814, 0.19205705261230469, 0.19228016662597655]",tokens/s,1331.9046510492847,kWh,5.68030962131394e-06,6.264281311712178e-07,3.7552861238846522e-06,1.006202387636981e-05,tokens/kWh,25442197.627974622,MB,1998.278656,5403.246592,0.0,4993.318912,4336.176128,s,10,21.814504150390626,2.1814504150390626,0.024596799761330265,2.18764794921875,2.2065200439453125,2.2078297241210936,2.208877468261719,"[2.176327392578125, 2.205752197265625, 2.209139404296875, 2.198985595703125, 2.167893310546875, 2.163714111328125, 2.136791748046875, 2.150702880859375, 2.20622900390625, 2.198968505859375]",tokens/s,28.87986798401369,kWh,6.44792890078529e-05,7.111966963577139e-06,3.8065941777515405e-05,0.00010965719774894542,tokens/kWh,574517.6905234739,,s,630,21.811262943267828,0.03462105229090131,0.0006365128838513587,0.034709230422973636,0.03513799514770508,0.035320075225830076,0.036336495742797854,"[0.034657791137695314, 0.03510147094726562, 0.035061153411865234, 0.034299678802490234, 0.03433555221557617, 0.0346662712097168, 0.0344005126953125, 0.03419084930419922, 0.034439136505126956, 0.03428403091430664, 0.03415033721923828, 0.034528865814208984, 0.03439379119873047, 0.03421878433227539, 0.03442496109008789, 0.03454553604125977, 0.0344917106628418, 0.03459756851196289, 0.03441196823120117, 0.034748992919921874, 0.03480924987792969, 0.03475219345092773, 0.03456911849975586, 0.03450259017944336, 0.03487468719482422, 0.0345747184753418, 0.03452876663208008, 0.03502735900878906, 0.034482177734375, 0.03420848083496094, 0.03463529586791992, 0.03438582229614258, 0.034328895568847655, 0.034375679016113284, 0.034293441772460936, 0.034229984283447264, 0.03427388763427734, 0.03424380874633789, 0.03431878280639648, 0.03425497436523438, 0.03456991958618164, 0.03460889434814453, 0.03442768096923828, 0.03477673721313477, 0.0345214729309082, 0.034662559509277345, 0.03448320007324219, 0.034349918365478516, 0.034522911071777344, 0.03487353515625, 0.034461441040039065, 0.034713855743408205, 0.03460931015014648, 0.034522815704345705, 0.03444483184814453, 0.03478144073486328, 0.03474166488647461, 0.03471868896484375, 0.034678943634033205, 0.03520259094238281, 0.034642238616943356, 0.034565567016601566, 0.03453580856323242, 0.035050815582275394, 0.0350728645324707, 0.03494390487670898, 0.034964000701904294, 0.03513148880004883, 0.03518086242675781, 0.03522355270385742, 0.03498521423339844, 0.035007232666015624, 0.03561872100830078, 0.03513148880004883, 0.03505692672729492, 0.03503081512451172, 0.03492345428466797, 0.034848159790039065, 0.034869537353515626, 0.03470070266723633, 0.03519811248779297, 0.03518848037719727, 0.03495935821533203, 0.03494876861572266, 0.035037281036376954, 0.034828544616699215, 0.03522076797485352, 0.03496124649047851, 0.034853759765625, 0.035077278137207034, 0.035126113891601564, 0.03509036636352539, 0.03490208053588867, 0.035053184509277344, 0.0348983039855957, 0.03505718231201172, 0.035375457763671875, 0.03511439895629883, 0.03496137619018555, 0.03522051239013672, 0.035344318389892576, 0.035106494903564454, 0.03503318405151367, 0.035016384124755856, 0.034995967864990235, 0.03510476684570313, 0.03502342224121094, 0.03476591873168945, 0.034845375061035154, 0.035302879333496094, 0.03490291213989258, 0.035522430419921876, 0.0348671989440918, 0.034740222930908206, 0.03491356658935547, 0.03467142486572266, 0.03465107345581055, 0.034697952270507815, 0.03463808059692383, 0.03468921661376953, 0.03466400146484375, 0.03486044692993164, 0.035044193267822266, 0.034952545166015626, 0.0349967041015625, 0.03521865463256836, 0.03520716857910156, 0.035127296447753906, 0.03501055908203125, 0.03508224105834961, 0.03529913711547852, 0.03512118530273438, 0.035379711151123046, 0.03526006317138672, 0.03510441589355469, 0.03473648071289062, 0.034899585723876955, 0.03485561752319336, 0.034762081146240235, 0.034707809448242186, 0.03492195129394531, 0.03464041519165039, 0.03478291320800781, 0.034892097473144534, 0.034840286254882814, 0.034947006225585935, 0.03482803344726562, 0.03480201721191406, 0.035111167907714846, 0.03474985504150391, 0.03483286285400391, 0.03503936004638672, 0.03503308868408203, 0.03524726486206055, 0.03498223876953125, 0.03503279876708985, 0.03512400054931641, 0.034866943359375, 0.03495552062988281, 0.03515830230712891, 0.03488470458984375, 0.03538988876342773, 0.03513772964477539, 0.0352852783203125, 0.03497955322265625, 0.034920448303222655, 0.035052574157714844, 0.03509747314453125, 0.03486265563964844, 0.035844894409179685, 0.034995967864990235, 0.034816001892089846, 0.03489014434814453, 0.035090015411376956, 0.03492428970336914, 0.03510201644897461, 0.03503721618652344, 0.03536374282836914, 0.03494911956787109, 0.03491020965576172, 0.03502489471435547, 0.035334144592285156, 0.03510198211669922, 0.03685772705078125, 0.035224414825439455, 0.035092479705810545, 0.03523763275146485, 0.035066112518310544, 0.034991329193115234, 0.0350777587890625, 0.035180927276611325, 0.035018272399902343, 0.03497385787963867, 0.03511328125, 0.03501628875732422, 0.03501916885375977, 0.035151966094970705, 0.03522361755371094, 0.03485270309448242, 0.035435966491699215, 0.03516473770141602, 0.03478704071044922, 0.03489820861816406, 0.034903968811035156, 0.034994174957275394, 0.03496764755249023, 0.03487744140625, 0.03501260757446289, 0.034990367889404295, 0.03496540832519531, 0.03494236755371094, 0.034904449462890626, 0.03491206359863281, 0.035117279052734374, 0.035133377075195316, 0.03508595275878906, 0.034877120971679686, 0.03481472015380859, 0.03470678329467773, 0.03470998382568359, 0.03485244750976563, 0.03463024139404297, 0.034721694946289065, 0.03468854522705078, 0.034807777404785155, 0.03483299255371094, 0.03468288040161133, 0.03480902481079102, 0.03494380950927734, 0.03493465423583984, 0.03462892913818359, 0.03472681427001953, 0.03473516845703125, 0.03502985763549805, 0.034812030792236326, 0.03462684631347656, 0.0346855354309082, 0.034854911804199216, 0.034708606719970704, 0.03482304000854492, 0.03486848068237305, 0.034875553131103514, 0.03477155303955078, 0.034983615875244144, 0.035020896911621094, 0.03480352020263672, 0.03478160095214844, 0.03489382553100586, 0.03494460678100586, 0.03489628982543945, 0.03458857727050781, 0.03484630584716797, 0.03506988906860352, 0.034912254333496096, 0.03480112075805664, 0.03493116760253906, 0.0348529281616211, 0.03486515045166016, 0.03518838500976563, 0.035359073638916015, 0.03637350463867187, 0.03528396987915039, 0.034968929290771486, 0.03520169448852539, 0.03498393630981445, 0.034890975952148434, 0.035608993530273435, 0.03492287826538086, 0.034887680053710936, 0.037615135192871095, 0.03499657440185547, 0.03495334243774414, 0.034568321228027346, 0.03449843215942383, 0.03452102279663086, 0.03442694473266601, 0.03426044845581055, 0.03410383987426758, 0.034149856567382814, 0.033927711486816406, 0.03390089416503906, 0.034018913269042966, 0.033905025482177734, 0.033994430541992186, 0.033877246856689455, 0.03415036773681641, 0.03413897705078125, 0.03401523208618164, 0.03393734359741211, 0.03383030319213867, 0.03384707260131836, 0.03395462417602539, 0.0340912971496582, 0.03401084899902344, 0.03382799911499024, 0.033829631805419924, 0.033783584594726565, 0.033818943023681644, 0.03375500869750977, 0.033901920318603514, 0.03382966232299805, 0.03383622360229492, 0.03379692840576172, 0.03393526458740234, 0.033833183288574216, 0.03386150360107422, 0.03395993423461914, 0.03385139083862305, 0.03380601501464844, 0.033976638793945316, 0.03413212966918945, 0.03431164932250977, 0.03432182312011719, 0.03422073745727539, 0.0342256965637207, 0.03465030288696289, 0.034551551818847656, 0.034345409393310544, 0.034385921478271485, 0.03475408172607422, 0.034484703063964846, 0.03437577438354492, 0.03438310241699219, 0.034687393188476565, 0.03460275268554688, 0.03478374481201172, 0.03779116821289062, 0.03495951843261719, 0.03472864151000977, 0.03481958389282227, 0.03483168029785156, 0.03502127838134766, 0.034920833587646485, 0.034848831176757813, 0.03551798248291015, 0.03491635131835937, 0.03466083145141602, 0.03464176177978515, 0.03455123138427734, 0.034400993347167966, 0.034557472229003905, 0.03437641525268555, 0.034153759002685545, 0.03399932861328125, 0.034007392883300784, 0.03410835266113281, 0.033907424926757815, 0.033931102752685544, 0.034285728454589846, 0.03390630340576172, 0.033834590911865234, 0.033880062103271484, 0.03375795364379883, 0.03400278472900391, 0.03398851013183594, 0.034248992919921874, 0.03426639938354492, 0.03424739074707031, 0.03408246231079102, 0.033997150421142576, 0.03403571319580078, 0.03413638305664062, 0.033936351776123044, 0.03403062438964844, 0.03393535995483398, 0.03382854461669922, 0.03379974365234375, 0.034107231140136716, 0.03377008056640625, 0.033955520629882815, 0.03390284729003906, 0.03381033706665039, 0.033839263916015626, 0.03381248092651367, 0.03393539047241211, 0.03403158569335937, 0.03391241455078125, 0.03452048110961914, 0.03415225601196289, 0.03381577682495117, 0.033892833709716796, 0.03376710510253906, 0.033872222900390624, 0.03379657745361328, 0.03380223846435547, 0.0337448959350586, 0.033920352935791015, 0.033802913665771483, 0.033860897064208986, 0.03370687866210938, 0.03379315185546875, 0.03367536163330078, 0.034025310516357425, 0.033710880279541014, 0.033897663116455076, 0.033735488891601564, 0.03384320068359375, 0.033716224670410154, 0.03394355010986328, 0.0337570571899414, 0.033775039672851566, 0.033726337432861325, 0.03386028671264649, 0.03377577590942383, 0.033991935729980466, 0.03378153610229492, 0.03394831848144531, 0.033871200561523436, 0.03384415817260742, 0.033742847442626955, 0.033783584594726565, 0.03364476776123047, 0.03378176116943359, 0.03367353439331055, 0.03386272048950195, 0.03390118408203125, 0.0339865608215332, 0.033832351684570314, 0.03395955276489258, 0.033810497283935544, 0.03487017440795898, 0.03407014465332031, 0.03392755126953125, 0.03387731170654297, 0.033767681121826175, 0.03374111938476562, 0.03397644805908203, 0.03391392135620117, 0.033792961120605466, 0.033736671447753906, 0.03390985488891601, 0.03505456161499024, 0.03408924865722656, 0.03459449768066406, 0.0339060173034668, 0.0338438720703125, 0.033939456939697264, 0.03512351989746094, 0.03388716888427734, 0.03383987045288086, 0.03388620758056641, 0.03426019287109375, 0.03397481536865234, 0.033840320587158204, 0.03404019165039063, 0.03496966552734375, 0.03562060928344726, 0.03401507186889648, 0.033872608184814454, 0.033859329223632814, 0.03422854232788086, 0.0340398063659668, 0.03400908660888672, 0.03393929672241211, 0.033938785552978516, 0.0338430061340332, 0.03387289428710937, 0.03388729476928711, 0.03388102340698242, 0.03385945510864258, 0.0337204475402832, 0.033764671325683594, 0.033694400787353515, 0.03375718307495117, 0.03417411041259766, 0.03389321517944336, 0.03378598403930664, 0.03414412689208984, 0.033773822784423826, 0.033940895080566406, 0.03378006362915039, 0.03377129745483398, 0.03376316833496094, 0.033693790435791016, 0.03376380920410156, 0.034727489471435544, 0.03447529602050781, 0.033891521453857425, 0.03388095855712891, 0.03391766357421875, 0.03387615966796875, 0.0340766716003418, 0.03410943984985351, 0.03433478546142578, 0.03451017761230469, 0.0338166389465332, 0.03390963363647461, 0.033861343383789065, 0.034175167083740236, 0.034297119140625, 0.03443699264526367, 0.034328670501708985, 0.034880001068115236, 0.0343551025390625, 0.03444543838500977, 0.034609119415283204, 0.03460710525512695, 0.034590049743652346, 0.03458899307250977, 0.03451068878173828, 0.03438153457641602, 0.03435411071777344, 0.034305889129638674, 0.03471260833740234, 0.03517875289916992, 0.03493072128295899, 0.03492262268066406, 0.03503091049194336, 0.03478720092773437, 0.034756736755371095, 0.03490313720703125, 0.03503177642822266, 0.03497334289550781, 0.03487161636352539, 0.03480188751220703, 0.03479321670532227, 0.034928417205810545, 0.03593593597412109, 0.03483337783813477, 0.03501571273803711, 0.034429153442382815, 0.03569926452636719, 0.03606099319458008, 0.03472198486328125, 0.035871967315673825, 0.03470985412597656, 0.034966209411621096, 0.034823070526123046, 0.034859073638916015, 0.036114944458007815, 0.037464351654052735, 0.03471769714355469, 0.03478511810302734, 0.034621601104736326, 0.034653888702392575, 0.03624297714233399, 0.034826656341552735, 0.0345849609375, 0.03468288040161133, 0.03465539169311523, 0.03478524780273438, 0.035003326416015626, 0.03495135879516602, 0.034751873016357425, 0.034746753692626954, 0.034788734436035154, 0.0348084487915039, 0.034723838806152346, 0.034909278869628906, 0.03495935821533203, 0.03524291229248047, 0.03472326278686524, 0.03474256134033203, 0.03491167831420899, 0.03503327941894531, 0.035151840209960934, 0.034726593017578126, 0.03475580978393555, 0.03538000106811524, 0.034911552429199216, 0.03485766220092774, 0.034947071075439456, 0.03485935974121094, 0.03494198226928711, 0.03497564697265625, 0.03515055847167969, 0.034936832427978515, 0.03504537582397461, 0.03501398468017578, 0.03514038467407227, 0.034834110260009765, 0.03465804672241211, 0.03484415817260742, 0.035513278961181644, 0.034813438415527344, 0.03475711822509766, 0.03465625762939453, 0.034826526641845705, 0.03494063949584961, 0.03479100799560547, 0.034621856689453126, 0.0346255989074707, 0.034670528411865235, 0.034697216033935545, 0.03459408187866211, 0.03464662551879883, 0.03471334457397461, 0.036245887756347656, 0.03476460647583008, 0.03477875137329101, 0.03465039825439453, 0.034834720611572265, 0.035259521484375, 0.03517734527587891, 0.034885631561279294, 0.03660377502441406, 0.042235328674316404, 0.0347283821105957, 0.03441196823120117, 0.03442361450195312, 0.03434086227416992, 0.035374752044677736, 0.03471571350097656, 0.03549196624755859, 0.03411574554443359, 0.03463372802734375, 0.03429171371459961, 0.034225505828857423, 0.033992862701416014, 0.03452915191650391, 0.03444755172729492, 0.03469337463378906, 0.034508800506591795, 0.03469126510620117, 0.03464022445678711, 0.03484169769287109, 0.034632160186767576, 0.03459414291381836, 0.034630401611328125, 0.03478307342529297, 0.03473177719116211, 0.034866657257080075, 0.03461215972900391, 0.03481372833251953, 0.03475254440307617, 0.03482156753540039, 0.03461628723144531, 0.03453449630737305, 0.03451996612548828, 0.034746143341064455]",tokens/s,28.884159603167465,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7166.230528,11446.124544,0.0,11043.602432,10644.85888,s,1,15.3945673828125,15.3945673828125,0.0,15.3945673828125,15.3945673828125,15.3945673828125,15.3945673828125,[15.3945673828125],,kWh,0.00024076620776249154,2.6550995485527102e-05,0.00010717841907598863,0.0003744956223240073,,MB,3125.383168,11464.998912,0.0,11047.796736,10193.762816,s,10,3.873770233154297,0.3873770233154297,0.0008017146505535117,0.38724441528320314,0.3881938079833984,0.3884663650512695,0.3886844107055664,"[0.3870435791015625, 0.3859915466308594, 0.38744525146484377, 0.386902099609375, 0.386473876953125, 0.3880574951171875, 0.38702590942382814, 0.38873892211914063, 0.38795831298828126, 0.38813323974609376]",tokens/s,660.8548896601612,kWh,1.1362643189422872e-05,1.2530906996021258e-06,7.572709049615256e-06,2.0188442938640255e-05,tokens/kWh,12680522.256128103,MB,3134.324736,11464.998912,0.0,11047.796736,10216.945152,s,10,35.00595971679687,3.5005959716796875,0.0070912096024240935,3.499769653320312,3.5076157958984373,3.510068273925781,3.512030256347656,"[3.506041259765625, 3.49793798828125, 3.49913525390625, 3.500029541015625, 3.485289306640625, 3.50707080078125, 3.503698974609375, 3.499509765625, 3.512520751953125, 3.49472607421875]",tokens/s,17.996935524601763,kWh,0.00010226989875307727,1.1279542750089423e-05,6.72559063687854e-05,0.00018080534787195206,tokens/kWh,348441.0209183478,,s,630,35.003313110351556,0.055560814460875484,0.0006759404369073328,0.055407808303833,0.05614957122802734,0.056722057914733885,0.058441114768981936,"[0.05706671905517578, 0.0566479377746582, 0.05593052673339844, 0.05570240020751953, 0.05591356658935547, 0.05569795227050781, 0.05528403091430664, 0.0554455680847168, 0.05533919906616211, 0.05525596618652344, 0.05510371017456055, 0.05513593673706055, 0.05490739059448242, 0.055091552734375, 0.05493734359741211, 0.05606646347045898, 0.054945793151855465, 0.05669820785522461, 0.058350303649902346, 0.0555478401184082, 0.05563158416748047, 0.055535903930664064, 0.0555146255493164, 0.05538595199584961, 0.05529462432861328, 0.05573775863647461, 0.055712352752685546, 0.05524684906005859, 0.055076862335205076, 0.05508233642578125, 0.05516559982299805, 0.05502361679077149, 0.055041950225830076, 0.055523422241210936, 0.055400577545166016, 0.055637889862060544, 0.0550748176574707, 0.05554995346069336, 0.05557763290405274, 0.05606643295288086, 0.0556069450378418, 0.05536857604980469, 0.05535136032104492, 0.05676582336425781, 0.05606054306030273, 0.055623680114746096, 0.05573126220703125, 0.055118431091308595, 0.05511958312988281, 0.05534755325317383, 0.055705696105957034, 0.05822585678100586, 0.05546649551391602, 0.05535337448120117, 0.05545353698730469, 0.055890560150146484, 0.05584220886230469, 0.05619363021850586, 0.05620121765136719, 0.055439006805419924, 0.055688865661621095, 0.05526393508911133, 0.055629825592041014, 0.05660467147827149, 0.05637936019897461, 0.055859233856201174, 0.05538739013671875, 0.05564902496337891, 0.055431167602539064, 0.05577059173583984, 0.0553447036743164, 0.05579391860961914, 0.055210689544677734, 0.05537900924682617, 0.055024608612060544, 0.05820211029052735, 0.05573567962646484, 0.055767681121826174, 0.05603308868408203, 0.05536377716064453, 0.05552864074707031, 0.05528992080688477, 0.055398529052734374, 0.05756582260131836, 0.0554700813293457, 0.055926303863525394, 0.055638496398925784, 0.05551030349731445, 0.05549055862426758, 0.05518204879760742, 0.05496422576904297, 0.05525910568237305, 0.055543838500976564, 0.05542483139038086, 0.05528595352172851, 0.055209983825683595, 0.05556220626831055, 0.05566876983642578, 0.055314430236816405, 0.05519974517822265, 0.05594028854370117, 0.05563584136962891, 0.05578438568115234, 0.05580595016479492, 0.05549260711669922, 0.05509017562866211, 0.05520854568481445, 0.05526979064941406, 0.054968318939208984, 0.05501244735717774, 0.054876190185546875, 0.05519792175292969, 0.05496489715576172, 0.05505228805541992, 0.05615779113769531, 0.055204193115234376, 0.054858974456787106, 0.055806976318359375, 0.05531628799438477, 0.05502560043334961, 0.05494339370727539, 0.05490678405761719, 0.05492937469482422, 0.055403072357177736, 0.055744510650634765, 0.05569945526123047, 0.0567459831237793, 0.055259136199951174, 0.054963489532470704, 0.05601753616333008, 0.05546198272705078, 0.05546803283691406, 0.05507455825805664, 0.055177536010742184, 0.05496620941162109, 0.05487411117553711, 0.05535334396362305, 0.05546188735961914, 0.058049758911132815, 0.056232769012451174, 0.05560335922241211, 0.056409439086914065, 0.05535113525390625, 0.05605606460571289, 0.05596521759033203, 0.055554912567138674, 0.05561711883544922, 0.05529436874389648, 0.05515008163452149, 0.05508147048950195, 0.055449600219726565, 0.05581167984008789, 0.05564457702636719, 0.05511516952514649, 0.05483990478515625, 0.05543119812011719, 0.055682174682617186, 0.05514435195922852, 0.0553298225402832, 0.05528358459472656, 0.055445793151855466, 0.05583027267456055, 0.05551628875732422, 0.05508607864379883, 0.055209632873535155, 0.05592496109008789, 0.05604323196411133, 0.05559462356567383, 0.05559347152709961, 0.055476383209228514, 0.05528575897216797, 0.05511129760742187, 0.0549728012084961, 0.055063934326171876, 0.05579840087890625, 0.055570430755615234, 0.055445182800292966, 0.055322208404541016, 0.05518819046020508, 0.055205249786376955, 0.05607078552246094, 0.055424606323242184, 0.05580636978149414, 0.056248321533203124, 0.05698355102539063, 0.055334209442138675, 0.0551605110168457, 0.055126750946044925, 0.05509664154052735, 0.056720897674560546, 0.055374526977539064, 0.055836929321289065, 0.054925537109375, 0.054728256225585935, 0.05529212951660156, 0.05496201705932617, 0.05519184112548828, 0.05515171051025391, 0.05484828948974609, 0.054708225250244144, 0.05506467056274414, 0.05489788818359375, 0.05487891387939453, 0.055787521362304686, 0.055408641815185546, 0.056180286407470706, 0.05564051055908203, 0.05531619262695313, 0.05535567855834961, 0.05585715103149414, 0.05696259307861328, 0.05593753433227539, 0.05577724838256836, 0.055982078552246094, 0.055412353515625, 0.05525747299194336, 0.054976062774658205, 0.055349281311035156, 0.055298465728759766, 0.05518950271606445, 0.05506646347045899, 0.05500534439086914, 0.05492326354980469, 0.05546188735961914, 0.05526528167724609, 0.05499401473999024, 0.05509417724609375, 0.055605247497558595, 0.05540249633789063, 0.055332862854003906, 0.055607040405273436, 0.055353408813476564, 0.05551683044433594, 0.05537401580810547, 0.055218528747558594, 0.055231807708740234, 0.055548480987548825, 0.055425086975097654, 0.05520374298095703, 0.05561964797973633, 0.05601219177246094, 0.059863422393798826, 0.05650201416015625, 0.05573689651489258, 0.055731231689453126, 0.05532547378540039, 0.055470401763916016, 0.055475902557373044, 0.05546780776977539, 0.05847820663452148, 0.055747360229492185, 0.055449600219726565, 0.05662332916259766, 0.05621475219726563, 0.05560940933227539, 0.054895008087158206, 0.05462844848632813, 0.055712799072265624, 0.05563027191162109, 0.055634464263916016, 0.05515856170654297, 0.05484956741333008, 0.05476512145996094, 0.05494847869873047, 0.054798336029052735, 0.05494169616699219, 0.05511756896972656, 0.05511193466186524, 0.055233631134033206, 0.0551693115234375, 0.055263870239257815, 0.055228416442871096, 0.05517926406860352, 0.055314430236816405, 0.055207263946533205, 0.05581206512451172, 0.055207744598388675, 0.05576179122924805, 0.05497232055664063, 0.05500688171386719, 0.05528416061401367, 0.05521104049682617, 0.055583518981933595, 0.0555145263671875, 0.05535996627807617, 0.055339134216308594, 0.05514873504638672, 0.05526732635498047, 0.055152641296386716, 0.05547622299194336, 0.05577318572998047, 0.05591980743408203, 0.055169857025146485, 0.055623519897460935, 0.055908512115478516, 0.056025089263916014, 0.05526095962524414, 0.05519177627563476, 0.05531167984008789, 0.05528646469116211, 0.055021568298339846, 0.054863872528076174, 0.05494195175170898, 0.05500083160400391, 0.05522127914428711, 0.05547123336791992, 0.055371807098388674, 0.05515145492553711, 0.05484864044189453, 0.05527657699584961, 0.05581286239624023, 0.05593299102783203, 0.05536153411865234, 0.05503171157836914, 0.05490441513061523, 0.0563721923828125, 0.05533033752441406, 0.05494012832641602, 0.056025089263916014, 0.055598369598388674, 0.05544319915771485, 0.056054302215576175, 0.05518297576904297, 0.056126270294189456, 0.055951358795166016, 0.055529247283935546, 0.055314430236816405, 0.05497468948364258, 0.055482368469238284, 0.055226367950439455, 0.0551014404296875, 0.05507609558105469, 0.05526796722412109, 0.055144577026367186, 0.055338977813720706, 0.05523625564575195, 0.05540697479248047, 0.05524070358276367, 0.054980609893798826, 0.055046142578125, 0.05522572708129883, 0.05577587127685547, 0.05610086441040039, 0.058312767028808596, 0.055914432525634765, 0.05548633575439453, 0.05542281723022461, 0.055232158660888674, 0.055341697692871096, 0.055160831451416016, 0.05523984146118164, 0.05571670532226562, 0.05554950332641601, 0.05539680099487305, 0.05572963333129883, 0.05532521438598633, 0.05489788818359375, 0.054952671051025394, 0.055842334747314454, 0.0560041618347168, 0.05586368179321289, 0.05548857498168945, 0.05520383834838867, 0.055002750396728514, 0.05508188629150391, 0.05523865509033203, 0.05575884628295898, 0.055812095642089846, 0.05574409484863281, 0.05998793411254883, 0.05604323196411133, 0.05575350570678711, 0.05602860641479492, 0.05583865737915039, 0.05582700729370117, 0.055475582122802736, 0.05523260879516602, 0.05937180709838867, 0.056723007202148436, 0.05545209503173828, 0.055244800567626956, 0.059635711669921876, 0.056403968811035154, 0.056524768829345706, 0.055821918487548826, 0.055502655029296875, 0.05552396774291992, 0.055093246459960936, 0.05566873550415039, 0.05614988708496094, 0.05538966369628906, 0.05560745620727539, 0.05526505661010742, 0.055126625061035155, 0.055400577545166016, 0.05524057769775391, 0.055141918182373045, 0.054853313446044924, 0.05475526428222656, 0.057218017578125, 0.05689139175415039, 0.055756065368652345, 0.0550687370300293, 0.055083072662353516, 0.055179103851318356, 0.05512038421630859, 0.05531264114379883, 0.05634822463989258, 0.057859775543212894, 0.05616511917114258, 0.05584051132202148, 0.0553342399597168, 0.05532134246826172, 0.0564178237915039, 0.05514918518066406, 0.05542707061767578, 0.055499935150146486, 0.05533980941772461, 0.055729312896728514, 0.05564303970336914, 0.05530131149291992, 0.054958015441894534, 0.05491801452636719, 0.05496012878417969, 0.055646209716796874, 0.056627201080322265, 0.055954689025878905, 0.05524147033691406, 0.055260353088378907, 0.055148609161376955, 0.05549100875854492, 0.055484737396240234, 0.05509939193725586, 0.055067649841308595, 0.054998016357421874, 0.05514790344238281, 0.055502880096435545, 0.055315040588378904, 0.05520793533325195, 0.05488435363769531, 0.05502361679077149, 0.05592905426025391, 0.05538035202026367, 0.05531033706665039, 0.055261184692382816, 0.055160831451416016, 0.055954910278320315, 0.05534774398803711, 0.05539430236816406, 0.055349246978759765, 0.05520793533325195, 0.05517107009887695, 0.055054336547851565, 0.05551103973388672, 0.05678079986572265, 0.05630563354492187, 0.0561317138671875, 0.05532563018798828, 0.05582947158813477, 0.0559119987487793, 0.05561731338500977, 0.055314559936523434, 0.05574710464477539, 0.055547904968261716, 0.05558483123779297, 0.05542396926879883, 0.05575574493408203, 0.055623680114746096, 0.05614527893066406, 0.05574310302734375, 0.05516016006469727, 0.05520595169067383, 0.055763553619384766, 0.05576294326782227, 0.05519500732421875, 0.05566886520385742, 0.055132671356201174, 0.05591155242919922, 0.0558682861328125, 0.05544140625, 0.0554516487121582, 0.05508819198608399, 0.055296382904052734, 0.05507062530517578, 0.05647990417480469, 0.055034366607666016, 0.05494784164428711, 0.055259136199951174, 0.054848896026611325, 0.055109760284423825, 0.05580646514892578, 0.056840190887451174, 0.05558272171020508, 0.055662593841552734, 0.05563119888305664, 0.05515740966796875, 0.05535539245605469, 0.05522022247314453, 0.05527142333984375, 0.05626582336425781, 0.05565491104125977, 0.05517494583129883, 0.05524339294433594, 0.05584896087646484, 0.05678031921386719, 0.05609673690795899, 0.05603372955322266, 0.05664339065551758, 0.0555926399230957, 0.05566857528686524, 0.055231201171875, 0.055349246978759765, 0.05542502212524414, 0.05680070495605469, 0.05515913772583008, 0.055427486419677735, 0.0549128303527832, 0.05485567855834961, 0.05494937515258789, 0.05540083312988281, 0.055842113494873044, 0.05489952087402344, 0.05489254379272461, 0.05522249603271484, 0.055232288360595704, 0.0551357421875, 0.054984607696533204, 0.05521612930297851, 0.057003711700439455, 0.05569424057006836, 0.056068286895751954, 0.05607814407348633, 0.05517311859130859, 0.05516313552856445, 0.055742176055908206, 0.05606403350830078, 0.05771891021728515, 0.058752895355224606, 0.05676851272583008, 0.05634048080444336, 0.055744510650634765, 0.05536767959594727, 0.05568307113647461, 0.0561231689453125, 0.05564982223510742, 0.05540217590332031, 0.05554691314697266, 0.05528284835815429, 0.05565087890625, 0.05581951904296875, 0.05503481674194336, 0.05595296096801758, 0.055906112670898435, 0.05580012893676758, 0.055675262451171874, 0.0561495361328125, 0.05605628967285156, 0.05566463851928711, 0.055556095123291016, 0.055258174896240235, 0.05528364944458008, 0.05577542495727539, 0.05532140731811523, 0.0556473274230957, 0.05547100830078125, 0.05520383834838867, 0.05789491271972656, 0.058880577087402346, 0.056213630676269534, 0.05619443130493164, 0.0554911994934082, 0.055312320709228514, 0.05562758255004883, 0.05529612731933594, 0.055279678344726565, 0.05556435012817383, 0.055801856994628904, 0.05547945785522461, 0.055470943450927734, 0.05499638366699219, 0.05520806503295898, 0.05496844863891601, 0.054780254364013674, 0.05528985595703125, 0.05589715194702148, 0.05701932907104492, 0.055478271484375, 0.05526732635498047, 0.05495603179931641, 0.0549354248046875, 0.05505651092529297, 0.0557088623046875, 0.055507774353027346, 0.05525222396850586, 0.055094017028808596, 0.05470207977294922, 0.055119873046875, 0.05482201766967774, 0.054932350158691405, 0.0548201904296875, 0.0547314224243164, 0.054988800048828126, 0.05505843353271484, 0.054863872528076174, 0.05551248168945312, 0.055734432220458985, 0.05572652816772461, 0.055235614776611326, 0.054970367431640625, 0.05554995346069336, 0.055710689544677734, 0.05543708801269531, 0.055835968017578126, 0.05574703979492188, 0.05623017501831055, 0.05530147171020508, 0.054913856506347655, 0.055422367095947264, 0.05523324966430664, 0.0549947509765625, 0.05554796981811523, 0.055012992858886715, 0.055644542694091796, 0.05546803283691406, 0.0551014404296875, 0.05513417434692383, 0.055167007446289065, 0.055531520843505856, 0.05592268753051758, 0.05832294464111328]",tokens/s,17.998296275951365,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 111487 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,877.613056,615.448576,0.0,220.20096,205.438976,s,1,8.500099609375,8.500099609375,0.0,8.500099609375,8.500099609375,8.500099609375,8.500099609375,[8.500099609375],,kWh,2.8432755895839057e-05,3.1291399853608897e-06,8.50334013600268e-06,4.0065236017202625e-05,,MB,1265.762304,669.974528,0.0,260.046848,226.386944,s,17,0.21296153736114498,0.012527149256537942,0.0002239725954963185,0.01243660831451416,0.012815871810913086,0.012923238182067871,0.013149460105895997,"[0.01247321605682373, 0.012356096267700196, 0.012852543830871583, 0.012580448150634766, 0.012397248268127442, 0.013206015586853028, 0.012528351783752441, 0.012396703720092774, 0.012384832382202149, 0.012378560066223145, 0.012320768356323243, 0.012509632110595702, 0.012791423797607422, 0.01259267234802246, 0.01243660831451416, 0.012429247856140137, 0.012327168464660644]",tokens/s,20435.615059539035,kWh,3.6659520940118344e-07,4.0428975814141996e-08,2.1475729073366726e-07,6.217814759489927e-07,tokens/kWh,411720210.2383004,MB,1301.495808,684.654592,0.0,274.726912,226.389504,s,17,10.28993084716797,0.60529004983341,0.006259915020451108,0.6042811279296875,0.6105053955078125,0.6131617797851562,0.6216492016601562,"[0.6237710571289062, 0.60166943359375, 0.6040850830078125, 0.6031799926757813, 0.6103824462890625, 0.610502685546875, 0.6029651489257812, 0.6058961791992188, 0.5993203125, 0.5990332641601562, 0.6070938110351562, 0.6058821411132812, 0.606194580078125, 0.6105094604492187, 0.6042811279296875, 0.6007789306640625, 0.5943851928710937]",tokens/s,104.08233212711671,kWh,1.7470339347216228e-05,1.9266556545660242e-06,6.675967176089875e-06,2.6072962177872127e-05,tokens/kWh,2416296.2217414444,,s,1071,10.281280707359333,0.009599701874285074,0.0002051811333571774,0.009560256004333497,0.009805824279785156,0.009942975997924805,0.010313395404815674,"[0.009171008110046387, 0.009627840042114259, 0.009655872344970702, 0.009693375587463379, 0.009803775787353516, 0.009846783638000489, 0.009953280448913575, 0.009963104248046875, 0.010200960159301758, 0.010041824340820312, 0.009928768157958984, 0.009994239807128906, 0.009959199905395507, 0.010000032424926758, 0.01023977565765381, 0.010158592224121094, 0.010066207885742187, 0.01001471996307373, 0.010053119659423827, 0.01003980827331543, 0.00995952033996582, 0.009994144439697266, 0.009936960220336914, 0.010014656066894532, 0.010170080184936524, 0.010049823760986329, 0.010022496223449707, 0.010047679901123046, 0.010000800132751465, 0.009991999626159667, 0.009944128036499024, 0.009989055633544922, 0.009928704261779785, 0.009945152282714843, 0.009989888191223144, 0.009937088012695312, 0.00991436767578125, 0.009963520050048828, 0.009947168350219726, 0.010061440467834472, 0.010079999923706055, 0.009996895790100097, 0.009924351692199707, 0.009973919868469239, 0.009924703598022461, 0.009928704261779785, 0.009844672203063965, 0.009871264457702637, 0.009721823692321777, 0.009646400451660157, 0.009768832206726074, 0.009744000434875489, 0.009773632049560547, 0.009792448043823242, 0.009788031578063966, 0.009751999855041504, 0.0097259521484375, 0.009770079612731934, 0.009736063957214356, 0.00963753604888916, 0.009565952301025391, 0.009583104133605956, 0.009512703895568847, 0.009132831573486329, 0.009488384246826171, 0.009463839530944824, 0.009680191993713378, 0.009472736358642578, 0.009467840194702148, 0.009449600219726562, 0.009438207626342773, 0.009354080200195312, 0.00944099235534668, 0.009469792366027832, 0.009453920364379883, 0.009395456314086914, 0.009422911643981933, 0.009462592124938964, 0.009458687782287598, 0.009466879844665528, 0.009483360290527343, 0.009440128326416015, 0.009436575889587403, 0.009460351943969726, 0.009475263595581054, 0.00959558391571045, 0.00969536018371582, 0.009668607711791993, 0.009870495796203613, 0.009701567649841309, 0.009704095840454102, 0.009498623847961426, 0.009460000038146973, 0.009428735733032226, 0.009385439872741699, 0.009327103614807129, 0.009369855880737304, 0.009355072021484376, 0.009361344337463379, 0.009365056037902832, 0.009372096061706543, 0.009408415794372559, 0.00946390438079834, 0.00951910400390625, 0.009660096168518066, 0.00961308765411377, 0.009667103767395019, 0.009676095962524413, 0.009593536376953125, 0.009637887954711915, 0.009713919639587402, 0.009682687759399414, 0.009721856117248535, 0.009625887870788575, 0.009573663711547852, 0.009586175918579102, 0.009679807662963866, 0.009644031524658203, 0.009625696182250976, 0.009793439865112304, 0.009809920310974121, 0.009721599578857421, 0.009717375755310058, 0.009685471534729003, 0.009660575866699219, 0.00963379192352295, 0.009323552131652832, 0.009723967552185058, 0.009725631713867188, 0.009746432304382324, 0.009736096382141114, 0.009617504119873046, 0.009623488426208497, 0.009539711952209472, 0.009451456069946289, 0.00952444839477539, 0.009515456199645996, 0.009551360130310058, 0.009421567916870117, 0.009523200035095216, 0.009402496337890625, 0.00951852798461914, 0.009396896362304688, 0.009446944236755371, 0.009421183586120605, 0.009555232048034669, 0.009522111892700195, 0.00954054355621338, 0.009506912231445312, 0.009521951675415039, 0.009457280158996582, 0.009498527526855469, 0.0095217924118042, 0.009507712364196777, 0.009554240226745605, 0.00945206356048584, 0.009523263931274414, 0.009390080451965332, 0.00941055965423584, 0.009467904090881347, 0.009729887962341309, 0.009640031814575196, 0.010800640106201171, 0.009769023895263672, 0.009482751846313477, 0.009523200035095216, 0.009584671974182129, 0.0095600004196167, 0.009488224029541015, 0.009507231712341309, 0.009494303703308105, 0.00944495964050293, 0.009464223861694336, 0.009465855598449707, 0.009479488372802734, 0.009427647590637207, 0.009532416343688965, 0.009415264129638673, 0.009462176322937011, 0.009967616081237793, 0.00963804817199707, 0.009981792449951172, 0.009711615562438965, 0.009736319541931153, 0.00965824031829834, 0.009650176048278808, 0.009789440155029297, 0.00976300811767578, 0.00973958396911621, 0.009398367881774903, 0.009805824279785156, 0.009750687599182128, 0.009750528335571289, 0.00970355224609375, 0.00965839958190918, 0.009682175636291503, 0.00963753604888916, 0.009605152130126953, 0.009683456420898438, 0.00967910385131836, 0.009699007987976075, 0.009595264434814453, 0.009670880317687989, 0.0096212158203125, 0.009589887619018555, 0.00955072021484375, 0.009581791877746581, 0.009560864448547363, 0.00960870361328125, 0.009638400077819824, 0.009682656288146973, 0.0098306884765625, 0.009820159912109374, 0.0095862398147583, 0.009612031936645508, 0.009519840240478515, 0.009719008445739747, 0.010307040214538574, 0.00950819206237793, 0.009497407913208009, 0.009441408157348632, 0.009545120239257812, 0.009455583572387695, 0.00946457576751709, 0.009462944030761719, 0.009445695877075196, 0.009462176322937011, 0.00957148838043213, 0.009549759864807128, 0.00962172794342041, 0.009554304122924805, 0.009519200325012207, 0.009607647895812988, 0.009540575981140137, 0.009475040435791015, 0.009420607566833496, 0.009484383583068847, 0.00943446445465088, 0.00946233558654785, 0.009470080375671387, 0.009502592086791993, 0.009398271560668945, 0.009438719749450684, 0.009425408363342285, 0.009391743659973145, 0.009480256080627442, 0.009470175743103028, 0.009416159629821777, 0.009356096267700195, 0.009371359825134278, 0.00942255973815918, 0.009480575561523437, 0.00919539165496826, 0.009489055633544921, 0.009371808052062988, 0.009392127990722657, 0.009570143699645995, 0.00944099235534668, 0.009560256004333497, 0.009598464012145995, 0.00963046360015869, 0.009744319915771484, 0.00986950397491455, 0.009901951789855958, 0.009772255897521972, 0.009700127601623535, 0.009725824356079101, 0.00981824016571045, 0.009711615562438965, 0.009695199966430664, 0.009738112449645996, 0.009740544319152831, 0.009679776191711426, 0.009862272262573243, 0.009678303718566894, 0.00970748805999756, 0.009693535804748535, 0.00978166389465332, 0.00976863956451416, 0.00973414421081543, 0.009760767936706542, 0.009801888465881347, 0.009803296089172363, 0.009849120140075683, 0.009814271926879883, 0.009702752113342285, 0.00965062427520752, 0.00962764835357666, 0.009643872261047364, 0.009717920303344726, 0.009703424453735352, 0.009750656127929688, 0.009701248168945312, 0.00992198371887207, 0.009770848274230958, 0.009894240379333495, 0.009717791557312012, 0.01021561622619629, 0.009672127723693847, 0.009613632202148437, 0.009574879646301269, 0.009551712036132812, 0.009599072456359863, 0.009494560241699219, 0.009596799850463867, 0.009506912231445312, 0.009508447647094726, 0.009507231712341309, 0.009516480445861817, 0.009534015655517578, 0.009778592109680176, 0.00960377597808838, 0.009641504287719727, 0.009687423706054688, 0.009850879669189454, 0.009324543952941895, 0.00964515209197998, 0.009589632034301758, 0.00955504035949707, 0.009611840248107911, 0.00963212776184082, 0.009727999687194825, 0.00969711971282959, 0.009580703735351562, 0.009620863914489746, 0.00960307216644287, 0.009740960121154786, 0.009661664009094238, 0.009672575950622559, 0.009761664390563965, 0.009670399665832519, 0.009658623695373535, 0.009602432250976562, 0.009578623771667481, 0.009638400077819824, 0.009733375549316406, 0.00976089572906494, 0.009736831665039063, 0.009725664138793945, 0.009684927940368652, 0.00978774356842041, 0.009649920463562011, 0.009681023597717286, 0.009743743896484375, 0.009789343833923339, 0.00978121566772461, 0.009813119888305664, 0.009731840133666993, 0.009657855987548827, 0.009503199577331542, 0.009594911575317383, 0.009537311553955078, 0.009617631912231445, 0.009587008476257325, 0.00969491195678711, 0.00971571159362793, 0.009670207977294922, 0.009662431716918945, 0.009676575660705566, 0.0098056640625, 0.009737055778503418, 0.009699071884155273, 0.009733440399169922, 0.00975318431854248, 0.009720000267028808, 0.009915871620178223, 0.009802592277526856, 0.010184032440185547, 0.00979196834564209, 0.009754655838012695, 0.00963276767730713, 0.009667584419250488, 0.00958182430267334, 0.00965494441986084, 0.009599072456359863, 0.009582688331604004, 0.009639840126037597, 0.00960921573638916, 0.00929792022705078, 0.009689087867736817, 0.009654080390930176, 0.009646271705627442, 0.009649503707885742, 0.009935263633728028, 0.009639231681823731, 0.009681056022644043, 0.009609248161315918, 0.009585408210754395, 0.009542783737182618, 0.009476991653442382, 0.009478143692016602, 0.009482272148132324, 0.00952950382232666, 0.009523008346557617, 0.009414655685424805, 0.009484224319458008, 0.00949459171295166, 0.009535488128662109, 0.009529343605041504, 0.009584832191467284, 0.009631039619445801, 0.009661984443664551, 0.009627967834472657, 0.009737055778503418, 0.009662336349487304, 0.009559328079223633, 0.009552000045776367, 0.009482591629028321, 0.009488032341003419, 0.009537823677062988, 0.009518336296081543, 0.009475071907043458, 0.009479680061340333, 0.009483039855957031, 0.009533151626586914, 0.00955907154083252, 0.009726943969726563, 0.00952905559539795, 0.009521439552307128, 0.009510687828063965, 0.009373920440673828, 0.009457152366638183, 0.009426560401916504, 0.009427840232849122, 0.009445376396179199, 0.009512479782104493, 0.009518912315368652, 0.009427616119384766, 0.00944057559967041, 0.009456000328063964, 0.009515328407287598, 0.010233759880065918, 0.00963980770111084, 0.009683168411254883, 0.009689087867736817, 0.009656319618225098, 0.009720864295959473, 0.009599967956542969, 0.00951103973388672, 0.009512928009033203, 0.009490336418151855, 0.009165056228637695, 0.009415936470031739, 0.00938265609741211, 0.009409536361694336, 0.00937241554260254, 0.00933027172088623, 0.009364447593688964, 0.009411935806274413, 0.009388383865356445, 0.009390080451965332, 0.009383487701416015, 0.009430815696716308, 0.00937564754486084, 0.009410304069519043, 0.009468928337097168, 0.009412863731384277, 0.00934876823425293, 0.009426464080810546, 0.009412384033203125, 0.009421183586120605, 0.009427103996276855, 0.009400896072387695, 0.009358112335205077, 0.009396479606628419, 0.009394944190979004, 0.009483776092529296, 0.009447392463684081, 0.009490079879760742, 0.00958240032196045, 0.009833120346069335, 0.009627776145935059, 0.00975481605529785, 0.011046208381652833, 0.01058067226409912, 0.00982425594329834, 0.009805567741394043, 0.009711039543151856, 0.009804512023925781, 0.009725664138793945, 0.009687423706054688, 0.00982534408569336, 0.009741215705871583, 0.009658656120300292, 0.009708576202392578, 0.009941823959350586, 0.00987331199645996, 0.009747455596923828, 0.00971059226989746, 0.009843008041381836, 0.009844256401062011, 0.00981827163696289, 0.009767135620117188, 0.009688447952270508, 0.009711135864257812, 0.009737183570861816, 0.009630847930908203, 0.009642784118652343, 0.009627264022827148, 0.009607040405273437, 0.009544192314147949, 0.009564160346984863, 0.00953916835784912, 0.009517760276794433, 0.009058624267578126, 0.00949232006072998, 0.0094782075881958, 0.009377568244934083, 0.009380895614624024, 0.009429984092712403, 0.009412832260131836, 0.009476896286010741, 0.009476608276367187, 0.009590592384338379, 0.009569055557250976, 0.009490400314331056, 0.009492480278015136, 0.009602848052978516, 0.009642399787902833, 0.009704416275024413, 0.009544672012329102, 0.009545087814331055, 0.009611712455749512, 0.009443327903747559, 0.011451680183410645, 0.010408672332763672, 0.009653280258178711, 0.00951961612701416, 0.009482560157775878, 0.009478400230407715, 0.009477312088012696, 0.009429727554321289, 0.00946787166595459, 0.009475744247436524, 0.009490367889404296, 0.009535103797912598, 0.009470784187316894, 0.009422016143798829, 0.009470848083496094, 0.009419808387756348, 0.0094749755859375, 0.009498623847961426, 0.00941875171661377, 0.009441439628601073, 0.009448479652404786, 0.009462176322937011, 0.00950496006011963, 0.009538975715637207, 0.009491264343261719, 0.009500672340393066, 0.009441280364990234, 0.00942460823059082, 0.00943283176422119, 0.009398719787597657, 0.009399999618530273, 0.009406335830688476, 0.00933353614807129, 0.00936524772644043, 0.009364992141723634, 0.009334752082824707, 0.009383456230163574, 0.009349856376647949, 0.00933071994781494, 0.009394399642944336, 0.009390111923217773, 0.009396224021911622, 0.00944320011138916, 0.00910422420501709, 0.00940329647064209, 0.009640543937683106, 0.009717984199523926, 0.009580544471740723, 0.009473504066467285, 0.009511455535888672, 0.009469599723815917, 0.009451871871948242, 0.00944921588897705, 0.009576959609985352, 0.009397120475769042, 0.009390975952148438, 0.009457056045532226, 0.00946886444091797, 0.009419551849365234, 0.009382783889770508, 0.009373791694641113, 0.009432991981506348, 0.009418368339538574, 0.009483776092529296, 0.009499039649963379, 0.009457695960998535, 0.009476096153259277, 0.009492128372192384, 0.009571104049682618, 0.009510911941528321, 0.009511199951171875, 0.009462559700012207, 0.00970748805999756, 0.009495743751525879, 0.009486271858215331, 0.00948243236541748, 0.009498271942138672, 0.00945907211303711, 0.009527935981750488, 0.009486335754394531, 0.009486335754394531, 0.009474047660827637, 0.010303296089172364, 0.009507007598876953, 0.009545663833618163, 0.009566271781921386, 0.00951910400390625, 0.009592831611633301, 0.009515007972717286, 0.009516256332397461, 0.009462559700012207, 0.009488384246826171, 0.009514623641967774, 0.009496959686279297, 0.009467904090881347, 0.009456831932067871, 0.009416576385498046, 0.009629695892333985, 0.009510047912597656, 0.00952246379852295, 0.009447808265686036, 0.009457216262817383, 0.009454143524169923, 0.00950489616394043, 0.009461631774902344, 0.009506815910339356, 0.00906054401397705, 0.009447232246398925, 0.009499615669250488, 0.009506815910339356, 0.009447423934936524, 0.009434720039367676, 0.009488832473754883, 0.009455583572387695, 0.009481951713562012, 0.00935478401184082, 0.009437472343444823, 0.00941104030609131, 0.009596223831176759, 0.00964406394958496, 0.010400287628173828, 0.010328224182128906, 0.009663519859313964, 0.009513248443603515, 0.009525919914245605, 0.009545087814331055, 0.009527935981750488, 0.009539039611816406, 0.009586655616760253, 0.009589311599731445, 0.009667776107788085, 0.009604991912841797, 0.009605759620666504, 0.00966483211517334, 0.009691136360168457, 0.009743935585021972, 0.009705920219421387, 0.009610879898071288, 0.009609919548034668, 0.009643168449401855, 0.009634336471557618, 0.009661503791809082, 0.00980678367614746, 0.009740511894226074, 0.009710880279541015, 0.009898624420166015, 0.009799391746520995, 0.009852928161621094, 0.009749759674072265, 0.00981004810333252, 0.009923359870910645, 0.009751615524291992, 0.00973027229309082, 0.009787615776062011, 0.009729824066162109, 0.009724255561828613, 0.009680255889892578, 0.00961023998260498, 0.00962559986114502, 0.009615360260009765, 0.009559776306152344, 0.009611552238464355, 0.009512160301208496, 0.009536288261413573, 0.009527296066284179, 0.009443327903747559, 0.009508864402770996, 0.009500127792358398, 0.009505311965942382, 0.009476703643798828, 0.009670399665832519, 0.009631744384765625, 0.009697279930114745, 0.00969859218597412, 0.009681632041931152, 0.00971951961517334, 0.0096014404296875, 0.00956332778930664, 0.009609184265136718, 0.009517760276794433, 0.00952950382232666, 0.009523008346557617, 0.009516192436218261, 0.009671392440795899, 0.009494720458984375, 0.00947993564605713, 0.009517312049865723, 0.009430944442749023, 0.009511008262634277, 0.009662431716918945, 0.00947612762451172, 0.009488320350646973, 0.009642144203186036, 0.009512864112854003, 0.009500672340393066, 0.009456704139709473, 0.009518143653869628, 0.009435232162475587, 0.009445152282714843, 0.00973414421081543, 0.009494784355163575, 0.009483488082885742, 0.009506367683410644, 0.009413663864135741, 0.00960095977783203, 0.009500415802001953, 0.009463775634765624, 0.009595168113708497, 0.009612544059753418, 0.009621312141418457, 0.009638431549072266, 0.009648192405700683, 0.009674592018127441, 0.009671199798583984, 0.009633631706237792, 0.009644160270690918, 0.009672191619873047, 0.009816384315490723, 0.009676095962524413, 0.00974118423461914, 0.009797216415405274, 0.009735615730285644, 0.009796575546264648, 0.009756735801696777, 0.009629023551940919, 0.009669216156005859, 0.009676223754882812, 0.009752832412719726, 0.00971951961517334, 0.009730112075805665, 0.00979203224182129, 0.00975603199005127, 0.009398431777954101, 0.009790783882141113, 0.009690143585205079, 0.009721759796142579, 0.009706912040710449, 0.009738559722900391, 0.009764512062072754, 0.009703807830810547, 0.00963584041595459, 0.009580256462097168, 0.009599264144897462, 0.00960307216644287, 0.009569536209106446, 0.009566783905029297, 0.00950496006011963, 0.009641759872436524, 0.009528927803039551, 0.0094684476852417, 0.009706656455993652, 0.009982912063598633, 0.009622655868530273, 0.009667136192321778, 0.009652159690856933, 0.009632191658020019, 0.00969529628753662, 0.009756544113159179, 0.009693183898925782, 0.009708864212036133, 0.00965401554107666, 0.009642208099365234, 0.00963798427581787, 0.009586560249328612, 0.009584799766540527, 0.00955452823638916, 0.009522527694702148, 0.00953321647644043, 0.009482368469238282, 0.009499263763427735, 0.009439359664916993, 0.009482239723205567, 0.009531519889831542, 0.00957049560546875, 0.009444543838500977, 0.009574912071228027, 0.009498623847961426, 0.009477215766906738, 0.009527199745178223, 0.009548704147338867, 0.00949187183380127, 0.009691712379455566, 0.009611200332641602, 0.009554112434387207, 0.00957049560546875, 0.009463616371154785, 0.00952889633178711, 0.009559712409973144, 0.009497376441955566, 0.00961945629119873, 0.009672736167907715, 0.009834176063537597, 0.009779040336608886, 0.009763263702392578, 0.009883232116699218, 0.009333087921142577, 0.00965766429901123, 0.009728704452514648, 0.00973583984375, 0.009791839599609374, 0.010076160430908204, 0.009791487693786622, 0.009736448287963868, 0.009621312141418457, 0.009738176345825196, 0.00990822410583496, 0.009815199851989746, 0.009700127601623535, 0.009820223808288574, 0.009844736099243164, 0.009727999687194825, 0.009672767639160156, 0.009756223678588867, 0.009771391868591309, 0.009772992134094239, 0.009874784469604492, 0.009904864311218262, 0.009795743942260743, 0.009840767860412597, 0.009761631965637207, 0.009865856170654297, 0.009761216163635254, 0.009797087669372559, 0.009746784210205079, 0.009711615562438965, 0.00971776008605957, 0.009672287940979005, 0.009761343955993653, 0.009731936454772949, 0.009594047546386719, 0.009575231552124024, 0.009521056175231933, 0.00959273624420166, 0.009543264389038086, 0.009558815956115723, 0.009531455993652344, 0.009545472145080567, 0.009571423530578613, 0.009505696296691894, 0.009676256179809571, 0.009631808280944825, 0.00972060775756836, 0.009700480461120606, 0.009706048011779786, 0.009653599739074707, 0.009653120040893555, 0.009598112106323243, 0.009710207939147949, 0.009540736198425293, 0.009595775604248047, 0.009674752235412597, 0.009569279670715332, 0.00951097583770752, 0.009465056419372559, 0.009446720123291015, 0.009476511955261231, 0.009577983856201172, 0.009595392227172851, 0.009239904403686524, 0.009540255546569825, 0.009547200202941895, 0.009482815742492675, 0.00951427173614502, 0.009537728309631347, 0.009586496353149414, 0.009578495979309083, 0.009547967910766602, 0.009552160263061523, 0.009576383590698243, 0.009619839668273926, 0.009606911659240722, 0.009691328048706054, 0.009698719978332519, 0.009806336402893067, 0.00965340805053711, 0.009854144096374511, 0.009723744392395019, 0.009740351676940918, 0.009783072471618652, 0.00978313636779785, 0.009615584373474121, 0.009639936447143555, 0.009508864402770996, 0.009486335754394531, 0.009538592338562012, 0.010194111824035644, 0.009602784156799316, 0.010584416389465333, 0.009709280014038086, 0.010919103622436524, 0.009595775604248047, 0.009570240020751953, 0.009543680191040039, 0.009529343605041504, 0.009500672340393066, 0.009517215728759765, 0.009490592002868652, 0.00948192024230957, 0.009484416007995605, 0.009461600303649903, 0.009410592079162597, 0.009459712028503419, 0.009705216407775878, 0.009410431861877441, 0.009435296058654785, 0.009408672332763672, 0.009432576179504394, 0.009409279823303222, 0.009449440002441406, 0.009489343643188477, 0.009435263633728028, 0.009345088005065918, 0.009380576133728027, 0.009381855964660644, 0.00942905616760254, 0.009455360412597657, 0.009363679885864258, 0.009416704177856445, 0.009450943946838378, 0.009417344093322753, 0.009479583740234375, 0.009153792381286621, 0.009463871955871582, 0.009493247985839844, 0.009561823844909669, 0.009521439552307128, 0.009453568458557129, 0.009467904090881347, 0.009397503852844238, 0.009399040222167969, 0.009575679779052734, 0.00956492805480957, 0.01002905559539795, 0.010358943939208984, 0.009647968292236329, 0.009621631622314454, 0.009529215812683105, 0.01051206398010254, 0.00967523193359375, 0.00961315155029297, 0.009598976135253906, 0.00970956802368164, 0.009591072082519532, 0.009576064109802246, 0.009640031814575196, 0.009443327903747559, 0.009504799842834472, 0.009521120071411133, 0.009453568458557129, 0.009465855598449707, 0.009466143608093261, 0.009469663619995117, 0.0094269437789917, 0.009799679756164551, 0.009549823760986328, 0.009744511604309083, 0.00953331184387207, 0.009433279991149903, 0.009445183753967286, 0.009465408325195313, 0.009503007888793946, 0.009449631690979003, 0.009445376396179199, 0.009414655685424805, 0.009391679763793945, 0.00943558406829834, 0.00941055965423584, 0.009531392097473144, 0.009400256156921387, 0.009432703971862792, 0.009427007675170898, 0.009468255996704101, 0.009432255744934082, 0.009419615745544434, 0.009453120231628418, 0.009413056373596192, 0.009462016105651855, 0.009608960151672364, 0.009394207954406738, 0.009431008338928223, 0.009375231742858887, 0.009355584144592284, 0.009371904373168946, 0.009400159835815429, 0.009089088439941407, 0.00941868782043457, 0.00937990379333496, 0.009520511627197265, 0.009367551803588867, 0.009364255905151367, 0.009434975624084473, 0.00942080020904541, 0.00941055965423584, 0.009392127990722657, 0.009465951919555664, 0.009379743576049804, 0.009383328437805176, 0.009396832466125488, 0.0094651517868042, 0.009476160049438477, 0.009392767906188966, 0.009380096435546876, 0.009408448219299316, 0.009378687858581543, 0.009448160171508789, 0.009445599555969238, 0.009660415649414063, 0.009490431785583496, 0.009516127586364746, 0.00942307186126709, 0.00946662425994873, 0.009393216133117676, 0.009413503646850585, 0.009426591873168945, 0.009359711647033692, 0.009447423934936524, 0.009539584159851074, 0.009402112007141113, 0.009417023658752442, 0.009500032424926757, 0.00936415958404541, 0.00945081615447998, 0.00944595241546631, 0.009459327697753907, 0.00941811180114746, 0.009458687782287598, 0.00949884796142578, 0.009457440376281738, 0.009502752304077149, 0.00949187183380127, 0.009359935760498046, 0.009407648086547852, 0.009374784469604493, 0.009389920234680176, 0.009488320350646973, 0.00941055965423584, 0.009414112091064453, 0.009381952285766602, 0.009384415626525878, 0.009381888389587402, 0.009561471939086914, 0.009379520416259765, 0.009413567543029785, 0.009455360412597657, 0.009412416458129883, 0.009456064224243164, 0.009404704093933106]",tokens/s,104.16990163816664,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1154.973696,1182.662656,0.0,780.140544,738.50112,s,1,8.7898916015625,8.7898916015625,0.0,8.7898916015625,8.7898916015625,8.7898916015625,8.7898916015625,[8.7898916015625],,kWh,3.179194462495616e-05,3.4995406943539667e-06,9.635007708008736e-06,4.4926493027318864e-05,,MB,1588.457472,1490.944,0.0,1073.741824,995.355648,s,10,0.2559623355865479,0.025596233558654785,0.00014572111156549938,0.02560910415649414,0.025685941696166992,0.025822266578674317,0.025931326484680176,"[0.025491424560546875, 0.025655647277832032, 0.025620479583740235, 0.0256396484375, 0.02546348762512207, 0.025396831512451173, 0.02551801681518555, 0.025598527908325196, 0.02595859146118164, 0.025619680404663087]",tokens/s,10001.471482644734,kWh,9.073257625907674e-07,1.0002028912212674e-07,5.588358507950333e-07,1.5661819025079275e-06,tokens/kWh,163454832.15587357,MB,1623.416832,1539.178496,0.0,1121.97632,995.358208,s,10,11.333508056640623,1.1333508056640622,0.004087949794408497,1.1326409301757812,1.138636828613281,1.1390207458496093,1.1393278796386719,"[1.137170166015625, 1.13261376953125, 1.1326009521484375, 1.1394046630859376, 1.1326680908203124, 1.1323131103515625, 1.1287606201171876, 1.1253172607421875, 1.138551513671875, 1.13410791015625]",tokens/s,55.58737831671326,kWh,4.131977156824092e-05,4.557201763101779e-06,1.7371261826603804e-05,6.32482351579465e-05,tokens/kWh,996075.223959584,,s,630,11.33036725616455,0.01798470993041992,0.00038515741050633114,0.01792252826690674,0.018155727767944337,0.018261038684844973,0.019342688789367676,"[0.017776447296142577, 0.01785260772705078, 0.017967103958129883, 0.017999872207641602, 0.01801215934753418, 0.01788313674926758, 0.017954591751098634, 0.01805238342285156, 0.018027456283569335, 0.01794812774658203, 0.018341920852661134, 0.01929267120361328, 0.01804287910461426, 0.01795686340332031, 0.01846886444091797, 0.017975072860717773, 0.018257535934448243, 0.017838272094726562, 0.017871551513671875, 0.01821251106262207, 0.01820582389831543, 0.017806272506713867, 0.017963008880615236, 0.01818009567260742, 0.017941951751708984, 0.01797177505493164, 0.017903455734252928, 0.017862752914428712, 0.017952831268310546, 0.01817795181274414, 0.017987743377685547, 0.01793132781982422, 0.018024991989135743, 0.017862239837646485, 0.01805571174621582, 0.019036384582519533, 0.01932099151611328, 0.018263904571533204, 0.017987583160400392, 0.017981184005737304, 0.01794278335571289, 0.018112512588500978, 0.018053119659423827, 0.017872991561889647, 0.018040672302246093, 0.01817923164367676, 0.017955455780029297, 0.01794272041320801, 0.018051168441772462, 0.017963071823120118, 0.01792608070373535, 0.017913856506347657, 0.017928192138671875, 0.017806400299072267, 0.0177828483581543, 0.017959808349609373, 0.017836032867431642, 0.017833984375, 0.01784012794494629, 0.017968576431274415, 0.01797587203979492, 0.017907007217407227, 0.017916608810424804, 0.017899999618530272, 0.017955167770385742, 0.017904703140258788, 0.018039072036743164, 0.017988256454467774, 0.01796505546569824, 0.01797711944580078, 0.017936607360839844, 0.01783318328857422, 0.017904415130615234, 0.01812611198425293, 0.01791049575805664, 0.017924095153808595, 0.017876991271972655, 0.017999872207641602, 0.017868799209594728, 0.017920000076293945, 0.018001407623291017, 0.017863168716430664, 0.017855615615844728, 0.017931135177612304, 0.01800147247314453, 0.018008415222167968, 0.01846281623840332, 0.018089632034301757, 0.018024255752563476, 0.017930784225463868, 0.01823513603210449, 0.017946176528930664, 0.018157440185546873, 0.018135616302490234, 0.01793459129333496, 0.017946624755859376, 0.018112512588500978, 0.017987583160400392, 0.01794047927856445, 0.017976959228515624, 0.017820032119750976, 0.017833984375, 0.01798940849304199, 0.017932512283325194, 0.017920000076293945, 0.017897472381591797, 0.017792192459106446, 0.017914464950561523, 0.017847936630249025, 0.017846879959106447, 0.017864511489868163, 0.017850559234619142, 0.017883104324340822, 0.017936416625976562, 0.017829343795776366, 0.017953311920166016, 0.01798700714111328, 0.017899328231811524, 0.01781427192687988, 0.018239488601684572, 0.01799996757507324, 0.01821891212463379, 0.0179748477935791, 0.018245920181274414, 0.01809014320373535, 0.01812393569946289, 0.01782793617248535, 0.017954784393310545, 0.018192256927490234, 0.018038944244384767, 0.018030303955078125, 0.017878944396972657, 0.018053504943847658, 0.01781350326538086, 0.0179006404876709, 0.01780614471435547, 0.01785251235961914, 0.017920000076293945, 0.01861222457885742, 0.017912895202636718, 0.01792300796508789, 0.01796879959106445, 0.017924064636230468, 0.01805120086669922, 0.01812505531311035, 0.017967103958129883, 0.017901567459106444, 0.017613983154296874, 0.017777503967285155, 0.018001920700073244, 0.01803059196472168, 0.01786675262451172, 0.01803468894958496, 0.01801625633239746, 0.017876991271972655, 0.01775926399230957, 0.01778118324279785, 0.01806492805480957, 0.01779199981689453, 0.017835487365722658, 0.01786729621887207, 0.01781760025024414, 0.017925439834594728, 0.01782649612426758, 0.017876575469970703, 0.017938432693481447, 0.017801567077636717, 0.018039968490600584, 0.018019231796264648, 0.01844428825378418, 0.018132640838623048, 0.017862272262573243, 0.017897600173950194, 0.018112672805786132, 0.018051519393920898, 0.017807231903076173, 0.0179932804107666, 0.01836908721923828, 0.018046335220336916, 0.018220991134643556, 0.018047679901123048, 0.018165760040283203, 0.017970304489135742, 0.01794099235534668, 0.01795929527282715, 0.017981536865234377, 0.018082880020141603, 0.01800489616394043, 0.017967039108276368, 0.017869216918945312, 0.018026975631713866, 0.018040256500244142, 0.017926015853881837, 0.018008672714233398, 0.018515487670898438, 0.018338272094726563, 0.017971200942993162, 0.018118656158447266, 0.018495487213134765, 0.018182144165039063, 0.018279712677001955, 0.018027231216430663, 0.017942527770996093, 0.017933439254760743, 0.01806015968322754, 0.018187904357910158, 0.018016639709472655, 0.017971200942993162, 0.018058656692504883, 0.01808790397644043, 0.018031232833862303, 0.018089088439941406, 0.017912704467773436, 0.017901567459106444, 0.018006111145019533, 0.01797929573059082, 0.018028192520141602, 0.018065439224243165, 0.017913440704345703, 0.017885631561279296, 0.018102943420410158, 0.017927808761596678, 0.017963008880615236, 0.017895008087158205, 0.01806787109375, 0.02174127960205078, 0.018247200012207032, 0.018143999099731446, 0.017968704223632812, 0.01803104019165039, 0.0180316162109375, 0.018189311981201172, 0.01800387191772461, 0.018152671813964842, 0.018053695678710936, 0.01796659278869629, 0.017823616027832032, 0.017982175827026367, 0.017957088470458984, 0.017903615951538086, 0.017856735229492188, 0.017960735321044922, 0.017845375061035156, 0.017798015594482422, 0.017917312622070313, 0.018002016067504883, 0.018219104766845705, 0.018123199462890625, 0.017807104110717772, 0.017838336944580077, 0.017884832382202148, 0.01783024024963379, 0.017913728713989257, 0.01798566436767578, 0.018036735534667968, 0.017898752212524415, 0.01787104034423828, 0.017854560852050783, 0.01790771293640137, 0.018190816879272462, 0.017960960388183594, 0.017984575271606445, 0.017918912887573243, 0.01784739112854004, 0.018040960311889648, 0.01805801582336426, 0.01803468894958496, 0.01802239990234375, 0.01784832000732422, 0.017924095153808595, 0.017862655639648437, 0.017918975830078124, 0.01815616035461426, 0.018175487518310548, 0.017983903884887697, 0.017885663986206054, 0.01801615905761719, 0.017905183792114258, 0.018301504135131836, 0.01805881690979004, 0.017945024490356447, 0.0180402889251709, 0.018136863708496095, 0.01788800048828125, 0.01800396728515625, 0.019195903778076173, 0.018155519485473632, 0.018124063491821288, 0.018033344268798827, 0.01797088050842285, 0.017782623291015626, 0.017859071731567384, 0.01781692886352539, 0.017980064392089844, 0.01794047927856445, 0.017879039764404296, 0.01778892707824707, 0.017909759521484374, 0.017958911895751953, 0.018231296539306642, 0.018026336669921875, 0.017949888229370117, 0.017817951202392577, 0.017879680633544923, 0.018350080490112306, 0.017758560180664063, 0.017884832382202148, 0.017887231826782226, 0.017870847702026366, 0.01780496025085449, 0.01776265525817871, 0.017827840805053712, 0.017701152801513673, 0.01774131202697754, 0.017895647048950195, 0.01776473617553711, 0.017760255813598632, 0.018071136474609374, 0.01785628890991211, 0.01789401626586914, 0.017616159439086915, 0.017799200057983397, 0.01774563217163086, 0.01770787239074707, 0.01783616065979004, 0.017887231826782226, 0.017692319869995116, 0.017965408325195314, 0.017769472122192383, 0.017714176177978515, 0.017754112243652344, 0.017735679626464843, 0.017786144256591797, 0.017830623626708984, 0.01775542449951172, 0.017776447296142577, 0.017803232192993165, 0.017703807830810547, 0.017753728866577147, 0.017783231735229492, 0.017729536056518554, 0.017688575744628905, 0.01790492820739746, 0.017842912673950197, 0.017946399688720704, 0.017856735229492188, 0.018004287719726564, 0.017868480682373046, 0.017707008361816406, 0.01769503974914551, 0.01769599914550781, 0.017821792602539063, 0.017680736541748048, 0.017704959869384765, 0.017844224929809572, 0.017808639526367187, 0.017670368194580077, 0.017801759719848632, 0.01776608085632324, 0.018934080123901367, 0.02215116882324219, 0.021310943603515625, 0.01806800079345703, 0.018046207427978515, 0.01785113525390625, 0.017923871994018556, 0.01782806396484375, 0.017889280319213868, 0.017985151290893554, 0.018088319778442382, 0.01805516815185547, 0.0179814395904541, 0.017820768356323242, 0.017927072525024415, 0.01777004814147949, 0.017961408615112303, 0.017876991271972655, 0.017751071929931642, 0.017913856506347657, 0.017887231826782226, 0.018011295318603515, 0.018041568756103514, 0.017875072479248046, 0.017855712890625, 0.01786140823364258, 0.017913856506347657, 0.017762304306030274, 0.017868480682373046, 0.017842496871948242, 0.017686111450195312, 0.01797699165344238, 0.017941247940063475, 0.017836032867431642, 0.017815423965454102, 0.017928319931030272, 0.0177674560546875, 0.018015199661254883, 0.017993728637695314, 0.01785980796813965, 0.017658016204833985, 0.017935327529907227, 0.0178187198638916, 0.017963584899902345, 0.017954816818237306, 0.017993728637695314, 0.01780940818786621, 0.017864704132080078, 0.017878879547119142, 0.017712799072265625, 0.017825983047485353, 0.017762624740600585, 0.017964576721191405, 0.017854656219482422, 0.01815567970275879, 0.01791187286376953, 0.017859712600708007, 0.01794476890563965, 0.018987775802612305, 0.018010112762451173, 0.018075424194335936, 0.017860832214355468, 0.01792518424987793, 0.017820608139038085, 0.017909759521484374, 0.01806460762023926, 0.01782863998413086, 0.01782579231262207, 0.017864704132080078, 0.01798963165283203, 0.01782374382019043, 0.017770496368408203, 0.017956287384033202, 0.018037216186523437, 0.01791804885864258, 0.017780736923217775, 0.017876991271972655, 0.017801055908203123, 0.017766559600830078, 0.017893119812011717, 0.018024639129638673, 0.01810233688354492, 0.01781350326538086, 0.017960927963256837, 0.01795689582824707, 0.017916959762573244, 0.017746912002563477, 0.017730783462524415, 0.017754911422729492, 0.01779484748840332, 0.017913951873779296, 0.017751392364501954, 0.017938880920410155, 0.017886783599853514, 0.017741952896118164, 0.017867424011230468, 0.017919071197509767, 0.017797183990478516, 0.017738367080688478, 0.017880767822265626, 0.01780169677734375, 0.017791040420532228, 0.017827840805053712, 0.018005823135375975, 0.017899391174316406, 0.017913728713989257, 0.017949119567871093, 0.017966815948486328, 0.01785475158691406, 0.017829887390136717, 0.01866547203063965, 0.0179814395904541, 0.017890687942504882, 0.01780374336242676, 0.017731327056884766, 0.017740192413330077, 0.017731231689453127, 0.01780352020263672, 0.017862752914428712, 0.017876991271972655, 0.017829887390136717, 0.01783135986328125, 0.017816127777099608, 0.01777257537841797, 0.017797088623046874, 0.017880096435546874, 0.017795135498046875, 0.017752992630004884, 0.0178606071472168, 0.017944576263427735, 0.01781760025024414, 0.017724895477294923, 0.017621536254882813, 0.017764095306396485, 0.01775846481323242, 0.01785036849975586, 0.0178155517578125, 0.017887231826782226, 0.017913791656494142, 0.017817663192749023, 0.017960960388183594, 0.017885183334350584, 0.018008064270019532, 0.017952768325805665, 0.017906784057617187, 0.018125247955322266, 0.019351551055908203, 0.01812819290161133, 0.018086591720581056, 0.017913856506347657, 0.017958911895751953, 0.0178606071472168, 0.01775763130187988, 0.017774784088134765, 0.017678720474243164, 0.017819648742675782, 0.01780873680114746, 0.01777116775512695, 0.017657855987548828, 0.01769094467163086, 0.017741504669189452, 0.020137983322143553, 0.018218591690063478, 0.01827471923828125, 0.018255872726440428, 0.01783718490600586, 0.017895807266235353, 0.018217376708984375, 0.018077600479125978, 0.017856704711914063, 0.01779622459411621, 0.017633216857910156, 0.01783635139465332, 0.017793664932250975, 0.017758207321166994, 0.01769862365722656, 0.017724767684936523, 0.017804128646850586, 0.01787017631530762, 0.01807427215576172, 0.018005088806152345, 0.017767328262329102, 0.01789132881164551, 0.017915552139282226, 0.01781907272338867, 0.017834112167358397, 0.017895200729370116, 0.017777183532714843, 0.017633760452270508, 0.017836032867431642, 0.01784832000732422, 0.017836032867431642, 0.018141183853149414, 0.01795680046081543, 0.018010175704956055, 0.01808793640136719, 0.02124595260620117, 0.0209421443939209, 0.01799193572998047, 0.018289087295532226, 0.018163711547851562, 0.01787494468688965, 0.017917951583862304, 0.018083839416503905, 0.017922048568725587, 0.01789695930480957, 0.018020383834838866, 0.01776265525817871, 0.018179935455322267, 0.017915584564208983, 0.017943519592285156, 0.017843488693237305, 0.01814159965515137, 0.017902015686035156, 0.017843456268310548, 0.017864992141723633, 0.017951072692871092, 0.01827020835876465, 0.017928192138671875, 0.019091455459594727, 0.019060640335083007, 0.01799337577819824, 0.018213312149047853, 0.01811827278137207, 0.017930335998535156, 0.017872255325317384, 0.017937248229980468, 0.017958272933959962, 0.01791043281555176, 0.0178176326751709, 0.017780736923217775, 0.017820991516113282, 0.01787516784667969, 0.017856992721557618, 0.017902944564819338, 0.017790719985961913, 0.01782467269897461, 0.017833984375, 0.01797222328186035, 0.01795097541809082, 0.017988351821899413, 0.017956832885742187, 0.01793948745727539, 0.017943328857421875, 0.017907903671264647, 0.018012191772460936, 0.01799772834777832, 0.017904800415039064, 0.017879999160766602, 0.017895263671875, 0.018043039321899414, 0.018058303833007813, 0.01792505645751953, 0.018229248046875, 0.018190336227416993, 0.01786835289001465, 0.017983488082885742, 0.018006048202514648, 0.01799193572998047, 0.018036895751953125, 0.017993728637695314, 0.017999872207641602, 0.01810588836669922, 0.017895904541015625, 0.0178768310546875, 0.017928352355957033, 0.018072608947753907, 0.01791689682006836, 0.018017824172973634, 0.017911712646484376, 0.017989791870117188]",tokens/s,55.60278724921594,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1521.627136,1569.652736,0.0,1174.40512,1147.036672,s,1,9.174837890625,9.174837890625,0.0,9.174837890625,9.174837890625,9.174837890625,9.174837890625,[9.174837890625],,kWh,5.877842605833052e-05,6.457500539367533e-06,2.1065850186002127e-05,8.630177678370019e-05,,MB,1591.238656,1640.955904,0.0,1231.028224,1064.778752,s,10,0.5417076148986816,0.05417076148986817,0.0002991612835500622,0.054034416198730464,0.054629844665527345,0.05464852142333985,0.054663462829589844,"[0.05466719818115234, 0.05395267105102539, 0.054403553009033205, 0.05388374328613281, 0.05387664031982422, 0.05386707305908203, 0.0539851188659668, 0.054625694274902346, 0.05436220932006836, 0.05408371353149414]",tokens/s,4725.7965913564085,kWh,1.6224050773842514e-06,1.7891919610032442e-07,1.0721351169666835e-06,2.8734593904512597e-06,tokens/kWh,89091219.05488169,MB,1595.375616,1643.053056,0.0,1233.125376,1119.936512,s,10,11.516790893554688,1.1516790893554687,0.014175968572519957,1.157119384765625,1.1659436767578124,1.1674348510742187,1.168627790527344,"[1.156865966796875, 1.1615103759765626, 1.1206495361328126, 1.168926025390625, 1.13904345703125, 1.1395523681640625, 1.157372802734375, 1.147115234375, 1.160142822265625, 1.1656123046875]",tokens/s,54.70273844709435,kWh,3.264305194969919e-05,3.599656571623945e-06,1.5546405955633424e-05,5.1789114476956555e-05,tokens/kWh,1216471.8519763781,,s,630,11.513171941757191,0.0182748760980273,0.0003702732853264695,0.018321136474609374,0.018600220489501954,0.018759527015686034,0.019429015655517582,"[0.018496543884277343, 0.018222143173217773, 0.018337696075439454, 0.018274303436279296, 0.018227231979370116, 0.01834185600280762, 0.01832931137084961, 0.018333984375, 0.018300640106201173, 0.01833919906616211, 0.0183855037689209, 0.01843846321105957, 0.018487167358398438, 0.018321792602539064, 0.018392383575439455, 0.018314720153808594, 0.018430368423461914, 0.018307647705078124, 0.018386943817138672, 0.01954636764526367, 0.018539424896240234, 0.018700159072875977, 0.01852720069885254, 0.018660928726196287, 0.018301376342773436, 0.018692096710205077, 0.018381023406982423, 0.01868281555175781, 0.018606815338134765, 0.018315168380737306, 0.018128351211547853, 0.01819753646850586, 0.018078752517700195, 0.01788163185119629, 0.017928159713745118, 0.01817807960510254, 0.019560543060302735, 0.017892608642578123, 0.018021120071411132, 0.017933727264404297, 0.017898143768310545, 0.01781711959838867, 0.01778086471557617, 0.017873247146606444, 0.018137407302856446, 0.018095808029174806, 0.01839286422729492, 0.01839344024658203, 0.018333568572998046, 0.018400543212890624, 0.018452287673950196, 0.01827097511291504, 0.018306432723999025, 0.018232128143310548, 0.018562047958374024, 0.018413728713989257, 0.01830790328979492, 0.01802649688720703, 0.018167808532714845, 0.019142112731933593, 0.019446304321289062, 0.018234367370605468, 0.01837059211730957, 0.01831907272338867, 0.01845155143737793, 0.018242752075195313, 0.0184006404876709, 0.0183110408782959, 0.018409631729125978, 0.018238752365112305, 0.018168832778930662, 0.018165504455566406, 0.018143455505371095, 0.018368383407592774, 0.018282655715942384, 0.01828000068664551, 0.018264480590820312, 0.018645023345947264, 0.018462047576904297, 0.01852073669433594, 0.018412864685058594, 0.018557632446289062, 0.01840742492675781, 0.01846272087097168, 0.01846272087097168, 0.01858470344543457, 0.018535295486450196, 0.018395135879516602, 0.018266111373901366, 0.018489343643188477, 0.019142656326293944, 0.018550783157348632, 0.01850534439086914, 0.01858188819885254, 0.01838243293762207, 0.018948511123657228, 0.018390975952148437, 0.018322591781616212, 0.01827699279785156, 0.018274816513061523, 0.018249216079711913, 0.0182410888671875, 0.01827449607849121, 0.018307392120361327, 0.018492704391479493, 0.01855788803100586, 0.018755584716796874, 0.018665504455566407, 0.018591487884521484, 0.018516191482543944, 0.01858870315551758, 0.018467807769775392, 0.01842585563659668, 0.018520063400268554, 0.018509824752807616, 0.018337791442871093, 0.018472032546997072, 0.01823766326904297, 0.01841632080078125, 0.01852579116821289, 0.018423999786376953, 0.018179359436035155, 0.018650047302246092, 0.01828643226623535, 0.018331327438354493, 0.01842598342895508, 0.018340639114379883, 0.01820035171508789, 0.018094175338745116, 0.018360063552856444, 0.018367647171020508, 0.018084863662719726, 0.017983423233032227, 0.017958080291748047, 0.017991743087768554, 0.017875232696533204, 0.017993631362915038, 0.017659839630126954, 0.017674943923950196, 0.017479871749877928, 0.017536832809448243, 0.017511808395385742, 0.017820287704467773, 0.017499263763427735, 0.01753379249572754, 0.01755753517150879, 0.017525760650634766, 0.017783775329589843, 0.017527040481567384, 0.01758595275878906, 0.01762303924560547, 0.01767737579345703, 0.017633472442626953, 0.01770572853088379, 0.01773583984375, 0.017748096466064452, 0.017774303436279296, 0.017727487564086913, 0.01803209686279297, 0.017801759719848632, 0.017958976745605468, 0.017848608016967773, 0.017886432647705078, 0.0178090877532959, 0.017840351104736328, 0.017855007171630858, 0.018051071166992186, 0.017905183792114258, 0.017913503646850584, 0.017873727798461914, 0.017768447875976562, 0.017580032348632812, 0.01759548759460449, 0.01757702445983887, 0.01755945587158203, 0.01758201599121094, 0.017979040145874023, 0.017885536193847657, 0.017957887649536132, 0.017752128601074217, 0.017714111328125, 0.01759833526611328, 0.01760063934326172, 0.017564672470092774, 0.017586624145507813, 0.017543743133544922, 0.01784832000732422, 0.01761075210571289, 0.017762304306030274, 0.018194175720214843, 0.018288799285888672, 0.01822719955444336, 0.018292255401611328, 0.018169376373291017, 0.01840336036682129, 0.019065759658813478, 0.02022777557373047, 0.019386688232421876, 0.01820579147338867, 0.018299007415771486, 0.018180896759033203, 0.01822096061706543, 0.018063711166381835, 0.01829043197631836, 0.01984716796875, 0.019367231369018554, 0.018485248565673826, 0.01829782485961914, 0.018226015090942384, 0.018361215591430665, 0.018366464614868162, 0.018692096710205077, 0.018499584197998048, 0.01843404769897461, 0.01836851119995117, 0.018388992309570314, 0.01842585563659668, 0.01869808006286621, 0.01913667106628418, 0.01914291191101074, 0.018812671661376953, 0.01844223976135254, 0.01843600082397461, 0.018398847579956055, 0.01847542381286621, 0.018434112548828124, 0.018868223190307617, 0.01836851119995117, 0.018440191268920898, 0.018437183380126954, 0.01856812858581543, 0.01888876724243164, 0.01857734489440918, 0.01845622444152832, 0.018479455947875978, 0.018548063278198242, 0.018498207092285158, 0.018484800338745118, 0.01905504035949707, 0.01847091293334961, 0.01862575912475586, 0.018534496307373048, 0.018560800552368164, 0.018583456039428712, 0.01860009574890137, 0.018366592407226563, 0.018410207748413086, 0.01831430435180664, 0.01832032012939453, 0.01836444854736328, 0.018283679962158204, 0.018248512268066407, 0.01836412811279297, 0.01833366394042969, 0.018542112350463866, 0.01830771255493164, 0.018352703094482423, 0.01860748863220215, 0.0184400634765625, 0.018434560775756836, 0.018614143371582032, 0.018347808837890625, 0.01881532859802246, 0.018511871337890624, 0.018388992309570314, 0.018315263748168945, 0.01838489532470703, 0.018350080490112306, 0.01844633674621582, 0.018254880905151368, 0.018329856872558593, 0.018445024490356444, 0.018431232452392577, 0.01827619171142578, 0.01832048034667969, 0.01823315238952637, 0.018420896530151366, 0.018191200256347656, 0.018241695404052734, 0.018271839141845703, 0.01820025634765625, 0.018149471282958983, 0.018106847763061523, 0.01803468894958496, 0.018061664581298827, 0.017942176818847657, 0.017963008880615236, 0.017881088256835938, 0.01787494468688965, 0.01779097557067871, 0.01846643257141113, 0.01795110321044922, 0.01797887992858887, 0.0179368953704834, 0.017784927368164064, 0.017620416641235353, 0.01780169677734375, 0.017593696594238283, 0.017632095336914063, 0.01760598373413086, 0.017749568939208985, 0.017718175888061523, 0.017852224349975587, 0.017643007278442382, 0.01769696044921875, 0.01757222366333008, 0.017655935287475586, 0.01763942337036133, 0.017719295501708983, 0.017936384201049805, 0.017688575744628905, 0.01762918472290039, 0.017688575744628905, 0.017636800765991213, 0.017633855819702147, 0.01811075210571289, 0.018186336517333986, 0.018018463134765624, 0.017983808517456054, 0.017743871688842772, 0.017648960113525392, 0.017669855117797853, 0.01750239944458008, 0.01754185676574707, 0.017592031478881835, 0.01757798385620117, 0.01762665557861328, 0.017916736602783204, 0.017788511276245117, 0.017915712356567384, 0.017659584045410157, 0.017725343704223632, 0.01780633544921875, 0.017948448181152345, 0.018820671081542967, 0.01916729545593262, 0.0181824951171875, 0.018285919189453125, 0.01803766441345215, 0.017979391098022462, 0.017935712814331053, 0.01807744026184082, 0.017956832885742187, 0.018332128524780274, 0.018667999267578124, 0.018171295166015625, 0.018061279296875, 0.018168447494506836, 0.018218048095703127, 0.01843827247619629, 0.018371391296386718, 0.018145280838012694, 0.01815337562561035, 0.018138879776000975, 0.018137439727783203, 0.01820876884460449, 0.018237056732177733, 0.018016895294189452, 0.017993471145629884, 0.018100223541259765, 0.018019872665405272, 0.01795145606994629, 0.018171648025512695, 0.01839030456542969, 0.018147199630737305, 0.01806764793395996, 0.017955167770385742, 0.01793846321105957, 0.01792848014831543, 0.01789132881164551, 0.017957952499389647, 0.018260927200317384, 0.018457632064819336, 0.01839833641052246, 0.01852400016784668, 0.018380800247192384, 0.018348031997680665, 0.018398656845092773, 0.018251039505004882, 0.01826479911804199, 0.018163551330566408, 0.018497695922851564, 0.018251775741577148, 0.018294111251831054, 0.018309791564941405, 0.018241535186767577, 0.018274303436279296, 0.018292800903320312, 0.01829190444946289, 0.01842252731323242, 0.018343967437744142, 0.018341119766235352, 0.018442975997924806, 0.018359584808349608, 0.018364160537719727, 0.018234336853027344, 0.018904895782470704, 0.018556447982788087, 0.018555551528930663, 0.018493280410766602, 0.01854275131225586, 0.01843836784362793, 0.018530080795288086, 0.018397184371948243, 0.018565120697021483, 0.01863884735107422, 0.01815337562561035, 0.018669664382934572, 0.018147104263305663, 0.01802262306213379, 0.018108415603637695, 0.018050464630126953, 0.018236000061035155, 0.018042367935180666, 0.018299392700195313, 0.018175039291381836, 0.01839529609680176, 0.01826211166381836, 0.01841632080078125, 0.018418720245361328, 0.018336864471435548, 0.018269887924194338, 0.018486719131469726, 0.018410240173339844, 0.018297119140625, 0.01836345672607422, 0.018343839645385742, 0.018323583602905272, 0.018411840438842773, 0.018329919815063475, 0.018305023193359374, 0.01840278434753418, 0.018389535903930665, 0.018337791442871093, 0.01839308738708496, 0.01874502372741699, 0.01837116813659668, 0.01843312072753906, 0.018551679611206056, 0.018544384002685547, 0.01828175926208496, 0.018601343154907225, 0.018679712295532228, 0.01848940849304199, 0.01838287925720215, 0.01839366340637207, 0.018312799453735353, 0.01843596839904785, 0.01926633644104004, 0.01834828758239746, 0.018445152282714844, 0.018559904098510743, 0.018511871337890624, 0.018502880096435546, 0.018432416915893556, 0.018174400329589845, 0.018030336380004883, 0.018011455535888673, 0.01810051155090332, 0.017983808517456054, 0.01877020835876465, 0.017947967529296876, 0.018074304580688476, 0.018083839416503905, 0.018044927597045898, 0.017809343338012696, 0.017829824447631835, 0.017886911392211914, 0.017740320205688477, 0.017788671493530275, 0.01773379135131836, 0.01764352035522461, 0.01761484718322754, 0.018059263229370116, 0.017761407852172853, 0.017931135177612304, 0.01799273681640625, 0.017998815536499024, 0.017924095153808595, 0.01909350395202637, 0.018093088150024413, 0.01804115104675293, 0.01800668716430664, 0.0179814395904541, 0.01803868865966797, 0.018141151428222656, 0.018350208282470703, 0.01818828773498535, 0.018349088668823243, 0.018006879806518553, 0.018018367767333985, 0.017964544296264647, 0.018038911819458006, 0.017934783935546875, 0.018345983505249023, 0.019271263122558592, 0.02040227127075195, 0.01827027130126953, 0.018086143493652344, 0.01784809684753418, 0.017931711196899413, 0.017865631103515626, 0.01794758415222168, 0.0181214714050293, 0.018433792114257812, 0.018362367630004883, 0.018323455810546875, 0.018280448913574218, 0.018315391540527345, 0.018317184448242187, 0.018227264404296874, 0.018206783294677734, 0.0182205753326416, 0.01874185562133789, 0.0183191032409668, 0.018155519485473632, 0.01821286392211914, 0.018266111373901366, 0.018272096633911133, 0.018477439880371094, 0.018488351821899413, 0.018399999618530272, 0.018319360733032225, 0.01826153564453125, 0.018176576614379884, 0.018466720581054686, 0.018340160369873047, 0.018284351348876952, 0.01839414405822754, 0.018350208282470703, 0.018494207382202147, 0.01834185600280762, 0.018422847747802736, 0.01844758415222168, 0.018483936309814455, 0.018498559951782227, 0.018534400939941405, 0.018433088302612304, 0.018717567443847657, 0.018488927841186522, 0.018506336212158202, 0.01843596839904785, 0.01853660774230957, 0.018480480194091795, 0.018864639282226564, 0.018728384017944334, 0.018506303787231445, 0.018597888946533202, 0.01847500801086426, 0.018354047775268556, 0.01840959930419922, 0.018343936920166014, 0.018406911849975584, 0.01826255989074707, 0.018429920196533202, 0.01836025619506836, 0.0187761287689209, 0.018327552795410155, 0.018390399932861328, 0.01831999969482422, 0.019230335235595704, 0.01838323211669922, 0.01835215950012207, 0.018264192581176758, 0.018208223342895506, 0.018151039123535155, 0.018210624694824217, 0.01848179244995117, 0.018748735427856444, 0.018473344802856444, 0.018477376937866212, 0.018558879852294922, 0.018548831939697266, 0.018446271896362304, 0.018501632690429686, 0.01839017677307129, 0.018666400909423828, 0.01861631965637207, 0.018472959518432617, 0.018502784729003907, 0.018977535247802733, 0.019945247650146485, 0.018634592056274414, 0.018618560791015624, 0.018594432830810546, 0.01856070327758789, 0.018503040313720704, 0.01868579292297363, 0.01852672004699707, 0.018519872665405272, 0.018384767532348634, 0.01849318313598633, 0.01833996772766113, 0.018376735687255858, 0.018328224182128906, 0.018434080123901367, 0.018482208251953125, 0.01847747230529785, 0.018309247970581054, 0.01846931266784668, 0.018431999206542968, 0.018394975662231444, 0.018317567825317384, 0.018255231857299804, 0.018248191833496095, 0.018351648330688478, 0.018377216339111328, 0.018762752532958983, 0.01860416030883789, 0.018494592666625977, 0.018346944808959962, 0.01830940818786621, 0.018315391540527345, 0.018483776092529297, 0.018567007064819337, 0.0184967041015625, 0.018387775421142578, 0.018423807144165038, 0.018386943817138672, 0.01843152046203613, 0.01866908836364746, 0.01838073539733887, 0.018529279708862305, 0.01833193588256836, 0.018412544250488282, 0.01840787124633789, 0.018359615325927735, 0.01839571189880371, 0.018364160537719727, 0.01842857551574707]",tokens/s,54.719933237082024,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 291711 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4088, in from_pretrained hf_quantizer.postprocess_model(model) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model return self._process_model_after_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 460, in post_init_awq_exllama_modules model = exllamav2_post_init( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 193, in exllamav2_post_init submodule.post_init(scratch_space=model.scratch_spaces[device]) File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 78, in post_init self.q_handle = exlv2_ext.make_q_matrix( RuntimeError: q_weight and gptq_scales have incompatible shapes " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1559.187456,1861.156864,0.0,1465.909248,1358.169088,s,1,9.04726953125,9.04726953125,0.0,9.04726953125,9.04726953125,9.04726953125,9.04726953125,[9.04726953125],,kWh,5.35661705249898e-05,5.901440951654836e-06,1.904251523400513e-05,7.851012671064976e-05,,MB,1643.769856,1882.128384,0.0,1472.200704,1356.544512,s,10,0.5091492156982422,0.050914921569824224,0.0001894923756759238,0.050911760330200194,0.05102181968688965,0.05121476535797119,0.051369121894836424,"[0.051407711029052734, 0.05073353576660156, 0.05079391860961914, 0.050764545440673825, 0.05096934509277344, 0.0509356803894043, 0.05097894287109375, 0.050953472137451175, 0.05072422409057617, 0.05088784027099609]",tokens/s,5027.9955680364565,kWh,1.540151951425279e-06,1.6985144640651348e-07,1.0198487690947573e-06,2.7298521669265497e-06,tokens/kWh,93777971.97282736,MB,1654.398976,1882.128384,0.0,1472.200704,1409.728,s,10,14.181490478515625,1.4181490478515626,0.011949045983867831,1.4157312622070313,1.4365609375,1.4378442504882814,1.4388709008789062,"[1.4074014892578126, 1.4060621337890624, 1.4138895263671876, 1.4052296142578125, 1.4362757568359374, 1.4266898193359374, 1.4391275634765626, 1.4070614013671876, 1.417572998046875, 1.42218017578125]",tokens/s,44.424103443458506,kWh,4.134897928982142e-05,4.56039416331762e-06,1.8563500230904383e-05,6.447287368404344e-05,tokens/kWh,977155.1413814527,,s,630,14.178197593688962,0.022505075545538038,0.00040603970772232217,0.022582143783569333,0.022888503074645995,0.02301265926361084,0.02379591606140137,"[0.022025472640991212, 0.024424575805664064, 0.022487903594970705, 0.021987327575683592, 0.02176527976989746, 0.021830495834350587, 0.021835775375366212, 0.021753856658935547, 0.02182476806640625, 0.021838592529296874, 0.022016000747680665, 0.02205286407470703, 0.021999488830566405, 0.021917823791503907, 0.02184339141845703, 0.02192348861694336, 0.02226883125305176, 0.02215705680847168, 0.02219647979736328, 0.022005760192871093, 0.02232499122619629, 0.022150783538818358, 0.02206300735473633, 0.022018783569335936, 0.02191974449157715, 0.021850112915039063, 0.022033536911010742, 0.022864288330078125, 0.021902912139892577, 0.02205174446105957, 0.02206105613708496, 0.02249648094177246, 0.022553375244140625, 0.02262575912475586, 0.022878751754760743, 0.022747135162353514, 0.022773759841918945, 0.022775583267211914, 0.022773056030273436, 0.02280745506286621, 0.022767616271972657, 0.02275328063964844, 0.022804479598999023, 0.02289254379272461, 0.022929311752319336, 0.022975648880004883, 0.02287481689453125, 0.02300339126586914, 0.022779903411865234, 0.0225218563079834, 0.022543680191040038, 0.022575584411621094, 0.022440160751342773, 0.022427648544311524, 0.022368064880371095, 0.022573247909545898, 0.022187679290771485, 0.021956960678100587, 0.021991424560546875, 0.021909280776977538, 0.021983455657958985, 0.022040576934814454, 0.022013343811035157, 0.022180288314819337, 0.022029375076293944, 0.022129184722900392, 0.022351999282836914, 0.0223251838684082, 0.02210646438598633, 0.022005760192871093, 0.021977088928222657, 0.021941343307495118, 0.021882976531982422, 0.02197587203979492, 0.021921791076660157, 0.022042623519897463, 0.022045984268188476, 0.022045408248901367, 0.02203788757324219, 0.022080127716064452, 0.022013952255249023, 0.021942272186279296, 0.021981184005737304, 0.021897216796875, 0.022409215927124023, 0.023537664413452147, 0.022390272140502928, 0.022047231674194336, 0.02215116882324219, 0.022595552444458007, 0.022800416946411134, 0.02263859176635742, 0.022691776275634765, 0.023191616058349608, 0.022660991668701173, 0.0226243839263916, 0.02269958305358887, 0.022587007522583007, 0.02265990447998047, 0.022719648361206053, 0.022598112106323242, 0.02258367919921875, 0.022648832321166993, 0.022747135162353514, 0.022845439910888672, 0.022841344833374022, 0.022548128128051757, 0.02243414306640625, 0.022372352600097657, 0.02271027183532715, 0.02254643249511719, 0.02230271911621094, 0.022202367782592772, 0.022136831283569337, 0.022349279403686525, 0.022045215606689452, 0.02214271926879883, 0.022296831130981444, 0.02212633514404297, 0.02199782371520996, 0.02186147117614746, 0.02204742431640625, 0.021997791290283203, 0.022111295700073242, 0.02199443244934082, 0.021991680145263672, 0.02243027114868164, 0.022374399185180666, 0.022461856842041016, 0.022299232482910155, 0.022331167221069335, 0.02237017631530762, 0.02234511947631836, 0.02222585678100586, 0.02230790328979492, 0.02226652717590332, 0.022452512741088868, 0.0222740478515625, 0.02230271911621094, 0.022353919982910156, 0.02228144073486328, 0.02257695960998535, 0.022541120529174806, 0.02243190383911133, 0.02247043228149414, 0.022374624252319335, 0.022322528839111327, 0.0224586238861084, 0.02235219192504883, 0.022370208740234376, 0.02240121650695801, 0.02275071907043457, 0.02273126411437988, 0.02265497589111328, 0.02288844871520996, 0.02272051239013672, 0.022824960708618162, 0.022879392623901366, 0.0226549129486084, 0.02276972770690918, 0.022592351913452147, 0.02269491195678711, 0.022760448455810548, 0.02269718360900879, 0.022734880447387695, 0.02268441581726074, 0.022773439407348633, 0.022752960205078124, 0.022680000305175783, 0.022331584930419923, 0.02239897537231445, 0.02233344078063965, 0.022540288925170897, 0.022743040084838868, 0.022401023864746093, 0.02230886459350586, 0.022353567123413087, 0.022224767684936524, 0.02214681625366211, 0.02213711929321289, 0.02232979202270508, 0.02206924819946289, 0.022071296691894532, 0.022130687713623046, 0.022261760711669923, 0.022223871231079103, 0.02202012825012207, 0.021980127334594726, 0.022016000747680665, 0.022521568298339845, 0.0226014404296875, 0.022661695480346678, 0.022699712753295898, 0.022622528076171874, 0.02255580711364746, 0.022566944122314452, 0.022553407669067382, 0.02259328079223633, 0.022585119247436523, 0.022950368881225584, 0.02253004837036133, 0.022605823516845702, 0.02267136001586914, 0.02269388771057129, 0.022617120742797852, 0.02236675262451172, 0.022175647735595702, 0.022188575744628906, 0.022128448486328126, 0.022143104553222655, 0.022090879440307618, 0.022109119415283204, 0.021996639251708985, 0.02191187286376953, 0.021719648361206056, 0.021909503936767577, 0.02170809555053711, 0.021684928894042967, 0.02166783905029297, 0.02184806442260742, 0.021761920928955077, 0.021956735610961915, 0.022029504776000977, 0.022192960739135743, 0.0221265926361084, 0.022183935165405275, 0.022234176635742186, 0.022211519241333008, 0.022177791595458983, 0.022597631454467772, 0.022261760711669923, 0.02209587287902832, 0.022064191818237305, 0.0220067195892334, 0.022370304107666016, 0.02206278419494629, 0.02201398468017578, 0.02213257598876953, 0.022106559753417968, 0.022130239486694337, 0.022097471237182618, 0.02212505531311035, 0.02219251251220703, 0.022603776931762694, 0.022711423873901366, 0.022727615356445314, 0.02267513656616211, 0.02264703941345215, 0.022657024383544923, 0.022636415481567383, 0.022685728073120116, 0.022685279846191408, 0.022894176483154297, 0.022700223922729492, 0.022828800201416016, 0.02282748794555664, 0.022845439910888672, 0.023136255264282226, 0.02306153678894043, 0.022729375839233398, 0.022667583465576173, 0.02257846450805664, 0.022614688873291017, 0.022614080429077147, 0.022498336791992188, 0.022250463485717773, 0.022112255096435548, 0.02211020851135254, 0.023009279251098632, 0.022681215286254882, 0.0228089599609375, 0.022718143463134766, 0.022681312561035158, 0.02267401695251465, 0.02265100860595703, 0.022617984771728515, 0.02268083190917969, 0.02249372863769531, 0.02262873649597168, 0.022667104721069337, 0.022677312850952147, 0.022606016159057617, 0.022679040908813477, 0.022497791290283203, 0.022652799606323243, 0.022810111999511717, 0.02309744071960449, 0.022707775115966798, 0.022531040191650392, 0.024663904190063476, 0.023474016189575196, 0.022782207489013672, 0.022845504760742187, 0.022794240951538085, 0.02269171142578125, 0.0227063045501709, 0.022633695602416994, 0.02263734436035156, 0.024266271591186522, 0.023129823684692383, 0.022687807083129882, 0.024545984268188478, 0.022742080688476562, 0.022741952896118165, 0.02274086380004883, 0.0225316162109375, 0.02318191909790039, 0.022849536895751952, 0.022609920501708985, 0.022771711349487304, 0.022652032852172852, 0.022637439727783204, 0.022767616271972657, 0.02250294494628906, 0.02233558464050293, 0.022548479080200197, 0.02246019172668457, 0.022376672744750976, 0.023231584548950194, 0.022500255584716796, 0.022660991668701173, 0.02270220756530762, 0.02299622344970703, 0.02256972885131836, 0.022374399185180666, 0.022278144836425783, 0.02215116882324219, 0.022278144836425783, 0.02255183982849121, 0.02218448066711426, 0.022164800643920898, 0.022283136367797853, 0.022401023864746093, 0.02247372817993164, 0.022764543533325195, 0.022657024383544923, 0.022560768127441407, 0.022612127304077148, 0.022603103637695313, 0.02255238342285156, 0.022575456619262694, 0.022579008102416993, 0.02257155227661133, 0.023085056304931642, 0.022783424377441405, 0.02270470428466797, 0.022714111328125, 0.022560672760009767, 0.02268172836303711, 0.022687679290771486, 0.02260201644897461, 0.022664352416992186, 0.022698272705078126, 0.022561023712158203, 0.022602048873901368, 0.022591487884521484, 0.022684671401977538, 0.022704288482666014, 0.022637407302856447, 0.02270207977294922, 0.022606975555419923, 0.022790815353393553, 0.022673631668090822, 0.02267670440673828, 0.02262915229797363, 0.022773759841918945, 0.022906688690185546, 0.022951839447021484, 0.02275542449951172, 0.0229050235748291, 0.02272051239013672, 0.023015424728393553, 0.023259136199951173, 0.02276291275024414, 0.022679584503173828, 0.02269241523742676, 0.022626304626464845, 0.022560768127441407, 0.022639232635498045, 0.022566911697387695, 0.0226693115234375, 0.022580255508422853, 0.022606367111206054, 0.02264313507080078, 0.022689792633056642, 0.02265907287597656, 0.022773759841918945, 0.022673023223876952, 0.022634016036987305, 0.022575040817260743, 0.022557823181152344, 0.02268137550354004, 0.022603776931762694, 0.022556671142578123, 0.022484512329101564, 0.02264931106567383, 0.022716032028198243, 0.02269808006286621, 0.022745376586914064, 0.022648832321166993, 0.024188383102416992, 0.023607423782348633, 0.022935680389404297, 0.022765087127685546, 0.022803199768066405, 0.02371148872375488, 0.022826400756835938, 0.022664031982421874, 0.022700031280517577, 0.02264192008972168, 0.022629119873046874, 0.02262835121154785, 0.0226810245513916, 0.022730783462524416, 0.023067167282104492, 0.022788095474243163, 0.022662656784057617, 0.022745311737060545, 0.022775680541992187, 0.023114143371582033, 0.023830400466918946, 0.022810752868652345, 0.022872032165527342, 0.02282908821105957, 0.022794240951538085, 0.022996992111206056, 0.022949472427368164, 0.0231243839263916, 0.023142400741577147, 0.02348236846923828, 0.022992767333984376, 0.022977983474731446, 0.02319152069091797, 0.022915807723999024, 0.02281062316894531, 0.02315216064453125, 0.02276710319519043, 0.022629344940185547, 0.022706239700317384, 0.022527008056640624, 0.02256121635437012, 0.022552127838134765, 0.022764127731323244, 0.02269593620300293, 0.022751232147216797, 0.02279347229003906, 0.0227457275390625, 0.022622112274169923, 0.022910655975341795, 0.022888992309570314, 0.02272051239013672, 0.022915071487426757, 0.022863872528076173, 0.0230830078125, 0.022769535064697265, 0.022935680389404297, 0.022721824645996095, 0.022681760787963866, 0.02266579246520996, 0.02262015914916992, 0.022662784576416014, 0.02277174377441406, 0.02265532875061035, 0.022665216445922853, 0.022867456436157226, 0.02297702407836914, 0.02288640022277832, 0.022857183456420897, 0.02253385543823242, 0.022377151489257813, 0.022327423095703125, 0.022450176239013672, 0.02219340705871582, 0.02232729530334473, 0.02229324722290039, 0.02224742317199707, 0.021979040145874023, 0.0218768310546875, 0.021886016845703123, 0.021813823699951173, 0.021788543701171875, 0.02178713607788086, 0.0218603515625, 0.021909503936767577, 0.021893119812011717, 0.02186854362487793, 0.021900352478027345, 0.02193212890625, 0.021842784881591797, 0.021824607849121092, 0.0221246395111084, 0.021883487701416016, 0.02184828758239746, 0.022161407470703123, 0.021790719985961913, 0.021770015716552734, 0.022009311676025392, 0.021860639572143556, 0.021967103958129883, 0.021910783767700195, 0.02201193618774414, 0.021898176193237303, 0.021762048721313477, 0.02186854362487793, 0.021831487655639647, 0.02183635139465332, 0.02170460891723633, 0.021683744430541992, 0.021822015762329103, 0.02190540885925293, 0.021865983963012696, 0.021797376632690428, 0.02192793655395508, 0.02312396812438965, 0.02424831962585449, 0.021944128036499023, 0.02227203178405762, 0.022223007202148436, 0.022149120330810547, 0.022326271057128907, 0.02234060859680176, 0.022577152252197266, 0.022237152099609376, 0.022161376953125, 0.02207084846496582, 0.022168064117431642, 0.02231091117858887, 0.022581247329711913, 0.022587072372436522, 0.0224051513671875, 0.02247612762451172, 0.022264768600463867, 0.02229452705383301, 0.022321151733398437, 0.022441247940063476, 0.022389408111572265, 0.022468671798706055, 0.022350912094116212, 0.022524768829345704, 0.022634592056274414, 0.02273689651489258, 0.02272172737121582, 0.02284422492980957, 0.022634368896484375, 0.022601568222045898, 0.022578752517700196, 0.022637279510498046, 0.022666303634643555, 0.022696895599365233, 0.022729888916015625, 0.022762336730957032, 0.02268876838684082, 0.022732831954956054, 0.022776800155639647, 0.022867168426513672, 0.02280268859863281, 0.022831647872924805, 0.02276051139831543, 0.022831520080566405, 0.02285196876525879, 0.02304630470275879, 0.022956031799316406, 0.02294281578063965, 0.022876127243041992, 0.02274604797363281, 0.022771711349487304, 0.022746496200561524, 0.02287968063354492, 0.02282111930847168, 0.022941024780273437, 0.02272764778137207, 0.022705631256103517, 0.022778400421142576, 0.022854976654052735, 0.022808671951293946, 0.022874719619750978, 0.022840799331665038, 0.022945632934570314, 0.022790847778320314, 0.022715423583984377, 0.022658016204833983, 0.022931455612182617, 0.023023616790771483, 0.02290483283996582, 0.022953344345092774, 0.022860416412353514, 0.02270412826538086, 0.02271023941040039, 0.022704160690307618, 0.022654880523681642, 0.022585439682006835, 0.022654783248901366, 0.02251740837097168, 0.022485536575317384, 0.02246451187133789, 0.022679391860961913, 0.022311071395874023, 0.022376447677612304, 0.02232867240905762, 0.022169376373291017, 0.022576000213623045, 0.022347776412963868, 0.02224947166442871, 0.022300416946411133, 0.022276351928710938, 0.02225152015686035, 0.022192127227783204, 0.02208358383178711, 0.0222740478515625, 0.022021215438842775, 0.021990304946899415, 0.021925888061523437, 0.021999616622924805, 0.022171648025512695, 0.022177791595458983, 0.02225971221923828, 0.022427648544311524, 0.022556543350219727, 0.0226243839263916, 0.02245996856689453, 0.022423999786376953, 0.022560768127441407, 0.022583040237426757, 0.02259174346923828, 0.02283513641357422, 0.02273695945739746, 0.022779903411865234, 0.02290483283996582, 0.022861568450927735, 0.022970624923706055]",tokens/s,44.43442093658133,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1490.198528,1861.156864,0.0,1465.909248,1358.169088,s,1,8.92091796875,8.92091796875,0.0,8.92091796875,8.92091796875,8.92091796875,8.92091796875,[8.92091796875],,kWh,5.367274289999386e-05,5.892446202189604e-06,1.9234459832012663e-05,7.879964893419612e-05,,MB,1562.64448,1882.128384,0.0,1472.200704,1356.544512,s,10,0.5081317710876465,0.05081317710876465,0.00015486380894732922,0.05077542304992676,0.05091472702026367,0.05105760421752929,0.0511719059753418,"[0.05120048141479492, 0.050786014556884765, 0.05086569595336914, 0.050716190338134765, 0.050882335662841796, 0.05088297653198242, 0.05076483154296875, 0.05066867065429687, 0.05075222396850586, 0.05061235046386719]",tokens/s,5038.063245918215,kWh,1.5367383819298004e-06,1.693980679567003e-07,1.0171937962105986e-06,2.7233302460970993e-06,tokens/kWh,94002554.5439752,MB,1566.969856,1882.128384,0.0,1472.200704,1409.728,s,10,13.755901000976564,1.3755901000976565,0.010149455197494195,1.3747086181640626,1.3863067016601562,1.3911563415527344,1.395036053466797,"[1.3666591796875, 1.3815029296875, 1.37841259765625, 1.3679105224609376, 1.38522900390625, 1.3802357177734375, 1.371004638671875, 1.369410888671875, 1.3960059814453125, 1.359529541015625]",tokens/s,45.79852675264781,kWh,3.9929619992651026e-05,4.403737437878737e-06,1.818709934498727e-05,6.252045677551703e-05,tokens/kWh,1007670.181076968,,s,630,13.75327059555053,0.021830588246905612,0.0005318301540078125,0.021719439506530762,0.02215329875946045,0.02244603204727173,0.023784326286315927,"[0.021613216400146483, 0.022085823059082032, 0.021815040588378905, 0.0216680965423584, 0.021667808532714845, 0.021643295288085937, 0.021800960540771484, 0.02176527976989746, 0.021879648208618162, 0.02168422317504883, 0.021696512222290038, 0.021541919708251953, 0.02150704002380371, 0.0214835205078125, 0.021432319641113282, 0.021542495727539062, 0.02154742431640625, 0.02147532844543457, 0.021661632537841796, 0.021585248947143556, 0.021725471496582032, 0.02163142395019531, 0.021708927154541015, 0.022439136505126953, 0.02157548713684082, 0.021566303253173828, 0.021622783660888673, 0.021726463317871095, 0.021621631622314452, 0.021550079345703126, 0.02178963279724121, 0.02153785514831543, 0.0215230712890625, 0.021641183853149414, 0.021709312438964845, 0.02167990493774414, 0.021585439682006834, 0.02156742477416992, 0.02157417678833008, 0.021571264266967774, 0.021595584869384767, 0.021650304794311525, 0.021582048416137697, 0.021617504119873048, 0.02160508728027344, 0.021489952087402345, 0.021641151428222656, 0.021610496520996093, 0.022153215408325197, 0.021655904769897462, 0.02179987144470215, 0.02160326385498047, 0.021557024002075195, 0.02169599914550781, 0.021551296234130858, 0.021716543197631836, 0.02183776092529297, 0.021844224929809572, 0.021924415588378907, 0.021893440246582033, 0.0219586238861084, 0.021964351654052736, 0.021996864318847655, 0.022323711395263672, 0.02274070358276367, 0.022243200302124025, 0.02209164810180664, 0.02187494468688965, 0.021698175430297853, 0.021667871475219726, 0.021665952682495118, 0.02226425552368164, 0.02184921646118164, 0.02217024040222168, 0.021674016952514648, 0.021690336227416993, 0.021659231185913085, 0.021671615600585937, 0.021672672271728515, 0.02161017608642578, 0.021604671478271484, 0.021631263732910157, 0.021666847229003906, 0.02176041603088379, 0.02166934394836426, 0.022010431289672852, 0.021982879638671876, 0.02301011276245117, 0.022120223999023438, 0.022070943832397463, 0.021960351943969728, 0.022176095962524414, 0.022585920333862305, 0.02421084785461426, 0.02257139205932617, 0.022220256805419922, 0.022251840591430663, 0.022133983612060548, 0.022149663925170898, 0.021976959228515624, 0.021926496505737306, 0.021749151229858397, 0.021627487182617186, 0.02160256004333496, 0.021640159606933593, 0.021815616607666014, 0.02167228889465332, 0.02174086380004883, 0.02167046356201172, 0.02160870361328125, 0.021593856811523437, 0.02177168083190918, 0.021775232315063477, 0.02168396759033203, 0.021597984313964844, 0.02154364776611328, 0.021581279754638673, 0.021588512420654297, 0.021515039443969725, 0.02184009552001953, 0.02181769561767578, 0.02197542381286621, 0.021849376678466797, 0.02182406425476074, 0.021991519927978515, 0.02190332794189453, 0.022125984191894533, 0.021993728637695314, 0.02211199951171875, 0.021815200805664063, 0.021950368881225587, 0.021970752716064454, 0.022051807403564452, 0.022048191070556642, 0.026231039047241212, 0.022007904052734374, 0.02209814453125, 0.021773984909057617, 0.0218702392578125, 0.021920448303222657, 0.02166988754272461, 0.021588031768798827, 0.021597503662109375, 0.021589855194091796, 0.02164179229736328, 0.021725120544433593, 0.021469247817993163, 0.021561567306518554, 0.02152448081970215, 0.021526239395141603, 0.021641504287719725, 0.0216944637298584, 0.021901216506958008, 0.021958751678466795, 0.021798912048339843, 0.02168550491333008, 0.021549503326416017, 0.021582143783569336, 0.02153654479980469, 0.02173734474182129, 0.022026592254638672, 0.02167398452758789, 0.02195631980895996, 0.021469472885131836, 0.021575040817260742, 0.02172492790222168, 0.021758848190307618, 0.022183135986328126, 0.021911712646484376, 0.02179340744018555, 0.021844255447387696, 0.021860063552856444, 0.02195408058166504, 0.021957088470458984, 0.02191564750671387, 0.0219238395690918, 0.02270534324645996, 0.02182364845275879, 0.021823423385620117, 0.021830368041992187, 0.021620704650878907, 0.021544544219970704, 0.02228860855102539, 0.021835647583007812, 0.021688703536987305, 0.02172265625, 0.021676383972167968, 0.021616735458374024, 0.02152774429321289, 0.021600000381469725, 0.024475872039794924, 0.022559295654296874, 0.021643264770507813, 0.021704959869384765, 0.021891008377075194, 0.02201603126525879, 0.02172431945800781, 0.021658239364624025, 0.02158582305908203, 0.021520351409912108, 0.021495935440063476, 0.021544960021972655, 0.02152396774291992, 0.021488096237182615, 0.021628704071044922, 0.021528831481933595, 0.021700607299804688, 0.02187868881225586, 0.022249631881713867, 0.021604095458984375, 0.021485408782958983, 0.021625471115112305, 0.021522144317626953, 0.021549280166625977, 0.021425952911376955, 0.02150998306274414, 0.021403936386108397, 0.021442304611206053, 0.021451040267944334, 0.021468191146850585, 0.02156368064880371, 0.02147292709350586, 0.021468032836914064, 0.021497856140136717, 0.02162403106689453, 0.021523487091064452, 0.021996992111206055, 0.021713184356689452, 0.021624767303466796, 0.021512287139892578, 0.02149171257019043, 0.02143846321105957, 0.021521856307983398, 0.021496383666992188, 0.021536672592163086, 0.021415903091430665, 0.021479808807373046, 0.021474720001220703, 0.02148137664794922, 0.02147782325744629, 0.021579776763916016, 0.021741855621337892, 0.02163030433654785, 0.021844575881958008, 0.02188230323791504, 0.021813600540161134, 0.021933664321899415, 0.02193459129333496, 0.021980831146240234, 0.02209612846374512, 0.022370304107666016, 0.022097408294677736, 0.02198303985595703, 0.021686975479125976, 0.021526527404785157, 0.021564640045166016, 0.021691167831420898, 0.021637439727783203, 0.02178220748901367, 0.021549055099487305, 0.021557247161865235, 0.021493631362915037, 0.02146112060546875, 0.021557247161865235, 0.02159401512145996, 0.02155897521972656, 0.02161324882507324, 0.02147260856628418, 0.021651840209960936, 0.022077856063842775, 0.022388320922851562, 0.022321151733398437, 0.022180192947387694, 0.022123680114746094, 0.022041088104248048, 0.02200713539123535, 0.022143808364868164, 0.022259552001953124, 0.021983392715454103, 0.02194144058227539, 0.02196950340270996, 0.021921855926513672, 0.022035743713378905, 0.021886848449707032, 0.021995552062988283, 0.021977920532226563, 0.02230409622192383, 0.02203865623474121, 0.021826271057128907, 0.02187241554260254, 0.021813056945800782, 0.021766368865966796, 0.02189913558959961, 0.02186662483215332, 0.021764095306396485, 0.021732864379882814, 0.02185206413269043, 0.02160700798034668, 0.021536031723022462, 0.02144112014770508, 0.021545087814331055, 0.02161664009094238, 0.02190336036682129, 0.021511743545532227, 0.02165315246582031, 0.02143017578125, 0.021822336196899415, 0.02184543991088867, 0.022589439392089843, 0.028974687576293946, 0.02359600067138672, 0.022169055938720702, 0.022321952819824218, 0.021992992401123047, 0.0220263671875, 0.022234304428100586, 0.022154048919677736, 0.02194950485229492, 0.021844928741455077, 0.021921760559082033, 0.02173161506652832, 0.02174950408935547, 0.02182310485839844, 0.021796512603759765, 0.02177507209777832, 0.02244963264465332, 0.023236352920532225, 0.02193014335632324, 0.021885568618774415, 0.021719039916992186, 0.021937631607055665, 0.021923519134521483, 0.02164956855773926, 0.021628671646118165, 0.021658048629760743, 0.021921440124511717, 0.022119264602661132, 0.021914848327636717, 0.02387228775024414, 0.023461984634399413, 0.021772192001342772, 0.021587968826293946, 0.021569696426391602, 0.021558143615722655, 0.021758752822875975, 0.021954559326171876, 0.021967039108276368, 0.02191702461242676, 0.021924287796020507, 0.021805280685424804, 0.021815296173095702, 0.02195894432067871, 0.02194985580444336, 0.021995872497558595, 0.021880800247192383, 0.02173686408996582, 0.021723743438720702, 0.021816448211669923, 0.021572479248046873, 0.021735328674316406, 0.021979232788085938, 0.021763519287109376, 0.02163564872741699, 0.021769792556762695, 0.021602272033691406, 0.021588447570800782, 0.021646751403808593, 0.021518943786621093, 0.02144211196899414, 0.021768640518188477, 0.021445632934570313, 0.02162816047668457, 0.022441631317138673, 0.021637216567993164, 0.021765600204467772, 0.02197590446472168, 0.022206047058105468, 0.021868640899658204, 0.02228022384643555, 0.022071264266967774, 0.022108160018920898, 0.02201190376281738, 0.022044927597045898, 0.022058752059936522, 0.021909343719482423, 0.021891231536865233, 0.021907360076904296, 0.02175766372680664, 0.021636831283569337, 0.021740192413330077, 0.021609920501708985, 0.021600927352905273, 0.02168003273010254, 0.02167807960510254, 0.021542335510253908, 0.021496192932128906, 0.021588415145874024, 0.021453632354736327, 0.02151315116882324, 0.02150102424621582, 0.02150217628479004, 0.02165020751953125, 0.021780384063720702, 0.021513599395751953, 0.02207551956176758, 0.021658111572265625, 0.021851648330688478, 0.02175859260559082, 0.02168614387512207, 0.021638431549072266, 0.021597055435180663, 0.021728864669799806, 0.02159663963317871, 0.0216114559173584, 0.02158064079284668, 0.021569856643676756, 0.021524160385131837, 0.021587488174438476, 0.021520864486694335, 0.021665504455566406, 0.021573247909545897, 0.02161497688293457, 0.021612800598144532, 0.021686304092407228, 0.021808671951293945, 0.021967327117919922, 0.02243084716796875, 0.021975936889648436, 0.021985279083251954, 0.021966848373413086, 0.021966848373413086, 0.02192207908630371, 0.02213430404663086, 0.02191744041442871, 0.021874303817749022, 0.02187884712219238, 0.021584320068359374, 0.02156166458129883, 0.021610496520996093, 0.021774368286132814, 0.02172675132751465, 0.021722496032714842, 0.02150092887878418, 0.0216494083404541, 0.021575679779052736, 0.021706592559814452, 0.021603647232055663, 0.02152534484863281, 0.022009632110595704, 0.021544160842895507, 0.022028671264648438, 0.022018688201904297, 0.02199087905883789, 0.021679647445678712, 0.021588991165161133, 0.02162483215332031, 0.0215285758972168, 0.021408063888549805, 0.02150092887878418, 0.02159891128540039, 0.02167318344116211, 0.021501056671142577, 0.02150774383544922, 0.021736608505249024, 0.021848192214965822, 0.021660383224487306, 0.02177039909362793, 0.021797056198120116, 0.021664863586425782, 0.022116191864013673, 0.02167411231994629, 0.02159062385559082, 0.021942335128784178, 0.02220025634765625, 0.021827583312988282, 0.021858112335205078, 0.02177043151855469, 0.021740928649902343, 0.02170694351196289, 0.021629375457763673, 0.02145894432067871, 0.021498144149780272, 0.0215852165222168, 0.021821887969970703, 0.021650400161743164, 0.021556224822998047, 0.02163539123535156, 0.021694496154785158, 0.021444255828857423, 0.021553152084350585, 0.021626880645751953, 0.02152988815307617, 0.02194095993041992, 0.02212819290161133, 0.0219051513671875, 0.021817312240600586, 0.021780448913574217, 0.021777408599853516, 0.022009599685668946, 0.021946367263793946, 0.02184716796875, 0.02206399917602539, 0.02191564750671387, 0.021919904708862306, 0.022091487884521484, 0.022042911529541017, 0.022021184921264647, 0.022110687255859376, 0.022075231552124024, 0.022236831665039064, 0.02181999969482422, 0.021727615356445313, 0.021744895935058593, 0.022137088775634764, 0.021722848892211915, 0.021719839096069334, 0.02201919937133789, 0.021950464248657226, 0.021842655181884767, 0.02181052780151367, 0.021689439773559572, 0.021653215408325197, 0.02183782386779785, 0.021826719284057616, 0.021619903564453126, 0.021661088943481444, 0.02185775947570801, 0.021805791854858397, 0.022106176376342775, 0.022526208877563476, 0.02248678398132324, 0.022536191940307617, 0.022476383209228516, 0.022396575927734374, 0.022626047134399415, 0.0227476806640625, 0.027306463241577147, 0.02386124801635742, 0.022891712188720704, 0.022737024307250976, 0.02265590476989746, 0.022454048156738283, 0.022736799240112304, 0.02254857635498047, 0.02272051239013672, 0.02231705665588379, 0.02209369659423828, 0.02194633674621582, 0.022667423248291015, 0.02208483123779297, 0.022051071166992186, 0.02211689567565918, 0.022370464324951173, 0.021884607315063476, 0.021692928314208985, 0.021583360671997072, 0.02165155220031738, 0.021517440795898436, 0.021602399826049806, 0.02144883155822754, 0.02154159927368164, 0.021571584701538086, 0.021536415100097656, 0.021631519317626954, 0.02189228820800781, 0.02197104072570801, 0.021752351760864257, 0.02164169692993164, 0.021532032012939455, 0.021899904251098633, 0.021468896865844727, 0.021408159255981444, 0.021547008514404296, 0.02143164825439453, 0.021460927963256837, 0.0215150089263916, 0.02161033630371094, 0.021573631286621094, 0.021640832901000977, 0.02158131217956543, 0.02145564842224121, 0.021573728561401367, 0.021874176025390626, 0.021486080169677735, 0.021531871795654297, 0.021501983642578125, 0.021409631729125977, 0.02139753532409668, 0.021481632232666015, 0.021409568786621095, 0.021506528854370117, 0.021636703491210937, 0.021386112213134766, 0.021499391555786132, 0.021395263671875, 0.021631359100341797, 0.021420095443725588, 0.021516544342041016, 0.021360639572143555, 0.021444095611572265, 0.021538528442382812, 0.02166793632507324, 0.022164159774780274, 0.02159395217895508, 0.021512351989746093, 0.021675167083740236, 0.02156220817565918, 0.021464191436767577, 0.021497856140136717, 0.0214168643951416, 0.021491680145263672, 0.02143436813354492, 0.02146303939819336, 0.02157535934448242, 0.021425920486450194, 0.021492128372192384, 0.021706880569458006, 0.021501983642578125, 0.02149184036254883, 0.021491584777832032, 0.02147123146057129, 0.02176153564453125, 0.021498176574707033, 0.02165551948547363, 0.021991584777832033, 0.021856319427490233, 0.02164531135559082, 0.021760255813598632, 0.022137920379638673, 0.02208633613586426]",tokens/s,45.80728602866419,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 26899 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6655.623168,9387.835392,0.0,8992.587776,8404.320768,s,1,14.1039892578125,14.1039892578125,0.0,14.1039892578125,14.1039892578125,14.1039892578125,14.1039892578125,[14.1039892578125],,kWh,0.00020940675901246473,2.3088364685130984e-05,9.230840717999733e-05,0.00032480353087759305,,MB,1649.729536,9402.515456,0.0,8992.587776,7880.40704,s,10,3.451050415039062,0.3451050415039062,0.0007356105670003265,0.34543060302734374,0.34567911071777346,0.34580835113525393,0.3459117434692383,"[0.34342340087890627, 0.34508074951171874, 0.3455641784667969, 0.34434658813476565, 0.3445519104003906, 0.3459375915527344, 0.345650390625, 0.3454937133789063, 0.34536749267578126, 0.3456343994140625]",tokens/s,741.8031300974268,kWh,1.0099989562068839e-05,1.113836428620258e-06,6.726509212620348e-06,1.7940335203309443e-05,tokens/kWh,14269521.561268032,MB,1654.054912,9402.515456,0.0,8992.587776,8123.606528,s,10,25.017800048828125,2.5017800048828125,0.004788870374114529,2.5015008544921873,2.5062755859375,2.50874619140625,2.51072267578125,"[2.491120361328125, 2.499575439453125, 2.50334716796875, 2.501220703125, 2.500439453125, 2.501781005859375, 2.50078271484375, 2.50258984375, 2.5057265625, 2.511216796875]",tokens/s,25.182070316750742,kWh,7.290349514543089e-05,8.04132675155834e-06,4.832889594278162e-05,0.00012927371783977084,tokens/kWh,487338.0378684998,,s,630,25.014439548492433,0.03970545960078164,0.0003530860509353926,0.03967923164367676,0.04011471900939941,0.04025527458190918,0.04061889789581299,"[0.03986508941650391, 0.039329792022705076, 0.03907360076904297, 0.039225536346435545, 0.03890790557861328, 0.038934528350830076, 0.038956737518310545, 0.03900447845458985, 0.03896096038818359, 0.03898387145996094, 0.03897244644165039, 0.039081024169921874, 0.03904425430297852, 0.039357120513916016, 0.03925145721435547, 0.0391317138671875, 0.03911219024658203, 0.039131649017333986, 0.039114494323730466, 0.03923820877075195, 0.03917078399658203, 0.03921123123168945, 0.03916572952270508, 0.03923616027832031, 0.03933020782470703, 0.039411102294921875, 0.03943280029296875, 0.03953641510009766, 0.03940169525146484, 0.03929087829589844, 0.03934105682373047, 0.039314430236816404, 0.039375873565673826, 0.03936153411865234, 0.039409217834472654, 0.03942854309082031, 0.03946393585205078, 0.03972512054443359, 0.039560127258300784, 0.039617599487304686, 0.0397116813659668, 0.0399637451171875, 0.0400513916015625, 0.03991475296020508, 0.03992435073852539, 0.04002646255493164, 0.04001792144775391, 0.03998720169067383, 0.040048641204833986, 0.04004438400268555, 0.04009308624267578, 0.03997158432006836, 0.0401797103881836, 0.03999916839599609, 0.03994246292114258, 0.03991097640991211, 0.03992620849609375, 0.03992918395996094, 0.03995305633544922, 0.040033760070800783, 0.040174110412597656, 0.04012646484375, 0.04041030502319336, 0.03999948883056641, 0.03946614456176758, 0.03911151885986328, 0.039007553100585936, 0.03913593673706055, 0.03915273666381836, 0.03919760131835937, 0.03927859115600586, 0.03928255844116211, 0.03907740783691406, 0.03919113540649414, 0.039152767181396486, 0.03932659149169922, 0.039378944396972655, 0.039304351806640624, 0.03935062408447266, 0.03933849716186524, 0.039193599700927735, 0.03920588684082031, 0.039213054656982424, 0.039403518676757815, 0.03924991989135742, 0.03933747100830078, 0.03956911849975586, 0.03968428802490234, 0.03956329727172852, 0.0393647346496582, 0.03940396881103515, 0.03948748779296875, 0.03952844619750977, 0.03940383911132812, 0.03947017669677735, 0.03940428924560547, 0.03947708892822266, 0.03938508987426758, 0.03945993423461914, 0.03944300842285156, 0.03948281478881836, 0.039545761108398435, 0.039610366821289066, 0.039628288269042966, 0.03976787185668945, 0.03985670471191406, 0.04022284698486328, 0.03989299011230469, 0.0400211181640625, 0.04009471893310547, 0.04008127975463867, 0.040005630493164065, 0.0400076789855957, 0.04006707382202149, 0.04023910522460938, 0.04012851333618164, 0.04016537475585937, 0.04018758392333984, 0.0406572151184082, 0.040321025848388675, 0.04031462478637695, 0.04034326553344727, 0.040298080444335936, 0.040319934844970706, 0.04151091384887695, 0.04040281677246094, 0.040008705139160154, 0.03945369720458984, 0.03962252807617188, 0.03921728134155274, 0.03914547348022461, 0.039194561004638674, 0.03925158309936523, 0.039430591583251955, 0.03948339080810547, 0.03997081756591797, 0.03926630401611328, 0.03928268814086914, 0.03920832061767578, 0.039216991424560546, 0.03911964797973633, 0.03916185760498047, 0.039289886474609376, 0.03941270446777344, 0.039360511779785154, 0.03953452682495117, 0.03937411117553711, 0.039478240966796876, 0.039813983917236326, 0.039555614471435546, 0.03965792083740234, 0.039651199340820314, 0.04068979263305664, 0.0402940788269043, 0.039524673461914066, 0.03947660827636719, 0.039524993896484374, 0.03966476821899414, 0.03958668899536133, 0.03966566467285156, 0.03989913558959961, 0.03978192138671875, 0.03968252944946289, 0.03963228988647461, 0.039583518981933595, 0.03966649627685547, 0.03982329559326172, 0.03976947021484375, 0.0396561279296875, 0.03966156768798828, 0.039640384674072264, 0.039736129760742187, 0.03984515380859375, 0.039922271728515625, 0.03989913558959961, 0.039888320922851564, 0.03985260772705078, 0.03988275146484375, 0.03995423889160156, 0.039876705169677736, 0.03974563217163086, 0.04027350234985352, 0.04225606536865235, 0.040241310119628906, 0.04014956665039063, 0.04039254379272461, 0.04028176116943359, 0.04017407989501953, 0.0402303352355957, 0.03999337768554687, 0.04015718460083008, 0.04039907073974609, 0.039617984771728516, 0.03936492919921875, 0.03950595092773437, 0.03957964706420898, 0.039485439300537106, 0.03954687881469727, 0.03948748779296875, 0.03951001739501953, 0.039425121307373044, 0.039430335998535154, 0.03964387130737305, 0.0395489273071289, 0.03950592041015625, 0.039347969055175784, 0.03934028625488281, 0.03957724761962891, 0.03954723358154297, 0.039483200073242186, 0.03957779312133789, 0.039585792541503906, 0.03964313507080078, 0.0396943359375, 0.03971481704711914, 0.039534591674804685, 0.03969036865234375, 0.03959590530395508, 0.039556320190429685, 0.03969331359863281, 0.03965315246582031, 0.039671134948730466, 0.039596702575683596, 0.039570430755615234, 0.0395777587890625, 0.039535457611083985, 0.03953049468994141, 0.039657470703125, 0.03998857498168945, 0.03979727935791016, 0.03983526229858399, 0.039819774627685545, 0.0398131217956543, 0.039972862243652346, 0.03994214248657227, 0.039731201171875, 0.03969023895263672, 0.039739391326904294, 0.03987865447998047, 0.03978649520874023, 0.040016990661621094, 0.03978742218017578, 0.03976396942138672, 0.03977190399169922, 0.039973121643066406, 0.03981926345825195, 0.039981056213378906, 0.03995024108886719, 0.039933055877685544, 0.039803871154785155, 0.03981244659423828, 0.03975987243652344, 0.040004543304443356, 0.03956265640258789, 0.039293567657470704, 0.03926371383666992, 0.03937331390380859, 0.039624126434326175, 0.03932396697998047, 0.039336193084716795, 0.039346176147460936, 0.0393955192565918, 0.03952211380004883, 0.03941580963134766, 0.03946905517578125, 0.03950371170043945, 0.03941584014892578, 0.039462879180908204, 0.03943171310424805, 0.03990591812133789, 0.03975328063964844, 0.03955551910400391, 0.039562305450439456, 0.03946499252319336, 0.0394986572265625, 0.03958169555664062, 0.039684097290039064, 0.039724224090576174, 0.039570240020751955, 0.03962220764160156, 0.03970502471923828, 0.039696449279785155, 0.03988054275512695, 0.0395838394165039, 0.039595169067382814, 0.0396030387878418, 0.03963289642333984, 0.0395489273071289, 0.03952640151977539, 0.03953609466552734, 0.039515968322753905, 0.03966230392456055, 0.03967180633544922, 0.03978630447387695, 0.039665088653564454, 0.03971750259399414, 0.03967808151245117, 0.03976192092895508, 0.03975167846679688, 0.03984998321533203, 0.039825408935546876, 0.03999641418457031, 0.04005376052856445, 0.040080638885498045, 0.04003097534179687, 0.0400074577331543, 0.03992803192138672, 0.03997919845581055, 0.04007097625732422, 0.04006054306030273, 0.03992816162109375, 0.04003334426879883, 0.04001276779174805, 0.040032257080078126, 0.04005068969726563, 0.03992374420166016, 0.039288833618164064, 0.038968353271484374, 0.03905209732055664, 0.03937260818481445, 0.039351806640625, 0.03947004699707031, 0.039435550689697264, 0.0400263671875, 0.03942009735107422, 0.039358047485351565, 0.03929964828491211, 0.039428096771240234, 0.03948716735839844, 0.03940998458862305, 0.03934975814819336, 0.03935078430175781, 0.03931955337524414, 0.03933776092529297, 0.03936892700195312, 0.039415870666503906, 0.03961587142944336, 0.039694526672363284, 0.039734817504882815, 0.03976688003540039, 0.039790592193603515, 0.04046438217163086, 0.03976521682739258, 0.039747425079345707, 0.03976691055297851, 0.039737407684326174, 0.040170913696289064, 0.03981372833251953, 0.03970457458496094, 0.039585758209228515, 0.03966336059570313, 0.03959017562866211, 0.03971379089355469, 0.039508255004882815, 0.04063100814819336, 0.039839744567871094, 0.039897087097167966, 0.03998025512695313, 0.039852832794189455, 0.039812671661376954, 0.039850433349609374, 0.03992079925537109, 0.04005974578857422, 0.03995663833618164, 0.03988243103027344, 0.03986140823364258, 0.03980972671508789, 0.03991782379150391, 0.03982076644897461, 0.03975347137451172, 0.03975183868408203, 0.03978332901000976, 0.03989868927001953, 0.04011439895629883, 0.040048641204833986, 0.03988275146484375, 0.03993929672241211, 0.03996137619018555, 0.03987308883666992, 0.03935641479492188, 0.039239070892333985, 0.03930976104736328, 0.03931468963623047, 0.0392911376953125, 0.03920742416381836, 0.039094432830810544, 0.039188480377197264, 0.03963667297363281, 0.03966793441772461, 0.03946096038818359, 0.03939152145385742, 0.03944028854370117, 0.03941151809692383, 0.03934988784790039, 0.03936016082763672, 0.03945750427246094, 0.039567230224609375, 0.03967795181274414, 0.04005440139770508, 0.04031673431396485, 0.03964995193481445, 0.039862079620361326, 0.03958560180664063, 0.039667713165283204, 0.039726848602294924, 0.03967628860473633, 0.04004278564453125, 0.03970572662353516, 0.03962543869018555, 0.03985635375976562, 0.04058924865722656, 0.039946239471435545, 0.03979241561889649, 0.039799007415771484, 0.03963644790649414, 0.039803424835205076, 0.0401324462890625, 0.03968406295776367, 0.03969247817993164, 0.039671390533447266, 0.03955257415771484, 0.039648353576660154, 0.039651073455810544, 0.03965542221069336, 0.03963875198364258, 0.039680286407470705, 0.03972710418701172, 0.0398131217956543, 0.03981951904296875, 0.03994188690185547, 0.03978649520874023, 0.039874561309814455, 0.0399013442993164, 0.039868255615234376, 0.03986636734008789, 0.03983052825927735, 0.03976704025268555, 0.03993936157226562, 0.039813854217529296, 0.03995852661132813, 0.03995564651489258, 0.03999055862426758, 0.03950460815429688, 0.03931497573852539, 0.03923401641845703, 0.0394354248046875, 0.03926704025268555, 0.03933196640014648, 0.039706623077392575, 0.039425281524658205, 0.03917900848388672, 0.03934003067016602, 0.03925753784179688, 0.0393447036743164, 0.03954073715209961, 0.039462913513183595, 0.03946227264404297, 0.0396622085571289, 0.03968000030517578, 0.03936665725708008, 0.03940678405761719, 0.03952288055419922, 0.03945830535888672, 0.03951827239990234, 0.03958147048950195, 0.039478240966796876, 0.03966918563842774, 0.039682559967041016, 0.03963904190063477, 0.03960374450683594, 0.0399150390625, 0.039529407501220706, 0.039515617370605466, 0.03950428771972656, 0.03976764678955078, 0.0396824951171875, 0.039816608428955076, 0.039662273406982425, 0.039593505859375, 0.03971120071411133, 0.039702529907226565, 0.03975964736938477, 0.0400118408203125, 0.0399013442993164, 0.03975289535522461, 0.03981600189208984, 0.039882110595703124, 0.03987519836425781, 0.03993190383911133, 0.03993740844726563, 0.040471168518066404, 0.039981056213378906, 0.040065025329589846, 0.03993360137939453, 0.040122207641601564, 0.04008396911621094, 0.040035774230957034, 0.04005535888671875, 0.04014080047607422, 0.0401715202331543, 0.040269054412841794, 0.0401761589050293, 0.040261856079101564, 0.04012406539916992, 0.04000156784057617, 0.03958227157592773, 0.039352127075195316, 0.03924009704589844, 0.03929679870605469, 0.03936870574951172, 0.03947897720336914, 0.03928649520874023, 0.03946966552734375, 0.03940095901489258, 0.0395863037109375, 0.039428096771240234, 0.03960422515869141, 0.03948748779296875, 0.03954483032226563, 0.03945033645629883, 0.039372768402099606, 0.03957382583618164, 0.03960627365112305, 0.03939123153686523, 0.03946700668334961, 0.04038860702514648, 0.03962060928344727, 0.039671710968017575, 0.03985795211791992, 0.039841697692871096, 0.03980534362792969, 0.03986636734008789, 0.03967705535888672, 0.039719806671142575, 0.0396124153137207, 0.039613792419433594, 0.03966124725341797, 0.03967667388916016, 0.0396638412475586, 0.0397209587097168, 0.039798782348632815, 0.03983103942871094, 0.03982704162597656, 0.039785377502441405, 0.039686145782470705, 0.039779937744140625, 0.03986588668823242, 0.039869312286376954, 0.03978361511230469, 0.04031366348266602, 0.0401715202331543, 0.03995782470703125, 0.04002067184448242, 0.040030113220214845, 0.03997670364379883, 0.040013599395751956, 0.04009632110595703, 0.0401899528503418, 0.0400912971496582, 0.0399958381652832, 0.04003801727294922, 0.04013808059692383, 0.04002912139892578, 0.04016361618041992, 0.04007814407348633, 0.04007414245605469, 0.04034764862060547, 0.0401448974609375, 0.03967795181274414, 0.039392833709716794, 0.039356864929199216, 0.03964518356323242, 0.03927654266357422, 0.039357696533203125, 0.03928128051757813, 0.039416961669921875, 0.03934105682373047, 0.03942195129394531, 0.03950796890258789, 0.03969164657592773, 0.03957004928588867, 0.039495681762695314, 0.03948300933837891, 0.03961183929443359, 0.03947171020507813, 0.03948704147338867, 0.03945462417602539, 0.03955785751342773, 0.03950403213500977, 0.03957964706420898, 0.0399441909790039, 0.04011164855957031, 0.04000201416015625, 0.03974870300292969, 0.03992623901367188, 0.03970256042480469, 0.039781822204589846, 0.04150780868530273, 0.039656768798828124, 0.03967846298217773, 0.039740959167480466, 0.03973782348632812, 0.03990518569946289, 0.0398359375, 0.0397209587097168, 0.03986953735351562, 0.039900062561035156, 0.03996057510375976, 0.039923713684082034, 0.03993945693969726, 0.040032001495361326, 0.039911712646484375, 0.040083168029785156, 0.0400495376586914, 0.04008265686035156, 0.040264480590820315, 0.040117599487304687, 0.040042686462402347, 0.04002825546264648, 0.04014688110351562, 0.040247230529785155, 0.04018227386474609, 0.04013875198364258, 0.04009574508666992, 0.040725887298583986, 0.040456832885742186, 0.040269824981689455, 0.040114177703857425, 0.04031206512451172, 0.040207103729248045]",tokens/s,25.185453337009456,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3051.712512,3997.04064,0.0,3594.518528,3559.375872,s,1,11.1963916015625,11.1963916015625,0.0,11.1963916015625,11.1963916015625,11.1963916015625,11.1963916015625,[11.1963916015625],,kWh,0.00011142415573328133,1.2283616005395431e-05,4.5873092253956305e-05,0.00016958086399263307,,MB,3109.13024,4118.675456,0.0,3701.47328,3595.939328,s,10,1.4192767333984373,0.14192767333984374,0.0002801270902000654,0.14184268951416015,0.1420531707763672,0.14239255218505859,0.1426640573120117,"[0.14196339416503906, 0.14189324951171875, 0.14176844787597656, 0.1417612762451172, 0.14174826049804687, 0.14174703979492187, 0.14182528686523438, 0.14273193359375, 0.14197775268554688, 0.14186009216308593]",tokens/s,1803.7356209385025,kWh,4.203487464583304e-06,4.634308692121779e-07,2.788938739086065e-06,7.455857072881546e-06,tokens/kWh,34335422.1382708,MB,3122.569216,4118.675456,0.0,3701.47328,3595.941888,s,10,21.053625488281252,2.105362548828125,0.02105314392036901,2.1037702636718754,2.1287179931640625,2.1306723510742187,2.1322358374023436,"[2.127435546875, 2.107937255859375, 2.081617431640625, 2.08191357421875, 2.0762900390625, 2.127012451171875, 2.132626708984375, 2.12828369140625, 2.099603271484375, 2.090905517578125]",tokens/s,29.923587286696392,kWh,6.065145993916152e-05,6.689826502826763e-06,3.406325740931256e-05,0.00010140454385130085,tokens/kWh,621273.9351441973,,s,630,21.04976106643676,0.03341231915307424,0.0006254825786136484,0.03336494445800781,0.03400017776489257,0.03426003379821777,0.03534403148651123,"[0.033791999816894534, 0.03378585433959961, 0.0334354248046875, 0.03333763122558594, 0.033277889251708985, 0.03342079925537109, 0.03331532669067383, 0.03418236923217773, 0.03350214385986328, 0.033504192352294924, 0.03346847915649414, 0.03348976135253906, 0.03338230514526367, 0.03334563064575195, 0.033296382904052735, 0.034179073333740234, 0.03403571319580078, 0.033583393096923826, 0.033449695587158206, 0.03361382293701172, 0.0334700813293457, 0.03364400100708008, 0.03336214447021484, 0.03362271881103516, 0.03351267242431641, 0.03319887924194336, 0.03352576065063476, 0.0333436164855957, 0.03346828842163086, 0.03389235305786133, 0.033742847442626955, 0.033615550994873046, 0.033467838287353516, 0.03328623962402344, 0.03330537414550781, 0.03362563323974609, 0.033642974853515625, 0.033642494201660156, 0.03345011138916015, 0.03364646530151367, 0.03362368011474609, 0.033741184234619144, 0.0335810546875, 0.03376128005981445, 0.03351103973388672, 0.03431484985351563, 0.03395356750488281, 0.03419097518920899, 0.03389478302001953, 0.03355619049072266, 0.033591583251953126, 0.033576736450195314, 0.0335849609375, 0.03369116973876953, 0.03368844985961914, 0.0342852783203125, 0.03441872024536133, 0.03479568099975586, 0.034368896484375, 0.037870304107666015, 0.03524607849121094, 0.03400422286987305, 0.03394588851928711, 0.03376947021484375, 0.034103294372558594, 0.034088958740234376, 0.0339046401977539, 0.033841152191162106, 0.03415814590454101, 0.0340382080078125, 0.034174079895019534, 0.03430281448364258, 0.03394348907470703, 0.0341750717163086, 0.03377507019042969, 0.03353987121582031, 0.0334568977355957, 0.03354009628295898, 0.03367446517944336, 0.03390915298461914, 0.03446563339233399, 0.033235488891601564, 0.03402041625976562, 0.033941566467285154, 0.03404684829711914, 0.033836288452148436, 0.03353868865966797, 0.033275230407714844, 0.03346448135375977, 0.03345471954345703, 0.032950271606445314, 0.032729087829589845, 0.033159168243408206, 0.03321651077270508, 0.03323494338989258, 0.03316121673583984, 0.033175552368164066, 0.0332470703125, 0.0331429443359375, 0.03318310546875, 0.03343040084838867, 0.03376287841796875, 0.03345177459716797, 0.0332476806640625, 0.033662559509277344, 0.03331840133666992, 0.033137569427490236, 0.033337345123291014, 0.03312639999389649, 0.03309699249267578, 0.033123039245605466, 0.0331707534790039, 0.033141441345214843, 0.03277529525756836, 0.03270329666137695, 0.03344732666015625, 0.033043102264404295, 0.03271664047241211, 0.0329648323059082, 0.033103679656982424, 0.033081153869628906, 0.03312876892089844, 0.03318924713134765, 0.033165054321289064, 0.033121150970458986, 0.03321855926513672, 0.03292611312866211, 0.03369599914550781, 0.03310208129882813, 0.03350255966186524, 0.03365135955810547, 0.03347353744506836, 0.033152000427246094, 0.03427471923828125, 0.03289286422729492, 0.032527008056640626, 0.03246694564819336, 0.03283180618286133, 0.032595649719238284, 0.03253247833251953, 0.03267379379272461, 0.03259312057495117, 0.03280089569091797, 0.032780960083007814, 0.032665439605712894, 0.0324343376159668, 0.03263692855834961, 0.032729087829589845, 0.032749153137207034, 0.033247455596923825, 0.033204639434814456, 0.03297052764892578, 0.032984352111816405, 0.03279945755004883, 0.032827392578125, 0.03278185653686523, 0.03316988754272461, 0.03325235366821289, 0.033999870300292966, 0.03347846221923828, 0.034166015625, 0.0334505615234375, 0.03363391876220703, 0.03343756866455078, 0.03299212646484375, 0.0328675537109375, 0.03279542541503906, 0.03292345428466797, 0.0327520637512207, 0.03268806457519531, 0.03277395248413086, 0.033492286682128905, 0.03415929412841797, 0.03306118392944336, 0.033048255920410156, 0.032927745819091796, 0.03293180847167969, 0.03291721725463867, 0.03298867034912109, 0.032951103210449216, 0.03313836669921875, 0.03264275360107422, 0.0328298225402832, 0.03271091079711914, 0.03274879837036133, 0.032592193603515625, 0.032849857330322266, 0.03281151962280274, 0.0335728645324707, 0.0331014404296875, 0.03329702377319336, 0.03279052734375, 0.03274137496948242, 0.03273318481445313, 0.03278438568115234, 0.032661502838134765, 0.03360563278198242, 0.03334143829345703, 0.03305017471313477, 0.03317958450317383, 0.033047039031982424, 0.03307110214233398, 0.037718017578125, 0.033040382385253905, 0.03301366424560547, 0.03292486572265625, 0.03293999862670898, 0.03296761703491211, 0.03286185455322266, 0.032788833618164065, 0.03304447937011719, 0.032677440643310546, 0.03330297470092773, 0.0328408317565918, 0.03311500930786133, 0.03374835205078125, 0.033173633575439454, 0.032925281524658206, 0.033102752685546875, 0.03306496047973633, 0.03322643280029297, 0.03313398361206055, 0.03302409744262695, 0.03290572738647461, 0.03292185592651367, 0.03282950210571289, 0.032728641510009766, 0.032672191619873045, 0.03269769668579101, 0.03262287902832031, 0.03265603256225586, 0.03256851196289062, 0.03262345504760742, 0.03281404876708984, 0.03270115280151367, 0.032562847137451174, 0.03288678359985352, 0.03280144119262695, 0.032964286804199217, 0.03277180862426758, 0.03276031875610352, 0.03309545516967773, 0.03327494430541992, 0.03310259246826172, 0.0328787841796875, 0.03370975875854492, 0.03315500640869141, 0.03285548782348633, 0.033142974853515625, 0.03298524856567383, 0.03284643173217774, 0.03290678405761719, 0.03324480056762695, 0.03322982406616211, 0.03310099029541016, 0.032881664276123046, 0.03296851348876953, 0.03295638275146484, 0.03277417755126953, 0.03279667282104492, 0.03311804962158203, 0.033271968841552736, 0.03294003295898437, 0.0331776008605957, 0.0328776969909668, 0.03276480102539062, 0.032752960205078126, 0.032662208557128904, 0.032804607391357425, 0.03292390441894531, 0.032780288696289066, 0.033389984130859376, 0.03329289627075195, 0.03302604675292969, 0.033040382385253905, 0.03376319885253906, 0.03277427291870117, 0.0326379508972168, 0.03264995193481445, 0.03286454391479492, 0.03267193603515625, 0.03270364761352539, 0.03293788909912109, 0.032634849548339846, 0.032784416198730466, 0.03272079849243164, 0.032672607421875, 0.03280012893676758, 0.032655807495117185, 0.03256134414672852, 0.03279465484619141, 0.03281097412109375, 0.032671745300292966, 0.03287033462524414, 0.03266361618041992, 0.03241340637207031, 0.03270275115966797, 0.032545822143554684, 0.03264944076538086, 0.032946945190429684, 0.032769344329833985, 0.03288265609741211, 0.03360636901855469, 0.03339987182617187, 0.03357382583618164, 0.03334348678588867, 0.03300175857543945, 0.03290803146362305, 0.03269935989379883, 0.03265705490112305, 0.032766239166259765, 0.033476673126220706, 0.03356246566772461, 0.033904800415039064, 0.03371343994140625, 0.0336097297668457, 0.03380223846435547, 0.03381184005737305, 0.03372326278686524, 0.033965824127197265, 0.03384320068359375, 0.03382476806640625, 0.03380831909179687, 0.033595008850097655, 0.0335748176574707, 0.03346230316162109, 0.03369420623779297, 0.03367497634887695, 0.03415273666381836, 0.03377151870727539, 0.033646560668945315, 0.03359337615966797, 0.0337119026184082, 0.03360563278198242, 0.033372383117675784, 0.033468414306640625, 0.033535999298095705, 0.033777057647705076, 0.03391897583007813, 0.035324127197265624, 0.03458860778808594, 0.033900993347167965, 0.03366892623901367, 0.033672863006591794, 0.03381647872924805, 0.03396041488647461, 0.03380035018920898, 0.033716224670410154, 0.033619678497314454, 0.033674686431884766, 0.033643360137939456, 0.033602718353271485, 0.033675167083740236, 0.03365369415283203, 0.03373046493530273, 0.03387843322753906, 0.033977569580078124, 0.03381427383422852, 0.034548446655273436, 0.03393740844726562, 0.03377280044555664, 0.03377433776855469, 0.03360675048828125, 0.033560798645019534, 0.03348550415039062, 0.03351087951660156, 0.03340486526489258, 0.03355849456787109, 0.034269824981689456, 0.033746528625488284, 0.03363827133178711, 0.03355065536499023, 0.03376947021484375, 0.03362335968017578, 0.03349116897583008, 0.0335162239074707, 0.033560577392578124, 0.033658878326416015, 0.036792160034179684, 0.03415727996826172, 0.03382230377197266, 0.03363471984863281, 0.0335912971496582, 0.03400294494628906, 0.03401696014404297, 0.03367967987060547, 0.033691646575927735, 0.03387353515625, 0.03366937637329102, 0.033716350555419924, 0.03344512176513672, 0.03339750289916992, 0.03338582229614258, 0.03332163238525391, 0.034797569274902344, 0.03373606491088867, 0.03364668655395508, 0.03345462417602539, 0.03347455978393555, 0.03334143829345703, 0.03347411346435547, 0.033513919830322265, 0.03348070526123047, 0.03336774444580078, 0.03383123016357422, 0.03358310317993164, 0.03367731094360352, 0.03352371215820313, 0.03335782241821289, 0.03377059173583984, 0.034790302276611326, 0.036227008819580075, 0.034008895874023434, 0.03489817428588867, 0.03423846435546875, 0.03386368179321289, 0.03376937484741211, 0.03419145584106445, 0.0335313606262207, 0.03369424057006836, 0.03351865768432617, 0.03333388900756836, 0.03357027053833008, 0.033534816741943356, 0.033644287109375, 0.03406393432617188, 0.03375088119506836, 0.03369456100463867, 0.03360758590698242, 0.033807647705078124, 0.03365513610839844, 0.03427990341186524, 0.03418035125732422, 0.034255615234375, 0.033880062103271484, 0.033716224670410154, 0.03378736114501953, 0.03364495849609375, 0.03339276885986328, 0.03382681655883789, 0.03360563278198242, 0.03414425659179687, 0.034075294494628906, 0.03371331024169922, 0.033668254852294924, 0.03376492691040039, 0.03374415969848633, 0.03355971145629883, 0.03347951889038086, 0.033850208282470706, 0.03934822463989258, 0.03420140838623047, 0.03425503921508789, 0.03384320068359375, 0.033616992950439455, 0.03360655975341797, 0.03361996841430664, 0.03360995101928711, 0.03389948654174805, 0.033618751525878905, 0.03355209732055664, 0.03368783950805664, 0.03348275375366211, 0.033421054840087894, 0.03361407852172851, 0.03352700805664063, 0.03430070495605469, 0.03450470352172851, 0.03434905624389648, 0.03411286544799805, 0.03375164794921875, 0.03365894317626953, 0.033413120269775394, 0.033570816040039066, 0.03333504104614258, 0.03330073547363281, 0.03325088119506836, 0.033479103088378905, 0.03429580688476563, 0.03361587142944336, 0.03354828643798828, 0.03331686401367188, 0.0336732177734375, 0.03359686279296875, 0.035103199005126956, 0.03515811157226562, 0.03364780807495117, 0.03336022567749024, 0.03379008102416992, 0.033634654998779295, 0.03355766296386719, 0.033648799896240235, 0.03376748657226562, 0.03344652938842774, 0.03341657638549805, 0.03317331314086914, 0.03327673721313477, 0.03378345489501953, 0.03384751892089844, 0.03348287963867187, 0.033081344604492184, 0.032817150115966795, 0.03265459060668945, 0.03331967926025391, 0.033425662994384764, 0.033716960906982424, 0.03360153579711914, 0.03347257614135742, 0.033568225860595706, 0.03381808090209961, 0.03374387359619141, 0.03397216033935547, 0.03382799911499024, 0.033548606872558596, 0.03426364898681641, 0.03342745590209961, 0.033799358367919925, 0.03373139190673828, 0.03341107177734375, 0.03324518585205078, 0.03357900619506836, 0.03353734588623047, 0.03343750381469727, 0.03350207901000977, 0.03324835205078125, 0.033688480377197266, 0.033236862182617184, 0.03327398300170899, 0.03325747299194336, 0.03311529541015625, 0.03307196807861328, 0.032935935974121096, 0.03297411346435547, 0.033360607147216795, 0.03306611251831055, 0.03311017608642578, 0.03307209777832031, 0.03294569778442383, 0.032970497131347656, 0.033059295654296876, 0.033107967376708985, 0.03328960037231445, 0.033270401000976564, 0.03311206436157227, 0.03330047988891602, 0.033942913055419924, 0.032887134552001956, 0.03284406280517578, 0.03314275360107422, 0.0328600959777832, 0.03323260879516601, 0.03315513610839844, 0.03299359893798828, 0.0330093765258789, 0.033306625366210936, 0.03364483261108398, 0.03343769454956055, 0.033186111450195316, 0.03315065765380859, 0.03307884979248047, 0.03315961456298828, 0.03330867385864258, 0.03327983856201172, 0.03342351913452148, 0.03307929611206055, 0.03305283355712891, 0.03296249771118164, 0.03309577560424805, 0.03300230407714844, 0.03303753662109375, 0.03333814239501953, 0.03292924880981445, 0.03282163238525391, 0.03290476989746094, 0.03297248077392578, 0.032858081817626957, 0.032905185699462894, 0.03303087997436523, 0.033171710968017576, 0.034186336517333986, 0.03335465621948242, 0.03335168075561523, 0.033349822998046875, 0.03335558319091797, 0.03312371063232422, 0.03294476699829101, 0.03299737548828125, 0.03281305694580078, 0.03343155288696289, 0.03372224044799805, 0.033310081481933596, 0.03278092956542969, 0.032772159576416014, 0.03321247863769531, 0.03302195358276367, 0.03329219055175781, 0.033331295013427735, 0.03325132751464844, 0.033337345123291014, 0.03298303985595703, 0.033132545471191405, 0.033093471527099606, 0.03307740783691406, 0.033139904022216796, 0.03293881607055664, 0.03308863830566406, 0.032895870208740234, 0.03292063903808594, 0.033014785766601565, 0.032825153350830076, 0.032630878448486327, 0.032694305419921875, 0.03335887908935547, 0.03290803146362305, 0.032962398529052736, 0.03272713470458984, 0.03270870590209961, 0.03266307067871094, 0.03269683074951172, 0.032774303436279295, 0.0353521614074707, 0.03558646392822266, 0.033277759552001955, 0.033105823516845705, 0.03311996841430664, 0.033141311645507814, 0.033285919189453124, 0.03334143829345703, 0.033457889556884765, 0.03453593444824219]",tokens/s,29.929080810542626,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4256.03072,7121.79712,0.0,6719.275008,6274.515968,s,1,12.418529296875,12.418529296875,0.0,12.418529296875,12.418529296875,12.418529296875,12.418529296875,[12.418529296875],,kWh,0.00014137381606252954,1.5586915964423327e-05,6.030227046399661e-05,0.0002172630024909495,,MB,2286.686208,7140.671488,0.0,6723.469312,6008.827392,s,10,2.0947570648193357,0.20947570648193362,0.00043114630842088874,0.20952868652343748,0.20997152404785155,0.2100088638305664,0.21003873565673828,"[0.21004620361328125, 0.2088861083984375, 0.20972300720214843, 0.20870748901367187, 0.20996322631835937, 0.20981484985351562, 0.2092494659423828, 0.20933436584472656, 0.20928451538085938, 0.20974783325195312]",tokens/s,1222.0987545497496,kWh,6.184938646614392e-06,6.820623090058666e-07,4.099916474374285e-06,1.0966917429994544e-05,tokens/kWh,23342931.28713082,MB,2299.834368,7140.671488,0.0,6723.469312,6008.829952,s,10,21.820334472656253,2.182033447265625,0.006910743522811032,2.1827291259765627,2.1883765625000002,2.190209521484375,2.191675888671875,"[2.18339599609375, 2.176656982421875, 2.18766064453125, 2.165607666015625, 2.183771484375, 2.181748046875, 2.18796923828125, 2.182062255859375, 2.19204248046875, 2.179419677734375]",tokens/s,28.872151377398584,kWh,5.7489244691305105e-05,6.340945834416019e-06,3.8199395142825623e-05,0.00010202958566854675,tokens/kWh,617467.9588003205,,s,630,21.815948173522937,0.03462848916432214,0.0005596024846131795,0.0345711669921875,0.03496627235412598,0.035160722923278806,0.03655111900329591,"[0.03513398361206055, 0.03470035171508789, 0.03508540725708008, 0.03518838500976563, 0.034627777099609375, 0.03475251388549805, 0.034582527160644534, 0.03458047866821289, 0.034709217071533204, 0.034644256591796874, 0.03452870559692383, 0.03468755340576172, 0.03449996948242187, 0.034507392883300785, 0.03456371307373047, 0.03466201782226563, 0.034595230102539065, 0.034446815490722656, 0.034552352905273434, 0.03454601669311524, 0.034461185455322264, 0.034447006225585934, 0.03486745452880859, 0.034507358551025394, 0.034524799346923825, 0.03515593719482422, 0.03434067153930664, 0.03406089782714844, 0.03408486557006836, 0.034121025085449216, 0.034307937622070316, 0.0343724479675293, 0.03439369583129883, 0.034449535369873045, 0.03457379150390625, 0.03448534393310547, 0.03500822448730469, 0.03460665512084961, 0.03474870300292969, 0.03451510238647461, 0.034697216033935545, 0.03481164932250977, 0.034602367401123046, 0.03480428695678711, 0.03472851181030273, 0.03469286346435547, 0.034756385803222656, 0.03463804626464844, 0.034648063659667966, 0.03457558441162109, 0.034686912536621095, 0.034799678802490235, 0.034585376739501954, 0.03495731353759766, 0.03530745697021485, 0.03471500778198242, 0.03456703948974609, 0.03482195281982422, 0.034582527160644534, 0.03476841735839844, 0.034683361053466796, 0.03532806396484375, 0.03457222366333008, 0.03501123046875, 0.034285343170166016, 0.03425689697265625, 0.03426857757568359, 0.034171489715576174, 0.034078720092773435, 0.03428761672973633, 0.034113536834716796, 0.033824417114257814, 0.03419113540649414, 0.03416121673583984, 0.034560001373291016, 0.03440639877319336, 0.03445888137817383, 0.034992897033691406, 0.035079296112060544, 0.03542047882080078, 0.034519615173339846, 0.03476838302612305, 0.034672382354736325, 0.03479536056518555, 0.03450732803344726, 0.03461510467529297, 0.03458015823364258, 0.03496169662475586, 0.0347367057800293, 0.03466854476928711, 0.03530342483520508, 0.034533119201660155, 0.03479276657104492, 0.03457529449462891, 0.035270687103271484, 0.03485279846191406, 0.034702945709228515, 0.03482259368896484, 0.03438796615600586, 0.034359294891357424, 0.0345684814453125, 0.036910816192626955, 0.03487744140625, 0.03477609634399414, 0.03475145721435547, 0.03498393630981445, 0.034533374786376955, 0.03476176071166992, 0.03457734298706055, 0.03426831817626953, 0.034595489501953125, 0.03421760177612305, 0.03425750350952148, 0.03421388626098633, 0.03425302505493164, 0.034144031524658204, 0.034239967346191405, 0.035359264373779294, 0.03407772827148438, 0.034365726470947267, 0.0340302734375, 0.034050048828125, 0.033716224670410154, 0.033701889038085936, 0.034159713745117185, 0.033888862609863284, 0.034854175567626954, 0.034851009368896485, 0.03482223892211914, 0.035066497802734374, 0.039920993804931644, 0.03516463851928711, 0.03507555389404297, 0.034516990661621096, 0.03448271942138672, 0.0344587516784668, 0.03448307037353516, 0.034600833892822265, 0.034599040985107424, 0.03522870254516602, 0.034648063659667966, 0.03464396667480469, 0.03450294494628906, 0.034652095794677734, 0.03448704147338867, 0.03464396667480469, 0.03461734390258789, 0.034408447265625, 0.03459868621826172, 0.034336990356445315, 0.03436521530151367, 0.03461891174316406, 0.03443987274169922, 0.034334720611572264, 0.03432243347167969, 0.034252799987792966, 0.03470336151123047, 0.03663616180419922, 0.03439257431030274, 0.034410400390625, 0.034162784576416014, 0.03439616012573242, 0.03424492645263672, 0.03450764846801758, 0.03434726333618164, 0.03429852676391602, 0.034235935211181644, 0.034820480346679686, 0.0347955207824707, 0.03499852752685547, 0.037625598907470706, 0.03454771041870117, 0.03451619338989258, 0.034468639373779295, 0.03431939315795898, 0.03444790267944336, 0.03462393569946289, 0.03440812683105469, 0.03451894378662109, 0.0342691535949707, 0.03448870468139648, 0.03428768157958984, 0.034359294891357424, 0.03437932968139648, 0.0346802864074707, 0.034415454864501954, 0.0345560302734375, 0.03475462341308594, 0.03461449432373047, 0.034458206176757815, 0.034375679016113284, 0.03476275253295898, 0.034435073852539064, 0.034336769104003906, 0.03422003173828125, 0.03433651351928711, 0.03420086288452148, 0.03412067031860352, 0.03411558532714844, 0.034121726989746096, 0.033976318359375, 0.03419136047363281, 0.03411545562744141, 0.03407823944091797, 0.03388300704956055, 0.03389548873901367, 0.033839008331298825, 0.033934078216552734, 0.03408448028564453, 0.0345153923034668, 0.034240447998046875, 0.03423756790161133, 0.0345689582824707, 0.03462675094604492, 0.03447289657592773, 0.034748321533203126, 0.03458262252807617, 0.03459590530395508, 0.034509761810302735, 0.034531326293945314, 0.03433241653442383, 0.034492671966552736, 0.034457599639892575, 0.03432057571411133, 0.034619232177734376, 0.034506175994873045, 0.03447843170166016, 0.034506431579589845, 0.03459318542480469, 0.03462527847290039, 0.03469916915893555, 0.03457267379760742, 0.03440236663818359, 0.034342910766601564, 0.03452454376220703, 0.03431488037109375, 0.034293758392333985, 0.03444041442871094, 0.03461385726928711, 0.034740001678466796, 0.03432204818725586, 0.03445811080932617, 0.03448035049438476, 0.03458873748779297, 0.03428457641601562, 0.034493408203125, 0.03445455932617188, 0.03442172622680664, 0.03405619049072266, 0.03455590438842773, 0.033951713562011716, 0.034154529571533206, 0.03472841644287109, 0.034588001251220704, 0.03447820663452148, 0.03453596878051758, 0.03446988677978516, 0.03461119842529297, 0.03902185440063476, 0.034791614532470705, 0.034726207733154296, 0.03470153427124024, 0.03443916702270508, 0.03443283081054688, 0.034500255584716796, 0.03480009460449219, 0.03470956802368164, 0.03462144088745117, 0.0345272331237793, 0.0348504638671875, 0.034748767852783205, 0.03451839828491211, 0.03433359909057617, 0.03468806457519531, 0.03490403366088867, 0.03460371017456055, 0.03487855911254883, 0.03474729537963867, 0.03437539291381836, 0.03444559860229492, 0.03425244903564453, 0.034942337036132816, 0.034476383209228516, 0.03427347183227539, 0.03443740844726562, 0.03438918304443359, 0.03490505599975586, 0.034735198974609374, 0.03431622314453125, 0.03421654510498047, 0.03453699111938477, 0.0350994873046875, 0.03400908660888672, 0.033977569580078124, 0.034330944061279296, 0.03489168167114258, 0.03431190490722656, 0.03455014419555664, 0.034506622314453124, 0.03466915130615234, 0.034721790313720705, 0.03469302368164062, 0.03448662567138672, 0.03449161529541016, 0.03474236679077149, 0.034560447692871095, 0.03491430282592774, 0.034738113403320316, 0.03460723114013672, 0.034559616088867186, 0.03475283050537109, 0.03460095977783203, 0.03459008026123047, 0.034455711364746094, 0.03479011154174805, 0.03466233444213867, 0.03468076705932617, 0.03496614456176758, 0.03480575942993164, 0.03451526260375976, 0.034371200561523436, 0.03435481643676758, 0.034406848907470707, 0.034532894134521486, 0.04070243072509765, 0.035364063262939456, 0.03451513671875, 0.0343087043762207, 0.03435724639892578, 0.03424822235107422, 0.03412851333618164, 0.03420348739624023, 0.03422412872314453, 0.034313377380371095, 0.03412464141845703, 0.03422822570800781, 0.034070526123046875, 0.0340398063659668, 0.034320606231689456, 0.03420883178710937, 0.034398849487304685, 0.03448227310180664, 0.034902015686035154, 0.034869407653808596, 0.034712543487548826, 0.03500857543945313, 0.03522233581542969, 0.034816097259521485, 0.03446774291992188, 0.034799617767333986, 0.034543617248535156, 0.034770145416259765, 0.03501136016845703, 0.03470876693725586, 0.034697952270507815, 0.034732032775878906, 0.03497987365722656, 0.03460707092285156, 0.03447132873535156, 0.03441836929321289, 0.03459574508666992, 0.034416286468505856, 0.03490031814575195, 0.0344986572265625, 0.03446364974975586, 0.03460710525512695, 0.03445113754272461, 0.034445632934570314, 0.034545505523681644, 0.034480289459228514, 0.034234272003173825, 0.034644065856933595, 0.0341872329711914, 0.03424873733520508, 0.034119678497314454, 0.034574337005615234, 0.03443833541870117, 0.034272064208984376, 0.034654624938964845, 0.035026943206787106, 0.0344238395690918, 0.03458351898193359, 0.03442892837524414, 0.03447510528564453, 0.034442142486572264, 0.03472700881958008, 0.034667423248291016, 0.034400032043457034, 0.03441686248779297, 0.03471916961669922, 0.034484798431396485, 0.03444940948486328, 0.034551807403564457, 0.03427971267700195, 0.03482998275756836, 0.034838176727294924, 0.03458892822265625, 0.03488524627685547, 0.03475715255737305, 0.03496345520019531, 0.03463091278076172, 0.03533488082885742, 0.034708545684814456, 0.034715999603271486, 0.03507263946533203, 0.03477913665771484, 0.03510646438598633, 0.034748321533203126, 0.03456380844116211, 0.03453206253051758, 0.03431967926025391, 0.03466447830200195, 0.03442310333251953, 0.03452735900878906, 0.03466057586669922, 0.03427328109741211, 0.03423027038574219, 0.03409823989868164, 0.034055103302001954, 0.034514015197753906, 0.03441961669921875, 0.03476889419555664, 0.03570275115966797, 0.03527475357055664, 0.034960670471191405, 0.03467132949829101, 0.03494854354858398, 0.03469577789306641, 0.034969600677490234, 0.03505766296386719, 0.03502899169921875, 0.034549758911132815, 0.03489382553100586, 0.034736064910888674, 0.03536105728149414, 0.03634291076660156, 0.03495110321044922, 0.03468745422363281, 0.03479280090332031, 0.03463670349121094, 0.034473407745361326, 0.03484726333618164, 0.034826305389404295, 0.034737953186035155, 0.03512137603759766, 0.03533824157714844, 0.03484896087646484, 0.03442464065551758, 0.03448147201538086, 0.0345074577331543, 0.03458662414550781, 0.03452428817749023, 0.03432495880126953, 0.03418921661376953, 0.03413423919677734, 0.034077217102050784, 0.03450444793701172, 0.0344637451171875, 0.0346333122253418, 0.03449452972412109, 0.03464748764038086, 0.03474444961547852, 0.0345340461730957, 0.03471782302856445, 0.03471331024169922, 0.03457257461547852, 0.03454771041870117, 0.03496742248535156, 0.034740352630615236, 0.03463372802734375, 0.03460300827026367, 0.03467673492431641, 0.03453747177124023, 0.03483852767944336, 0.03489177703857422, 0.03471491241455078, 0.034460384368896486, 0.03441241455078125, 0.03471782302856445, 0.03467984008789062, 0.03488662338256836, 0.03503513717651367, 0.03479580688476563, 0.03480553436279297, 0.034740318298339845, 0.034684768676757814, 0.03479142379760742, 0.03493820953369141, 0.03436624145507813, 0.03454553604125977, 0.034647102355957034, 0.0343438720703125, 0.034233345031738284, 0.03445673751831055, 0.034148193359375, 0.034170879364013675, 0.03417702484130859, 0.03423027038574219, 0.03434905624389648, 0.03461084747314453, 0.03502524948120117, 0.03448627090454102, 0.03483017730712891, 0.03587324905395508, 0.03492476654052734, 0.035054752349853516, 0.03482463836669922, 0.035176673889160154, 0.03477932739257812, 0.0347589111328125, 0.034741153717041014, 0.03440470504760742, 0.03439462280273437, 0.03465609741210938, 0.03463388824462891, 0.03465216064453125, 0.034890750885009765, 0.03489894485473633, 0.034854911804199216, 0.0351959342956543, 0.03488252639770508, 0.03461238479614258, 0.03610243225097656, 0.040483390808105466, 0.03478732681274414, 0.03434086227416992, 0.034637760162353516, 0.034514976501464845, 0.03436089706420899, 0.03438463973999024, 0.03450646209716797, 0.034375679016113284, 0.034746368408203124, 0.03441664123535156, 0.03444736099243164, 0.034557121276855465, 0.03449446487426758, 0.034476863861083985, 0.034648063659667966, 0.0342786865234375, 0.03445558547973633, 0.034195232391357425, 0.03438025665283203, 0.03470918273925781, 0.03453785705566406, 0.034423168182373044, 0.03447987365722656, 0.034637760162353516, 0.034500926971435544, 0.03462348937988281, 0.03453747177124023, 0.03501055908203125, 0.03466156768798828, 0.03472390365600586, 0.0347962875366211, 0.035003681182861325, 0.034880416870117184, 0.034625057220458985, 0.034562335968017575, 0.03501055908203125, 0.0347149772644043, 0.03486102294921875, 0.03457011032104492, 0.03494780731201172, 0.034799713134765625, 0.03513043212890625, 0.034906208038330076, 0.03522310256958008, 0.034662689208984375, 0.03452905654907226, 0.03438835144042969, 0.03440435028076172, 0.03429513549804687, 0.034307998657226564, 0.03446988677978516, 0.034371936798095706, 0.03422249603271484, 0.03422412872314453, 0.034283519744873044, 0.03425484848022461, 0.03466239929199219, 0.03444649505615234, 0.034235233306884764, 0.03441571044921875, 0.034543712615966796, 0.03465216064453125, 0.03479020690917969, 0.034590721130371094, 0.035464607238769534, 0.035468158721923826, 0.03477222442626953, 0.034705886840820314, 0.03488972854614258, 0.03476889419555664, 0.03451289749145508, 0.03467203140258789, 0.034520801544189454, 0.03539427185058594, 0.03460931015014648, 0.03456931304931641, 0.03491420745849609, 0.03497014236450195, 0.03465407943725586, 0.034757217407226565, 0.03470950317382813, 0.03489487838745117, 0.034584800720214845, 0.03488217544555664, 0.034573665618896486, 0.03454851150512695, 0.03458867263793945, 0.0347770881652832, 0.034697216033935545, 0.03479142379760742, 0.034891616821289065, 0.03464726257324219, 0.034730239868164064, 0.034299617767333986, 0.03423244857788086, 0.034495136260986325, 0.0342938232421875, 0.03421401596069336, 0.03400201416015625, 0.034148353576660156, 0.03407583999633789, 0.034205310821533205, 0.03447407913208008, 0.034699230194091796, 0.034246528625488284, 0.034598079681396485]",tokens/s,28.877956391764958,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8205.709312,12277.710848,0.0,11882.463232,11315.947008,s,1,17.264017578125,17.264017578125,0.0,17.264017578125,17.264017578125,17.264017578125,17.264017578125,[17.264017578125],,kWh,0.0002955891702458151,3.259817562144908e-05,0.0001235167654800029,0.00045170411134726706,,MB,4005.691392,12294.488064,0.0,11884.560384,11070.308352,s,10,4.10347378540039,0.410347378540039,0.0023673147913827546,0.41062973022460936,0.41252118530273435,0.4131544372558594,0.4136610388183594,"[0.40438449096679685, 0.40964364624023436, 0.409791259765625, 0.4118450622558594, 0.40939697265625, 0.41238046264648437, 0.4106231994628906, 0.4137876892089844, 0.4106362609863281, 0.4109847412109375]",tokens/s,623.8616679136923,kWh,1.1888004602831642e-05,1.311049485645935e-06,7.907128547919307e-06,2.1106182636396885e-05,tokens/kWh,12129147.388241434,MB,4010.016768,12296.585216,0.0,11886.657536,11070.310912,s,10,26.82674755859375,2.6826747558593746,0.010927280065343218,2.6856224365234374,2.692194482421875,2.69243134765625,2.69262083984375,"[2.686230224609375, 2.684601318359375, 2.692668212890625, 2.692141845703125, 2.676589111328125, 2.67872265625, 2.689929443359375, 2.687415283203125, 2.6850146484375, 2.653434814453125]",tokens/s,23.484024614761182,kWh,7.839160972883474e-05,8.645532575390907e-06,5.198583603308493e-05,0.0001390229783373106,tokens/kWh,453162.49697329523,,s,630,26.82373398590088,0.042577355533176,0.0006445835464012684,0.04254539108276367,0.04294107666015625,0.043384997367858884,0.04486556758880615,"[0.043501632690429684, 0.04275251388549805, 0.04272435379028321, 0.04483391952514648, 0.04284735870361328, 0.04276211166381836, 0.042699840545654295, 0.04291078567504883, 0.042758495330810546, 0.04269456100463867, 0.04260704040527344, 0.04274591827392578, 0.04262297439575195, 0.04490230560302735, 0.04297292709350586, 0.04265609741210938, 0.04277372741699219, 0.04230633544921875, 0.042858592987060545, 0.04202249526977539, 0.042059391021728516, 0.04201542282104492, 0.04254924774169922, 0.04251456069946289, 0.04257984161376953, 0.042471038818359376, 0.04250252914428711, 0.04264495849609375, 0.04286313629150391, 0.04249107360839844, 0.04237750244140625, 0.04495772933959961, 0.04283801651000976, 0.042490367889404294, 0.042102783203125, 0.04202278518676758, 0.04231900787353516, 0.04197036743164063, 0.042092384338378905, 0.042482112884521486, 0.04273097610473633, 0.04228137588500976, 0.04199436950683594, 0.04249379348754883, 0.04229062271118164, 0.04207904052734375, 0.0421313591003418, 0.042012481689453124, 0.04207952117919922, 0.042036128997802735, 0.04192387390136719, 0.04188835144042969, 0.04202713775634766, 0.04229439926147461, 0.04497087860107422, 0.042567455291748046, 0.04249212646484375, 0.042893310546875, 0.0426618881225586, 0.042807296752929686, 0.042485759735107424, 0.04276736068725586, 0.04273180770874024, 0.043911167144775394, 0.043012096405029294, 0.04281756973266602, 0.04317910385131836, 0.04297817611694336, 0.04275609588623047, 0.04257791900634766, 0.04256300735473633, 0.04261891174316406, 0.04271971130371094, 0.04254111862182617, 0.04229251098632812, 0.04233903884887695, 0.042670177459716796, 0.043022174835205075, 0.043068702697753904, 0.042918689727783205, 0.042966846466064454, 0.04287875366210937, 0.042690464019775394, 0.04270111846923828, 0.042686527252197265, 0.04294614410400391, 0.04288092803955078, 0.042809982299804684, 0.04272332763671875, 0.04267753601074219, 0.04266672134399414, 0.04242777633666992, 0.0423166389465332, 0.04217977523803711, 0.042594913482666016, 0.042491584777832034, 0.04250041580200195, 0.042536224365234375, 0.042465568542480465, 0.04303436660766601, 0.042547393798828125, 0.04246796798706055, 0.0426453742980957, 0.042573631286621096, 0.04243001556396484, 0.042346942901611326, 0.042324161529541014, 0.042223617553710936, 0.04207779312133789, 0.04221993637084961, 0.042143489837646486, 0.042047744750976564, 0.0421847038269043, 0.042477569580078124, 0.042575870513916016, 0.04244275283813476, 0.04239772796630859, 0.04232815933227539, 0.04229107284545899, 0.04245094299316406, 0.04273356628417969, 0.04266377639770508, 0.04258832168579101, 0.042815486907958986, 0.04264243316650391, 0.04250457763671875, 0.044290046691894534, 0.042940513610839844, 0.04259939193725586, 0.04258297729492187, 0.042437793731689454, 0.04241107177734375, 0.042444576263427736, 0.04249190521240234, 0.04271712112426758, 0.04315347290039063, 0.042297344207763675, 0.0425984001159668, 0.042412033081054686, 0.04280902481079102, 0.04364729690551758, 0.04273766326904297, 0.0427061767578125, 0.04295657730102539, 0.042742591857910156, 0.04269686508178711, 0.04276211166381836, 0.042649726867675784, 0.04252467346191406, 0.04232774353027344, 0.042232128143310545, 0.04251446533203125, 0.04235424041748047, 0.04212524795532226, 0.0421278076171875, 0.0424400634765625, 0.044878494262695315, 0.04339616012573242, 0.043242431640625, 0.04276838302612305, 0.04268646240234375, 0.042651519775390626, 0.04252419281005859, 0.042977886199951174, 0.04281913757324219, 0.042654144287109376, 0.04276838302612305, 0.04264316940307617, 0.042540863037109376, 0.04277478408813477, 0.04276860809326172, 0.04254515075683594, 0.04261273574829102, 0.04272332763671875, 0.04270467376708984, 0.04286076736450195, 0.04259193420410156, 0.042860702514648436, 0.04254521560668945, 0.04269884872436523, 0.0424238395690918, 0.042394081115722654, 0.04249391937255859, 0.043276321411132815, 0.04261177444458008, 0.04250236892700195, 0.042844894409179685, 0.04321206283569336, 0.04261552047729492, 0.0442716178894043, 0.042753631591796876, 0.042307998657226564, 0.04288723373413086, 0.042493343353271484, 0.042057857513427735, 0.042111392974853515, 0.041924606323242186, 0.041944671630859375, 0.04224665451049805, 0.042170272827148435, 0.04275404739379883, 0.042671104431152344, 0.04272854232788086, 0.042444225311279296, 0.0423419189453125, 0.04285488128662109, 0.04240547180175781, 0.04243775939941406, 0.04383513641357422, 0.04365311813354492, 0.04306707382202148, 0.04256784057617188, 0.042587871551513674, 0.04268076705932617, 0.042261566162109375, 0.042098945617675784, 0.042123966217041016, 0.042487808227539066, 0.04315107345581055, 0.04422275161743164, 0.04238131332397461, 0.04246323013305664, 0.04245276641845703, 0.04237305450439453, 0.04270713424682617, 0.05246985626220703, 0.042667072296142576, 0.04254816055297852, 0.04258531188964844, 0.042728225708007814, 0.04274176025390625, 0.04255539321899414, 0.042385406494140625, 0.042288448333740236, 0.04254175949096679, 0.0425634880065918, 0.04234454345703125, 0.04244070434570312, 0.04224975967407227, 0.042141185760498044, 0.04230038452148437, 0.04200803375244141, 0.04290614318847656, 0.04225820922851563, 0.042126911163330075, 0.042500926971435544, 0.042528606414794924, 0.042842113494873046, 0.04270489501953125, 0.04256358337402344, 0.04256972885131836, 0.04237516784667969, 0.0435464973449707, 0.042721473693847656, 0.04249564743041992, 0.04232556915283203, 0.04222032165527344, 0.04193231964111328, 0.04200899124145508, 0.04176796722412109, 0.04195836639404297, 0.04318560028076172, 0.042439231872558596, 0.04254719924926758, 0.041895553588867186, 0.04224192047119141, 0.04200089645385742, 0.041780353546142575, 0.04175347137451172, 0.04171571350097656, 0.041662464141845705, 0.041769088745117186, 0.041713409423828125, 0.041941120147705076, 0.042000385284423826, 0.041924606323242186, 0.04181180953979492, 0.04174454498291016, 0.04216012954711914, 0.042708992004394535, 0.04246246337890625, 0.04246966552734375, 0.042621086120605466, 0.04356537628173828, 0.04259561538696289, 0.04235951995849609, 0.04242419052124023, 0.04246745681762695, 0.04254828643798828, 0.042372032165527346, 0.04234444808959961, 0.0422553596496582, 0.042719905853271484, 0.04274825668334961, 0.042805248260498044, 0.04289311981201172, 0.042778785705566404, 0.042829631805419925, 0.04307785415649414, 0.04285747146606445, 0.042843135833740234, 0.04312684631347656, 0.04303209686279297, 0.04341801452636719, 0.04295862579345703, 0.04287919998168945, 0.04274723052978516, 0.0424925765991211, 0.04352934265136719, 0.04282457733154297, 0.04281958389282227, 0.04254272079467773, 0.04259811019897461, 0.04266870498657226, 0.042627071380615236, 0.044226974487304685, 0.04269049453735352, 0.04276633453369141, 0.04273366546630859, 0.043165889739990235, 0.04268431854248047, 0.04268246459960937, 0.042544990539550784, 0.04251772689819336, 0.04237561416625977, 0.0419920654296875, 0.04256422424316406, 0.04281753540039063, 0.04244831848144531, 0.04254556655883789, 0.042393470764160154, 0.042436126708984376, 0.04258278274536133, 0.042401695251464845, 0.04241046524047851, 0.04235385513305664, 0.042400032043457034, 0.042436126708984376, 0.04209651184082031, 0.04199068832397461, 0.04229347229003906, 0.04203519821166992, 0.042266334533691406, 0.04194537734985351, 0.04257344055175781, 0.04290524673461914, 0.042672863006591795, 0.04278681564331055, 0.04276614379882813, 0.04267436981201172, 0.04274995040893555, 0.04251238250732422, 0.042362144470214844, 0.04242095947265625, 0.04254515075683594, 0.04252262496948242, 0.04220284652709961, 0.04222800064086914, 0.04226047897338867, 0.042315616607666015, 0.04255964660644531, 0.042469375610351565, 0.04426137542724609, 0.042641407012939454, 0.042412033081054686, 0.042007839202880856, 0.041931488037109374, 0.04187564849853516, 0.041993633270263675, 0.04226063919067383, 0.04234415817260742, 0.04256335830688476, 0.04277119827270508, 0.04261177444458008, 0.04246163177490234, 0.04250864028930664, 0.04255132675170899, 0.04290342330932617, 0.04420198440551758, 0.04258127975463867, 0.042500831604003905, 0.04232912063598633, 0.042627777099609375, 0.0428372802734375, 0.042369281768798825, 0.042479679107666014, 0.042355392456054686, 0.04247951889038086, 0.042446945190429686, 0.04247251129150391, 0.042750911712646486, 0.04312659072875977, 0.04262844848632812, 0.04272828674316406, 0.04259183883666992, 0.042918304443359374, 0.04272335815429688, 0.042614368438720705, 0.042961246490478514, 0.042584095001220706, 0.04245913696289062, 0.04275209426879883, 0.04256377410888672, 0.04277148818969727, 0.04248441696166992, 0.04228083038330078, 0.04243878555297852, 0.042280960083007815, 0.04203891372680664, 0.042596416473388674, 0.04266630554199219, 0.04279001617431641, 0.04248681640625, 0.04258390426635742, 0.043319297790527345, 0.043439422607421875, 0.04283875274658203, 0.04262470245361328, 0.04286102294921875, 0.04260230255126953, 0.04247347259521484, 0.04258518218994141, 0.0425337905883789, 0.042581600189208986, 0.0430450553894043, 0.0464304313659668, 0.04254924774169922, 0.04254832077026367, 0.04247644805908203, 0.04269875335693359, 0.04263270568847656, 0.0423818244934082, 0.042524574279785156, 0.042584224700927736, 0.04246931076049805, 0.042657791137695314, 0.042313728332519535, 0.0421899528503418, 0.0424986572265625, 0.04264966583251953, 0.04258806228637695, 0.04410713577270508, 0.0425109748840332, 0.04253462219238281, 0.0425126724243164, 0.04274576187133789, 0.042840160369873044, 0.042813438415527344, 0.042622081756591795, 0.042621822357177735, 0.04263935852050781, 0.042657791137695314, 0.042659103393554686, 0.04278550338745117, 0.04265545654296875, 0.0426396484375, 0.042602497100830077, 0.04580966567993164, 0.04293545532226563, 0.04277948760986328, 0.042863807678222655, 0.042477569580078124, 0.04238828659057617, 0.04226662445068359, 0.04220060729980469, 0.04205615997314453, 0.04234588623046875, 0.04337315368652344, 0.042651134490966795, 0.04261119842529297, 0.04315545654296875, 0.042759166717529294, 0.04267708969116211, 0.04240809631347656, 0.04250419235229492, 0.04277174377441406, 0.04267273712158203, 0.04265382385253906, 0.04268387222290039, 0.04290204620361328, 0.04235887908935547, 0.04243638229370117, 0.04219302368164062, 0.04245836639404297, 0.04221807861328125, 0.04219510269165039, 0.0421847038269043, 0.042106399536132814, 0.042080734252929686, 0.0424648323059082, 0.04264595031738281, 0.04260182571411133, 0.04274607849121094, 0.04264595031738281, 0.042851329803466794, 0.04237318420410156, 0.042099391937255856, 0.04235494232177734, 0.042616832733154295, 0.042683712005615236, 0.042764991760253904, 0.042757793426513674, 0.04279331207275391, 0.042584064483642575, 0.04441497421264649, 0.042592254638671875, 0.042450241088867184, 0.04257452774047851, 0.04339468765258789, 0.04274367904663086, 0.04264988708496094, 0.04322531127929687, 0.04286777496337891, 0.04266870498657226, 0.04265158462524414, 0.04270700836181641, 0.04265577697753906, 0.04251382446289063, 0.04184764862060547, 0.04190003204345703, 0.04205491256713867, 0.04184262466430664, 0.042110912322998045, 0.042580863952636716, 0.04251561737060547, 0.042326881408691404, 0.04223081588745117, 0.042120159149169924, 0.04215193557739258, 0.042162174224853514, 0.04456447982788086, 0.0425799674987793, 0.042732864379882815, 0.04285305786132813, 0.04272681427001953, 0.042637855529785156, 0.04255340957641601, 0.042559486389160156, 0.04275814437866211, 0.042461185455322265, 0.042309406280517575, 0.042109153747558595, 0.042102497100830076, 0.042514144897460936, 0.04229561614990234, 0.0421844482421875, 0.042738174438476564, 0.042602497100830077, 0.04327443313598633, 0.042786624908447264, 0.04288857650756836, 0.04346944046020508, 0.04284963226318359, 0.042824512481689454, 0.04277027130126953, 0.04271059036254883, 0.04259657669067383, 0.04275001525878906, 0.04272348785400391, 0.042520416259765624, 0.04263033676147461, 0.04240083312988281, 0.042542144775390624, 0.04218352127075195, 0.042196990966796875, 0.042539199829101565, 0.04279072189331055, 0.04378009414672852, 0.042456321716308594, 0.04241648101806641, 0.04246774291992188, 0.04230144119262695, 0.04222524642944336, 0.042685951232910156, 0.04234902572631836, 0.042090110778808594, 0.042259265899658206, 0.04195145416259766, 0.04196697616577148, 0.0415830078125, 0.04163174438476563, 0.04175667190551758, 0.04165631866455078, 0.041629344940185546, 0.04199590301513672, 0.042908000946044925, 0.04212569427490234, 0.04189593505859375, 0.04201251220703125, 0.042135711669921874, 0.04204262542724609, 0.041855743408203125, 0.041891136169433595, 0.04198393630981445, 0.04217948913574219, 0.042016254425048825, 0.04184099197387695, 0.04194713592529297, 0.04203699111938477, 0.04195353698730469, 0.042229888916015625, 0.04293824005126953, 0.042169921875, 0.04175676727294922, 0.04191635131835938, 0.04170163345336914, 0.041727649688720704, 0.04182419204711914, 0.041877567291259764, 0.042229984283447264, 0.042162464141845706, 0.04213145446777344, 0.042159393310546876, 0.04204822540283203, 0.041998336791992184, 0.04198604965209961, 0.041989311218261716, 0.041960254669189456, 0.04201062393188477, 0.04195062255859375, 0.043493824005126955, 0.042765857696533204, 0.042019454956054685, 0.041957023620605466, 0.04223420715332031, 0.04198809432983398, 0.04198809432983398, 0.041908222198486327, 0.04201027297973633, 0.042020671844482424]",tokens/s,23.48666298029727,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,8560.51712,12356.288512,0.0,11953.7664,11082.63424,s,1,16.086310546875,16.086310546875,0.0,16.086310546875,16.086310546875,16.086310546875,16.086310546875,[16.086310546875],,kWh,0.00025261549842083844,2.7841503910228292e-05,0.00011605509284401405,0.0003965120951750808,,MB,1683.001344,12387.745792,0.0,11970.543616,9918.62272,s,10,4.975213958740234,0.4975213958740234,0.0004058371021581669,0.4976215515136718,0.497937744140625,0.49798114013671874,0.49801585693359374,"[0.49711123657226564, 0.4975026245117187, 0.49777993774414064, 0.4973531494140625, 0.4966119689941406, 0.4980245361328125, 0.4977841796875, 0.49737774658203127, 0.4979281005859375, 0.497740478515625]",tokens/s,514.5507351503359,kWh,1.4532519381944238e-05,1.6027133487703805e-06,9.677097688762104e-06,2.5812330419476725e-05,tokens/kWh,9917740.701429844,MB,1692.930048,12387.745792,0.0,11970.543616,9918.821888,s,10,30.45577783203125,3.045577783203125,0.243100272136718,2.924031005859375,3.3695760009765627,3.370962805175781,3.372072248535156,"[3.372349609375, 3.369267822265625, 3.361681884765625, 3.217048828125, 2.820540283203125, 2.822995361328125, 2.82324169921875, 2.822062255859375, 2.821769775390625, 3.0248203125]",tokens/s,20.685730092810505,kWh,8.234598096430975e-05,9.082997103454249e-06,5.466545378523826e-05,0.00014609443185300224,tokens/kWh,431227.9338844997,,s,630,30.453447063446035,0.04833880486261277,0.004285709414559487,0.045212512969970704,0.053479880142211916,0.053730999946594235,0.054750658950805664,"[0.05370297622680664, 0.05387468719482422, 0.053894527435302736, 0.054368896484375, 0.053513504028320315, 0.05335302352905273, 0.05316761779785156, 0.05347804641723633, 0.05347532653808594, 0.05382553482055664, 0.05364451217651367, 0.05336870574951172, 0.05332880020141602, 0.05332787322998047, 0.05330944061279297, 0.05333401489257812, 0.05331763076782227, 0.05330944061279297, 0.053348350524902347, 0.05321113586425781, 0.05326233673095703, 0.05368582534790039, 0.05347372817993164, 0.05316185760498047, 0.05314982223510742, 0.05324505615234375, 0.05368307113647461, 0.05384313583374024, 0.05345772933959961, 0.05373452758789062, 0.06085827255249023, 0.05339644622802735, 0.05326438522338867, 0.05344585418701172, 0.053379680633544924, 0.05333830261230469, 0.053235294342041016, 0.054843807220458986, 0.05361423873901367, 0.053523902893066404, 0.0533298568725586, 0.05326742553710938, 0.053542911529541014, 0.053131072998046876, 0.05314579010009766, 0.05321503829956055, 0.0531388168334961, 0.05318534469604492, 0.053166080474853515, 0.053172222137451174, 0.053135009765625, 0.05320534515380859, 0.053532032012939455, 0.0530926399230957, 0.05336918258666992, 0.05321932983398438, 0.053149696350097655, 0.05345075225830078, 0.05333606338500976, 0.05320294570922852, 0.05307183837890625, 0.0531657600402832, 0.053141857147216795, 0.054757953643798825, 0.05331552124023437, 0.05311616134643555, 0.05332051086425781, 0.05322115325927734, 0.053286815643310545, 0.05447507095336914, 0.05333724975585938, 0.05320294570922852, 0.053281120300292965, 0.0534040641784668, 0.05344195175170898, 0.05335110473632813, 0.053228641510009764, 0.05338166427612305, 0.0532913932800293, 0.05306367874145508, 0.05360435104370117, 0.05319475173950195, 0.053305343627929686, 0.05328265762329101, 0.053245983123779296, 0.053397632598876955, 0.05354217529296875, 0.053398113250732425, 0.05330956649780273, 0.05375532913208008, 0.05328543853759766, 0.05324800109863281, 0.053561344146728515, 0.05357567977905273, 0.053364734649658206, 0.05396684646606445, 0.0538028793334961, 0.05389104080200195, 0.053459102630615235, 0.05339670562744141, 0.05341999816894531, 0.05353055953979492, 0.053408641815185544, 0.05335244750976562, 0.053542911529541014, 0.05330124664306641, 0.053286815643310545, 0.053196128845214845, 0.05325696182250977, 0.05327052688598633, 0.053395454406738284, 0.05345894241333008, 0.053938175201416014, 0.053395454406738284, 0.05322751998901367, 0.05340979385375977, 0.05363481521606445, 0.054593727111816405, 0.053540927886962894, 0.05349785614013672, 0.053381118774414066, 0.053438465118408204, 0.05327667236328125, 0.053610305786132816, 0.05386259078979492, 0.05372668838500977, 0.054302814483642575, 0.053645313262939455, 0.05326028823852539, 0.05332787322998047, 0.05299363327026367, 0.053238014221191406, 0.054472862243652345, 0.053376224517822264, 0.05309110260009765, 0.053247425079345705, 0.05324448013305664, 0.05320294570922852, 0.053163745880126956, 0.053085601806640625, 0.05312108612060547, 0.05320172882080078, 0.05300783920288086, 0.05292496109008789, 0.05324288177490234, 0.053455329895019534, 0.05324006271362305, 0.0530926399230957, 0.052911872863769534, 0.0530700798034668, 0.053214881896972654, 0.05337446212768555, 0.053312351226806644, 0.05330739212036133, 0.05313932800292969, 0.05336896133422851, 0.053082111358642575, 0.05332144165039063, 0.053242046356201174, 0.05347132873535156, 0.05326835250854492, 0.0532911376953125, 0.053100543975830077, 0.05313711929321289, 0.05323174285888672, 0.05365980911254883, 0.05363449478149414, 0.053496383666992185, 0.05343231964111328, 0.05397865676879883, 0.053620223999023435, 0.05445856094360352, 0.05363382339477539, 0.05334425735473633, 0.05323980712890625, 0.05332140731811524, 0.05318415832519531, 0.053301311492919924, 0.0533202896118164, 0.053424095153808596, 0.05325827026367187, 0.053395454406738284, 0.05329862213134766, 0.05353324890136719, 0.05341593551635742, 0.05342108917236328, 0.053314529418945315, 0.05343436813354492, 0.05353667068481445, 0.053921630859375, 0.0552982406616211, 0.05318262481689453, 0.053098270416259766, 0.05316220855712891, 0.053340160369873046, 0.053694465637207034, 0.05412435150146484, 0.05332358551025391, 0.05410854339599609, 0.053231616973876954, 0.053350017547607424, 0.053198593139648434, 0.05315971374511719, 0.05305820846557617, 0.05312531280517578, 0.05315302276611328, 0.053254913330078125, 0.05315107345581055, 0.05309507369995117, 0.053154911041259766, 0.053160865783691405, 0.05326233673095703, 0.05308006286621094, 0.05311283111572265, 0.053397022247314456, 0.05321980667114258, 0.053189697265625, 0.05326124954223633, 0.05327462387084961, 0.053032958984375, 0.05309430313110351, 0.05303100967407227, 0.05311897659301758, 0.05311056137084961, 0.05308233642578125, 0.05313740921020508, 0.05309439849853516, 0.05303705596923828, 0.05309750366210937, 0.05489731216430664, 0.05375212860107422, 0.05339136123657227, 0.05326972961425781, 0.05350617599487305, 0.04703241729736328, 0.04515283203125, 0.04516659164428711, 0.04511948776245117, 0.04509286499023438, 0.04511743927001953, 0.04510083389282227, 0.04512995147705078, 0.04520044708251953, 0.0452262077331543, 0.045220577239990234, 0.04532633590698242, 0.045389759063720704, 0.04533459091186524, 0.04539148712158203, 0.04537587356567383, 0.04533657455444336, 0.04530585479736328, 0.044988414764404294, 0.04426137542724609, 0.04420569610595703, 0.04416083145141601, 0.04423123168945312, 0.04428595352172852, 0.04428323364257813, 0.04424975967407226, 0.044350975036621096, 0.044380672454833986, 0.04434688186645508, 0.04427382278442383, 0.0442655029296875, 0.044371486663818356, 0.044386112213134765, 0.04435043334960938, 0.04453744125366211, 0.04450054550170898, 0.044454753875732424, 0.04448665618896484, 0.04458905410766602, 0.04458233642578125, 0.04459167861938477, 0.04476009750366211, 0.04473955154418945, 0.04472563171386719, 0.04467276763916016, 0.04461862564086914, 0.04470732879638672, 0.04476774215698242, 0.044734462738037106, 0.04469456100463867, 0.04471648025512695, 0.044693119049072264, 0.04472515106201172, 0.044781566619873044, 0.04484659194946289, 0.04489471817016601, 0.04484505462646484, 0.04482992172241211, 0.04484534454345703, 0.04527769470214844, 0.04500889587402344, 0.04497612762451172, 0.0450228157043457, 0.04511328125, 0.045126113891601566, 0.04507779312133789, 0.04507926559448242, 0.04511734390258789, 0.04515024185180664, 0.045121440887451174, 0.045160606384277345, 0.04512768173217773, 0.04521308898925781, 0.045214176177978516, 0.045266304016113285, 0.045204063415527344, 0.045228191375732425, 0.04520259094238281, 0.0452760009765625, 0.04524627304077149, 0.04536310577392578, 0.045666526794433594, 0.044334815979003905, 0.044104351043701175, 0.04407279968261719, 0.044091552734375, 0.0441343994140625, 0.0442081298828125, 0.044232704162597655, 0.04424867248535156, 0.04455859375, 0.044435615539550784, 0.044365825653076174, 0.04445174407958984, 0.0445437126159668, 0.04453740692138672, 0.044538688659667966, 0.04453376007080078, 0.04455014419555664, 0.04462387084960937, 0.04446144104003906, 0.04464003372192383, 0.04465955352783203, 0.04457606506347656, 0.04463071823120117, 0.04459724807739258, 0.044609535217285154, 0.0446110725402832, 0.044569087982177735, 0.04458905410766602, 0.04463980865478516, 0.044790176391601565, 0.04477302551269531, 0.04474892807006836, 0.04470604705810547, 0.04575436782836914, 0.04493107223510742, 0.044955646514892575, 0.04497612762451172, 0.04499446487426758, 0.044974174499511715, 0.04499456024169922, 0.04497398376464844, 0.04498416137695312, 0.04500012969970703, 0.04513206481933594, 0.04504345703125, 0.04515865707397461, 0.04509750366210938, 0.04505190277099609, 0.0449617919921875, 0.044990463256835936, 0.045129726409912106, 0.045125633239746096, 0.04513494491577148, 0.04515046310424805, 0.04517136001586914, 0.045228031158447264, 0.045279232025146485, 0.04530579376220703, 0.045281024932861326, 0.045316417694091796, 0.04542464065551758, 0.04541030502319336, 0.04507353591918945, 0.04443225479125976, 0.04406272125244141, 0.04413235092163086, 0.04416460800170898, 0.04430284881591797, 0.04423263931274414, 0.04428963088989258, 0.04433353424072266, 0.044335105895996096, 0.04434431838989258, 0.044352512359619144, 0.044572673797607425, 0.044537281036376955, 0.044466720581054685, 0.044512767791748044, 0.04436329650878906, 0.04457542419433594, 0.044566848754882815, 0.04445513534545899, 0.0444731216430664, 0.044510433197021484, 0.04458371353149414, 0.04458204650878906, 0.044584991455078125, 0.04496262359619141, 0.04475699234008789, 0.044647903442382814, 0.044648990631103516, 0.04473628616333008, 0.04472649765014648, 0.044746753692626956, 0.04476646423339844, 0.04489814376831055, 0.04484505462646484, 0.045720481872558595, 0.04485734558105469, 0.04483270263671875, 0.04492236709594727, 0.044949855804443356, 0.04504169464111328, 0.04495158386230469, 0.044912799835205075, 0.04489980697631836, 0.04497036743164062, 0.04510025787353516, 0.045114303588867186, 0.04496384048461914, 0.044956958770751954, 0.0450280647277832, 0.04511334228515625, 0.045123424530029294, 0.04555996704101563, 0.04522598266601562, 0.04524579238891602, 0.045300384521484376, 0.04532147216796875, 0.045384063720703124, 0.04537913513183594, 0.04526572799682617, 0.0453807373046875, 0.04548659133911133, 0.04543267059326172, 0.04543414306640625, 0.04436041641235351, 0.04416921615600586, 0.044111873626708986, 0.044133758544921874, 0.04427990341186523, 0.044298240661621094, 0.04430259323120117, 0.04431020736694336, 0.04441763305664063, 0.04440883255004883, 0.04434739303588867, 0.04434329605102539, 0.044342689514160157, 0.04440851211547851, 0.04451420974731445, 0.0444453125, 0.04443379211425781, 0.04448665618896484, 0.0445458869934082, 0.04454006576538086, 0.04460134506225586, 0.04466483306884766, 0.04461772918701172, 0.044593055725097655, 0.0446649284362793, 0.04475904083251953, 0.04469456100463867, 0.04467299270629883, 0.04476416015625, 0.044781505584716795, 0.04484659194946289, 0.044691169738769534, 0.04468822479248047, 0.04475904083251953, 0.04482457733154297, 0.04487168121337891, 0.04485734558105469, 0.0450088005065918, 0.04496700668334961, 0.04497651290893555, 0.044873374938964844, 0.04496892929077148, 0.04500457763671875, 0.04504598236083984, 0.04507148742675781, 0.0450384635925293, 0.045162494659423826, 0.04515225601196289, 0.045080577850341794, 0.045041664123535156, 0.04519724655151367, 0.04524652862548828, 0.045244415283203124, 0.04523164749145508, 0.04525212860107422, 0.04521459197998047, 0.04525676727294922, 0.04530543899536133, 0.04541481781005859, 0.04540415954589844, 0.04534207916259766, 0.045342975616455075, 0.04520550537109375, 0.04427881622314453, 0.04422716903686524, 0.04414502334594726, 0.04417046356201172, 0.0442130241394043, 0.04428358459472656, 0.04432048034667969, 0.04430873489379883, 0.04432726287841797, 0.044319744110107424, 0.04430131149291992, 0.04448806381225586, 0.04452211380004883, 0.044494049072265625, 0.04446063995361328, 0.04446579360961914, 0.04459145736694336, 0.04459132766723633, 0.04455424118041992, 0.044521472930908204, 0.04452096176147461, 0.04459571075439453, 0.044619071960449216, 0.04465939331054687, 0.044677120208740234, 0.044783615112304685, 0.04472956848144531, 0.04471273422241211, 0.04473820877075195, 0.04465264129638672, 0.04475107192993164, 0.04475088119506836, 0.044751937866210935, 0.04469219207763672, 0.0450615348815918, 0.044876609802246094, 0.044916736602783204, 0.04494745635986328, 0.04494441604614258, 0.0449730224609375, 0.044955646514892575, 0.0450437126159668, 0.045172737121582034, 0.045125633239746096, 0.04501094436645508, 0.04500889587402344, 0.04504780960083008, 0.04511328125, 0.04508268737792969, 0.04510435104370117, 0.045067039489746094, 0.04503548812866211, 0.04514726257324219, 0.045231006622314454, 0.045176830291748044, 0.04510486221313476, 0.045211936950683596, 0.0452911376953125, 0.045358783721923826, 0.04536751937866211, 0.045399967193603515, 0.04534124755859375, 0.04517724609375, 0.04428764724731445, 0.04413679885864258, 0.04407295989990234, 0.04434534454345703, 0.04422383880615234, 0.044284576416015624, 0.044303359985351565, 0.044372608184814456, 0.04433343887329102, 0.04434921646118164, 0.04438003158569336, 0.04441110229492187, 0.044379936218261716, 0.04445574569702149, 0.04444166564941406, 0.04447075271606445, 0.044578113555908204, 0.04455699157714844, 0.04457164764404297, 0.04456268692016602, 0.04456934356689453, 0.0445849609375, 0.04461119842529297, 0.04461782455444336, 0.04465488052368164, 0.044668926239013675, 0.044572673797607425, 0.04476927947998047, 0.04482048034667969, 0.04473651123046875, 0.04462387084960937, 0.04475699234008789, 0.04477337646484375, 0.04480819320678711, 0.044816383361816405, 0.044865535736083983, 0.04495286560058594, 0.04494351959228516, 0.05335302352905273, 0.05368832015991211, 0.05456486511230469, 0.05585715103149414, 0.0547327995300293, 0.05322547149658203, 0.05293017578125, 0.053104736328125, 0.053276958465576174, 0.053212257385253904, 0.056099742889404294, 0.053354496002197264, 0.05332083129882813, 0.053297889709472655, 0.05319196701049805, 0.053379329681396484, 0.053206687927246095, 0.05295955276489258, 0.053127838134765626, 0.05399961471557617, 0.053233665466308595, 0.053354496002197264, 0.05311459350585938, 0.05315203094482422]",tokens/s,20.687313284682414,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4929.241088,8001.61792,0.0,7606.370304,6988.678144,s,1,13.9430087890625,13.9430087890625,0.0,13.9430087890625,13.9430087890625,13.9430087890625,13.9430087890625,[13.9430087890625],,kWh,0.00018919454070411725,2.0861765625099773e-05,7.585894957595762e-05,0.00028591525590517463,,MB,2811.670528,8018.395136,0.0,7608.467456,6915.136,s,10,2.2374570465087893,0.22374570465087892,0.00037969995005639054,0.22365451049804688,0.22425113830566407,0.22430920104980467,0.2243556512451172,"[0.2242382354736328, 0.22316246032714843, 0.2236561279296875, 0.22365289306640626, 0.2232406463623047, 0.22382080078125, 0.22364569091796874, 0.22354595947265626, 0.22412696838378907, 0.2243672637939453]",tokens/s,1144.1560426800104,kWh,6.628699578146822e-06,7.308311389348398e-07,4.388984992666057e-06,1.174851570974772e-05,tokens/kWh,21789986.609763592,MB,2815.934464,8020.492288,0.0,7610.564608,6915.13856,s,10,21.531000488281247,2.153100048828125,0.007082121175120396,2.1534468994140625,2.160139599609375,2.16175107421875,2.16304025390625,"[2.163362548828125, 2.148183349609375, 2.159029052734375, 2.158015625, 2.152819580078125, 2.1382724609375, 2.146603515625, 2.150858642578125, 2.15407421875, 2.159781494140625]",tokens/s,29.260135883740855,kWh,6.264239718852457e-05,6.908405714485793e-06,4.09264957041343e-05,0.00011047729860714466,tokens/kWh,570252.9007703827,,s,630,21.528521755218502,0.03417225675431509,0.0004772735727576938,0.034099822998046876,0.034559771347045895,0.03492978286743164,0.036041361808776866,"[0.03500147247314453, 0.034740222930908206, 0.034495361328125, 0.03453286361694336, 0.034643905639648434, 0.03560095977783203, 0.0345456657409668, 0.0344535026550293, 0.03457843017578125, 0.03551027297973633, 0.03474227142333984, 0.034320384979248046, 0.03415225601196289, 0.03415059280395508, 0.034148353576660156, 0.0339021110534668, 0.03396860885620117, 0.034050048828125, 0.03395100784301758, 0.03398851013183594, 0.03423657608032227, 0.034116256713867185, 0.0344002571105957, 0.03446768188476562, 0.03504089736938477, 0.03408950424194336, 0.033906688690185545, 0.03392015838623047, 0.03406934356689453, 0.034244350433349606, 0.03400048065185547, 0.03381264114379883, 0.03404822540283203, 0.03400265502929688, 0.035770721435546875, 0.035071712493896484, 0.034488609313964844, 0.03390630340576172, 0.03434979248046875, 0.03430796813964844, 0.03406028747558594, 0.033996353149414064, 0.03391532897949219, 0.03397241592407227, 0.03383276748657227, 0.03497983932495117, 0.03401660919189453, 0.035269279479980466, 0.034062335968017575, 0.034078399658203126, 0.03434531021118164, 0.034009056091308595, 0.03379404830932617, 0.034915809631347654, 0.03383126449584961, 0.03385158538818359, 0.03407257461547852, 0.03450265502929688, 0.03432015991210938, 0.03445782470703125, 0.03434700775146484, 0.0343633918762207, 0.03436860656738281, 0.03498441696166992, 0.034446815490722656, 0.034339359283447266, 0.034041568756103514, 0.03405033493041992, 0.0340503044128418, 0.03397135925292969, 0.034079326629638675, 0.03425894546508789, 0.03415859222412109, 0.03398451232910156, 0.03394057464599609, 0.034184062957763675, 0.03442416000366211, 0.03383980941772461, 0.034200801849365234, 0.03394220733642578, 0.03419548797607422, 0.03410131072998047, 0.03403504180908203, 0.0338348159790039, 0.03388297653198242, 0.03376038360595703, 0.033737598419189455, 0.033899936676025394, 0.03386355209350586, 0.03382041549682617, 0.03406124877929687, 0.03406646347045898, 0.034009056091308595, 0.034449440002441406, 0.03379548645019531, 0.033823326110839845, 0.03385283279418945, 0.033804702758789065, 0.03389894485473633, 0.03419520187377929, 0.034369537353515625, 0.0342072639465332, 0.03445830535888672, 0.03427920150756836, 0.03439616012573242, 0.03544876861572266, 0.034280574798583985, 0.034076736450195315, 0.03388703918457031, 0.034181182861328124, 0.034673694610595704, 0.0341401596069336, 0.03388924789428711, 0.033701854705810545, 0.03373468780517578, 0.033841152191162106, 0.03382672119140625, 0.03381462478637695, 0.034342910766601564, 0.034455486297607425, 0.03456764984130859, 0.033964160919189454, 0.034286048889160155, 0.03388396835327148, 0.03368569564819336, 0.03358678436279297, 0.034888031005859375, 0.03437097549438477, 0.03468540954589844, 0.03486646270751953, 0.03432944107055664, 0.03436524963378906, 0.03421817779541016, 0.03419955062866211, 0.034078720092773435, 0.034178272247314456, 0.034337566375732424, 0.034301952362060545, 0.034307167053222655, 0.0343515510559082, 0.03446646499633789, 0.03416044616699219, 0.03413593673706055, 0.03396006393432617, 0.034041664123535154, 0.03444140625, 0.03431945419311523, 0.03423529434204101, 0.03400089645385742, 0.0342279052734375, 0.034294078826904294, 0.03449168014526367, 0.03436003112792969, 0.034418689727783204, 0.034560001373291016, 0.034364639282226564, 0.0344420166015625, 0.03475046539306641, 0.034240222930908205, 0.03397046279907227, 0.034078720092773435, 0.034179073333740234, 0.03495935821533203, 0.03406438446044922, 0.033927230834960936, 0.033810367584228514, 0.033716129302978515, 0.03370947265625, 0.0339152946472168, 0.03394793701171875, 0.03406438446044922, 0.03421731185913086, 0.03440822219848633, 0.03414863967895508, 0.034769439697265626, 0.03423206329345703, 0.0341558723449707, 0.034216926574707034, 0.03425075149536133, 0.03415363311767578, 0.03407740783691406, 0.03414028930664063, 0.0340184326171875, 0.0340447998046875, 0.03424367904663086, 0.03376220703125, 0.03385958480834961, 0.03414425659179687, 0.03622092819213867, 0.03504364776611328, 0.03420003128051758, 0.033901023864746097, 0.033963489532470706, 0.033786048889160154, 0.03412572860717773, 0.03389039993286133, 0.03376537704467773, 0.033906688690185545, 0.03408710479736328, 0.03403142547607422, 0.03420159912109375, 0.03418316650390625, 0.03428700637817383, 0.03429846572875977, 0.03431366348266601, 0.03441926574707031, 0.034369537353515625, 0.03412582397460937, 0.034097152709960936, 0.03508582305908203, 0.034211582183837894, 0.03406105422973633, 0.03389235305786133, 0.033971904754638675, 0.0340136947631836, 0.03394950485229492, 0.03412947082519531, 0.03392345428466797, 0.033942913055419924, 0.03416339111328125, 0.03429584121704102, 0.03426095962524414, 0.03397836685180664, 0.03372032165527344, 0.03406412887573242, 0.03381260681152344, 0.03470963287353516, 0.03421388626098633, 0.03429315185546875, 0.03431484985351563, 0.036357246398925784, 0.03464265441894531, 0.03457449722290039, 0.03446169662475586, 0.037515262603759765, 0.03416844940185547, 0.03435763168334961, 0.034086910247802735, 0.034427902221679685, 0.03390127944946289, 0.03387011337280273, 0.03370134353637695, 0.03377983856201172, 0.03403772735595703, 0.03399929428100586, 0.0341401596069336, 0.034236415863037106, 0.03421184158325195, 0.034402305603027344, 0.034146305084228515, 0.034375679016113284, 0.034342849731445316, 0.03502735900878906, 0.034437183380126954, 0.03430636978149414, 0.034014751434326175, 0.03372652816772461, 0.03375145721435547, 0.03371212768554688, 0.034014720916748044, 0.03373263931274414, 0.03448393630981445, 0.034369537353515625, 0.034187393188476564, 0.034374271392822266, 0.03439616012573242, 0.034288894653320315, 0.034366207122802736, 0.034313953399658204, 0.03460739135742188, 0.03433184051513672, 0.03528345489501953, 0.034472000122070315, 0.03425817489624024, 0.034025630950927734, 0.03423660659790039, 0.03455657577514649, 0.03448368072509766, 0.034719905853271484, 0.03409958267211914, 0.03410739135742188, 0.033906688690185545, 0.03398767852783203, 0.03391171264648438, 0.03385513687133789, 0.03404131317138672, 0.034356094360351565, 0.03441664123535156, 0.03420950317382813, 0.03394179153442383, 0.033972000122070314, 0.03383113479614258, 0.03378732681274414, 0.03394822311401367, 0.03376031875610352, 0.03403366470336914, 0.03424761581420899, 0.03373020935058594, 0.033882465362548825, 0.03378768157958984, 0.035227519989013675, 0.03499657440185547, 0.03404729461669922, 0.03385990524291992, 0.033806720733642576, 0.0338895378112793, 0.03372518539428711, 0.03401113510131836, 0.033810367584228514, 0.03377363204956055, 0.03367731094360352, 0.033691646575927735, 0.03363779067993164, 0.03615190505981445, 0.03398451232910156, 0.034047073364257815, 0.03386403274536133, 0.033813121795654294, 0.03367731094360352, 0.03372780990600586, 0.03376614379882813, 0.033724353790283206, 0.03383840179443359, 0.03355830383300781, 0.033632896423339845, 0.033794334411621094, 0.03381657409667969, 0.033642494201660156, 0.03395721435546875, 0.033712799072265626, 0.03359247970581054, 0.0340076789855957, 0.033587425231933594, 0.03365683364868164, 0.033742847442626955, 0.03360873413085937, 0.03368854522705078, 0.03375718307495117, 0.03377907180786133, 0.03376812744140625, 0.033796031951904296, 0.033775615692138675, 0.033898048400878907, 0.034046592712402346, 0.0339986572265625, 0.03414028930664063, 0.03432620620727539, 0.03448646545410156, 0.034252799987792966, 0.034432926177978516, 0.03434672164916992, 0.034344352722167966, 0.03414115142822265, 0.03386982345581055, 0.0343485107421875, 0.03441923141479492, 0.03411763381958008, 0.03445145416259766, 0.033889694213867186, 0.033899105072021485, 0.03377084732055664, 0.03390041732788086, 0.03396278381347656, 0.03388809585571289, 0.03457382583618164, 0.034226943969726566, 0.03392416000366211, 0.03410006332397461, 0.03378995132446289, 0.033807903289794924, 0.03390521621704102, 0.0339967041015625, 0.034453472137451174, 0.033823806762695315, 0.033627105712890626, 0.03379129409790039, 0.03394367980957031, 0.03382041549682617, 0.034459648132324217, 0.033964031219482424, 0.03420159912109375, 0.03379404830932617, 0.03373862457275391, 0.0337655029296875, 0.03371212768554688, 0.033661022186279296, 0.03368540954589844, 0.03369779205322266, 0.03369779205322266, 0.033739871978759765, 0.033719200134277344, 0.03377260971069336, 0.03382742309570313, 0.03375084686279297, 0.033985057830810544, 0.03391606521606445, 0.03368534469604492, 0.03368447875976562, 0.033791999816894534, 0.03392102432250976, 0.03358924865722656, 0.033617919921875, 0.033888256072998044, 0.03405228805541992, 0.03435295867919922, 0.03421500778198242, 0.03433295822143555, 0.03432511901855469, 0.03437363052368164, 0.03494623947143555, 0.0342413444519043, 0.03468492889404297, 0.03434883117675781, 0.0341874885559082, 0.03403148651123047, 0.033939422607421874, 0.03407900619506836, 0.034126815795898435, 0.03393119812011719, 0.03466918563842773, 0.03381283187866211, 0.034407615661621094, 0.03397100830078125, 0.03384320068359375, 0.034430431365966796, 0.03387801742553711, 0.03420828628540039, 0.034415935516357424, 0.034226303100585935, 0.034326431274414065, 0.0342570571899414, 0.034301471710205075, 0.03410646438598633, 0.03435951995849609, 0.03425798416137695, 0.03417103958129883, 0.034165184020996095, 0.0341319694519043, 0.034255870819091795, 0.03446470260620117, 0.034250816345214846, 0.03496575927734375, 0.034304542541503905, 0.034285568237304685, 0.03435737609863281, 0.03418918228149414, 0.03443711853027344, 0.03451903915405274, 0.034179073333740234, 0.034113536834716796, 0.03403571319580078, 0.034381855010986326, 0.034057247161865235, 0.03394451141357422, 0.03422412872314453, 0.034127872467041014, 0.03398012924194336, 0.03413020706176758, 0.03384729766845703, 0.033772865295410154, 0.03394582366943359, 0.03379248046875, 0.03385343933105469, 0.03395555114746094, 0.034206016540527344, 0.03440841674804687, 0.034106433868408205, 0.03422281646728516, 0.03376969528198242, 0.033710079193115236, 0.03370393753051758, 0.033767425537109375, 0.03422959899902344, 0.03371484756469727, 0.0337523193359375, 0.033837825775146484, 0.034840576171875, 0.03376128005981445, 0.03383222579956055, 0.03380831909179687, 0.03373299026489258, 0.033970592498779296, 0.03371212768554688, 0.03381398391723633, 0.033843742370605466, 0.0337448959350586, 0.03371212768554688, 0.03369331359863281, 0.03369375991821289, 0.033742591857910155, 0.03384121704101563, 0.03403417587280273, 0.033705440521240235, 0.033753631591796875, 0.03378790283203125, 0.033797824859619144, 0.033754463195800784, 0.036950782775878904, 0.03401087951660156, 0.03388227081298828, 0.03456777572631836, 0.03432089614868164, 0.03899824142456055, 0.03446099090576172, 0.035622337341308596, 0.03448070526123047, 0.034195457458496094, 0.03408486557006836, 0.033836673736572266, 0.033685504913330076, 0.0337694091796875, 0.033640830993652344, 0.03368966293334961, 0.03451859283447266, 0.03430643081665039, 0.0343573112487793, 0.034176063537597653, 0.034185600280761716, 0.034255424499511716, 0.034148353576660156, 0.034318336486816405, 0.034264225006103516, 0.03430486297607422, 0.03420569610595703, 0.033982463836669925, 0.033882110595703126, 0.03400028610229492, 0.034049697875976566, 0.034415550231933593, 0.034369281768798825, 0.03366323089599609, 0.03423564910888672, 0.033739521026611326, 0.03404390335083008, 0.033783809661865234, 0.03368489456176758, 0.03406857681274414, 0.03441692733764649, 0.034260734558105466, 0.03516668701171875, 0.03430809783935547, 0.0343337287902832, 0.03453148651123047, 0.034324382781982424, 0.034781631469726564, 0.03451862335205078, 0.03420659255981445, 0.03448937606811524, 0.03391177749633789, 0.03383091354370117, 0.034113536834716796, 0.03407807922363281, 0.03400563049316406, 0.03426128005981445, 0.03448336029052734, 0.03394822311401367, 0.03389235305786133, 0.03379609680175781, 0.033879966735839845, 0.03381375885009766, 0.03389523315429688, 0.03450991821289062, 0.03428643035888672, 0.0347300796508789, 0.03423139190673828, 0.03446467208862305, 0.0343633918762207, 0.03522608184814453, 0.034343006134033204, 0.03402751922607422, 0.033957023620605466, 0.03380828857421875, 0.03375545501708985, 0.034398849487304685, 0.03545439910888672, 0.03400761413574219, 0.03418931198120117, 0.03446755218505859, 0.03452342224121094, 0.034514942169189454, 0.035471359252929685, 0.034382911682128904, 0.036981697082519534, 0.034551807403564457, 0.034290878295898435, 0.03474214553833008, 0.03419750213623047, 0.034941215515136716, 0.03399481582641602, 0.03410800170898438, 0.03426115036010742, 0.03400688171386719, 0.03398025512695312, 0.033742401123046876, 0.03409196853637695, 0.0342457275390625, 0.03417555236816406, 0.03401232147216797, 0.034154975891113284, 0.034361217498779295, 0.03442124938964844, 0.0344637451171875, 0.034559745788574216, 0.03472000122070312, 0.034731552124023436, 0.03440035247802734, 0.03462115097045899, 0.034570911407470706, 0.03450806427001953, 0.0345115852355957, 0.034506752014160154, 0.034655681610107424, 0.03453190231323242, 0.03434451293945313, 0.03454000091552734, 0.033992992401123044, 0.03404915237426758, 0.033874496459960934, 0.03384729766845703, 0.03371417617797851, 0.03360492706298828, 0.03363827133178711, 0.03369043350219727, 0.0337367057800293, 0.03366041564941406, 0.03369830322265625, 0.03362406539916992, 0.03362198257446289, 0.033640480041503905, 0.03369779205322266]",tokens/s,29.263504812971576,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11038.990336,14897.053696,0.0,14501.80608,13634.065408,s,1,18.719181640625,18.719181640625,0.0,18.719181640625,18.719181640625,18.719181640625,18.719181640625,[18.719181640625],,kWh,0.00034198201161249244,3.7715848971386735e-05,0.00015437345683199516,0.0005340713174158743,,MB,2041.290752,14911.73376,0.0,14501.80608,12898.96192,s,10,6.8069766235351565,0.6806976623535157,0.00043245650314753796,0.6807008361816407,0.6812607788085937,0.6812755004882812,0.6812872778320312,"[0.679794677734375, 0.6805288696289062, 0.6803720703125, 0.6804213256835937, 0.6808062133789062, 0.680595458984375, 0.6810659790039062, 0.6808442993164062, 0.6812575073242187, 0.6812902221679688]",tokens/s,376.08473505679257,kWh,1.9883856241666405e-05,2.1928306243289064e-06,1.3160492009867131e-05,3.523717887586244e-05,tokens/kWh,7265053.791674584,MB,2047.729664,14911.73376,0.0,14501.80608,13241.259008,s,10,39.032643798828126,3.9032643798828124,0.004029642901741416,3.9040800781250002,3.9065517822265625,3.9080163696289065,3.909188039550781,"[3.9038291015625, 3.90353759765625, 3.9043310546875, 3.90595458984375, 3.897573974609375, 3.89501171875, 3.9050361328125, 3.90948095703125, 3.906226318359375, 3.901662353515625]",tokens/s,16.140336361712563,kWh,0.00011425802097208299,1.2603256586144216e-05,7.603860712713239e-05,0.00020289988468535962,tokens/kWh,310497.9586247434,,s,630,39.02842597198486,0.061949882495214076,0.0004613444707387764,0.06196040153503418,0.06255189361572266,0.06267083473205566,0.06304915794372558,"[0.06164121627807617, 0.061228641510009764, 0.06112857437133789, 0.061268447875976566, 0.06133059310913086, 0.061350814819335936, 0.06136422348022461, 0.06142566299438477, 0.06140927886962891, 0.06138265609741211, 0.06134783935546875, 0.061345470428466796, 0.0614749755859375, 0.061418750762939456, 0.06154089736938476, 0.06231897735595703, 0.06151168060302734, 0.06147686386108398, 0.06159564971923828, 0.06175948715209961, 0.06155878448486328, 0.06156492614746094, 0.06227763366699219, 0.06169116973876953, 0.061587936401367185, 0.062097663879394534, 0.062137535095214844, 0.061911712646484374, 0.06161219024658203, 0.0615997428894043, 0.06200320053100586, 0.06176563262939453, 0.061615966796875, 0.06203612899780273, 0.06177724838256836, 0.06220383834838867, 0.06191286468505859, 0.062277793884277344, 0.061907070159912106, 0.06220457458496094, 0.061908447265625, 0.062230751037597655, 0.062292289733886716, 0.062271488189697265, 0.06204415893554688, 0.06237900924682617, 0.062051326751708984, 0.06346137619018555, 0.06247407913208008, 0.06242319869995117, 0.06216268920898437, 0.0625624008178711, 0.06241043090820313, 0.06240668869018555, 0.06226374435424804, 0.06232876968383789, 0.06249440002441406, 0.06299481582641601, 0.06236774444580078, 0.06259846496582032, 0.06272684860229492, 0.06275481414794921, 0.06271590423583985, 0.06171187210083008, 0.061659168243408204, 0.06137699127197266, 0.06139875030517578, 0.061394527435302736, 0.0614714241027832, 0.06140240097045899, 0.06153472137451172, 0.061458656311035156, 0.06145228958129883, 0.06138665771484375, 0.061962337493896485, 0.06156902313232422, 0.06266239929199219, 0.06156723022460937, 0.06157497787475586, 0.06143561553955078, 0.061438430786132814, 0.06146047973632812, 0.061676673889160157, 0.06139788818359375, 0.061484416961669924, 0.06150377655029297, 0.061473121643066404, 0.06148201751708984, 0.06150179290771484, 0.061684223175048826, 0.061843582153320316, 0.06161203384399414, 0.062125343322753906, 0.06183737564086914, 0.06204796981811524, 0.06195619201660156, 0.06192623901367188, 0.06198476791381836, 0.0620206413269043, 0.061856735229492185, 0.061882015228271484, 0.06214009475708008, 0.062115966796875, 0.062113311767578124, 0.06231552124023437, 0.062033920288085936, 0.062273536682128906, 0.06227558517456055, 0.06211993789672852, 0.06234726333618164, 0.06253104019165039, 0.06233961486816406, 0.062058494567871096, 0.06237593460083008, 0.062498622894287106, 0.06241660690307617, 0.062351295471191406, 0.06242899322509766, 0.06247625732421875, 0.06254463958740235, 0.06306611251831054, 0.06240777587890625, 0.06300764846801758, 0.06256553649902344, 0.06254678344726562, 0.06252339172363282, 0.06167955017089844, 0.0612589111328125, 0.06110275268554687, 0.0612022705078125, 0.06126217651367188, 0.06138019180297852, 0.06142323303222656, 0.061394878387451175, 0.06137737655639648, 0.06136217498779297, 0.06141484832763672, 0.06135804748535156, 0.061373023986816405, 0.06151987075805664, 0.061965503692626954, 0.061700927734375, 0.06151903915405273, 0.06150044631958008, 0.06198659133911133, 0.06177587127685547, 0.06135193634033203, 0.06161612701416016, 0.06193561553955078, 0.06171990585327149, 0.06161270523071289, 0.06199705505371094, 0.061712383270263675, 0.06179008102416992, 0.06375164794921875, 0.06231241607666015, 0.06183391952514648, 0.0625541114807129, 0.06218902587890625, 0.06183785629272461, 0.062252159118652346, 0.06281619262695312, 0.0618004150390625, 0.06212499237060547, 0.06205379104614258, 0.06216767883300781, 0.061768768310546875, 0.06212704086303711, 0.061975582122802735, 0.06207961654663086, 0.06178441619873047, 0.06226124954223633, 0.061871841430664064, 0.0621632308959961, 0.062250015258789065, 0.062072864532470705, 0.06227840042114258, 0.06210579299926758, 0.0620052490234375, 0.06248992156982422, 0.062427745819091794, 0.06239446258544922, 0.06258659362792969, 0.06242947387695313, 0.06259276962280273, 0.06262400054931641, 0.06254182434082031, 0.06344704055786132, 0.06260940933227539, 0.062093246459960935, 0.06120249557495117, 0.061050880432128904, 0.06111804962158203, 0.061499393463134766, 0.061515296936035156, 0.06122304153442383, 0.06132524871826172, 0.0614337272644043, 0.06138291168212891, 0.06142022323608398, 0.061513729095458984, 0.06145833587646484, 0.06189628982543945, 0.061727104187011717, 0.061610111236572264, 0.06158950424194336, 0.06209651184082031, 0.06155763244628906, 0.061663230895996096, 0.06200870513916015, 0.061704639434814454, 0.06162451171875, 0.06205644989013672, 0.061652992248535154, 0.06168166351318359, 0.062048030853271485, 0.061763809204101565, 0.06174310302734375, 0.062183006286621094, 0.06175507354736328, 0.06209798431396484, 0.06192697525024414, 0.062032127380371095, 0.061799999237060546, 0.062151199340820314, 0.06181033706665039, 0.06224137496948242, 0.0619417610168457, 0.06217046356201172, 0.061884769439697264, 0.062417217254638675, 0.06234931182861328, 0.062149887084960935, 0.062128894805908205, 0.06247174453735352, 0.06288550567626953, 0.06251334381103515, 0.06245235061645508, 0.06238611221313477, 0.06260079956054687, 0.0622305908203125, 0.06246441650390625, 0.06286540985107422, 0.06234112167358399, 0.062459903717041014, 0.062394367218017575, 0.06234726333618164, 0.06237593460083008, 0.06260531234741211, 0.06268518447875976, 0.06313907241821289, 0.0625792007446289, 0.062005760192871094, 0.06126998519897461, 0.06124137496948242, 0.061241344451904295, 0.06124534225463867, 0.06129878234863281, 0.06138470458984375, 0.06138675308227539, 0.06136217498779297, 0.06146630477905273, 0.0613768310546875, 0.06132326507568359, 0.061341697692871094, 0.06136972808837891, 0.06123788833618164, 0.061439998626708986, 0.06135603332519531, 0.06132227325439453, 0.061328254699707034, 0.06140528106689453, 0.06166470336914062, 0.06177644729614258, 0.061628414154052735, 0.06161993789672852, 0.061534496307373045, 0.06219161605834961, 0.06172671890258789, 0.06164889526367188, 0.06191439819335937, 0.061700607299804686, 0.06169007873535156, 0.06221414566040039, 0.06174515151977539, 0.06221619033813477, 0.061722496032714846, 0.062093280792236326, 0.06171836853027344, 0.06187654495239258, 0.061900798797607424, 0.061810688018798826, 0.062058494567871096, 0.062189567565917966, 0.06227872085571289, 0.062316574096679685, 0.06190377426147461, 0.06230400085449219, 0.06201536178588867, 0.06210188674926758, 0.06249625778198242, 0.06273593521118163, 0.06235232162475586, 0.06217728042602539, 0.06221372985839844, 0.06230873489379883, 0.062133663177490236, 0.06238393783569336, 0.06233580780029297, 0.06259430313110352, 0.06213094329833985, 0.06245171356201172, 0.06241401672363281, 0.06291881561279297, 0.0626080322265625, 0.061744640350341794, 0.06199347305297852, 0.061109729766845707, 0.061319713592529294, 0.061337535858154296, 0.0614769287109375, 0.06126979064941406, 0.061319198608398434, 0.06138054275512695, 0.06140047836303711, 0.061502304077148434, 0.06133555221557617, 0.0613449592590332, 0.06130352020263672, 0.06129056167602539, 0.06148508834838867, 0.061396991729736325, 0.06141299057006836, 0.06132364654541016, 0.061247390747070314, 0.061306304931640625, 0.061319839477539065, 0.06142771148681641, 0.061402206420898435, 0.061393825531005856, 0.06164070510864258, 0.06184755325317383, 0.061841407775878904, 0.06160351943969727, 0.061937984466552735, 0.061712383270263675, 0.06173081588745117, 0.06210275268554687, 0.06199990463256836, 0.06192287826538086, 0.06177632141113281, 0.061834911346435546, 0.06197078323364258, 0.06187744140625, 0.061917057037353514, 0.061877185821533204, 0.062091262817382815, 0.061709758758544925, 0.06214713668823242, 0.06202163314819336, 0.062183425903320315, 0.06200668716430664, 0.06216960144042969, 0.06216508865356445, 0.06214441680908203, 0.062206047058105465, 0.06204156875610352, 0.06217782211303711, 0.06227558517456055, 0.062061920166015624, 0.06228854370117187, 0.06247219085693359, 0.06276710510253906, 0.06257664108276367, 0.06266675186157226, 0.06260326385498047, 0.06267494583129883, 0.06271385574340821, 0.06182291030883789, 0.06166944122314453, 0.06127542495727539, 0.06126793670654297, 0.06142950439453125, 0.06137942504882812, 0.061372577667236326, 0.061338817596435544, 0.06134460830688476, 0.06133308792114258, 0.06160588836669922, 0.061386783599853514, 0.06151129531860351, 0.0620079345703125, 0.06158755111694336, 0.06152185440063476, 0.0614986572265625, 0.0614879035949707, 0.06137187194824219, 0.061313568115234376, 0.06127123260498047, 0.06143033599853515, 0.06147097778320312, 0.06183321762084961, 0.061685760498046874, 0.061863296508789065, 0.06191904067993164, 0.06177260971069336, 0.061685760498046874, 0.06208230209350586, 0.06198300933837891, 0.062134143829345706, 0.06179900741577148, 0.062311710357666014, 0.06231523132324219, 0.06195404815673828, 0.062123809814453125, 0.061956161499023436, 0.06201769638061523, 0.06203596878051758, 0.062189567565917966, 0.062494720458984375, 0.062066078186035156, 0.062247520446777345, 0.06227763366699219, 0.062067966461181644, 0.06225382232666016, 0.06248038482666016, 0.06237519836425781, 0.062232929229736327, 0.06232307052612305, 0.06251059341430663, 0.062400894165039064, 0.06280121612548828, 0.06253855895996094, 0.06307430267333984, 0.0626954231262207, 0.06264012908935547, 0.06260476684570312, 0.06265705490112304, 0.06273433685302734, 0.0628545265197754, 0.06284966278076172, 0.061902687072753905, 0.06136182403564453, 0.06126847839355469, 0.061245281219482424, 0.06136848068237305, 0.06156492614746094, 0.061358081817626954, 0.06145843124389649, 0.06131625747680664, 0.06138700866699219, 0.06147747039794922, 0.06194790267944336, 0.06158659362792969, 0.061528926849365236, 0.06151331329345703, 0.06317612838745117, 0.06178649520874024, 0.061612545013427736, 0.061575233459472654, 0.06204383850097656, 0.06169356918334961, 0.061973217010498044, 0.061832416534423826, 0.061819679260253904, 0.06217036819458008, 0.06205107116699219, 0.06205641555786133, 0.06187187194824219, 0.061873695373535154, 0.0618073616027832, 0.06218105697631836, 0.06195846557617188, 0.06227763366699219, 0.06221004867553711, 0.06188598251342774, 0.06194019317626953, 0.06204006576538086, 0.06224895858764649, 0.06193766403198242, 0.06234112167358399, 0.06207692718505859, 0.062332862854003905, 0.062324798583984375, 0.062132225036621094, 0.06211993789672852, 0.062443519592285154, 0.06242892837524414, 0.062207744598388674, 0.06227814483642578, 0.06260531234741211, 0.06248038482666016, 0.06268928146362304, 0.06250291061401367, 0.06233260726928711, 0.06222060775756836, 0.0625165138244629, 0.06264460754394531, 0.06264771270751954, 0.06268934249877929, 0.06267744064331054, 0.06265622329711915, 0.06269001770019532, 0.06264575958251953, 0.062007038116455075, 0.06132592010498047, 0.061243392944335937, 0.06121001434326172, 0.06126038360595703, 0.06147481536865235, 0.06141952133178711, 0.06134566497802734, 0.061429088592529296, 0.06138140869140625, 0.061400672912597654, 0.061426078796386716, 0.06160179138183594, 0.061781600952148436, 0.06168617630004883, 0.06151168060302734, 0.06164070510864258, 0.06188851165771484, 0.06171852874755859, 0.06154966354370117, 0.061658016204833986, 0.06186371231079101, 0.061591777801513675, 0.061687328338623046, 0.06200368118286133, 0.06178201675415039, 0.062459903717041014, 0.06192281723022461, 0.06195846557617188, 0.06178153610229492, 0.062175457000732424, 0.061980384826660156, 0.0620748176574707, 0.06201744079589844, 0.06210649490356445, 0.06201948928833008, 0.06217443084716797, 0.06236249542236328, 0.06194790267944336, 0.062306304931640626, 0.06215423965454102, 0.06207913589477539, 0.06234502410888672, 0.06249654388427734, 0.06210604858398437, 0.06235372924804688, 0.062429183959960936, 0.062494720458984375, 0.062470142364501956, 0.062266590118408204, 0.062322689056396485, 0.06249756622314453, 0.06245548629760742, 0.062324703216552736, 0.0622655029296875, 0.0625802879333496, 0.06268377685546875, 0.06259267044067383, 0.06265276718139648, 0.06257868957519531, 0.062484161376953125, 0.06261785507202149, 0.06246134567260742, 0.06177382278442383, 0.062104766845703124, 0.06108038330078125, 0.06122038269042969, 0.06127254486083984, 0.061415424346923826, 0.06133484649658203, 0.06130963134765625, 0.061396991729736325, 0.06135910415649414, 0.0613135986328125, 0.061393344879150394, 0.06142566299438477, 0.061548545837402345, 0.061529502868652344, 0.062147167205810545, 0.0616399040222168, 0.06136502456665039, 0.061456382751464846, 0.061841407775878904, 0.061679615020751956, 0.06170214462280273, 0.06160534286499023, 0.062036510467529296, 0.06174310302734375, 0.06171833419799805, 0.06193990325927735, 0.06172051239013672, 0.061558849334716795, 0.06267417526245117, 0.06216780853271484, 0.06209516906738281, 0.06208262252807617, 0.06203251266479492, 0.061966335296630856, 0.06191286468505859, 0.061973953247070314, 0.06200713729858399, 0.06198064041137695, 0.061899742126464846, 0.06225884628295898, 0.0623271369934082, 0.06205974578857422, 0.06230643081665039, 0.06189123153686524, 0.06219571304321289, 0.06230796813964844, 0.06200735855102539, 0.06235168075561524, 0.06231654357910156, 0.06202777481079102, 0.06237139129638672, 0.06225145721435547, 0.062042110443115236, 0.0623636474609375, 0.06239846420288086, 0.062461952209472656, 0.062390239715576175, 0.062351390838623046, 0.062473217010498044, 0.0625301742553711, 0.06266073608398437, 0.0625516471862793]",tokens/s,16.14208065813934,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3552.100352,5264.83456,0.0,4869.586944,4520.068608,s,1,11.08853125,11.08853125,0.0,11.08853125,11.08853125,11.08853125,11.08853125,[11.08853125],,kWh,0.00012195984266666641,1.3445429172698094e-05,5.28433756079999e-05,0.0001882486474473644,,MB,1489.997824,5298.388992,0.0,4888.461312,4194.016256,s,10,1.8471275177001956,0.1847127517700195,0.0002522035746094528,0.18474337768554688,0.18496663665771484,0.18501363754272462,0.1850512382507324,"[0.1843956756591797, 0.18467015075683593, 0.18439353942871095, 0.18472857666015624, 0.18491842651367188, 0.18506063842773438, 0.1847581787109375, 0.18495619201660157, 0.18431417846679687, 0.1849319610595703]",tokens/s,1385.935716656629,kWh,5.451094363425919e-06,6.011560006089856e-07,3.6200286161851924e-06,9.672278980220097e-06,tokens/kWh,26467392.07207758,MB,1494.26176,5306.7776,0.0,4896.84992,4194.018816,s,10,19.221016967773437,1.9221016967773437,0.009146835434526338,1.919507080078125,1.9376085083007812,1.9383769470214842,1.9389916979980468,"[1.9136322021484375, 1.9104893798828124, 1.9193065185546876, 1.9391453857421874, 1.9268912353515626, 1.9148179931640625, 1.9212440185546875, 1.9197076416015626, 1.937437744140625, 1.9183448486328125]",tokens/s,32.77662160416787,kWh,5.602947284699082e-05,6.179889168601416e-06,3.400133481421482e-05,9.621069682980705e-05,tokens/kWh,654812.8438508717,,s,630,19.21847046661375,0.030505508677164714,0.0005148219707423202,0.030384927749633788,0.030966467475891112,0.03126797103881836,0.03259703628540041,"[0.030611455917358397, 0.0304967041015625, 0.03031612777709961, 0.03030825614929199, 0.030020160675048826, 0.03007916831970215, 0.02998806381225586, 0.03006729507446289, 0.030107648849487304, 0.03014860725402832, 0.03042099189758301, 0.03059926414489746, 0.03043315124511719, 0.030795808792114257, 0.03019161605834961, 0.029981887817382813, 0.03011257553100586, 0.03006800079345703, 0.030456544876098633, 0.030057823181152344, 0.030179967880249025, 0.03121148872375488, 0.030181087493896485, 0.030123680114746094, 0.03037798309326172, 0.030918527603149414, 0.030266048431396485, 0.03034441566467285, 0.03014748764038086, 0.03022233581542969, 0.03014816093444824, 0.030793407440185546, 0.03023641586303711, 0.030313472747802734, 0.03048979187011719, 0.0304167366027832, 0.030376352310180665, 0.030234367370605468, 0.03030054473876953, 0.030327232360839843, 0.030291584014892577, 0.03019523239135742, 0.030243679046630858, 0.030310144424438478, 0.03008505630493164, 0.030300384521484376, 0.030256288528442383, 0.0303175048828125, 0.03044528007507324, 0.030292255401611328, 0.03018547248840332, 0.03046553611755371, 0.030894559860229494, 0.03169833564758301, 0.03156156730651855, 0.030501535415649414, 0.03039580726623535, 0.03025302314758301, 0.03061631965637207, 0.030244287490844728, 0.030304832458496092, 0.030232576370239257, 0.030437376022338865, 0.03140604782104492, 0.030475711822509764, 0.03024947166442871, 0.030238624572753905, 0.03006278419494629, 0.030111167907714845, 0.030222623825073243, 0.030118207931518554, 0.030168895721435548, 0.0301628475189209, 0.030060800552368164, 0.03014656066894531, 0.03018511962890625, 0.030277023315429686, 0.030139520645141603, 0.030576448440551757, 0.030291807174682616, 0.031021120071411133, 0.030328832626342773, 0.03016713523864746, 0.030324703216552736, 0.030236703872680664, 0.030521343231201172, 0.03006185531616211, 0.03010223960876465, 0.03005571174621582, 0.03045449638366699, 0.030038015365600586, 0.030361600875854492, 0.030174463272094727, 0.03027587127685547, 0.03036412811279297, 0.03016089630126953, 0.03032678413391113, 0.030220064163208007, 0.03021436882019043, 0.030103296279907227, 0.030090911865234375, 0.030140960693359375, 0.03009440040588379, 0.030193824768066407, 0.030183744430541993, 0.030509599685668944, 0.030469120025634764, 0.030327808380126952, 0.03028540802001953, 0.030431455612182617, 0.030309919357299805, 0.030304927825927735, 0.030203903198242187, 0.0317130241394043, 0.03052569580078125, 0.030285663604736328, 0.03050511932373047, 0.030242687225341798, 0.030770816802978516, 0.030367584228515626, 0.03044576072692871, 0.030266847610473633, 0.030605663299560548, 0.03019843292236328, 0.030209344863891603, 0.030167743682861327, 0.030597343444824218, 0.030380319595336915, 0.03059542465209961, 0.0301844482421875, 0.030128448486328126, 0.030419647216796877, 0.030078975677490235, 0.030016992568969728, 0.030314687728881837, 0.030212448120117186, 0.03016089630126953, 0.03022371292114258, 0.030679712295532225, 0.030266687393188475, 0.031111871719360352, 0.03082854461669922, 0.030248960494995116, 0.030345216751098632, 0.030269439697265626, 0.03068025588989258, 0.030255935668945313, 0.030277631759643556, 0.030208000183105467, 0.030242816925048828, 0.030633983612060548, 0.029933536529541015, 0.030087200164794922, 0.03015872001647949, 0.0317664966583252, 0.03291126251220703, 0.03051241683959961, 0.030219232559204102, 0.030239744186401366, 0.030258176803588867, 0.030234624862670898, 0.030229888916015624, 0.030140863418579102, 0.03004419136047363, 0.029991071701049806, 0.030099456787109374, 0.030066495895385743, 0.030138559341430664, 0.030103424072265624, 0.03129766464233399, 0.03032281684875488, 0.030350784301757812, 0.03137750434875488, 0.031072608947753905, 0.031014495849609375, 0.030578432083129884, 0.03056889533996582, 0.030410144805908205, 0.03062380790710449, 0.030356224060058595, 0.03048963165283203, 0.030395456314086914, 0.030265247344970703, 0.030717376708984376, 0.030202432632446288, 0.030410144805908205, 0.03052729606628418, 0.03093788719177246, 0.03063382339477539, 0.03118275260925293, 0.03058492851257324, 0.030607423782348632, 0.03052739143371582, 0.030689311981201173, 0.030881919860839845, 0.030838655471801757, 0.030696767807006836, 0.03129206466674805, 0.030791711807250977, 0.030519296646118164, 0.0315773754119873, 0.030790367126464845, 0.03060105514526367, 0.030642175674438478, 0.031178911209106444, 0.03192422485351563, 0.03083395195007324, 0.030804704666137696, 0.030619199752807618, 0.030751104354858397, 0.03188115119934082, 0.03060736083984375, 0.03053785514831543, 0.03124224090576172, 0.030826496124267577, 0.030605600357055663, 0.03043008041381836, 0.03022729682922363, 0.030490591049194337, 0.030468128204345704, 0.03041187286376953, 0.030379104614257812, 0.030431039810180666, 0.03054591941833496, 0.030697471618652345, 0.030644224166870116, 0.03058892822265625, 0.03056844711303711, 0.030535680770874023, 0.030697471618652345, 0.030904031753540038, 0.030615840911865234, 0.03139750480651855, 0.03066713523864746, 0.030514528274536133, 0.03111737632751465, 0.03050761604309082, 0.031086591720581053, 0.030651424407958986, 0.030878688812255858, 0.030686784744262695, 0.030537216186523438, 0.030923263549804687, 0.030676511764526366, 0.0346899528503418, 0.030697471618652345, 0.03087273597717285, 0.030311264038085938, 0.030062559127807618, 0.029908544540405275, 0.02998524856567383, 0.03007014465332031, 0.031105152130126955, 0.03035158348083496, 0.030337087631225584, 0.03022198486328125, 0.030728256225585938, 0.030312448501586913, 0.030133855819702147, 0.03121308708190918, 0.030201919555664064, 0.0301279354095459, 0.030178207397460938, 0.03059107208251953, 0.03018547248840332, 0.03327590560913086, 0.0354029426574707, 0.030475072860717774, 0.030375999450683595, 0.03066873550415039, 0.030102880477905273, 0.03002169609069824, 0.03034720039367676, 0.03043600082397461, 0.030636032104492186, 0.030402559280395508, 0.03019161605834961, 0.030438783645629883, 0.030186111450195313, 0.030207103729248046, 0.030231039047241212, 0.030253440856933593, 0.030195520401000975, 0.030369632720947264, 0.03022662353515625, 0.030741695404052735, 0.030847967147827147, 0.031092735290527345, 0.031047679901123046, 0.030496768951416016, 0.030525440216064452, 0.03132803153991699, 0.0305948486328125, 0.030443199157714845, 0.030224992752075196, 0.030177440643310547, 0.030492767333984375, 0.03016387176513672, 0.030243839263916016, 0.030523168563842774, 0.030275808334350587, 0.030661983489990233, 0.030265567779541015, 0.03020025634765625, 0.030352415084838866, 0.030823328018188476, 0.030553632736206055, 0.030460447311401368, 0.03033888053894043, 0.03011599922180176, 0.030244319915771485, 0.030324384689331053, 0.03070044708251953, 0.031247840881347657, 0.0308897590637207, 0.03153033638000488, 0.03061222457885742, 0.03055548858642578, 0.031179328918457032, 0.0301976318359375, 0.03037811279296875, 0.03037705612182617, 0.03068156814575195, 0.030427583694458006, 0.030507104873657227, 0.030369440078735353, 0.030424575805664062, 0.03028438377380371, 0.03105299186706543, 0.030401248931884766, 0.030175487518310548, 0.02999817657470703, 0.03037648010253906, 0.030269792556762695, 0.030133920669555662, 0.030085216522216796, 0.0300097599029541, 0.02999692726135254, 0.030054399490356445, 0.030224384307861327, 0.030529535293579102, 0.03021004867553711, 0.0300948486328125, 0.0299967041015625, 0.030170175552368166, 0.030283008575439453, 0.03041539192199707, 0.030515199661254884, 0.030770591735839844, 0.030912351608276368, 0.03222195053100586, 0.03142422485351563, 0.03087286376953125, 0.030434303283691407, 0.030088384628295897, 0.030649375915527344, 0.030649343490600587, 0.030597984313964845, 0.03044550323486328, 0.030382080078125, 0.030166463851928713, 0.03075129508972168, 0.030023263931274413, 0.030110111236572267, 0.02999603271484375, 0.030320928573608397, 0.0302653751373291, 0.030071487426757814, 0.02999091148376465, 0.02999888038635254, 0.030226367950439453, 0.03009564781188965, 0.030091232299804687, 0.030115583419799804, 0.029980960845947265, 0.030042015075683593, 0.030230079650878906, 0.03013644790649414, 0.03187132835388184, 0.03054739189147949, 0.030852735519409178, 0.030753471374511718, 0.03092710494995117, 0.030672895431518556, 0.03037183952331543, 0.030285823822021486, 0.03029747200012207, 0.0302139835357666, 0.03052796745300293, 0.030175552368164063, 0.030325983047485353, 0.030103935241699218, 0.03027395248413086, 0.030060543060302734, 0.03061555290222168, 0.030087167739868165, 0.03005232048034668, 0.030088512420654297, 0.030175968170166014, 0.03040870475769043, 0.03060736083984375, 0.030324735641479493, 0.030318208694458008, 0.030363872528076173, 0.030349472045898437, 0.030619327545166015, 0.030816064834594727, 0.03069958305358887, 0.031072608947753905, 0.030594175338745117, 0.0304486083984375, 0.03116009521484375, 0.030369279861450195, 0.03093987274169922, 0.03052470397949219, 0.030745311737060545, 0.0303636474609375, 0.0305930233001709, 0.030492671966552733, 0.030668800354003906, 0.030799680709838868, 0.030869056701660156, 0.030283872604370116, 0.030300703048706055, 0.03027507209777832, 0.030757375717163086, 0.030457855224609375, 0.030432479858398438, 0.030546079635620116, 0.030389184951782226, 0.03034819221496582, 0.03034601593017578, 0.030511104583740234, 0.030489952087402343, 0.03041961669921875, 0.030304256439208983, 0.030320640563964843, 0.03027974319458008, 0.03046803283691406, 0.03036787223815918, 0.030250591278076173, 0.031180320739746095, 0.031064544677734375, 0.0312740478515625, 0.03057561683654785, 0.03028540802001953, 0.030256959915161134, 0.030503456115722655, 0.029980640411376953, 0.030416608810424805, 0.03062175941467285, 0.030799615859985353, 0.03031068801879883, 0.0303635196685791, 0.030277984619140625, 0.03021414375305176, 0.030228479385375977, 0.030256959915161134, 0.030097440719604494, 0.030074911117553713, 0.029995231628417968, 0.030224288940429687, 0.03016499137878418, 0.03010291290283203, 0.030349952697753906, 0.030225439071655272, 0.032750240325927736, 0.030828672409057616, 0.030382303237915038, 0.03193795204162598, 0.031382080078125, 0.030369792938232422, 0.030279680252075194, 0.03035308837890625, 0.030306623458862304, 0.03018288040161133, 0.0312956485748291, 0.030235008239746095, 0.030103551864624024, 0.030146175384521485, 0.03048838424682617, 0.030067520141601564, 0.03021798324584961, 0.03007814407348633, 0.03024675178527832, 0.030100448608398438, 0.03024508857727051, 0.030223648071289064, 0.030235008239746095, 0.030326271057128908, 0.030328895568847655, 0.030364448547363282, 0.03023849678039551, 0.030316543579101563, 0.030277631759643556, 0.03018320083618164, 0.030655807495117187, 0.03055094337463379, 0.03061350440979004, 0.030510719299316407, 0.031060575485229492, 0.0304136962890625, 0.030366016387939454, 0.030956127166748046, 0.031328128814697265, 0.030971904754638672, 0.030769151687622072, 0.0314368953704834, 0.03103324890136719, 0.030501184463500978, 0.03054707145690918, 0.031260543823242185, 0.031103679656982422, 0.030983423233032225, 0.030567167282104492, 0.03056572723388672, 0.03061417579650879, 0.03046505546569824, 0.03080086326599121, 0.030818304061889647, 0.030668256759643554, 0.03075494384765625, 0.03051510429382324, 0.030677696228027344, 0.030580543518066407, 0.030556095123291015, 0.030853151321411133, 0.030576671600341797, 0.03041689682006836, 0.031282432556152345, 0.030485248565673827, 0.030951648712158202, 0.030715808868408204, 0.03064944076538086, 0.03096598434448242, 0.030384384155273437, 0.030718271255493163, 0.030664703369140626, 0.030631872177124025, 0.03115014457702637, 0.030367647171020508, 0.030263391494750977, 0.03472812652587891, 0.03085433578491211, 0.030767744064331054, 0.030790847778320314, 0.03068191909790039, 0.030478336334228515, 0.030332063674926756, 0.030435583114624024, 0.03021660804748535, 0.030455711364746094, 0.030801216125488282, 0.03039740753173828, 0.03041587257385254, 0.03041587257385254, 0.030411903381347655, 0.030601247787475586, 0.030695711135864258, 0.030782079696655272, 0.030457792282104493, 0.030520704269409178, 0.03143948745727539, 0.030482431411743165, 0.03033497619628906, 0.030459903717041017, 0.030648319244384766, 0.03125923156738281, 0.030444896697998047, 0.030462623596191406, 0.030713472366333008, 0.030172576904296877, 0.03026972770690918, 0.03018822479248047, 0.029996320724487303, 0.030165151596069337, 0.03036422348022461, 0.02992243194580078, 0.030058624267578125, 0.030214912414550783, 0.030038015365600586, 0.030147743225097657, 0.0304116153717041, 0.030117887496948242, 0.030084703445434572, 0.02998271942138672, 0.030419231414794922, 0.030262880325317383, 0.030174816131591797, 0.030970815658569337, 0.030072832107543947, 0.029845504760742186, 0.03015679931640625, 0.03033087921142578, 0.030048255920410157, 0.030242816925048828, 0.030380064010620118, 0.030223808288574218, 0.030192159652709962, 0.030849023818969725, 0.03017932891845703, 0.030269439697265626, 0.03038969612121582, 0.030593791961669923, 0.03027315139770508, 0.0333776969909668, 0.03040540885925293, 0.030117887496948242, 0.030264768600463867, 0.030300735473632812, 0.030349184036254882, 0.03065011215209961, 0.03065894317626953, 0.03038547134399414, 0.030784191131591795, 0.03055615997314453, 0.030610912322998046, 0.03055039978027344, 0.030518688201904298, 0.03083139228820801, 0.03067286491394043, 0.03058278465270996, 0.03106390380859375, 0.03065667152404785, 0.0305664005279541, 0.030693376541137695, 0.030803359985351563, 0.03039673614501953, 0.03093849563598633, 0.030460832595825195]",tokens/s,32.78096459832393,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5178.14272,6356.33664,0.0,5953.814528,5766.738432,s,1,12.3250703125,12.3250703125,0.0,12.3250703125,12.3250703125,12.3250703125,12.3250703125,[12.3250703125],,kWh,0.00015930497196250143,1.7565454984387384e-05,7.100227902399892e-05,0.00024787270597088776,,MB,1785.48736,6412.959744,0.0,5995.757568,5260.089344,s,10,2.703243072509766,0.27032430725097656,0.0004384154131524255,0.27030769348144534,0.27084059753417966,0.2709648391723633,0.2710642324829102,"[0.27081298828125, 0.2697884826660156, 0.26960369873046874, 0.2702043151855469, 0.27066400146484376, 0.27048593139648436, 0.270217041015625, 0.27039834594726564, 0.26997918701171875, 0.2710890808105469]",tokens/s,947.010657692438,kWh,7.970622544594377e-06,8.790163676940003e-07,5.3184802307564644e-06,1.4168119143044843e-05,tokens/kWh,18068735.68858086,MB,1791.40608,6429.73696,0.0,6012.534784,5260.091904,s,10,29.142009521484376,2.9142009521484376,0.008035036704213188,2.912658935546875,2.922499267578125,2.9281297607421877,2.932634155273438,"[2.913010498046875, 2.911853271484375, 2.921248046875, 2.90704248046875, 2.912307373046875, 2.913518798828125, 2.93376025390625, 2.915164794921875, 2.90215185546875, 2.9119521484375]",tokens/s,21.618275827394292,kWh,8.465634869290242e-05,9.337715414066874e-06,4.9431563569243906e-05,0.00014342562767621317,tokens/kWh,439252.0431719778,,s,630,29.139460975647008,0.0462531126597571,0.0005517035869884898,0.04615441513061523,0.04678251838684082,0.047081591796874996,0.0479963404083252,"[0.04774518585205078, 0.04645273590087891, 0.046206592559814456, 0.046292510986328125, 0.04617916870117188, 0.04628012847900391, 0.04595753479003906, 0.04606291198730469, 0.046398303985595704, 0.0461693115234375, 0.045929088592529296, 0.046030017852783205, 0.04602975845336914, 0.04598787307739258, 0.04597760009765625, 0.046170112609863284, 0.04651007843017578, 0.04625516891479492, 0.046293952941894534, 0.046219264984130856, 0.04791257476806641, 0.04662908935546875, 0.04642140960693359, 0.04613606262207031, 0.04730803298950195, 0.04614022445678711, 0.046096126556396486, 0.04619283294677735, 0.046065376281738284, 0.04592259216308594, 0.04640681457519531, 0.046107616424560544, 0.04584979248046875, 0.046029151916503905, 0.04606601715087891, 0.04603257751464844, 0.046053409576416016, 0.04578704071044922, 0.04607424163818359, 0.04606060791015625, 0.04587820816040039, 0.04599193572998047, 0.04595711898803711, 0.046433345794677734, 0.04580857467651367, 0.04581171035766601, 0.04580672073364258, 0.04578598403930664, 0.045848575592041016, 0.04616726303100586, 0.04656528091430664, 0.04614972686767578, 0.04679280090332031, 0.045941665649414064, 0.04618012619018555, 0.0456376953125, 0.04834707260131836, 0.04634009552001953, 0.0461168327331543, 0.045887264251708984, 0.04607395172119141, 0.04704425430297852, 0.04574079895019531, 0.04724335861206055, 0.046518463134765625, 0.04611936187744141, 0.04593824005126953, 0.04596096038818359, 0.04612716674804687, 0.04618624114990234, 0.04603772735595703, 0.045846527099609374, 0.04603433609008789, 0.046102142333984374, 0.045727840423583986, 0.04580646514892578, 0.04586905670166016, 0.04588553619384766, 0.04569692611694336, 0.046205024719238284, 0.04654070281982422, 0.04639334487915039, 0.04645273590087891, 0.04626432037353516, 0.04613951873779297, 0.046223262786865234, 0.04646307373046875, 0.046179710388183595, 0.04611123275756836, 0.04600627136230469, 0.04653587341308594, 0.046208961486816406, 0.046297950744628905, 0.046298561096191404, 0.0458570556640625, 0.046059295654296874, 0.04582249450683594, 0.046087646484375, 0.04579792022705078, 0.04581171035766601, 0.04639337539672852, 0.045999839782714845, 0.04596908950805664, 0.045873729705810544, 0.04587491226196289, 0.04604240036010742, 0.04595523071289063, 0.04604399871826172, 0.04637286376953125, 0.046003871917724606, 0.04675823974609375, 0.04637446212768555, 0.046198238372802736, 0.046559326171875, 0.046758785247802734, 0.04634019088745117, 0.046249889373779295, 0.04622732925415039, 0.04614771270751953, 0.047263744354248044, 0.04603017425537109, 0.04626684951782226, 0.0466495361328125, 0.04673331069946289, 0.04706304168701172, 0.04657353591918945, 0.04735795211791992, 0.04673529434204102, 0.0465263671875, 0.04702838516235352, 0.0473702392578125, 0.04654489517211914, 0.04710329437255859, 0.04637260818481445, 0.04702313613891602, 0.046145633697509764, 0.046253215789794924, 0.04618921661376953, 0.04686438369750977, 0.04683161544799805, 0.0461962890625, 0.04594073486328125, 0.0457507209777832, 0.045891681671142576, 0.045617057800292966, 0.046034942626953124, 0.04559872055053711, 0.04573798370361328, 0.045848575592041016, 0.04841676712036133, 0.04747673416137695, 0.04674355316162109, 0.04737433624267578, 0.047107425689697266, 0.04632793426513672, 0.046201377868652346, 0.04627852630615235, 0.0463504638671875, 0.046268417358398435, 0.04633599853515625, 0.045853919982910156, 0.04663580703735352, 0.04607932662963867, 0.04585948944091797, 0.0457050895690918, 0.045697406768798826, 0.045792991638183594, 0.04593257522583008, 0.04544307327270508, 0.04557158279418945, 0.04578867340087891, 0.04575641632080078, 0.045825023651123044, 0.04607747268676758, 0.04560854339599609, 0.0456938247680664, 0.045630687713623046, 0.04549407958984375, 0.04551164627075195, 0.045440479278564455, 0.04547638320922852, 0.04563123321533203, 0.04562112045288086, 0.05362019348144531, 0.047694400787353514, 0.046408031463623045, 0.046399486541748046, 0.04633190536499023, 0.046542240142822267, 0.046919105529785156, 0.04627523040771484, 0.04675312042236328, 0.04612982559204101, 0.045819263458251956, 0.045658206939697264, 0.04589420700073242, 0.046442462921142576, 0.046266368865966793, 0.046260223388671876, 0.047001598358154296, 0.04634009552001953, 0.046155006408691406, 0.046123775482177734, 0.0462213134765625, 0.046223518371582034, 0.04585865783691406, 0.04588083267211914, 0.045926559448242185, 0.04585292816162109, 0.04592959976196289, 0.045954238891601565, 0.046255359649658205, 0.04645142364501953, 0.046100288391113284, 0.04614937591552734, 0.046078208923339845, 0.04603200149536133, 0.0459967041015625, 0.04589158248901367, 0.04690761566162109, 0.046559009552001956, 0.046846176147460936, 0.04607292938232422, 0.04647350311279297, 0.046043777465820314, 0.046045185089111325, 0.046183521270751954, 0.04595132827758789, 0.04578524780273437, 0.04581382369995117, 0.04574425506591797, 0.04592233657836914, 0.04601456069946289, 0.045936767578125, 0.0469073600769043, 0.04610662460327149, 0.04643868637084961, 0.04619440078735351, 0.046176254272460936, 0.04601795196533203, 0.04589424133300781, 0.04594073486328125, 0.04646912002563477, 0.04613513565063477, 0.04612521743774414, 0.046532222747802734, 0.04566873550415039, 0.045778942108154294, 0.04596451187133789, 0.04586492919921875, 0.04575260925292969, 0.04568937683105469, 0.04700774383544922, 0.04646912002563477, 0.04593423843383789, 0.04582851028442383, 0.045768447875976566, 0.047919296264648435, 0.04696652984619141, 0.04820355224609375, 0.0462504653930664, 0.04621507263183594, 0.04615900802612305, 0.046449600219726564, 0.04624998474121094, 0.0461416015625, 0.04615971374511719, 0.04619996643066406, 0.04603696060180664, 0.046254974365234375, 0.046218303680419924, 0.045984897613525394, 0.04624278259277344, 0.045945697784423825, 0.045879295349121094, 0.04598921585083008, 0.04593302536010742, 0.04590326309204101, 0.04611692810058594, 0.046018943786621094, 0.04588374328613281, 0.04605644989013672, 0.04609331130981445, 0.04625612640380859, 0.04623516845703125, 0.046467391967773435, 0.04587535858154297, 0.04598374557495117, 0.04601446533203125, 0.04597350311279297, 0.04602470397949219, 0.046102527618408204, 0.04610867309570312, 0.046063617706298826, 0.046045185089111325, 0.04598169708251953, 0.04605142211914062, 0.04606563186645508, 0.04652431869506836, 0.04645395278930664, 0.04606140899658203, 0.046322689056396485, 0.046230880737304685, 0.04619740676879883, 0.04647836685180664, 0.046324703216552736, 0.046170112609863284, 0.046419326782226565, 0.04615407943725586, 0.04612441635131836, 0.04609116744995117, 0.04614553451538086, 0.04635443115234375, 0.046219520568847657, 0.04605926513671875, 0.047072704315185544, 0.04625468826293945, 0.045959423065185544, 0.04593635177612305, 0.046206817626953126, 0.04638326263427734, 0.04589577484130859, 0.0458361930847168, 0.04607385635375977, 0.04569619369506836, 0.04600300979614258, 0.04592639923095703, 0.04596646499633789, 0.046074047088623046, 0.046246654510498045, 0.04613318252563477, 0.046193695068359374, 0.04607075119018555, 0.04620719909667969, 0.04609820938110352, 0.045991390228271485, 0.04650243377685547, 0.04660224151611328, 0.046298431396484374, 0.04639936065673828, 0.04631827163696289, 0.04665318298339844, 0.04700198364257813, 0.04631542587280273, 0.04638934326171875, 0.04635033416748047, 0.04623974227905273, 0.04628275299072265, 0.04636671829223633, 0.046502113342285156, 0.046298686981201174, 0.046255584716796874, 0.04660915374755859, 0.04652793502807617, 0.046346561431884765, 0.04660623931884766, 0.04635683059692383, 0.046378463745117185, 0.046651199340820314, 0.04659904098510742, 0.046722911834716795, 0.0461844482421875, 0.04665753555297852, 0.0464640007019043, 0.04615475082397461, 0.04598374557495117, 0.04640918350219726, 0.04617279815673828, 0.04612700653076172, 0.04611196899414063, 0.04609452819824219, 0.046111328125, 0.045803295135498044, 0.04584265518188477, 0.04590790557861328, 0.04584454345703125, 0.04585017776489258, 0.0457589111328125, 0.048027809143066404, 0.04746236801147461, 0.0467044792175293, 0.04641574478149414, 0.04659804916381836, 0.04617814254760742, 0.04617254257202148, 0.04619590377807617, 0.04628153610229492, 0.04637305450439453, 0.04635424041748047, 0.04615929412841797, 0.046182945251464845, 0.046034591674804684, 0.04594019317626953, 0.04593468856811524, 0.04606473541259765, 0.046070590972900394, 0.046238624572753906, 0.04642611312866211, 0.04626457595825195, 0.04618374252319336, 0.046805438995361326, 0.04674969482421875, 0.04671420669555664, 0.04728057479858398, 0.04632393646240234, 0.046546112060546874, 0.0463306884765625, 0.048299072265625, 0.04678137588500977, 0.04666803359985352, 0.04651987075805664, 0.046266433715820315, 0.046083263397216793, 0.04609939193725586, 0.04629500961303711, 0.04618787384033203, 0.045988544464111325, 0.04722483062744141, 0.04590796661376953, 0.04636467361450195, 0.04620492935180664, 0.04612300872802735, 0.04714863967895508, 0.04644291305541992, 0.046311424255371096, 0.047002880096435544, 0.04653952026367188, 0.04647731018066406, 0.046524417877197265, 0.046548736572265624, 0.04681868743896484, 0.04715932846069336, 0.04979539108276367, 0.04691203308105469, 0.046663681030273435, 0.04653875350952148, 0.04636876678466797, 0.04656742477416992, 0.04647446441650391, 0.046426910400390625, 0.04676403045654297, 0.046827808380126956, 0.04593068695068359, 0.04563763046264648, 0.045645183563232425, 0.04591680145263672, 0.045706687927246095, 0.04723769760131836, 0.046619873046875, 0.04615862274169922, 0.04561305618286133, 0.04571750259399414, 0.04620492935180664, 0.04566377639770508, 0.045781471252441405, 0.046493663787841796, 0.047755294799804685, 0.047876094818115236, 0.04602470397949219, 0.046111934661865236, 0.046304065704345705, 0.045604896545410153, 0.04578915023803711, 0.045914112091064455, 0.04571945571899414, 0.04604118347167969, 0.04614371109008789, 0.046617919921875, 0.046428352355957034, 0.04625747299194336, 0.046631519317626956, 0.04645312118530273, 0.04643430328369141, 0.046450431823730466, 0.04634991836547851, 0.046666561126708986, 0.047088863372802735, 0.04698563385009766, 0.04688329696655273, 0.04679423904418945, 0.04651443099975586, 0.04698931121826172, 0.046440414428710934, 0.046618656158447264, 0.046636768341064457, 0.04622979354858398, 0.046223297119140624, 0.046456897735595706, 0.04657356643676758, 0.046153728485107424, 0.04592630386352539, 0.0459200325012207, 0.04604467010498047, 0.04602329635620117, 0.0458139533996582, 0.04589363098144531, 0.045830142974853515, 0.04627382278442383, 0.04594761657714844, 0.045913246154785155, 0.04586783981323242, 0.04584041595458985, 0.046085758209228514, 0.046207359313964844, 0.046821918487548825, 0.04633676910400391, 0.04636438369750977, 0.04639894485473633, 0.04610512161254883, 0.045930240631103514, 0.045889793395996095, 0.045774848937988284, 0.04581990432739258, 0.04574003219604492, 0.04583625411987305, 0.04645225524902344, 0.04569916915893555, 0.04585718536376953, 0.04603811264038086, 0.04640041732788086, 0.04614486312866211, 0.04664204788208008, 0.045964672088623044, 0.045687198638916016, 0.045610881805419924, 0.04556198501586914, 0.04560198211669922, 0.04590431976318359, 0.04609830474853516, 0.04610108947753906, 0.04585052871704102, 0.045770751953125, 0.04569497680664063, 0.0456407356262207, 0.04575945663452148, 0.046288127899169924, 0.04608895874023437, 0.04597555160522461, 0.04581785583496094, 0.0457891845703125, 0.0460544319152832, 0.0458741455078125, 0.046333953857421874, 0.046729217529296874, 0.04607318496704101, 0.04588816070556641, 0.04604927825927734, 0.045938304901123043, 0.0460986557006836, 0.0461416015625, 0.046464447021484376, 0.04646060943603516, 0.04585881423950195, 0.04601926422119141, 0.04643859100341797, 0.046172096252441404, 0.046477375030517576, 0.04609439849853516, 0.046465984344482424, 0.04589161682128906, 0.04583033752441406, 0.04585472106933594, 0.04605212783813477, 0.04616185760498047, 0.046497440338134764, 0.04660255813598633, 0.04593878555297851, 0.04703535842895508, 0.04596886444091797, 0.04609481430053711, 0.0460041618347168, 0.04593376159667969, 0.04566723251342773, 0.04573596954345703, 0.04774899291992187, 0.04736556625366211, 0.04636867141723633, 0.04605212783813477, 0.04603315353393555, 0.04579635238647461, 0.04597772979736328, 0.04701043319702149, 0.045969406127929685, 0.04577219009399414, 0.046070369720458984, 0.0458158073425293, 0.045840385437011716, 0.045698623657226566, 0.04577734375, 0.04632166290283203, 0.0457088623046875, 0.046354881286621095, 0.045674495697021485, 0.04621481704711914, 0.04623344039916992, 0.046582206726074216, 0.0459835205078125, 0.04596700668334961, 0.046283393859863284, 0.04618191909790039, 0.046176734924316405, 0.04617327880859375, 0.046855072021484374, 0.04639539337158203, 0.04699679946899414, 0.046064319610595705, 0.04568262481689453, 0.045744190216064455, 0.04585062408447266, 0.04637491226196289, 0.04616396713256836, 0.046292991638183595, 0.04669235229492188, 0.04634147262573242, 0.04642825698852539, 0.046209598541259764, 0.04773273468017578, 0.04646201705932617, 0.04615055847167969, 0.04598992156982422, 0.04691292953491211, 0.04608265686035156, 0.045873153686523435, 0.046301185607910154, 0.046458881378173826, 0.04609548950195313, 0.046051712036132814, 0.04597840118408203, 0.04587094497680664, 0.04603891372680664]",tokens/s,21.620166568163924,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,881.225728,697.237504,0.0,301.989888,282.769408,s,1,8.115388671875,8.115388671875,0.0,8.115388671875,8.115388671875,8.115388671875,8.115388671875,[8.115388671875],,kWh,2.7295277566675697e-05,3.003633164850173e-06,8.516117923990096e-06,3.881502865551596e-05,,MB,1224.056832,751.763456,0.0,341.835776,318.94528,s,14,0.19344745635986327,0.013817675454275948,7.485959433415556e-05,0.013801631927490236,0.01389693422317505,0.013936592054367066,0.013989008321762085,"[0.013711968421936034, 0.013839743614196777, 0.013796352386474609, 0.013757760047912598, 0.01400211238861084, 0.013886719703674317, 0.013901311874389649, 0.013763296127319335, 0.013855520248413087, 0.01378662395477295, 0.01382975959777832, 0.013800127983093262, 0.013803135871887208, 0.013713024139404296]",tokens/s,18526.994706680533,kWh,4.08081155355449e-07,4.500398054212974e-08,2.2897026864617723e-07,6.82055404543756e-07,tokens/kWh,375336077.23736876,MB,1260.453888,776.92928,0.0,367.0016,318.94784,s,14,10.310911926269531,0.7364937090192523,0.00821429704017505,0.733315673828125,0.7502115112304688,0.7535501770019531,0.7550960217285156,"[0.731744384765625, 0.7309376831054688, 0.7525097045898438, 0.7390380249023437, 0.7554824829101563, 0.7297362060546875, 0.7448490600585937, 0.7279030151367187, 0.7310556640625, 0.7342277221679687, 0.7320957641601562, 0.73263671875, 0.73399462890625, 0.7347008666992187]",tokens/s,85.54044553061233,kWh,2.142347510089356e-05,2.3626636365616697e-06,8.067167320637933e-06,3.1853306058093164e-05,tokens/kWh,1977816.6789061823,,s,882,10.304133318901059,0.011682690837756306,0.00026965252532891844,0.011595471858978272,0.012006979274749755,0.012102902269363402,0.01250075856208801,"[0.011332127571105957, 0.011513216018676758, 0.011523903846740723, 0.011514687538146972, 0.011567104339599609, 0.011506976127624512, 0.011500255584716797, 0.0115032958984375, 0.01157145595550537, 0.0115, 0.011535967826843262, 0.011537504196166993, 0.01150044822692871, 0.011519519805908204, 0.0116843204498291, 0.011497119903564453, 0.01149955177307129, 0.011528512001037598, 0.011507712364196777, 0.011517951965332032, 0.011522047996520996, 0.011536383628845214, 0.011618304252624511, 0.011584639549255371, 0.011707263946533203, 0.011537440299987793, 0.011595840454101563, 0.011653375625610352, 0.011615232467651367, 0.011658559799194336, 0.01165552043914795, 0.011603872299194335, 0.011617440223693848, 0.011604096412658691, 0.011585856437683105, 0.011633152008056641, 0.011616095542907715, 0.011680031776428223, 0.011685759544372558, 0.011615551948547364, 0.012221119880676269, 0.012294143676757812, 0.011687935829162598, 0.0116627197265625, 0.011536416053771972, 0.011536928176879882, 0.011585408210754395, 0.011544768333435058, 0.011587583541870117, 0.011808095932006836, 0.01165385627746582, 0.011642399787902833, 0.011640895843505859, 0.011566911697387695, 0.011641375541687012, 0.011624544143676759, 0.011571104049682618, 0.011580960273742676, 0.01161411190032959, 0.011581727981567383, 0.011815199851989746, 0.011599871635437011, 0.011587648391723633, 0.011315327644348144, 0.011515968322753907, 0.011562463760375976, 0.01171078395843506, 0.011559200286865235, 0.011632351875305176, 0.011551039695739746, 0.011545696258544923, 0.011518176078796388, 0.01150819206237793, 0.011536288261413574, 0.01151961612701416, 0.011553152084350586, 0.011536479949951172, 0.011547967910766602, 0.011608384132385253, 0.011607647895812988, 0.011583935737609864, 0.011637248039245606, 0.01163424015045166, 0.011589216232299806, 0.01148953628540039, 0.011503968238830566, 0.011534336090087891, 0.011538432121276856, 0.011568927764892578, 0.0115, 0.011554559707641602, 0.011624287605285645, 0.011762911796569825, 0.012058688163757323, 0.011580479621887208, 0.01152835178375244, 0.011554143905639648, 0.01158176040649414, 0.011716608047485352, 0.011552032470703124, 0.011535072326660156, 0.01155840015411377, 0.011536288261413574, 0.011457344055175782, 0.011515456199645996, 0.011505887985229493, 0.011585536003112793, 0.011697440147399903, 0.01161308765411377, 0.011927359580993653, 0.011561152458190917, 0.011576319694519043, 0.011567935943603516, 0.01153228759765625, 0.01159718418121338, 0.011604000091552734, 0.011825887680053711, 0.0116778564453125, 0.011640095710754394, 0.011684288024902343, 0.011738880157470703, 0.011684160232543945, 0.011620287895202636, 0.01164083194732666, 0.011595168113708497, 0.011536992073059082, 0.01131503963470459, 0.011548831939697265, 0.011655232429504395, 0.01158950424194336, 0.011568544387817382, 0.011610783576965333, 0.011775296211242675, 0.01202451229095459, 0.01194598388671875, 0.012062399864196778, 0.012091103553771973, 0.012057151794433594, 0.012130335807800293, 0.01204428768157959, 0.01205008029937744, 0.012075584411621093, 0.012076160430908204, 0.012047136306762695, 0.012345120429992676, 0.012214367866516113, 0.012165472030639649, 0.012131999969482422, 0.012068863868713378, 0.012277728080749511, 0.012010720252990723, 0.012003840446472168, 0.012003775596618652, 0.012091360092163085, 0.012036224365234376, 0.012035136222839355, 0.012047072410583497, 0.012056032180786132, 0.01209603214263916, 0.012072352409362793, 0.012259936332702637, 0.012213600158691407, 0.012230751991271972, 0.012175200462341308, 0.012130144119262696, 0.01219264030456543, 0.012103263854980468, 0.012030367851257323, 0.01241702365875244, 0.011921759605407714, 0.011951552391052247, 0.011896608352661132, 0.011863840103149415, 0.01187446403503418, 0.011878911972045898, 0.011974656105041503, 0.011937791824340821, 0.011804672241210937, 0.01170956802368164, 0.011801471710205078, 0.011701536178588868, 0.011725407600402832, 0.011652544021606445, 0.011580096244812012, 0.01157027244567871, 0.011555744171142577, 0.011534144401550293, 0.0115033597946167, 0.011524543762207032, 0.011343199729919434, 0.011522591590881348, 0.012023584365844726, 0.011730112075805665, 0.011649087905883789, 0.011541248321533203, 0.011573247909545899, 0.011509759902954102, 0.011574655532836915, 0.01151363182067871, 0.0115, 0.011467136383056641, 0.011556832313537598, 0.01162393569946289, 0.012193408012390137, 0.01223737621307373, 0.011980192184448242, 0.012771552085876464, 0.011760095596313476, 0.01160217571258545, 0.011673407554626465, 0.011946175575256348, 0.012265472412109376, 0.01174726390838623, 0.01163987159729004, 0.01158358383178711, 0.011530207633972169, 0.011858880043029785, 0.011736767768859863, 0.01168569564819336, 0.01163868808746338, 0.011626239776611327, 0.011565919876098632, 0.011524191856384277, 0.01151369571685791, 0.011475071907043458, 0.011552096366882324, 0.011550944328308105, 0.01182755184173584, 0.011616288185119629, 0.011571328163146972, 0.01153990364074707, 0.011553215980529784, 0.011589728355407714, 0.011542431831359863, 0.011595775604248047, 0.011716608047485352, 0.011577343940734864, 0.011577695846557616, 0.011578335762023925, 0.011540767669677734, 0.011717023849487305, 0.012767168045043945, 0.012411231994628907, 0.011845343589782715, 0.011935744285583496, 0.01173468780517578, 0.01175369644165039, 0.011649151802062989, 0.011759360313415528, 0.011724672317504882, 0.011703968048095703, 0.01190112018585205, 0.011593376159667969, 0.011972288131713867, 0.01187724781036377, 0.011890175819396973, 0.012007328033447265, 0.012232447624206543, 0.012208767890930176, 0.012140768051147461, 0.011964735984802246, 0.012011072158813477, 0.011962431907653809, 0.01389475154876709, 0.012280799865722657, 0.011997183799743653, 0.012040191650390625, 0.012062687873840332, 0.012019424438476563, 0.012007583618164062, 0.012017312049865723, 0.011946463584899902, 0.011941184043884277, 0.01198703956604004, 0.012292736053466797, 0.0120381441116333, 0.012011520385742188, 0.012001279830932618, 0.012007424354553223, 0.012056063652038575, 0.012044960021972657, 0.01201360034942627, 0.011940863609313965, 0.01200111961364746, 0.011930368423461913, 0.012023039817810059, 0.011953120231628417, 0.011968159675598144, 0.011946335792541504, 0.011949888229370117, 0.011941056251525878, 0.011911520004272461, 0.011969344139099121, 0.01201750373840332, 0.012036447525024413, 0.012076607704162598, 0.012013471603393555, 0.012150976181030273, 0.012000864028930663, 0.012104096412658692, 0.011859968185424804, 0.011877663612365722, 0.011769760131835937, 0.011840255737304687, 0.01169651222229004, 0.01168992042541504, 0.011776864051818848, 0.01173580837249756, 0.011754816055297851, 0.011872320175170899, 0.011838560104370116, 0.011685888290405273, 0.011697855949401856, 0.011759615898132325, 0.011684096336364747, 0.01166207981109619, 0.012231936454772949, 0.012161952018737793, 0.01175551986694336, 0.01164083194732666, 0.011558912277221679, 0.011542528152465821, 0.011533408164978028, 0.011537311553955078, 0.011517663955688477, 0.011511520385742187, 0.01152467155456543, 0.011520000457763671, 0.011620415687561034, 0.011564448356628418, 0.011528736114501953, 0.011510880470275878, 0.01150864028930664, 0.011520000457763671, 0.011505120277404786, 0.011620927810668945, 0.011525888442993164, 0.011514080047607423, 0.01162649631500244, 0.011543711662292481, 0.01148316764831543, 0.011576416015625, 0.011519712448120117, 0.01151580810546875, 0.011475040435791015, 0.011517951965332032, 0.011468128204345703, 0.01154319953918457, 0.01158950424194336, 0.01152233600616455, 0.011503456115722656, 0.0115316801071167, 0.011618304252624511, 0.01157910442352295, 0.01156604766845703, 0.011505023956298828, 0.011503423690795898, 0.011542431831359863, 0.011625280380249023, 0.011480287551879883, 0.011540287971496583, 0.011539423942565918, 0.011518272399902343, 0.011556480407714844, 0.011644191741943359, 0.011641632080078125, 0.011630592346191406, 0.01161734390258789, 0.011641792297363281, 0.011567104339599609, 0.011521696090698242, 0.011560640335083007, 0.0115513916015625, 0.011511136054992675, 0.011535008430480958, 0.011539744377136231, 0.011581279754638673, 0.011590527534484864, 0.011395071983337402, 0.011618304252624511, 0.011712512016296387, 0.01175551986694336, 0.011811967849731445, 0.011758208274841309, 0.01182102394104004, 0.011841823577880859, 0.011727968215942382, 0.011727519989013672, 0.011652511596679687, 0.011747872352600098, 0.011711071968078614, 0.011814271926879883, 0.011823391914367676, 0.011880703926086426, 0.011900992393493652, 0.011986687660217285, 0.011876352310180664, 0.011781503677368164, 0.011715200424194336, 0.011671039581298828, 0.01183561611175537, 0.011622976303100586, 0.011896896362304687, 0.011615903854370117, 0.01163263988494873, 0.011630271911621094, 0.011764032363891602, 0.011763584136962891, 0.011780447959899902, 0.01176915168762207, 0.011719136238098145, 0.011647168159484864, 0.011626303672790527, 0.011620287895202636, 0.011577407836914062, 0.011646688461303711, 0.011890975952148438, 0.013569855690002441, 0.011784192085266113, 0.011737600326538086, 0.01165392017364502, 0.011698207855224609, 0.011684224128723145, 0.011962880134582519, 0.012072480201721192, 0.012175456047058105, 0.012153056144714356, 0.011876640319824218, 0.01184921646118164, 0.011894240379333496, 0.011815199851989746, 0.011923839569091797, 0.0117838716506958, 0.011772480010986328, 0.011993056297302246, 0.012031167984008788, 0.011957088470458984, 0.011843584060668945, 0.011685888290405273, 0.0120381441116333, 0.011640735626220703, 0.011487456321716308, 0.011500639915466309, 0.01153660774230957, 0.011543007850646972, 0.011582688331604004, 0.01153536033630371, 0.011565919876098632, 0.011690976142883301, 0.01158739185333252, 0.011526432037353516, 0.011648896217346192, 0.011470848083496094, 0.011530495643615723, 0.011460639953613282, 0.011527296066284179, 0.011508543968200684, 0.011511839866638184, 0.01148697566986084, 0.011498720169067382, 0.011485568046569824, 0.011500127792358398, 0.011495519638061523, 0.011516960144042968, 0.011516448020935058, 0.011573408126831055, 0.011765151977539063, 0.011571840286254883, 0.0115730562210083, 0.011597984313964844, 0.011526240348815917, 0.011577247619628906, 0.0115382080078125, 0.011534784317016602, 0.01153606414794922, 0.011613823890686036, 0.011520480155944824, 0.011546624183654786, 0.011493535995483398, 0.011507776260375977, 0.01162012767791748, 0.011538335800170899, 0.011587679862976074, 0.011708064079284668, 0.01150921630859375, 0.011522111892700195, 0.01178502368927002, 0.011551936149597168, 0.011539263725280762, 0.011499423980712891, 0.011528287887573242, 0.01153212833404541, 0.011534496307373047, 0.011478848457336426, 0.011493120193481445, 0.011534784317016602, 0.011536383628845214, 0.011517951965332032, 0.011537887573242188, 0.01152451229095459, 0.011483200073242187, 0.011541855812072755, 0.011548416137695312, 0.01157868766784668, 0.011396991729736327, 0.011537023544311523, 0.011621824264526367, 0.011586112022399902, 0.011549823760986328, 0.011544575691223144, 0.011541600227355958, 0.011531583786010742, 0.011555295944213867, 0.01155628776550293, 0.011544480323791503, 0.011502495765686035, 0.011529088020324707, 0.011513888359069824, 0.011526880264282226, 0.01162604808807373, 0.0116778564453125, 0.011740639686584472, 0.011762175559997558, 0.01168124771118164, 0.011699168205261231, 0.011655391693115235, 0.01164675235748291, 0.011538432121276856, 0.011552767753601074, 0.011548064231872558, 0.011569408416748047, 0.011609727859497071, 0.011680480003356933, 0.011663359642028808, 0.011616255760192871, 0.01163263988494873, 0.011616255760192871, 0.011575296401977539, 0.011560959815979004, 0.011608063697814941, 0.011528191566467285, 0.01159552001953125, 0.011566559791564941, 0.011600543975830078, 0.01161023998260498, 0.011583488464355468, 0.011576416015625, 0.011649824142456055, 0.011569024085998535, 0.011546879768371583, 0.011523648262023926, 0.01151961612701416, 0.011710559844970703, 0.01160582447052002, 0.011629376411437988, 0.01158902359008789, 0.011659168243408203, 0.01159177589416504, 0.011670080184936524, 0.011669887542724609, 0.011583488464355468, 0.011606975555419922, 0.011600319862365722, 0.011616640090942383, 0.011644448280334473, 0.011595487594604492, 0.011618176460266113, 0.011320863723754883, 0.011556511878967285, 0.011838272094726562, 0.011838848114013673, 0.011623040199279785, 0.011645024299621581, 0.01162019157409668, 0.01157481575012207, 0.01157583999633789, 0.01150169563293457, 0.011534208297729493, 0.011498592376708984, 0.011508480072021484, 0.011548831939697265, 0.011511808395385742, 0.011521663665771485, 0.011630784034729004, 0.011590047836303711, 0.01153769588470459, 0.011499456405639649, 0.01150972843170166, 0.011551008224487305, 0.011491647720336914, 0.011468223571777344, 0.01149395179748535, 0.011523455619812011, 0.011504544258117675, 0.012517087936401368, 0.012246591567993164, 0.011889087677001952, 0.011517760276794433, 0.01162054443359375, 0.01155510425567627, 0.011564224243164063, 0.011545120239257812, 0.011607328414916991, 0.011733663558959962, 0.011609312057495116, 0.01159654426574707, 0.011566240310668946, 0.011588607788085938, 0.011586784362792969, 0.011612895965576173, 0.012664352416992188, 0.011820704460144042, 0.01181116771697998, 0.011679679870605468, 0.011630240440368653, 0.011608960151672364, 0.011599007606506348, 0.011879263877868652, 0.011613887786865234, 0.011565343856811523, 0.011607328414916991, 0.011614975929260253, 0.011653120040893555, 0.011548095703125, 0.011572992324829102, 0.011595871925354004, 0.011946720123291016, 0.01164236831665039, 0.01163929557800293, 0.011675647735595703, 0.01133135986328125, 0.01159001636505127, 0.011608256340026855, 0.011569439888000488, 0.011585151672363281, 0.011748895645141602, 0.011701087951660156, 0.011600128173828125, 0.011696063995361328, 0.011553824424743652, 0.011541536331176758, 0.01151097583770752, 0.01154105567932129, 0.011599552154541016, 0.011548992156982421, 0.011579392433166504, 0.011636735916137696, 0.011538432121276856, 0.011517951965332032, 0.011525471687316895, 0.01165334415435791, 0.011605631828308106, 0.01167033576965332, 0.01181056022644043, 0.012308735847473145, 0.01163263988494873, 0.01151200008392334, 0.011519743919372558, 0.011556672096252442, 0.011782400131225586, 0.011712608337402343, 0.011829440116882323, 0.011890399932861328, 0.01166748809814453, 0.01164425563812256, 0.011558943748474122, 0.011532032012939453, 0.011584287643432617, 0.011538335800170899, 0.011622688293457032, 0.011929471969604493, 0.011593824386596679, 0.011601087570190429, 0.01155072021484375, 0.011549152374267579, 0.01158739185333252, 0.01166096019744873, 0.011571999549865722, 0.011548704147338867, 0.011544544219970703, 0.011554816246032714, 0.011589119911193848, 0.01154099178314209, 0.011610112190246581, 0.011581631660461425, 0.011583423614501953, 0.011599552154541016, 0.011527456283569337, 0.011563424110412598, 0.011548768043518067, 0.011579808235168456, 0.011499520301818847, 0.011547807693481446, 0.011324224472045898, 0.011605631828308106, 0.011601535797119141, 0.011564064025878906, 0.011595456123352051, 0.011597791671752929, 0.01163811206817627, 0.011672287940979005, 0.011622400283813476, 0.011603232383728027, 0.0115894718170166, 0.011871328353881836, 0.011739263534545899, 0.011597087860107422, 0.011570624351501465, 0.011594688415527344, 0.01157692813873291, 0.011547039985656739, 0.011569087982177734, 0.011532383918762207, 0.011570879936218262, 0.011544927597045899, 0.01155679988861084, 0.011587583541870117, 0.011525728225708009, 0.011614432334899902, 0.011542719841003418, 0.011679136276245117, 0.011667648315429687, 0.011620063781738282, 0.011629440307617188, 0.011591487884521485, 0.011610112190246581, 0.011587039947509765, 0.011653663635253907, 0.011584799766540527, 0.011586272239685058, 0.011583488464355468, 0.011591679573059082, 0.011585536003112793, 0.011562368392944337, 0.011555456161499023, 0.011595168113708497, 0.011614879608154298, 0.011576319694519043, 0.011750304222106933, 0.011598976135253906, 0.011638848304748536, 0.011655488014221192, 0.011684384346008301, 0.011706368446350097, 0.01215283203125, 0.01173846435546875, 0.011721376419067384, 0.011668512344360352, 0.011633952140808106, 0.011560416221618653, 0.011532511711120606, 0.011565055847167969, 0.011964415550231934, 0.011577343940734864, 0.011609279632568359, 0.011574175834655762, 0.011409952163696288, 0.011574560165405274, 0.011592576026916504, 0.011579039573669434, 0.015159584045410157, 0.011781120300292968, 0.011895520210266113, 0.012040543556213378, 0.011678912162780762, 0.012071392059326173, 0.011631967544555664, 0.01158784008026123, 0.01154911994934082, 0.011625760078430176, 0.011839327812194824, 0.011596639633178711, 0.01169321632385254, 0.011586175918579102, 0.011512127876281739, 0.011509663581848144, 0.011529919624328613, 0.011582783699035645, 0.011521023750305176, 0.011522047996520996, 0.0118538236618042, 0.011583488464355468, 0.011623871803283692, 0.011559103965759277, 0.011528479576110839, 0.011558112144470214, 0.011527039527893067, 0.011599871635437011, 0.011497792243957519, 0.011528127670288086, 0.011503680229187012, 0.011514880180358887, 0.011549375534057616, 0.011481087684631347, 0.011513855934143067, 0.011479040145874024, 0.01144217586517334, 0.0117391357421875, 0.011477215766906738, 0.011482591629028321, 0.011491168022155761, 0.01149180793762207, 0.011520288467407226, 0.011497407913208009, 0.011453568458557129, 0.011601951599121094, 0.01155123233795166, 0.0115316162109375, 0.011485183715820312, 0.011467552185058594, 0.01149948787689209, 0.011585568428039551, 0.01153228759765625, 0.011511808395385742, 0.011530336380004882, 0.011515263557434083, 0.01188054370880127, 0.011671327590942382, 0.011587488174438476, 0.011274720191955566, 0.01171664047241211, 0.011661791801452636, 0.01176576042175293, 0.013993472099304198, 0.011579744338989258, 0.01155072021484375, 0.011563167572021485, 0.011564127922058106, 0.01154963207244873, 0.01157475185394287, 0.011520511627197265, 0.011618399620056152, 0.011521471977233886, 0.011629023551940919, 0.011529888153076172, 0.011524383544921874, 0.01148902416229248, 0.011532352447509766, 0.01157260799407959, 0.011989279747009278, 0.01283465576171875, 0.012496928215026855, 0.01159068775177002, 0.011574975967407227, 0.011568991661071778, 0.011477375984191894, 0.011532416343688965, 0.01158249568939209, 0.011495743751525879, 0.011513343811035156, 0.011567520141601563, 0.011548895835876465, 0.011514368057250977, 0.011483039855957031, 0.011503168106079101, 0.01147494411468506, 0.011527520179748535, 0.011537280082702637, 0.011521183967590332, 0.011533087730407714, 0.011484895706176758, 0.011499327659606933, 0.011513728141784668, 0.011469471931457519, 0.011448320388793945, 0.011683296203613282, 0.011519840240478516, 0.011526368141174316, 0.011581600189208984, 0.011582880020141602, 0.011568127632141113, 0.011606176376342774, 0.011882240295410157, 0.011601119995117188, 0.011520575523376465, 0.011532192230224609, 0.01196678352355957, 0.011513855934143067, 0.011581439971923829, 0.011605792045593262, 0.01234556770324707, 0.011591679573059082]",tokens/s,85.59671858885318,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4203.692032,5925.43744,0.0,5530.189824,5138.859008,s,1,12.0731376953125,12.0731376953125,0.0,12.0731376953125,12.0731376953125,12.0731376953125,12.0731376953125,[12.0731376953125],,kWh,0.00014106460684169329,1.555255706320597e-05,6.053282620399614e-05,0.0002171499901088954,,MB,1346.072576,5944.311808,0.0,5534.384128,4845.009408,s,10,2.1223248291015624,0.21223248291015628,0.0006155042745064004,0.2121497802734375,0.2128214599609375,0.21313273010253905,0.21338174621582032,"[0.21275228881835936, 0.21145152282714844, 0.21251513671875, 0.2122291259765625, 0.21344400024414062, 0.2127508087158203, 0.2120704345703125, 0.21166184997558593, 0.21206002807617189, 0.21138963317871093]",tokens/s,1206.2244030211516,kWh,6.256342691046083e-06,6.899540493929816e-07,4.159814202554111e-06,1.1106110942993175e-05,tokens/kWh,23050373.016623784,MB,1355.706368,5944.311808,0.0,5534.384128,5014.504448,s,10,19.151216674804687,1.9151216674804687,0.007566801084310754,1.9153659057617187,1.9236949584960936,1.9251369689941407,1.926290577392578,"[1.9125977783203125, 1.918134033203125, 1.92337451171875, 1.9195284423828125, 1.922497802734375, 1.9265789794921875, 1.902783447265625, 1.9088680419921875, 1.9096376953125, 1.9072159423828126]",tokens/s,32.896082306291646,kWh,5.530638489020411e-05,6.10016108972048e-06,3.6190023632844804e-05,9.759656961276938e-05,tokens/kWh,645514.4914412769,,s,630,19.147601362228393,0.030393018035283163,0.00043964754739872025,0.030350192070007324,0.030686198234558107,0.030910873889923095,0.032269759674072265,"[0.030111743927001954, 0.03027020835876465, 0.03071379280090332, 0.03030841636657715, 0.030068735122680663, 0.029963552474975588, 0.030040800094604494, 0.02991836738586426, 0.029935903549194336, 0.029993152618408202, 0.029962623596191406, 0.029997055053710937, 0.030203903198242187, 0.029853696823120116, 0.029997055053710937, 0.0299233283996582, 0.029970016479492188, 0.03009987258911133, 0.029996768951416015, 0.029968671798706055, 0.030044160842895507, 0.02992259216308594, 0.02990358352661133, 0.030365695953369142, 0.030026815414428712, 0.029924287796020507, 0.029881696701049804, 0.02996291160583496, 0.029847551345825195, 0.03037593650817871, 0.03012326431274414, 0.03005721664428711, 0.03009324836730957, 0.029945440292358398, 0.03094099235534668, 0.030613279342651366, 0.030752767562866212, 0.030438047409057617, 0.030339296340942384, 0.030453760147094725, 0.03043328094482422, 0.03059097671508789, 0.030914560317993164, 0.030317760467529296, 0.03035219192504883, 0.030464000701904297, 0.032626686096191404, 0.03220684814453125, 0.030823871612548827, 0.030476863861083985, 0.03043328094482422, 0.030664831161499023, 0.030436735153198242, 0.03016329574584961, 0.0300501766204834, 0.030085407257080077, 0.030236671447753907, 0.03025712013244629, 0.030279647827148436, 0.030145631790161134, 0.032142303466796876, 0.03215520095825195, 0.030665151596069334, 0.031100128173828127, 0.030376031875610353, 0.030333568572998047, 0.03298099136352539, 0.03464191818237305, 0.030572256088256835, 0.030384416580200194, 0.03064944076538086, 0.030303136825561523, 0.030513151168823242, 0.030337024688720703, 0.03073023986816406, 0.03046735954284668, 0.030585567474365236, 0.03040208053588867, 0.030433759689331055, 0.030470144271850585, 0.030676992416381835, 0.030463903427124024, 0.030350976943969727, 0.03015113639831543, 0.03026063919067383, 0.030040672302246094, 0.03006195259094238, 0.030019264221191406, 0.03032159996032715, 0.029902847290039062, 0.030099456787109374, 0.030369760513305664, 0.030090816497802736, 0.02996067237854004, 0.03039961624145508, 0.029934080123901367, 0.030137887954711916, 0.03006265640258789, 0.030348064422607422, 0.030129695892333986, 0.030308191299438476, 0.0302475528717041, 0.030225887298583984, 0.030269983291625977, 0.030364959716796876, 0.030122623443603516, 0.030242912292480467, 0.030113023757934572, 0.030255199432373047, 0.030081695556640625, 0.030390272140502928, 0.030162784576416017, 0.031088800430297853, 0.030289920806884765, 0.030410751342773438, 0.03058483123779297, 0.03037183952331543, 0.030263263702392577, 0.030572576522827147, 0.030259103775024415, 0.03019759941101074, 0.030613759994506835, 0.03020595169067383, 0.03015852737426758, 0.030529216766357423, 0.030356096267700194, 0.03056928062438965, 0.03031180763244629, 0.03024140739440918, 0.03038969612121582, 0.030655040740966796, 0.030468095779418947, 0.030488576889038086, 0.030186592102050783, 0.030479263305664063, 0.030461727142333986, 0.03057072067260742, 0.030486528396606444, 0.03033907127380371, 0.030273536682128906, 0.030465599060058593, 0.030517696380615234, 0.030280927658081054, 0.030517791748046873, 0.03057481575012207, 0.03056630325317383, 0.030379167556762697, 0.03073302459716797, 0.03033113670349121, 0.030498815536499024, 0.030693376541137695, 0.030467391967773438, 0.030349407196044922, 0.030277408599853516, 0.030374496459960938, 0.030388448715209963, 0.030449663162231445, 0.03056025505065918, 0.030265344619750976, 0.030341119766235353, 0.030308351516723633, 0.03054080009460449, 0.030527679443359376, 0.030509536743164062, 0.03051136016845703, 0.0305828800201416, 0.030494720458984374, 0.03044918441772461, 0.030603744506835937, 0.030301664352416994, 0.030540063858032228, 0.030445823669433592, 0.03073023986816406, 0.030957536697387697, 0.030633087158203124, 0.0305447998046875, 0.030683008193969727, 0.030642303466796875, 0.030565759658813477, 0.03045235252380371, 0.030962976455688476, 0.03043190383911133, 0.030562368392944336, 0.030648319244384766, 0.030541696548461915, 0.03199180793762207, 0.030679168701171874, 0.030472192764282226, 0.030715904235839843, 0.03075984001159668, 0.03059712028503418, 0.030496768951416016, 0.030487936019897462, 0.03031923294067383, 0.030514976501464845, 0.03051968002319336, 0.030489824295043946, 0.03124083137512207, 0.03050271987915039, 0.030428800582885742, 0.030476512908935546, 0.03075926399230957, 0.030524768829345704, 0.030716575622558594, 0.030598560333251954, 0.030528095245361327, 0.030494144439697266, 0.030455968856811524, 0.03044598388671875, 0.03057663917541504, 0.030512992858886718, 0.03046620750427246, 0.030471839904785157, 0.030439231872558595, 0.0304051513671875, 0.030347488403320313, 0.0303470401763916, 0.03042889595031738, 0.030332704544067383, 0.030433792114257813, 0.030316511154174806, 0.03055619239807129, 0.030390272140502928, 0.030312448501586913, 0.03035955238342285, 0.030373664855957033, 0.03033830451965332, 0.030558847427368165, 0.03045984077453613, 0.030433696746826173, 0.030516799926757814, 0.030269887924194334, 0.030166816711425782, 0.030505184173583985, 0.030373664855957033, 0.030480607986450196, 0.030416448593139647, 0.030368160247802735, 0.03035753631591797, 0.030810111999511718, 0.030310400009155275, 0.030365695953369142, 0.03040377616882324, 0.030417728424072265, 0.03061350440979004, 0.030361600875854492, 0.030398464202880858, 0.03031603240966797, 0.030466560363769532, 0.030402559280395508, 0.030273120880126955, 0.030325151443481444, 0.030507007598876954, 0.030537727355957032, 0.030351327896118163, 0.030394399642944336, 0.03010748863220215, 0.030150815963745116, 0.030119935989379884, 0.030257152557373046, 0.03213478469848633, 0.030421375274658203, 0.030488576889038086, 0.030382080078125, 0.03051215934753418, 0.030374399185180666, 0.030853599548339845, 0.032107967376708985, 0.030583040237426758, 0.030409023284912108, 0.030296064376831053, 0.030485792160034178, 0.030391008377075195, 0.030265344619750976, 0.030251007080078125, 0.030310144424438478, 0.030330528259277345, 0.030099103927612305, 0.030154783248901366, 0.02994451141357422, 0.030168544769287108, 0.030397184371948244, 0.030201343536376952, 0.030189088821411133, 0.030267488479614257, 0.03003481674194336, 0.03074662399291992, 0.03017318344116211, 0.030287872314453124, 0.030224384307861327, 0.030311424255371092, 0.030409568786621093, 0.0302873592376709, 0.03179952049255371, 0.030422847747802736, 0.030447263717651368, 0.03040470314025879, 0.030659231185913086, 0.0305296630859375, 0.030359647750854493, 0.030535680770874023, 0.030402559280395508, 0.03208396911621094, 0.030612512588500975, 0.0306428165435791, 0.030719648361206053, 0.030622047424316408, 0.030695039749145506, 0.030767839431762697, 0.030653472900390624, 0.03061356735229492, 0.03075142478942871, 0.030472415924072266, 0.030642175674438478, 0.03036774444580078, 0.030435392379760742, 0.030281728744506835, 0.030298112869262695, 0.030162464141845702, 0.030256959915161134, 0.03004182434082031, 0.03035545539855957, 0.030292224884033204, 0.030651071548461913, 0.030357503890991212, 0.03013599967956543, 0.03015488052368164, 0.030148799896240235, 0.030093311309814453, 0.030363616943359376, 0.030388191223144533, 0.030367807388305666, 0.030338144302368163, 0.030413728713989258, 0.03038617515563965, 0.03061459159851074, 0.030147008895874024, 0.03045743942260742, 0.030383007049560547, 0.032734783172607425, 0.03169148826599121, 0.030531295776367186, 0.030488576889038086, 0.03058278465270996, 0.030636032104492186, 0.03043328094482422, 0.03022355270385742, 0.030589183807373046, 0.030279487609863282, 0.03058127975463867, 0.03035772705078125, 0.03110019111633301, 0.030381792068481444, 0.031214591979980468, 0.03341299057006836, 0.030363775253295897, 0.030689279556274415, 0.030549760818481445, 0.030476543426513673, 0.03040480041503906, 0.03111302375793457, 0.030889951705932617, 0.03028995132446289, 0.030504959106445313, 0.030463008880615233, 0.03046067237854004, 0.030439008712768556, 0.032295455932617186, 0.03132425689697266, 0.030584735870361326, 0.03040835189819336, 0.03035385513305664, 0.030187519073486328, 0.03016908836364746, 0.030121984481811522, 0.030240480422973632, 0.03037392044067383, 0.030737695693969728, 0.030601184844970705, 0.03056057548522949, 0.030744512557983397, 0.03058937644958496, 0.030879072189331055, 0.030685855865478517, 0.030629888534545898, 0.030621696472167968, 0.030549375534057618, 0.030614080429077147, 0.03070572853088379, 0.030707712173461913, 0.030502912521362304, 0.030300096511840822, 0.03045792007446289, 0.030325759887695314, 0.030354080200195314, 0.030138303756713867, 0.03027395248413086, 0.029941024780273436, 0.03006892776489258, 0.02993779182434082, 0.030180927276611327, 0.029942623138427736, 0.029995008468627928, 0.029981887817382813, 0.030022464752197265, 0.03015065574645996, 0.030127519607543944, 0.030013408660888672, 0.03005299186706543, 0.030041919708251954, 0.02998700714111328, 0.029913087844848633, 0.02999075126647949, 0.029989023208618164, 0.030013439178466796, 0.02984137535095215, 0.030012895584106445, 0.030450239181518554, 0.029998336791992188, 0.030595584869384764, 0.030294271469116212, 0.02992742347717285, 0.029977983474731445, 0.029700191497802734, 0.029899295806884767, 0.030192800521850586, 0.029997919082641603, 0.029870080947875976, 0.02998681640625, 0.03001740837097168, 0.030025856018066406, 0.030323808670043945, 0.030186336517333986, 0.029966400146484374, 0.030269439697265626, 0.02998886489868164, 0.030216192245483397, 0.029929471969604493, 0.03000115203857422, 0.030109407424926758, 0.030103519439697267, 0.03058278465270996, 0.030396127700805665, 0.030263584136962892, 0.03027916717529297, 0.03021670341491699, 0.030006303787231445, 0.03005721664428711, 0.030121984481811522, 0.030309823989868163, 0.03015555191040039, 0.03029747200012207, 0.030239360809326172, 0.030218271255493163, 0.030117855072021485, 0.03035545539855957, 0.030353279113769532, 0.030174911499023436, 0.030254592895507814, 0.030229440689086916, 0.03030406379699707, 0.030234783172607423, 0.03050048065185547, 0.030351392745971678, 0.030359935760498048, 0.03153494453430176, 0.03038419151306152, 0.03043132781982422, 0.030774911880493163, 0.030474143981933592, 0.030265567779541015, 0.030646528244018555, 0.030522687911987305, 0.03060505676269531, 0.031136703491210938, 0.030646047592163085, 0.030492895126342772, 0.030332927703857423, 0.03118489646911621, 0.030777183532714844, 0.030232128143310548, 0.03016969680786133, 0.030136320114135744, 0.030135616302490235, 0.0309418888092041, 0.03007279968261719, 0.030005279541015624, 0.029941631317138673, 0.029935743331909178, 0.02998182487487793, 0.029864095687866212, 0.02985443115234375, 0.02983526420593262, 0.029890560150146486, 0.030318592071533205, 0.030068735122680663, 0.0299683837890625, 0.030003200531005858, 0.030011392593383788, 0.03036947250366211, 0.03003004837036133, 0.030107135772705077, 0.029987424850463868, 0.03007036781311035, 0.03045327949523926, 0.030236671447753907, 0.030155231475830078, 0.03245859146118164, 0.03033660888671875, 0.030024255752563477, 0.030182655334472657, 0.030153120040893554, 0.03018544006347656, 0.030271232604980467, 0.030103168487548827, 0.030098432540893554, 0.03013222312927246, 0.030058496475219725, 0.030325759887695314, 0.03008358383178711, 0.030177791595458983, 0.03039641571044922, 0.030144512176513674, 0.030094688415527343, 0.030398719787597655, 0.030152416229248045, 0.030263168334960937, 0.03021238327026367, 0.030152767181396485, 0.030220767974853516, 0.030274688720703127, 0.03040656089782715, 0.030340063095092774, 0.030588287353515625, 0.030394016265869142, 0.03026019287109375, 0.030478271484375, 0.03053984069824219, 0.03086329650878906, 0.030373760223388672, 0.030612895965576172, 0.030649120330810548, 0.030397663116455077, 0.03019024085998535, 0.030354848861694338, 0.030249696731567383, 0.030195711135864257, 0.03016703987121582, 0.030170848846435547, 0.030165056228637695, 0.03015292739868164, 0.030208000183105467, 0.0301711368560791, 0.030066688537597655, 0.030289920806884765, 0.030162399291992187, 0.030050559997558592, 0.030440864562988282, 0.02984815979003906, 0.029804832458496095, 0.029726720809936522, 0.031883264541625975, 0.03082035255432129, 0.030238719940185548, 0.03007279968261719, 0.03004128074645996, 0.030151519775390625, 0.030527488708496094, 0.030224384307861327, 0.030353408813476562, 0.030468095779418947, 0.030312448501586913, 0.030200895309448243, 0.03041289520263672, 0.030544736862182616, 0.03042483139038086, 0.030314559936523437, 0.03032048034667969, 0.030363359451293946, 0.030118528366088866, 0.03025820732116699, 0.030196704864501954, 0.03020956802368164, 0.030077407836914063, 0.030193119049072265, 0.03006217575073242, 0.030053312301635743, 0.030906368255615234, 0.03016499137878418, 0.030170175552368166, 0.029983680725097658, 0.02998886489868164, 0.0301711368560791, 0.030263296127319338, 0.03037539291381836, 0.030380319595336915, 0.030226688385009765, 0.030731264114379882, 0.03039468765258789, 0.030456512451171876, 0.030529535293579102, 0.030395584106445314, 0.030328832626342773, 0.030462783813476564, 0.03057663917541504, 0.030527488708496094, 0.030404287338256834, 0.030144832611083985, 0.03027276802062988, 0.030313215255737304, 0.03034275245666504, 0.030339296340942384, 0.030138559341430664, 0.030513151168823242, 0.03026896095275879, 0.030047903060913084, 0.03033171272277832, 0.030033151626586915, 0.03007753562927246, 0.02992483139038086, 0.030057151794433593, 0.030052352905273437, 0.03018137550354004, 0.0301560001373291, 0.0300501766204834, 0.03047222328186035, 0.030079872131347656, 0.030093311309814453, 0.03006224060058594, 0.029860191345214844]",tokens/s,32.90229350830191,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7128.715264,9262.989312,0.0,8860.4672,8344.1792,s,1,14.806181640625,14.806181640625,0.0,14.806181640625,14.806181640625,14.806181640625,14.806181640625,[14.806181640625],,kWh,0.00021484008227501666,2.3687199203734497e-05,9.755507804401486e-05,0.000336082359522766,,MB,3076.44416,9277.669376,0.0,8860.4672,7566.148096,s,10,3.6060410766601563,0.36060410766601564,0.0007679263872102494,0.36039358520507814,0.36144390258789066,0.3619397705078125,0.36233646484375,"[0.3600245666503906, 0.36243563842773435, 0.3613337097167969, 0.3596086120605469, 0.360388916015625, 0.360833740234375, 0.36039825439453127, 0.35999432373046875, 0.36023501586914064, 0.36078829956054687]",tokens/s,709.919811110699,kWh,1.0538146939582778e-05,1.162164930514351e-06,6.98638455735768e-06,1.868669642745481e-05,tokens/kWh,13699585.745069444,MB,3090.952192,9277.669376,0.0,8860.4672,7827.641344,s,10,25.331605468750002,2.533160546875,0.05219611557115325,2.51319580078125,2.5612449951171876,2.6238214721679687,2.673882653808594,"[2.68639794921875, 2.547339111328125, 2.514108154296875, 2.51187548828125, 2.51683447265625, 2.5090791015625, 2.514581298828125, 2.510867431640625, 2.508239013671875, 2.512283447265625]",tokens/s,24.870117323483154,kWh,7.317319206541773e-05,8.071063971952213e-06,4.8731271127845244e-05,0.00012997552716521518,tokens/kWh,484706.6318870868,,s,630,25.321290199279776,0.04019252412584093,0.0009449381615799566,0.039879600524902345,0.042254981994628904,0.04259270172119141,0.042983562011718754,"[0.04258307266235352, 0.04236771011352539, 0.04214803314208984, 0.04218009567260742, 0.042267135620117184, 0.04227481460571289, 0.04224179077148438, 0.04228326416015625, 0.04224204635620117, 0.042231807708740236, 0.04219903945922852, 0.04218265533447266, 0.04232396697998047, 0.04243251037597656, 0.042231552124023436, 0.04269900894165039, 0.042313728332519535, 0.04260454559326172, 0.0423853759765625, 0.04263529586791992, 0.04233420944213867, 0.042485759735107424, 0.04248371124267578, 0.042487808227539066, 0.04248691177368164, 0.042697601318359375, 0.04238943862915039, 0.042596416473388674, 0.04299078369140625, 0.04242950439453125, 0.04250908660888672, 0.042441566467285155, 0.04272038269042969, 0.04237209701538086, 0.042718334197998045, 0.042453121185302735, 0.04278316879272461, 0.04279123306274414, 0.04239155197143555, 0.0428337287902832, 0.04279110336303711, 0.04253615951538086, 0.04267046356201172, 0.0428322868347168, 0.04258816146850586, 0.042635265350341796, 0.04290969467163086, 0.04281315231323242, 0.04254483032226562, 0.04270336151123047, 0.04293833541870117, 0.04308185577392578, 0.04292403030395508, 0.04423884963989258, 0.04295999908447266, 0.042886016845703125, 0.04299555206298828, 0.042985633850097654, 0.0429854736328125, 0.0429788818359375, 0.04330131149291992, 0.04274486541748047, 0.04297417449951172, 0.04262380981445312, 0.04238131332397461, 0.04212940979003906, 0.042249248504638674, 0.042243038177490234, 0.04231967926025391, 0.042253631591796875, 0.04212377548217774, 0.04237744140625, 0.04223401641845703, 0.04221116638183594, 0.042202625274658206, 0.04120991897583008, 0.03967036819458008, 0.03985612869262695, 0.03962879943847656, 0.039722782135009765, 0.03966793441772461, 0.039684097290039064, 0.039720287322998045, 0.039723648071289065, 0.03972249603271484, 0.03988918304443359, 0.039919872283935544, 0.039851169586181644, 0.03995939254760742, 0.03999456024169922, 0.03983769607543945, 0.039889728546142575, 0.039900192260742186, 0.03984476852416992, 0.03976812744140625, 0.03983929443359375, 0.04239763259887695, 0.03982147216796875, 0.03967216110229492, 0.03973315048217774, 0.0398111686706543, 0.03986841583251953, 0.04001910400390625, 0.039828319549560544, 0.039970367431640626, 0.0397644157409668, 0.03981913757324219, 0.03986240005493164, 0.03983107376098633, 0.03989753723144531, 0.03985753631591797, 0.03987094497680664, 0.03994643020629883, 0.039953857421875, 0.040046142578125, 0.0399288330078125, 0.03991756820678711, 0.039923713684082034, 0.03987263870239258, 0.04012598419189453, 0.039979358673095704, 0.04007731246948242, 0.042540958404541016, 0.03998691177368164, 0.039962974548339844, 0.03997903823852539, 0.03992972946166992, 0.03957363128662109, 0.03940380859375, 0.03935203170776367, 0.03944243240356445, 0.03944243240356445, 0.039684097290039064, 0.03948953628540039, 0.039651329040527344, 0.03987046432495117, 0.04016128158569336, 0.0411126708984375, 0.03975987243652344, 0.04204022216796875, 0.03949977493286133, 0.039532543182373044, 0.03954687881469727, 0.03973302459716797, 0.039575775146484374, 0.039553024291992187, 0.03957350540161133, 0.03963916778564453, 0.03971263885498047, 0.039657470703125, 0.03977791976928711, 0.039840129852294924, 0.03991551971435547, 0.039731201171875, 0.03966566467285156, 0.03970457458496094, 0.04013260650634766, 0.03976742553710937, 0.03976454544067383, 0.039753791809082034, 0.040291904449462894, 0.039782047271728516, 0.039714591979980465, 0.03974028778076172, 0.04106252670288086, 0.03983769607543945, 0.03984931182861328, 0.039737022399902344, 0.04045833587646484, 0.03985702514648438, 0.039755489349365236, 0.03990556716918945, 0.03993190383911133, 0.03996160125732422, 0.03997967910766602, 0.04002409744262695, 0.0399299201965332, 0.03998662567138672, 0.03988768005371094, 0.039907329559326174, 0.03988060760498047, 0.03988079833984375, 0.040005630493164065, 0.04002406311035156, 0.039990303039550784, 0.0400742073059082, 0.040218624114990234, 0.04024838256835937, 0.04004345703125, 0.039868927001953124, 0.039610942840576174, 0.039513248443603516, 0.03954972839355469, 0.03952844619750977, 0.03952640151977539, 0.03940966415405273, 0.039532222747802735, 0.03951852798461914, 0.03967180633544922, 0.0396893424987793, 0.03961708831787109, 0.03975609588623047, 0.039725055694580076, 0.03965462493896484, 0.039672607421875, 0.0396943359375, 0.03972710418701172, 0.03970048141479492, 0.03970809555053711, 0.0398372802734375, 0.03973638534545899, 0.039693984985351566, 0.03977798461914062, 0.039811649322509766, 0.03993190383911133, 0.03989465713500977, 0.039857696533203125, 0.03980783843994141, 0.03988275146484375, 0.03972467041015625, 0.039823745727539064, 0.039796417236328124, 0.039762241363525394, 0.03978374481201172, 0.03989939117431641, 0.039970497131347656, 0.03978316879272461, 0.0399109115600586, 0.03986614227294922, 0.04013948822021484, 0.03992166519165039, 0.03983260726928711, 0.039978080749511716, 0.03988614273071289, 0.03993443298339844, 0.0400425910949707, 0.03994764709472656, 0.039985790252685546, 0.04007654571533203, 0.03998796844482422, 0.0400643196105957, 0.0399879035949707, 0.039995391845703124, 0.040011009216308596, 0.03993471908569336, 0.04014678573608398, 0.04009795379638672, 0.040101791381835936, 0.0409150390625, 0.040110015869140626, 0.04016953659057617, 0.040202239990234374, 0.0401490249633789, 0.03961363220214844, 0.03964396667480469, 0.03970048141479492, 0.039613792419433594, 0.039639198303222656, 0.03969001770019531, 0.039686145782470705, 0.03966953659057617, 0.03959494400024414, 0.039657470703125, 0.03969635009765625, 0.03966944122314453, 0.039668064117431644, 0.0404213752746582, 0.039846912384033206, 0.03981619262695312, 0.03991756820678711, 0.03979436874389648, 0.039737407684326174, 0.039882591247558594, 0.03981148910522461, 0.039808769226074216, 0.039897216796875, 0.04001366424560547, 0.04084355163574219, 0.03989299011230469, 0.039847934722900394, 0.039772159576416014, 0.039839744567871094, 0.03988889694213867, 0.03982940673828125, 0.039894977569580076, 0.03978188705444336, 0.03997763061523438, 0.03990118408203125, 0.039879905700683595, 0.03982416152954102, 0.03999718475341797, 0.03984550476074219, 0.03997145462036133, 0.03988230514526367, 0.04002656173706055, 0.040008705139160154, 0.0398837776184082, 0.03992105484008789, 0.03997756958007812, 0.04003635025024414, 0.04002566528320312, 0.04017542266845703, 0.04016147232055664, 0.04016582489013672, 0.040013824462890625, 0.04000972747802734, 0.04009369659423828, 0.04025548934936524, 0.04003631973266602, 0.040287391662597656, 0.040231807708740235, 0.04015222549438477, 0.040192863464355466, 0.040325119018554685, 0.040134654998779294, 0.0399700813293457, 0.03960905456542969, 0.039569408416748046, 0.03956531143188476, 0.039601825714111326, 0.039575904846191404, 0.03952230453491211, 0.039618560791015625, 0.03955231857299805, 0.039568096160888674, 0.03966886520385742, 0.03953647994995117, 0.03956326293945313, 0.03966787338256836, 0.03963081741333008, 0.039631744384765626, 0.03958988952636719, 0.03970361709594727, 0.03980384063720703, 0.039584926605224606, 0.039946624755859375, 0.039691009521484376, 0.03976739120483398, 0.039765792846679686, 0.03978096008300781, 0.039814334869384765, 0.03966633605957031, 0.03999760055541992, 0.039788352966308595, 0.039681377410888674, 0.03982627105712891, 0.03988787078857422, 0.039655712127685545, 0.03968700790405273, 0.03982457733154297, 0.03980953598022461, 0.0398070068359375, 0.039796897888183594, 0.039815040588378904, 0.039696510314941404, 0.039815166473388675, 0.03992166519165039, 0.03981881713867187, 0.039917312622070315, 0.039856830596923826, 0.039990913391113284, 0.03992361450195313, 0.04000032043457031, 0.040029857635498045, 0.04004156875610351, 0.04004451370239258, 0.04004140853881836, 0.04000316619873047, 0.04013302230834961, 0.040019966125488284, 0.03996057510375976, 0.0400261116027832, 0.03998284912109375, 0.04008780670166016, 0.03997228622436524, 0.039995166778564455, 0.04000844955444336, 0.04007120132446289, 0.03995334243774414, 0.0413034553527832, 0.04244240188598633, 0.03937580871582031, 0.03926835250854492, 0.039686145782470705, 0.039739391326904294, 0.03952435302734375, 0.03947897720336914, 0.03950214385986328, 0.03962265777587891, 0.03957692718505859, 0.039421695709228516, 0.03952454376220703, 0.03958844757080078, 0.039507423400878906, 0.03953014373779297, 0.03947212982177734, 0.03967795181274414, 0.03961193466186524, 0.03960047912597656, 0.03955686569213867, 0.03957183837890625, 0.039608318328857424, 0.03971072006225586, 0.03974553680419922, 0.039800830841064457, 0.0397209587097168, 0.039669631958007816, 0.039660831451416016, 0.039897953033447266, 0.04254105758666992, 0.03985612869262695, 0.039708671569824217, 0.039962623596191404, 0.03972915267944336, 0.03983116912841797, 0.03996902465820312, 0.03990867233276367, 0.03982937622070312, 0.03983385467529297, 0.039771839141845705, 0.03989811325073242, 0.03989913558959961, 0.039982334136962894, 0.03997520065307617, 0.04011228942871094, 0.03999059295654297, 0.04013564682006836, 0.03999132919311523, 0.04000080108642578, 0.04000604629516601, 0.0399117431640625, 0.03995443344116211, 0.03991551971435547, 0.04016332626342774, 0.040013824462890625, 0.03998220825195312, 0.04011711883544922, 0.04017766571044922, 0.03983564758300781, 0.04003180694580078, 0.040042945861816406, 0.039738849639892576, 0.03964057540893555, 0.03927123260498047, 0.039395584106445315, 0.0393807373046875, 0.039634944915771485, 0.039392704010009764, 0.039397342681884766, 0.039481407165527345, 0.04001984024047851, 0.03944063949584961, 0.03957187271118164, 0.03946495819091797, 0.03951001739501953, 0.03951993560791016, 0.03947552108764649, 0.039618560791015625, 0.039716606140136716, 0.03949747085571289, 0.03961420822143555, 0.03951283264160156, 0.039585792541503906, 0.04023295974731445, 0.04110063934326172, 0.03968272018432617, 0.04002336120605469, 0.0396929931640625, 0.039964672088623046, 0.03970854568481445, 0.03980710220336914, 0.03966156768798828, 0.03969638442993164, 0.03964108657836914, 0.039979007720947264, 0.03975987243652344, 0.03982720184326172, 0.039788799285888674, 0.039691360473632815, 0.039801761627197264, 0.0396943359375, 0.03995180892944336, 0.039858753204345704, 0.03987251281738281, 0.03991328048706055, 0.03986246490478516, 0.039876609802246096, 0.039847934722900394, 0.039952064514160154, 0.040196414947509765, 0.04152284622192383, 0.040142688751220706, 0.04012492752075195, 0.040013824462890625, 0.03998310470581055, 0.04000515365600586, 0.04000611114501953, 0.039882049560546876, 0.040063678741455076, 0.04004044723510742, 0.04005478286743164, 0.04062003326416016, 0.04014211273193359, 0.040084190368652343, 0.04006083297729492, 0.039725151062011715, 0.03949059295654297, 0.03981417465209961, 0.039459808349609375, 0.03967427062988281, 0.039512382507324216, 0.03940524673461914, 0.0394901123046875, 0.03957964706420898, 0.039602081298828126, 0.0398193588256836, 0.039616512298583983, 0.03955472183227539, 0.03962812805175781, 0.03956217575073242, 0.03957356643676758, 0.03962060928344727, 0.039650943756103514, 0.039690624237060546, 0.03971072006225586, 0.03967795181274414, 0.039774208068847655, 0.039792640686035156, 0.03983564758300781, 0.03989670562744141, 0.039879039764404295, 0.03979673767089844, 0.03974758529663086, 0.03968806457519531, 0.03978585433959961, 0.039731231689453125, 0.039748321533203124, 0.039731201171875, 0.039831550598144534, 0.03976806259155274, 0.039725055694580076, 0.039706623077392575, 0.03971481704711914, 0.039815166473388675, 0.03989059066772461, 0.03979683303833008, 0.03986048126220703, 0.03981439971923828, 0.039860801696777345, 0.03996067047119141, 0.039948383331298826, 0.03996057510375976, 0.04006057739257812, 0.04006742477416992, 0.04007494354248047, 0.040033760070800783, 0.04002620697021484, 0.04006300735473633, 0.040028766632080076, 0.04026367950439453, 0.04007302474975586, 0.039993152618408204, 0.03993036651611328, 0.04001177597045898, 0.040019966125488284, 0.040134654998779294, 0.040048641204833986, 0.040116382598876954, 0.03958972930908203, 0.03952025604248047, 0.039636993408203126, 0.03959603118896484, 0.039394367218017576, 0.03937558364868164, 0.03937071990966797, 0.039491710662841795, 0.03953782272338867, 0.0396165771484375, 0.03968707275390625, 0.03963657760620117, 0.03951043319702149, 0.03951958465576172, 0.03952707290649414, 0.03958988952636719, 0.039642784118652345, 0.03954927825927734, 0.03957964706420898, 0.03974348831176758, 0.039667007446289065, 0.039701183319091796, 0.03975363159179687, 0.03985337448120117, 0.039879295349121095, 0.0397270393371582, 0.03986175918579102, 0.040067806243896484, 0.03978854370117187, 0.04003801727294922, 0.040210464477539065, 0.03976227188110352, 0.03992115020751953, 0.039797248840332033, 0.03985203170776367, 0.039815166473388675, 0.041488384246826174, 0.041109504699707033, 0.03977830505371094, 0.039914527893066404, 0.03978041458129883, 0.03980790328979492, 0.039923713684082034, 0.03982131195068359, 0.03996464157104492, 0.03991555023193359, 0.03990438461303711, 0.04006758499145508, 0.04013606262207031, 0.04001408004760742, 0.03995225524902344, 0.03996556854248047, 0.040097793579101565, 0.040123809814453126, 0.04000419235229492, 0.04000972747802734, 0.0401363525390625, 0.040171871185302736, 0.04002345657348633, 0.04001612854003906, 0.040007999420166016, 0.03998716735839844]",tokens/s,24.880248796244953,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4029.550592,5435.686912,0.0,5033.1648,4589.126656,s,1,11.2000361328125,11.2000361328125,0.0,11.2000361328125,11.2000361328125,11.2000361328125,11.2000361328125,[11.2000361328125],,kWh,0.00011918361253747964,1.313938915741468e-05,5.201254160999702e-05,0.00018433554330489135,,MB,2075.693056,5450.366976,0.0,5033.1648,4111.93856,s,10,1.9780308685302734,0.19780308685302733,0.0006776805277923074,0.19789279937744142,0.19847427215576172,0.19874645156860352,0.19896419509887694,"[0.1963311309814453, 0.19755996704101564, 0.19732917785644533, 0.1974596862792969, 0.19901863098144532, 0.19841378784179686, 0.19780181884765624, 0.19798377990722657, 0.198100830078125, 0.1980320587158203]",tokens/s,1294.2164051778143,kWh,5.795671439215849e-06,6.389642633070332e-07,3.853521601333761e-06,1.0288157303856644e-05,tokens/kWh,24882978.791939273,MB,2091.462656,5450.366976,0.0,5033.1648,4261.580288,s,10,15.192255004882814,1.519225500488281,0.0036341453593062467,1.5192026977539062,1.522462255859375,1.5245869018554687,1.5262866186523438,"[1.5267115478515625, 1.518823974609375, 1.521878662109375, 1.5203453369140625, 1.5195814208984375, 1.5219901123046875, 1.5181517333984376, 1.515883544921875, 1.515383544921875, 1.513505126953125]",tokens/s,41.46849824450137,kWh,4.435149741287166e-05,4.891507171414622e-06,2.9567644024465573e-05,7.881064860875183e-05,tokens/kWh,799384.361277848,,s,630,15.184199739456174,0.024101904348343136,0.00024404193940307404,0.024082335472106933,0.024316592407226563,0.024416054725646973,0.024870368118286138,"[0.025761791229248047, 0.0241213436126709, 0.02424448013305664, 0.023999423980712892, 0.023877664566040038, 0.023904895782470702, 0.02387984085083008, 0.023901567459106446, 0.02392127990722656, 0.02395699119567871, 0.02397439956665039, 0.023723136901855468, 0.023806400299072265, 0.02394156837463379, 0.023990272521972656, 0.02404118347167969, 0.02402854347229004, 0.024015520095825197, 0.026206207275390626, 0.024262912750244142, 0.02410870361328125, 0.023996768951416017, 0.024143999099731445, 0.024040544509887695, 0.02407708740234375, 0.023973247528076173, 0.0239617919921875, 0.024026624679565428, 0.024092767715454103, 0.024052032470703123, 0.02418451118469238, 0.024020992279052734, 0.024108959197998048, 0.02414863967895508, 0.024157600402832033, 0.02409356880187988, 0.024246271133422852, 0.024231103897094725, 0.02452297592163086, 0.02424687957763672, 0.02421062469482422, 0.024244800567626953, 0.024339775085449218, 0.024378303527832032, 0.024249536514282227, 0.024222528457641602, 0.026549280166625975, 0.024212448120117188, 0.024161983489990234, 0.024176895141601564, 0.024096832275390626, 0.024135679244995118, 0.024197120666503907, 0.02412544059753418, 0.02416640090942383, 0.024108064651489257, 0.024417152404785158, 0.02434467124938965, 0.024200511932373048, 0.024264896392822265, 0.024307647705078126, 0.024283071517944337, 0.024492671966552734, 0.02449292755126953, 0.024211456298828125, 0.024082239151000977, 0.024002208709716796, 0.023908895492553713, 0.02391859245300293, 0.023959808349609375, 0.023760639190673827, 0.02393087959289551, 0.02393907165527344, 0.023928287506103516, 0.023912992477416992, 0.023898080825805666, 0.023799840927124023, 0.023750656127929686, 0.023953407287597657, 0.024038496017456053, 0.023946144104003905, 0.023998367309570313, 0.02394643211364746, 0.023984256744384765, 0.023960351943969727, 0.023957056045532228, 0.02383238410949707, 0.023841312408447266, 0.023914239883422853, 0.02393942451477051, 0.02396735954284668, 0.023998367309570313, 0.02402556800842285, 0.023996416091918944, 0.023961599349975587, 0.0240185604095459, 0.02404582405090332, 0.024178112030029297, 0.02396214485168457, 0.024051679611206054, 0.024155935287475585, 0.024328607559204102, 0.02409267234802246, 0.024123392105102538, 0.024231935501098634, 0.024168352127075195, 0.024088191986083984, 0.024092639923095703, 0.0241461124420166, 0.02418515205383301, 0.02416364860534668, 0.024444704055786134, 0.02449705505371094, 0.02429952049255371, 0.024242176055908202, 0.02432204818725586, 0.024436063766479492, 0.024254463195800782, 0.024217248916625977, 0.024255487442016603, 0.02428927993774414, 0.024294784545898437, 0.02441484832763672, 0.0243507194519043, 0.024440832138061523, 0.024422399520874022, 0.024407712936401368, 0.02447190475463867, 0.024174591064453126, 0.0241246395111084, 0.024109088897705078, 0.023978208541870116, 0.02396214485168457, 0.024102624893188478, 0.02398236846923828, 0.023992319107055664, 0.024057567596435545, 0.02400230407714844, 0.02415056037902832, 0.023916543960571288, 0.02408448028564453, 0.02389401626586914, 0.024000511169433594, 0.024104543685913086, 0.02415862464904785, 0.023973888397216796, 0.02393017578125, 0.023980703353881836, 0.02389360046386719, 0.02392643165588379, 0.024007455825805664, 0.024008703231811524, 0.0239554557800293, 0.024016576766967772, 0.023994688034057618, 0.02439727973937988, 0.024058015823364257, 0.024039615631103517, 0.024037567138671875, 0.02402899169921875, 0.024002752304077148, 0.02412931251525879, 0.024079872131347657, 0.024175327301025392, 0.02424838447570801, 0.024131519317626953, 0.024381439208984376, 0.024170495986938476, 0.024193023681640623, 0.02430067253112793, 0.024129663467407227, 0.02411801528930664, 0.024139392852783204, 0.024256128311157227, 0.02415407943725586, 0.024152320861816408, 0.024142175674438476, 0.024161567687988283, 0.024173471450805666, 0.024351871490478516, 0.024243072509765626, 0.02432614326477051, 0.02431158447265625, 0.02430793571472168, 0.024440576553344726, 0.02425267219543457, 0.02466815948486328, 0.024485567092895507, 0.024465728759765625, 0.024375648498535157, 0.024195072174072265, 0.024049663543701173, 0.023979616165161134, 0.023941183090209962, 0.023924671173095702, 0.023935392379760743, 0.02389811134338379, 0.023874784469604494, 0.024058624267578124, 0.02396691131591797, 0.023884639739990235, 0.023921791076660155, 0.024003456115722657, 0.023977855682373046, 0.02392691230773926, 0.023969760894775392, 0.024010784149169923, 0.023990272521972656, 0.023936800003051758, 0.023996320724487305, 0.02396326446533203, 0.023925439834594726, 0.023870847702026368, 0.024039327621459963, 0.02398691177368164, 0.02416748809814453, 0.023976768493652344, 0.02404159927368164, 0.024057151794433594, 0.024179391860961914, 0.024000511169433594, 0.0240447998046875, 0.024273664474487304, 0.02414182472229004, 0.024048927307128907, 0.024026111602783205, 0.02402275276184082, 0.024178144454956054, 0.02425075149536133, 0.024243839263916017, 0.024131263732910156, 0.024240991592407227, 0.024085535049438476, 0.02416329574584961, 0.024147968292236328, 0.024473600387573242, 0.024114816665649415, 0.024154495239257813, 0.024133567810058595, 0.024151968002319335, 0.024304832458496094, 0.025035423278808595, 0.024426528930664063, 0.024242464065551757, 0.0245166072845459, 0.02431001663208008, 0.024325887680053712, 0.02428927993774414, 0.02434048080444336, 0.02434662437438965, 0.02442176055908203, 0.024416543960571288, 0.024358432769775392, 0.02419731140136719, 0.024109344482421875, 0.02390630340576172, 0.023893247604370116, 0.023896736145019533, 0.023799903869628908, 0.023754463195800782, 0.023927391052246092, 0.023942848205566407, 0.02403887939453125, 0.023982624053955078, 0.023975936889648438, 0.024014848709106446, 0.02407423973083496, 0.024030879974365236, 0.0240130558013916, 0.024002048492431642, 0.024059776306152345, 0.023974624633789063, 0.02401875114440918, 0.023999807357788085, 0.024055904388427734, 0.024019744873046874, 0.024094720840454102, 0.02391859245300293, 0.02401251220703125, 0.02399875259399414, 0.02409676742553711, 0.024047296524047853, 0.02401888084411621, 0.024111455917358398, 0.024304927825927733, 0.02410163116455078, 0.02402252769470215, 0.024124992370605468, 0.024116096496582032, 0.024105024337768555, 0.024319040298461915, 0.024107967376708984, 0.024190975189208985, 0.024164352416992187, 0.024193023681640623, 0.02427654457092285, 0.024205759048461915, 0.024180736541748047, 0.024426496505737305, 0.02412544059753418, 0.024280607223510744, 0.024183263778686525, 0.024213024139404297, 0.024189407348632813, 0.02413532829284668, 0.02426095962524414, 0.02427631950378418, 0.02410563278198242, 0.02418454360961914, 0.024149471282958985, 0.0242491512298584, 0.024247968673706054, 0.024396127700805664, 0.02428927993774414, 0.02427903938293457, 0.024415456771850585, 0.02424297523498535, 0.024027135848999022, 0.02387942314147949, 0.023914751052856446, 0.02391859245300293, 0.024004608154296874, 0.024016735076904296, 0.024383583068847657, 0.02377529525756836, 0.023984128952026368, 0.023967744827270508, 0.023934783935546874, 0.02394745635986328, 0.024016319274902345, 0.024030815124511717, 0.024202207565307617, 0.023871488571166992, 0.02412950325012207, 0.024088096618652344, 0.023996927261352538, 0.023916543960571288, 0.024080287933349608, 0.023928064346313477, 0.02397270393371582, 0.024152063369750978, 0.023996416091918944, 0.024176639556884767, 0.024036863327026366, 0.024021503448486328, 0.024070144653320313, 0.02405900764465332, 0.02414476776123047, 0.024188512802124022, 0.024316320419311522, 0.024100448608398436, 0.024099231719970703, 0.02430723190307617, 0.02415452766418457, 0.024506431579589844, 0.024213504791259766, 0.024176607131958006, 0.02427609634399414, 0.024197919845581055, 0.02411532783508301, 0.024260608673095704, 0.024369152069091796, 0.024104639053344725, 0.024379392623901368, 0.024271167755126954, 0.024221696853637696, 0.024215551376342775, 0.024200864791870117, 0.024168800354003907, 0.024268800735473633, 0.024260608673095704, 0.024122976303100587, 0.024316320419311522, 0.024686304092407227, 0.02420115280151367, 0.024430784225463867, 0.024320159912109375, 0.0243624324798584, 0.024325567245483397, 0.024199840545654296, 0.023969024658203126, 0.02385558319091797, 0.024143871307373048, 0.024242143630981445, 0.023968063354492187, 0.024014751434326173, 0.023873823165893555, 0.023918399810791014, 0.023828479766845705, 0.02371788787841797, 0.023883424758911132, 0.023871839523315428, 0.023872671127319337, 0.02376380729675293, 0.02395248031616211, 0.023927711486816407, 0.02390575981140137, 0.023890464782714844, 0.02412291145324707, 0.02393247985839844, 0.02388675117492676, 0.023830528259277343, 0.02387775993347168, 0.024135551452636718, 0.023899904251098635, 0.023906463623046874, 0.024026880264282225, 0.0240064640045166, 0.024050207138061525, 0.023962656021118165, 0.02394799995422363, 0.024070016860961913, 0.025825536727905274, 0.02431161689758301, 0.02409503936767578, 0.024077983856201173, 0.024174943923950195, 0.02416419219970703, 0.024182048797607422, 0.02413420867919922, 0.0241312313079834, 0.024185504913330078, 0.024408063888549804, 0.024147968292236328, 0.024212575912475585, 0.024095647811889647, 0.024149503707885742, 0.024134143829345703, 0.024065216064453124, 0.024116031646728514, 0.024102912902832032, 0.02426630401611328, 0.024105056762695313, 0.024092479705810545, 0.02413587188720703, 0.024131935119628908, 0.024129535675048826, 0.02417024040222168, 0.02416646385192871, 0.024373088836669922, 0.024267103195190428, 0.02440825653076172, 0.024179328918457033, 0.02399388885498047, 0.02394313621520996, 0.023824352264404297, 0.023857696533203125, 0.023736320495605468, 0.023758848190307616, 0.02374185562133789, 0.023711456298828124, 0.023876480102539063, 0.023895263671875, 0.02387228775024414, 0.023980031967163085, 0.023755775451660157, 0.023839744567871093, 0.02407344055175781, 0.023943584442138673, 0.02402342414855957, 0.023900159835815428, 0.023938976287841796, 0.023783519744873048, 0.02386534309387207, 0.02388787269592285, 0.023980192184448242, 0.02401820755004883, 0.023947839736938478, 0.024027135848999022, 0.024033279418945314, 0.02404902458190918, 0.024009344100952148, 0.023915647506713867, 0.024124288558959962, 0.02393497657775879, 0.024010751724243166, 0.024024896621704102, 0.024104639053344725, 0.024197376251220704, 0.024162559509277343, 0.02418070411682129, 0.024184415817260742, 0.02414022445678711, 0.0242890567779541, 0.024131807327270507, 0.02413363265991211, 0.02415737533569336, 0.024152896881103517, 0.024059808731079102, 0.02413350486755371, 0.0240928955078125, 0.0241213436126709, 0.024143871307373048, 0.02413132858276367, 0.02425062370300293, 0.02427427291870117, 0.024226400375366212, 0.024475616455078127, 0.024256479263305663, 0.02428531265258789, 0.024184608459472658, 0.024232160568237304, 0.02426470375061035, 0.024182783126831055, 0.02436479949951172, 0.024204128265380858, 0.024010751724243166, 0.023903839111328123, 0.02390243148803711, 0.023828191757202147, 0.02368057632446289, 0.024048063278198244, 0.023917152404785157, 0.023842592239379883, 0.023998559951782225, 0.023982080459594726, 0.02407619285583496, 0.02393302345275879, 0.023967744827270508, 0.024091743469238282, 0.02530384063720703, 0.023947391510009765, 0.02386911964416504, 0.02372435188293457, 0.02384054374694824, 0.023939296722412108, 0.0239052791595459, 0.023781600952148436, 0.023845663070678712, 0.02391449546813965, 0.023965375900268555, 0.02397011184692383, 0.02405580711364746, 0.023963647842407225, 0.02399420738220215, 0.023914655685424804, 0.023988256454467772, 0.02395747184753418, 0.024008703231811524, 0.024012544631958007, 0.024033536911010744, 0.024104480743408204, 0.024144351959228514, 0.024066047668457033, 0.024053407669067384, 0.024019296646118165, 0.024014848709106446, 0.024020383834838867, 0.024314464569091795, 0.02415558433532715, 0.024068960189819334, 0.024018112182617186, 0.02407263946533203, 0.024153888702392576, 0.024109376907348632, 0.024070144653320313, 0.024084512710571288, 0.02409619140625, 0.02418729591369629, 0.024160383224487304, 0.024139776229858398, 0.024133344650268555, 0.024103200912475586, 0.024057472229003906, 0.02413532829284668, 0.0241691837310791, 0.02416214370727539, 0.024270240783691405, 0.02412441635131836, 0.023932928085327147, 0.023928064346313477, 0.02381286430358887, 0.02369740867614746, 0.023809568405151367, 0.023613920211791994, 0.023746559143066406, 0.023760351181030273, 0.02370614433288574, 0.02387308883666992, 0.023922111511230467, 0.023864320755004883, 0.02405580711364746, 0.023844415664672852, 0.023824960708618163, 0.023911808013916017, 0.023918304443359375, 0.023976255416870117, 0.023908832550048827, 0.023810047149658203, 0.023838464736938476, 0.02386307144165039, 0.023959648132324218, 0.023946687698364257, 0.023962495803833007, 0.023924415588378906, 0.023888256072998045, 0.023852607727050782, 0.02385145568847656, 0.0238768310546875, 0.02394806480407715, 0.023994367599487306, 0.023977535247802734, 0.023922496795654297, 0.023928800582885743, 0.02393564796447754, 0.024000640869140624, 0.024905567169189454, 0.024784191131591797, 0.024185056686401366, 0.02418841552734375, 0.024167423248291017, 0.024127391815185546, 0.024114463806152345, 0.024032415390014647, 0.024006303787231446, 0.02407587242126465, 0.02409823989868164, 0.024036319732666015, 0.024016895294189454, 0.0240762882232666, 0.024176544189453125, 0.02411734390258789, 0.02408243179321289, 0.024244096755981444, 0.024196319580078125, 0.02412950325012207, 0.024099552154541015, 0.024192480087280272, 0.02435763168334961, 0.024180736541748047]",tokens/s,41.49049741244799,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2166.718464,2964.258816,0.0,2569.0112,2295.745536,s,1,11.1279580078125,11.1279580078125,0.0,11.1279580078125,11.1279580078125,11.1279580078125,11.1279580078125,[11.1279580078125],,kWh,8.049906672503463e-05,8.872083881687885e-06,3.1210302745998675e-05,0.00012058145335272118,,MB,2131.853312,2981.036032,0.0,2571.108352,2282.97216,s,10,1.0005862045288085,0.10005862045288086,0.0002416026830852026,0.09998668670654297,0.10033361968994141,0.10044057159423828,0.10052613311767578,"[0.10054752349853516, 0.09991827392578125, 0.10029766082763672, 0.09975218963623046, 0.10008512115478516, 0.09992655944824219, 0.09990780639648437, 0.10004681396484374, 0.10030985260009766, 0.09979440307617188]",tokens/s,2558.5001955983826,kWh,2.975335105260814e-06,3.2812588964280005e-07,1.9688595997777816e-06,5.272320594681396e-06,tokens/kWh,48555469.15304189,MB,2145.849344,2981.036032,0.0,2571.108352,2390.926848,s,10,18.799360107421872,1.8799360107421872,0.006567797939365876,1.8798137207031251,1.8885275390624998,1.8895375122070313,1.8903454907226562,"[1.8782554931640625, 1.8793629150390625, 1.8883031005859374, 1.8768994140625, 1.872440185546875, 1.88361767578125, 1.86726904296875, 1.8802645263671875, 1.8824002685546875, 1.8905474853515625]",tokens/s,33.511778932905266,kWh,5.4661805717654654e-05,6.028934780673097e-06,2.725338600022186e-05,8.794412649854961e-05,tokens/kWh,716363.9291026331,,s,630,18.796350204467796,0.029835476515028207,0.0005983430423490145,0.029766528129577634,0.030310063362121584,0.030632141304016113,0.031643901500701906,"[0.030085248947143553, 0.029583232879638672, 0.029198335647583007, 0.02931679916381836, 0.029225055694580077, 0.029317344665527344, 0.02921401596069336, 0.029323200225830078, 0.029260543823242186, 0.029469791412353515, 0.02990572738647461, 0.029855840682983397, 0.029741056442260744, 0.029847423553466798, 0.029919359207153322, 0.029820928573608397, 0.02989414405822754, 0.029972736358642577, 0.030054079055786134, 0.02980512046813965, 0.031064064025878906, 0.02976153564453125, 0.02956819152832031, 0.02992620849609375, 0.029839519500732423, 0.030032928466796876, 0.029621055603027344, 0.029650016784667967, 0.029678688049316407, 0.0295032958984375, 0.029908992767333983, 0.029560831069946288, 0.029489152908325194, 0.029691904067993165, 0.029730815887451172, 0.029847328186035155, 0.029926719665527343, 0.03020240020751953, 0.02942223930358887, 0.029333215713500976, 0.02976742362976074, 0.029165184020996094, 0.029265792846679687, 0.029497791290283203, 0.029544160842895507, 0.02988912010192871, 0.029908319473266602, 0.03003664016723633, 0.029949951171875, 0.029896703720092774, 0.029853567123413086, 0.029939775466918946, 0.029836576461791994, 0.03012588882446289, 0.02983228874206543, 0.03363622283935547, 0.030794815063476564, 0.030011871337890624, 0.030013824462890627, 0.029782112121582032, 0.02975129508972168, 0.029582399368286133, 0.029332416534423828, 0.03008950424194336, 0.030223487854003906, 0.02970899200439453, 0.02986582374572754, 0.029860191345214844, 0.029863616943359376, 0.02967788887023926, 0.02999225616455078, 0.029788576126098632, 0.0298253116607666, 0.030033023834228515, 0.03035628890991211, 0.029812896728515625, 0.029628320693969725, 0.029677568435668947, 0.02965839958190918, 0.029483552932739257, 0.029447456359863282, 0.029877151489257812, 0.029326751708984376, 0.029239200592041017, 0.029747903823852537, 0.02914303970336914, 0.029288448333740235, 0.0291778564453125, 0.02940025520324707, 0.029926464080810546, 0.029871871948242187, 0.02970572853088379, 0.029803007125854493, 0.029767679214477538, 0.029775871276855468, 0.030281503677368163, 0.030458080291748048, 0.03000060844421387, 0.029930015563964844, 0.029560831069946288, 0.02973014450073242, 0.029724735260009766, 0.029626976013183592, 0.02963046455383301, 0.03023251152038574, 0.029461952209472658, 0.029513696670532226, 0.029413856506347657, 0.029448352813720702, 0.02932649612426758, 0.029716800689697266, 0.029676095962524414, 0.02971785545349121, 0.029509504318237303, 0.03016783905029297, 0.030033567428588866, 0.029991264343261718, 0.030118976593017578, 0.030094272613525392, 0.02994380760192871, 0.030119327545166014, 0.02994867134094238, 0.030297952651977537, 0.030095039367675783, 0.031193344116210938, 0.0310413761138916, 0.030129407882690428, 0.030309343338012697, 0.032150558471679684, 0.029980863571166992, 0.029887264251708984, 0.0297574405670166, 0.029665599822998046, 0.029699775695800783, 0.029628416061401368, 0.029665279388427734, 0.029933631896972655, 0.030123071670532228, 0.030094144821166992, 0.03025632095336914, 0.03012188720703125, 0.030086111068725586, 0.030674943923950194, 0.03030611228942871, 0.030195903778076173, 0.03039967918395996, 0.030130176544189452, 0.03195167922973633, 0.03177004814147949, 0.03033964729309082, 0.030338752746582032, 0.030285888671875, 0.030490432739257813, 0.030300256729125976, 0.030142431259155274, 0.03044384002685547, 0.02969759941101074, 0.029743616104125976, 0.029548511505126954, 0.029746591567993166, 0.029438783645629883, 0.02929030418395996, 0.029298688888549803, 0.029460479736328125, 0.029659135818481445, 0.029471967697143556, 0.029178688049316406, 0.029284128189086912, 0.029315263748168945, 0.029471872329711914, 0.029698335647583007, 0.029837919235229493, 0.02977110481262207, 0.030045984268188476, 0.02981977653503418, 0.029849599838256836, 0.029838815689086914, 0.03002217674255371, 0.02982863998413086, 0.029892095565795897, 0.02987241554260254, 0.029946559906005858, 0.03013804817199707, 0.02986342430114746, 0.029653663635253905, 0.029558944702148437, 0.029310976028442383, 0.029703519821166993, 0.029508256912231447, 0.030160959243774415, 0.030202144622802733, 0.030005247116088866, 0.029933088302612303, 0.029571744918823244, 0.029478208541870117, 0.029190656661987304, 0.029443840026855468, 0.029624576568603515, 0.029888511657714844, 0.029828447341918946, 0.029809024810791014, 0.029731103897094727, 0.029644735336303712, 0.029750751495361327, 0.029972192764282226, 0.0306712646484375, 0.031049503326416015, 0.030140256881713866, 0.03026620864868164, 0.02990835189819336, 0.030366016387939454, 0.030270816802978516, 0.03101590347290039, 0.029638656616210936, 0.029618175506591796, 0.02934169578552246, 0.029640480041503905, 0.02933366394042969, 0.029259103775024414, 0.02925788879394531, 0.030773279190063476, 0.029911808013916016, 0.02962156867980957, 0.029792032241821288, 0.029617984771728514, 0.029362752914428712, 0.031062240600585937, 0.029593727111816407, 0.02985487937927246, 0.02917433547973633, 0.029239551544189453, 0.02920243263244629, 0.029418655395507812, 0.02922991943359375, 0.029826879501342773, 0.02961987113952637, 0.029676063537597656, 0.029664543151855467, 0.029814592361450197, 0.02968796730041504, 0.029827840805053712, 0.02974028778076172, 0.029827840805053712, 0.0295280647277832, 0.029663232803344725, 0.029491071701049806, 0.029993087768554687, 0.030443519592285157, 0.029928447723388672, 0.029484031677246093, 0.02932067108154297, 0.02922140884399414, 0.029472543716430665, 0.030872543334960936, 0.03060736083984375, 0.030689376831054688, 0.02974710464477539, 0.029437952041625977, 0.02920355224609375, 0.029173664093017578, 0.029076480865478517, 0.029206527709960937, 0.029144800186157227, 0.029233024597167968, 0.029114784240722655, 0.029507583618164062, 0.02961974334716797, 0.029769983291625977, 0.029713951110839843, 0.029706943511962892, 0.02970591926574707, 0.02970195198059082, 0.02967193603515625, 0.029710111618041993, 0.029522144317626953, 0.029548032760620117, 0.029778432846069337, 0.02940928077697754, 0.02947715187072754, 0.02964796829223633, 0.02971526336669922, 0.029382463455200195, 0.02915705680847168, 0.029378879547119142, 0.029170719146728516, 0.029237600326538087, 0.029221120834350585, 0.029501535415649413, 0.030172767639160155, 0.030245567321777345, 0.030287679672241212, 0.030163135528564453, 0.030652416229248046, 0.03022233581542969, 0.02998054313659668, 0.03015465545654297, 0.030274015426635742, 0.030367359161376953, 0.03029209518432617, 0.03015065574645996, 0.030454111099243165, 0.029914783477783202, 0.02969331169128418, 0.029909631729125977, 0.029621919631958007, 0.0298110408782959, 0.029617855072021484, 0.029526336669921875, 0.029259967803955077, 0.029345504760742186, 0.029325408935546873, 0.030076927185058593, 0.029222911834716796, 0.029394559860229492, 0.029737632751464845, 0.030038015365600586, 0.030072832107543947, 0.029962175369262694, 0.030449600219726564, 0.03038630485534668, 0.03036742401123047, 0.030045536041259764, 0.030057439804077147, 0.029677568435668947, 0.029569023132324217, 0.02955264091491699, 0.029934783935546876, 0.030255935668945313, 0.0296812801361084, 0.029543807983398437, 0.02978508758544922, 0.02969148826599121, 0.029986623764038087, 0.029956703186035157, 0.029669376373291017, 0.029597183227539063, 0.029983232498168946, 0.03004377555847168, 0.029957727432250978, 0.030054912567138672, 0.0300546875, 0.02983526420593262, 0.029861055374145507, 0.02989084815979004, 0.030095903396606446, 0.029834592819213867, 0.0299465274810791, 0.029872127532958984, 0.0297609920501709, 0.02942416000366211, 0.029459808349609377, 0.029678367614746095, 0.03013996887207031, 0.02974083137512207, 0.029759584426879884, 0.029883968353271485, 0.029614368438720704, 0.02948975944519043, 0.02936800003051758, 0.0294050235748291, 0.029413856506347657, 0.030033344268798827, 0.029803071975708008, 0.029933216094970704, 0.029972095489501954, 0.02988310432434082, 0.029998367309570312, 0.029862783432006837, 0.02997587203979492, 0.029763647079467773, 0.029921760559082033, 0.030057600021362305, 0.029989023208618164, 0.029456256866455078, 0.03273779296875, 0.029587808609008788, 0.02976563262939453, 0.029737056732177733, 0.029645023345947267, 0.02952556800842285, 0.02945510482788086, 0.02941939163208008, 0.029394559860229492, 0.03005900764465332, 0.030144800186157228, 0.02975062370300293, 0.03127539253234863, 0.0316682243347168, 0.029464799880981444, 0.029251359939575197, 0.02935980796813965, 0.02940883255004883, 0.029600704193115234, 0.02950124740600586, 0.029359455108642577, 0.029311647415161134, 0.0293919677734375, 0.02939792060852051, 0.029566719055175782, 0.029745248794555663, 0.02936412811279297, 0.029284000396728516, 0.029137504577636718, 0.029298688888549803, 0.029719680786132813, 0.029369216918945312, 0.029211872100830077, 0.029487136840820313, 0.029492992401123047, 0.029477632522583008, 0.02922710418701172, 0.02931727981567383, 0.0291843204498291, 0.02930246353149414, 0.029169824600219725, 0.029331296920776368, 0.02924473571777344, 0.029831584930419923, 0.029538591384887694, 0.030424383163452147, 0.029377056121826173, 0.029310367584228517, 0.029333248138427734, 0.02927622413635254, 0.02944000053405762, 0.0298853759765625, 0.029875232696533204, 0.029969375610351564, 0.02990825653076172, 0.029983583450317382, 0.02995712089538574, 0.03043008041381836, 0.030110879898071288, 0.030100320816040037, 0.029831167221069335, 0.029730815887451172, 0.029609024047851564, 0.029567935943603515, 0.030072832107543947, 0.02960963249206543, 0.029482656478881836, 0.030195072174072267, 0.030063072204589845, 0.0297043514251709, 0.02994380760192871, 0.030019872665405272, 0.03011555290222168, 0.02977177619934082, 0.03092230415344238, 0.029642784118652343, 0.029543872833251952, 0.029543392181396483, 0.02936832046508789, 0.029239456176757814, 0.02945212745666504, 0.0292938232421875, 0.029304927825927734, 0.02918671989440918, 0.02951366424560547, 0.029510751724243164, 0.02927631950378418, 0.029223167419433593, 0.02925609588623047, 0.029228799819946288, 0.029395360946655274, 0.02918604850769043, 0.031028608322143554, 0.029380640029907226, 0.02972313690185547, 0.029298784255981446, 0.029265920639038087, 0.029138751983642578, 0.029325504302978516, 0.02927568054199219, 0.029395423889160156, 0.029183935165405274, 0.029254880905151368, 0.02918684768676758, 0.029564992904663086, 0.02959564781188965, 0.0296376953125, 0.029434816360473633, 0.030011392593383788, 0.02958736038208008, 0.029698495864868165, 0.029296287536621092, 0.02936627197265625, 0.029203903198242186, 0.029497919082641603, 0.029418943405151367, 0.029855615615844728, 0.02991276741027832, 0.030143487930297853, 0.02991663932800293, 0.030113855361938478, 0.030249088287353516, 0.031141504287719727, 0.0313863353729248, 0.030803359985351563, 0.03851068878173828, 0.03006719970703125, 0.030251007080078125, 0.02997452735900879, 0.02997452735900879, 0.029560224533081055, 0.02951420783996582, 0.02946249580383301, 0.02939091110229492, 0.029276287078857422, 0.030003200531005858, 0.029709440231323242, 0.03000831985473633, 0.030301504135131836, 0.030228191375732422, 0.029833280563354492, 0.029932319641113283, 0.02980659294128418, 0.030270463943481447, 0.029850336074829103, 0.029861440658569337, 0.02988051223754883, 0.029539936065673827, 0.029411808013916015, 0.029499040603637696, 0.030976736068725585, 0.031403392791748044, 0.029716512680053712, 0.029505632400512696, 0.029166240692138672, 0.029353919982910155, 0.029355167388916015, 0.029335712432861327, 0.030500831604003905, 0.029557472229003907, 0.02954217529296875, 0.02983900833129883, 0.029892255783081054, 0.030061311721801758, 0.029940191268920897, 0.03137251281738281, 0.030804128646850587, 0.030271488189697264, 0.030097408294677733, 0.030144832611083985, 0.030078975677490235, 0.030187295913696288, 0.03158435249328613, 0.03023052787780762, 0.029703807830810548, 0.029869951248168946, 0.029751840591430663, 0.029884511947631837, 0.02935807991027832, 0.029362239837646485, 0.029264032363891603, 0.02976915168762207, 0.029820255279541016, 0.0297740478515625, 0.029489023208618164, 0.02946345520019531, 0.02955299186706543, 0.029916000366210938, 0.029719711303710938, 0.029836959838867187, 0.029792255401611328, 0.02980624008178711, 0.02968726348876953, 0.030058496475219725, 0.029822975158691405, 0.0294150390625, 0.02950387191772461, 0.02989023971557617, 0.030142784118652344, 0.02983065605163574, 0.029726688385009765, 0.02975119972229004, 0.030175775527954102, 0.030308448791503906, 0.030334848403930664, 0.0303187198638916, 0.030316543579101563, 0.030291263580322265, 0.030335647583007812, 0.030335008621215822, 0.030283775329589844, 0.03026918411254883, 0.030382335662841795, 0.030130048751831055, 0.03030847930908203, 0.0301977596282959, 0.030289920806884765, 0.03026652717590332, 0.03044233512878418, 0.030398527145385743, 0.030152191162109376, 0.030237024307250976, 0.03035935974121094, 0.030349599838256837, 0.030451488494873048, 0.030513568878173827, 0.030404415130615235, 0.030298112869262695, 0.03015884780883789, 0.02994937515258789, 0.03004195213317871, 0.02995248031616211, 0.02988368034362793, 0.029749887466430664, 0.030007232666015626, 0.030136735916137695, 0.031016960144042968, 0.030116031646728516, 0.030105152130126954, 0.030177087783813478, 0.030249408721923828, 0.02996396827697754, 0.0296342716217041, 0.029513919830322265, 0.0296595516204834, 0.02968560028076172, 0.029726879119873047, 0.02941244888305664, 0.029422464370727538, 0.029306304931640624, 0.029297088623046873, 0.029221023559570312, 0.029294048309326172, 0.02928220748901367, 0.02932150459289551, 0.029618528366088866]",tokens/s,33.517145251435736,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1339.273216,1148.125184,0.0,752.877568,710.554112,s,1,9.06067578125,9.06067578125,0.0,9.06067578125,9.06067578125,9.06067578125,9.06067578125,[9.06067578125],,kWh,4.953646999583725e-05,5.456865979846261e-06,1.666501333200121e-05,7.165834930768472e-05,,MB,1380.43392,1429.143552,0.0,1019.215872,949.09696,s,10,0.2942716503143311,0.02942716503143311,0.00022323368126339786,0.029456704139709473,0.029709081268310546,0.029712220954895018,0.029714732704162597,"[0.02908736038208008, 0.029266624450683593, 0.02917398452758789, 0.029661823272705078, 0.02954355239868164, 0.029525312423706054, 0.02971536064147949, 0.02938809585571289, 0.029201152801513672, 0.029708383560180664]",tokens/s,8699.44487437201,kWh,8.653325942554742e-07,9.542787400274307e-08,5.681242217574055e-07,1.528884690015623e-06,tokens/kWh,167442320.3213475,MB,1411.227648,1437.53216,0.0,1027.60448,949.09952,s,10,14.7783876953125,1.47783876953125,0.012717323493488161,1.4748740844726562,1.4969176635742187,1.4986131286621094,1.4999695007324219,"[1.465909423828125, 1.487019775390625, 1.4622545166015626, 1.4743558349609376, 1.475392333984375, 1.50030859375, 1.4965408935546876, 1.4825546875, 1.4701060791015625, 1.463945556640625]",tokens/s,42.629819503234934,kWh,4.354316999407808e-05,4.802410100368045e-06,1.7512890243041744e-05,6.585847033748786e-05,tokens/kWh,956596.7699699098,,s,630,14.776034406661996,0.02345402286771744,0.0004678781912243277,0.023406271934509275,0.023798483657836916,0.023929944038391115,0.025378705310821544,"[0.02317145538330078, 0.023236608505249022, 0.023126047134399415, 0.023090911865234376, 0.023014816284179687, 0.023077728271484375, 0.023382015228271484, 0.02467020797729492, 0.023322336196899413, 0.023066335678100586, 0.023068256378173828, 0.023079103469848632, 0.023062944412231445, 0.02304787254333496, 0.02313222312927246, 0.023034496307373045, 0.02304956817626953, 0.02298908805847168, 0.023044479370117187, 0.023377920150756838, 0.02310495948791504, 0.02314687919616699, 0.02325654411315918, 0.023218624114990233, 0.023033760070800782, 0.023341440200805665, 0.023050048828125, 0.023063743591308594, 0.02334617614746094, 0.023126016616821288, 0.023224319458007812, 0.023160991668701173, 0.02316476821899414, 0.02305638313293457, 0.023698720932006836, 0.023110368728637695, 0.023233888626098632, 0.023124639511108398, 0.02306252861022949, 0.023969280242919923, 0.023087615966796874, 0.023363136291503907, 0.02329439926147461, 0.02344927978515625, 0.023587039947509766, 0.0241112003326416, 0.023644159317016602, 0.02357855987548828, 0.023468095779418944, 0.02367692756652832, 0.023396127700805663, 0.02362758445739746, 0.023465599060058594, 0.02331119918823242, 0.023117759704589843, 0.023085279464721678, 0.0230664005279541, 0.02309939193725586, 0.023205791473388672, 0.023170528411865236, 0.02310403251647949, 0.023055551528930664, 0.02319388771057129, 0.025132352828979493, 0.024007328033447267, 0.023142208099365236, 0.023210016250610352, 0.023070272445678712, 0.02308134460449219, 0.023132511138916016, 0.023209856033325194, 0.023405664443969725, 0.02321878433227539, 0.02308127975463867, 0.022891584396362304, 0.02290598487854004, 0.022908512115478515, 0.02299888038635254, 0.023011711120605467, 0.02288640022277832, 0.02293350410461426, 0.023472127914428712, 0.025620479583740235, 0.02332467269897461, 0.023330976486206054, 0.02352943992614746, 0.023367551803588866, 0.023373823165893554, 0.023383071899414062, 0.023323776245117188, 0.023493568420410157, 0.024407167434692383, 0.02378838348388672, 0.023742591857910156, 0.027173215866088868, 0.0237957763671875, 0.02355036735534668, 0.023366975784301757, 0.0232159366607666, 0.023216991424560546, 0.02326265525817871, 0.02343382453918457, 0.023395360946655272, 0.023634784698486327, 0.02388595199584961, 0.023769216537475588, 0.023877504348754883, 0.023607295989990236, 0.0236810245513916, 0.02367487907409668, 0.023668512344360352, 0.023748735427856445, 0.02376006317138672, 0.023824480056762694, 0.023689888000488282, 0.023719776153564454, 0.02391276741027832, 0.02393087959289551, 0.023842815399169923, 0.02393414306640625, 0.023944000244140624, 0.023554048538208007, 0.023588863372802735, 0.023379968643188476, 0.023971839904785155, 0.023398143768310547, 0.023480319976806642, 0.0229171199798584, 0.023529504776000975, 0.022869983673095704, 0.02287820816040039, 0.023367679595947266, 0.02339801597595215, 0.022933759689331056, 0.023593088150024415, 0.022965824127197266, 0.022795808792114257, 0.022799264907836913, 0.022814399719238283, 0.022935552597045897, 0.02283344078063965, 0.023304224014282227, 0.02288435173034668, 0.02278121566772461, 0.022868703842163086, 0.023011327743530274, 0.022796287536621093, 0.02305366325378418, 0.022975135803222656, 0.022882368087768553, 0.022757408142089843, 0.022945632934570314, 0.022964544296264648, 0.023022592544555662, 0.02297318458557129, 0.023085056304931642, 0.023082080841064452, 0.02289142417907715, 0.02294988822937012, 0.023019392013549803, 0.023131391525268555, 0.023349151611328126, 0.023194271087646483, 0.02331875228881836, 0.023220191955566405, 0.023195199966430664, 0.023343679428100585, 0.023236608505249022, 0.023218175888061524, 0.02325836753845215, 0.023541631698608397, 0.023339616775512696, 0.023406879425048828, 0.02332054328918457, 0.023735519409179687, 0.02461948776245117, 0.023928800582885743, 0.023382144927978514, 0.023486719131469727, 0.02337366485595703, 0.023539871215820313, 0.023298240661621093, 0.02380985641479492, 0.023310111999511718, 0.023339231491088866, 0.023396352767944335, 0.023333919525146483, 0.0234935359954834, 0.02352521514892578, 0.023863296508789062, 0.023743839263916017, 0.023628448486328123, 0.023662591934204103, 0.02360691261291504, 0.023711679458618164, 0.023652799606323244, 0.023738367080688477, 0.023756799697875978, 0.02371583938598633, 0.02372425651550293, 0.02371708869934082, 0.02373075294494629, 0.023746559143066406, 0.02385305595397949, 0.023570432662963867, 0.02344550323486328, 0.023468032836914062, 0.023334527969360353, 0.023372159957885743, 0.02330419158935547, 0.023189504623413085, 0.02327756881713867, 0.023117279052734373, 0.023043615341186523, 0.02304921531677246, 0.022982656478881838, 0.023136255264282226, 0.023289279937744142, 0.023705312728881836, 0.023285888671875, 0.023255775451660157, 0.023250143051147462, 0.022958848953247072, 0.023107616424560547, 0.023052288055419923, 0.023072383880615235, 0.023005855560302733, 0.023137472152709962, 0.023271455764770507, 0.023640575408935546, 0.023326719284057617, 0.023373823165893554, 0.023471296310424803, 0.02328454399108887, 0.02345564842224121, 0.023334304809570314, 0.023296703338623048, 0.023244800567626952, 0.023464960098266603, 0.02330294418334961, 0.023417055130004884, 0.023402496337890624, 0.023444896697998048, 0.02325926399230957, 0.023286239624023437, 0.02321343994140625, 0.02341337585449219, 0.023267328262329103, 0.02328371238708496, 0.02326323127746582, 0.023371776580810546, 0.023750879287719726, 0.023793664932250977, 0.023723167419433595, 0.023555168151855467, 0.023559328079223632, 0.0236343994140625, 0.02379952049255371, 0.024199167251586915, 0.023945152282714845, 0.023740896224975584, 0.02360691261291504, 0.023716224670410156, 0.02370560073852539, 0.023721376419067384, 0.023691871643066405, 0.023741952896118163, 0.023687679290771483, 0.023416831970214845, 0.02338819122314453, 0.023413856506347655, 0.023558080673217772, 0.02348700714111328, 0.02352783966064453, 0.023338464736938475, 0.023236896514892576, 0.023181343078613283, 0.023082399368286134, 0.022969152450561522, 0.02307449531555176, 0.023085535049438475, 0.02314019203186035, 0.02312735939025879, 0.023029632568359375, 0.022984575271606446, 0.023086015701293944, 0.022937599182128905, 0.023077024459838866, 0.023221824645996095, 0.023001279830932617, 0.023040096282958986, 0.023013376235961915, 0.023195232391357422, 0.023187871932983398, 0.023090463638305664, 0.02329248046875, 0.02319545555114746, 0.023404895782470705, 0.02353561592102051, 0.023602176666259765, 0.023654783248901367, 0.02366054344177246, 0.023454208374023438, 0.023224447250366213, 0.023326208114624023, 0.023238367080688476, 0.02331523132324219, 0.023338207244873045, 0.02357107162475586, 0.023419008255004883, 0.023457632064819336, 0.02363376045227051, 0.02369980812072754, 0.023704671859741212, 0.02367990493774414, 0.023982912063598632, 0.023767040252685546, 0.02367283248901367, 0.023817344665527342, 0.023664928436279296, 0.023730783462524413, 0.02369126319885254, 0.023807136535644532, 0.023767904281616212, 0.023736480712890626, 0.02377507209777832, 0.023744543075561522, 0.023680959701538086, 0.023650335311889648, 0.02379481506347656, 0.023645055770874022, 0.023661983489990233, 0.023683679580688476, 0.02389561653137207, 0.023673280715942383, 0.02364975929260254, 0.02377987289428711, 0.02373222351074219, 0.023748607635498048, 0.023738304138183595, 0.023799840927124023, 0.023688608169555665, 0.024232576370239258, 0.02368511962890625, 0.02374678421020508, 0.023551040649414063, 0.023628480911254884, 0.023742496490478517, 0.02365644836425781, 0.02382601547241211, 0.02377564811706543, 0.023743871688842774, 0.023613632202148436, 0.02386089515686035, 0.023902048110961915, 0.023872447967529298, 0.023666688919067383, 0.02384092712402344, 0.023738208770751952, 0.023586816787719726, 0.023619583129882812, 0.023737760543823243, 0.023648799896240233, 0.023586879730224608, 0.024459264755249024, 0.02366464042663574, 0.023686559677124023, 0.023734880447387696, 0.023787519454956055, 0.023777280807495117, 0.023819583892822266, 0.024060415267944335, 0.027090335845947267, 0.023963775634765625, 0.023654752731323243, 0.023760831832885743, 0.023578079223632812, 0.023777408599853514, 0.023635551452636717, 0.026352415084838866, 0.024042751312255858, 0.023329376220703125, 0.023359199523925782, 0.023655872344970703, 0.023469024658203125, 0.023701536178588868, 0.02365644836425781, 0.023704992294311524, 0.023681631088256837, 0.023830528259277343, 0.023762943267822266, 0.023797760009765623, 0.023834367752075196, 0.0238287353515625, 0.02368953514099121, 0.023764896392822265, 0.023774848937988282, 0.025479328155517577, 0.023715167999267577, 0.023869472503662108, 0.023683040618896485, 0.023740800857543945, 0.023559839248657226, 0.02375529670715332, 0.023938655853271484, 0.02389596748352051, 0.023798368453979493, 0.023786815643310547, 0.023610048294067383, 0.02366208076477051, 0.02362201690673828, 0.02364166450500488, 0.026810943603515627, 0.023846015930175782, 0.023659040451049804, 0.023546207427978517, 0.023611391067504883, 0.023537664413452147, 0.023576448440551758, 0.02380112075805664, 0.023543840408325196, 0.02344428825378418, 0.023556095123291015, 0.02345779228210449, 0.023764480590820314, 0.024295583724975586, 0.023822687149047853, 0.023564287185668945, 0.023492448806762694, 0.023418367385864256, 0.02348646354675293, 0.02353753662109375, 0.023382816314697265, 0.023333984375, 0.023215007781982423, 0.023286912918090822, 0.023335807800292967, 0.023590911865234376, 0.023217504501342773, 0.02325980758666992, 0.023297695159912108, 0.023316352844238282, 0.02369430351257324, 0.023160928726196288, 0.023456703186035155, 0.023466976165771484, 0.023586816787719726, 0.02346598434448242, 0.023613439559936524, 0.023773151397705077, 0.023831615447998045, 0.023699615478515627, 0.023706432342529296, 0.02375267219543457, 0.02358415985107422, 0.023638656616210937, 0.023571903228759766, 0.023636064529418944, 0.023607776641845702, 0.023603200912475586, 0.023717439651489258, 0.02368115234375, 0.023900480270385743, 0.02367852783203125, 0.023697599411010743, 0.02348876762390137, 0.023616640090942383, 0.02352547264099121, 0.023438112258911133, 0.02332819175720215, 0.02324502372741699, 0.023039743423461913, 0.023020000457763673, 0.023199167251586914, 0.02288096046447754, 0.023060480117797853, 0.02306252861022949, 0.023109792709350586, 0.022951776504516602, 0.02309939193725586, 0.02471731185913086, 0.024356576919555666, 0.023531808853149414, 0.0232193603515625, 0.023532384872436522, 0.02350694465637207, 0.023272960662841798, 0.02346031951904297, 0.02322003173828125, 0.023435487747192382, 0.023418079376220702, 0.023251007080078125, 0.02333283233642578, 0.023489280700683592, 0.023491775512695313, 0.02330291175842285, 0.023273536682128906, 0.023215551376342774, 0.02326585578918457, 0.026685440063476562, 0.024603679656982423, 0.02335968017578125, 0.02330307197570801, 0.023185279846191405, 0.023202175140380858, 0.023189952850341797, 0.02337980842590332, 0.024762367248535155, 0.02369126319885254, 0.023716896057128907, 0.023649248123168945, 0.023715232849121092, 0.02361782455444336, 0.023650623321533202, 0.023622848510742187, 0.023598047256469728, 0.0236596794128418, 0.023494560241699217, 0.02361369514465332, 0.023732351303100585, 0.02361180877685547, 0.023609344482421874, 0.02371379280090332, 0.023690528869628906, 0.02357232093811035, 0.023599071502685545, 0.02333590316772461, 0.023291839599609374, 0.02329190444946289, 0.023371776580810546, 0.023184480667114257, 0.02317932891845703, 0.023011199951171873, 0.023100383758544923, 0.023101152420043944, 0.02303932762145996, 0.022924448013305666, 0.022990623474121095, 0.022844959259033203, 0.02294950485229492, 0.022952800750732423, 0.0229965763092041, 0.022861888885498047, 0.022987104415893553, 0.023044095993041993, 0.022943744659423827, 0.022927007675170898, 0.022965824127197266, 0.02305718421936035, 0.023094688415527344, 0.02359766387939453, 0.023175167083740233, 0.023107168197631835, 0.023281791687011718, 0.02340892791748047, 0.023162879943847657, 0.02308006477355957, 0.023212095260620118, 0.022958528518676757, 0.023078367233276366, 0.0230960636138916, 0.023122079849243166, 0.02345779228210449, 0.02369945526123047, 0.023629472732543944, 0.023630176544189453, 0.023600927352905275, 0.02382681655883789, 0.02376153564453125, 0.023654399871826173, 0.0237076473236084, 0.023617536544799804, 0.02377462387084961, 0.023581151962280274, 0.0236627197265625, 0.02374390411376953, 0.023738975524902343, 0.02369126319885254, 0.023463455200195313, 0.02326188850402832, 0.02343497657775879, 0.023358848571777342, 0.023343807220458986, 0.023342975616455076, 0.02332480049133301, 0.023236608505249022, 0.023220224380493162, 0.023011327743530274, 0.022924383163452147, 0.023034624099731445, 0.02332054328918457, 0.023392671585083007, 0.023652128219604492, 0.023131519317626952, 0.023063167572021485, 0.022898496627807616, 0.02309872055053711, 0.022985727310180663, 0.02335750389099121, 0.02323843193054199, 0.023554048538208007, 0.023201791763305665, 0.023233983993530275, 0.02313007926940918, 0.02294371223449707, 0.023199392318725587, 0.0229385929107666, 0.023277183532714844, 0.022931840896606444, 0.022960256576538086, 0.02356825637817383, 0.023244800567626952, 0.022974464416503908, 0.022996768951416016, 0.023230495452880858, 0.02297056007385254, 0.02301686477661133, 0.022899295806884764, 0.022974464416503908, 0.022999040603637694, 0.022975839614868165, 0.022910816192626953, 0.023233343124389648, 0.02287820816040039, 0.022988128662109374, 0.022862112045288086, 0.0229748477935791, 0.022902624130249023, 0.02289004707336426, 0.023021440505981445]",tokens/s,42.63660889392322,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1947.234304,2886.664192,0.0,2491.416576,2425.650176,s,1,9.868951171875,9.868951171875,0.0,9.868951171875,9.868951171875,9.868951171875,9.868951171875,[9.868951171875],,kWh,8.412446947921428e-05,9.272347202353469e-06,3.036085762200824e-05,0.00012375767430357598,,MB,1804.824576,3115.25376,0.0,2705.32608,2606.127616,s,10,0.5254048614501953,0.05254048614501953,0.0002773108713715526,0.05247787284851074,0.05277234954833984,0.05302678260803223,0.05323032905578613,"[0.05328121566772461, 0.05221033477783203, 0.05246867370605469, 0.05251366424560547, 0.0524870719909668, 0.05235599899291992, 0.05256211090087891, 0.05242294311523438, 0.052387039184570314, 0.0527158088684082]",tokens/s,4872.433027996773,kWh,1.6226055165509899e-06,1.7894313008149922e-07,1.076778639200039e-06,2.878327285832528e-06,tokens/kWh,88940545.87192453,MB,1813.352448,3115.25376,0.0,2705.32608,2606.130176,s,10,15.354823120117187,1.535482312011719,0.007036248048440928,1.5348394165039063,1.544455078125,1.546784729003906,1.5486484497070312,"[1.541155517578125, 1.53364013671875, 1.5439373779296874, 1.5290914306640624, 1.5290074462890626, 1.5267310791015625, 1.5369732666015625, 1.5360386962890624, 1.5491143798828124, 1.5291337890625]",tokens/s,41.02945342135545,kWh,4.509273926678394e-05,4.972856784603393e-06,2.1461878280601573e-05,7.15274743319889e-05,tokens/kWh,880780.4356070323,,s,630,15.352706731796268,0.024369375764755976,0.0005069567612018552,0.024270143508911133,0.024675234603881834,0.024878185272216795,0.026551848983764656,"[0.02491788864135742, 0.024565887451171876, 0.0244770565032959, 0.024411935806274414, 0.02661030387878418, 0.024983264923095702, 0.0243450870513916, 0.024272768020629883, 0.024323999404907228, 0.024269023895263673, 0.02430771255493164, 0.024483808517456053, 0.024207040786743163, 0.024369504928588866, 0.024154111862182616, 0.024174591064453126, 0.024319583892822266, 0.02429497528076172, 0.02416057586669922, 0.024277536392211915, 0.024219648361206055, 0.02426211166381836, 0.02471785545349121, 0.02448588752746582, 0.024548864364624022, 0.024412511825561523, 0.024575359344482423, 0.024414079666137695, 0.02506166458129883, 0.02458687973022461, 0.024655744552612303, 0.024481311798095703, 0.02459062385559082, 0.024663936614990233, 0.024816064834594725, 0.024495519638061524, 0.024796831130981446, 0.024913919448852538, 0.02438265609741211, 0.024207103729248048, 0.024388896942138673, 0.024675039291381835, 0.024383487701416014, 0.02430486488342285, 0.0243023681640625, 0.024389535903930663, 0.024212671279907227, 0.024167327880859374, 0.024248064041137694, 0.02421513557434082, 0.0241814079284668, 0.024163328170776367, 0.02418307113647461, 0.0242511043548584, 0.024449024200439453, 0.024363008499145508, 0.024188928604125977, 0.02473574447631836, 0.024076255798339843, 0.02415119934082031, 0.024197248458862303, 0.025268543243408204, 0.02424671936035156, 0.02435686492919922, 0.024133600234985352, 0.024125471115112304, 0.024120927810668946, 0.024091039657592773, 0.0241213436126709, 0.02452479934692383, 0.024240224838256837, 0.02448784065246582, 0.024131584167480468, 0.024469152450561523, 0.024158559799194335, 0.024456960678100586, 0.02447529602050781, 0.024283071517944337, 0.02412816047668457, 0.02415001678466797, 0.02444595146179199, 0.0241530876159668, 0.025384960174560548, 0.024129247665405272, 0.024501663208007812, 0.024080543518066405, 0.024268960952758788, 0.024013376235961913, 0.024236032485961914, 0.024066047668457033, 0.024022687911987306, 0.024242496490478514, 0.024862943649291994, 0.024149824142456054, 0.024110815048217774, 0.024254751205444337, 0.024385055541992186, 0.0242935676574707, 0.0243973445892334, 0.02438015937805176, 0.02449612808227539, 0.024347679138183594, 0.02435183906555176, 0.024362880706787108, 0.02440323257446289, 0.024329984664916992, 0.02425663948059082, 0.024199840545654296, 0.024123584747314453, 0.024694688796997072, 0.02494268798828125, 0.025475072860717773, 0.024489984512329102, 0.024132768630981447, 0.02417897605895996, 0.02423846435546875, 0.024490175247192384, 0.024427743911743165, 0.02427532768249512, 0.02434486389160156, 0.024114496231079103, 0.024268672943115233, 0.024243135452270508, 0.024573951721191405, 0.02438924789428711, 0.024439168930053712, 0.02679408073425293, 0.024801471710205077, 0.024880863189697264, 0.02500422477722168, 0.02476851272583008, 0.02455129623413086, 0.024522880554199218, 0.02453913688659668, 0.024527008056640626, 0.02464726448059082, 0.02409075164794922, 0.02443481636047363, 0.024141023635864258, 0.024180896759033205, 0.024269439697265624, 0.02411110305786133, 0.02403459167480469, 0.024122079849243163, 0.024184959411621094, 0.02412460708618164, 0.02407049560546875, 0.02406844711303711, 0.026804224014282226, 0.024825599670410155, 0.02429567909240723, 0.024436895370483398, 0.024587488174438475, 0.024371679306030274, 0.024483776092529295, 0.02472368049621582, 0.02511052894592285, 0.026408735275268554, 0.024821792602539063, 0.024660160064697265, 0.02470639991760254, 0.02463212776184082, 0.024571264266967773, 0.024616512298583984, 0.024423328399658203, 0.02430143928527832, 0.024234111785888673, 0.02428108787536621, 0.024268640518188476, 0.02439593505859375, 0.02534809684753418, 0.02426006317138672, 0.02417513656616211, 0.024055871963500976, 0.024084415435791016, 0.024159904479980468, 0.024237823486328126, 0.024143552780151366, 0.02419126319885254, 0.024114816665649415, 0.02407526397705078, 0.024625152587890626, 0.024453119277954103, 0.024302783966064452, 0.024208192825317384, 0.024152095794677735, 0.02412950325012207, 0.02411724853515625, 0.024061119079589844, 0.024164608001708984, 0.024045568466186523, 0.023969791412353517, 0.024037376403808593, 0.023969791412353517, 0.024252256393432616, 0.024037023544311524, 0.02409926414489746, 0.02412073516845703, 0.024058528900146484, 0.024093759536743163, 0.024011711120605468, 0.024014848709106446, 0.024059616088867187, 0.024013120651245116, 0.023990463256835938, 0.023992095947265625, 0.024006656646728516, 0.02395516777038574, 0.024068191528320314, 0.024070335388183595, 0.024271936416625978, 0.024287904739379883, 0.024219104766845703, 0.024275552749633788, 0.025054527282714845, 0.02495580863952637, 0.024461311340332033, 0.02451059150695801, 0.024422271728515625, 0.024414207458496092, 0.024213504791259766, 0.02425651168823242, 0.024061952590942383, 0.024023040771484375, 0.02414406394958496, 0.024198720932006836, 0.024229631423950196, 0.024041919708251952, 0.024020448684692382, 0.02403388786315918, 0.024102752685546874, 0.02405392074584961, 0.02427494430541992, 0.02405299186706543, 0.024089344024658205, 0.02427510452270508, 0.024893280029296874, 0.024829727172851562, 0.024539552688598632, 0.024463104248046874, 0.02458336067199707, 0.02460697555541992, 0.024541824340820313, 0.024600479125976564, 0.024522207260131837, 0.024422431945800783, 0.02445577621459961, 0.02449612808227539, 0.024416255950927734, 0.024532991409301756, 0.024619007110595705, 0.024383487701416014, 0.02473119926452637, 0.02456825637817383, 0.02431795120239258, 0.024145919799804686, 0.024079551696777345, 0.02426755142211914, 0.024102943420410156, 0.024147136688232422, 0.02408121681213379, 0.02422777557373047, 0.024100479125976564, 0.024144319534301757, 0.024143039703369142, 0.024445215225219728, 0.02456438446044922, 0.02451363182067871, 0.024351520538330076, 0.024325824737548827, 0.024243776321411132, 0.024346752166748045, 0.024236671447753905, 0.02416640090942383, 0.024270847320556642, 0.02425961685180664, 0.024072288513183594, 0.024208255767822266, 0.02413155174255371, 0.024219551086425782, 0.024234111785888673, 0.02422528076171875, 0.02413030433654785, 0.024110336303710938, 0.024125951766967774, 0.024100351333618163, 0.02402707290649414, 0.02399087905883789, 0.024426464080810548, 0.024178688049316405, 0.024363168716430662, 0.02418262481689453, 0.024164352416992187, 0.02413542366027832, 0.02446518325805664, 0.024483392715454102, 0.024111072540283204, 0.024242591857910157, 0.024030815124511717, 0.02402400016784668, 0.023950368881225585, 0.024064064025878906, 0.0242653751373291, 0.024221952438354493, 0.02444803237915039, 0.02432249641418457, 0.02445100784301758, 0.024355424880981445, 0.02449308776855469, 0.024634336471557616, 0.02464156723022461, 0.02458006477355957, 0.02456166458129883, 0.024377344131469726, 0.02426192092895508, 0.024551616668701173, 0.024418304443359375, 0.02443894386291504, 0.02487491226196289, 0.024250303268432617, 0.024170495986938476, 0.024422399520874022, 0.02424028778076172, 0.024110944747924804, 0.02407769584655762, 0.024058496475219727, 0.024041471481323243, 0.024016895294189454, 0.024401376724243164, 0.02448134422302246, 0.024273248672485353, 0.024083072662353516, 0.024023040771484375, 0.024041471481323243, 0.024012800216674804, 0.023941375732421874, 0.02399411201477051, 0.024029375076293946, 0.02405958366394043, 0.023966943740844727, 0.02407072067260742, 0.024025279998779295, 0.02399247932434082, 0.0240883846282959, 0.024090240478515625, 0.024138463973999023, 0.0239532470703125, 0.024010751724243166, 0.023991712570190428, 0.024105247497558595, 0.0239619197845459, 0.02464358329772949, 0.024160255432128908, 0.024006752014160155, 0.024213407516479494, 0.02408982467651367, 0.02416111946105957, 0.0240250244140625, 0.024039424896240235, 0.024345983505249025, 0.024253183364868165, 0.024173824310302735, 0.02447132873535156, 0.02424233627319336, 0.024338144302368164, 0.024071392059326173, 0.024104736328125, 0.024045536041259766, 0.02416640090942383, 0.024143871307373048, 0.024447200775146484, 0.024690464019775392, 0.024743295669555663, 0.0246954231262207, 0.02458380889892578, 0.02504742431640625, 0.024618431091308592, 0.024568384170532226, 0.024742528915405272, 0.024450111389160156, 0.024130495071411132, 0.024127008438110352, 0.024111583709716798, 0.02427903938293457, 0.02424998474121094, 0.024211839675903322, 0.024123008728027345, 0.024150400161743163, 0.024207359313964845, 0.024465408325195313, 0.02418214416503906, 0.02460678482055664, 0.02413420867919922, 0.024373439788818358, 0.02439727973937988, 0.02429952049255371, 0.024465503692626952, 0.02458415985107422, 0.02464531135559082, 0.024703584671020507, 0.024583839416503907, 0.024742240905761718, 0.026992639541625976, 0.0249715518951416, 0.024709888458251953, 0.02475257682800293, 0.024504480361938478, 0.02460300827026367, 0.02446272087097168, 0.024658559799194336, 0.02448355293273926, 0.02441859245300293, 0.024284320831298827, 0.024478368759155274, 0.02457209587097168, 0.024401920318603516, 0.024336383819580077, 0.02411315155029297, 0.024164608001708984, 0.02422915267944336, 0.024218080520629882, 0.02418467140197754, 0.024252576828002928, 0.024236032485961914, 0.02422515106201172, 0.024207231521606445, 0.024414560317993165, 0.024518112182617187, 0.024367391586303713, 0.024200927734375, 0.02420832061767578, 0.02421766471862793, 0.024131519317626953, 0.02406399917602539, 0.02428486442565918, 0.024400192260742186, 0.024139776229858398, 0.024030975341796875, 0.024069631576538086, 0.024177408218383788, 0.024125471115112304, 0.024378591537475586, 0.02430668830871582, 0.02407414436340332, 0.02410710334777832, 0.024014015197753907, 0.024122175216674806, 0.024137216567993162, 0.024224256515502928, 0.024209407806396483, 0.024223743438720705, 0.024122432708740236, 0.024537439346313476, 0.02452537536621094, 0.02471529579162598, 0.02416204833984375, 0.024046079635620117, 0.025385887145996093, 0.024093536376953124, 0.024141504287719728, 0.02414419174194336, 0.02427244758605957, 0.024266271591186522, 0.024355424880981445, 0.024312095642089845, 0.024427999496459962, 0.024455135345458984, 0.024408672332763674, 0.024533151626586914, 0.02455331230163574, 0.024594432830810548, 0.024424448013305664, 0.024371200561523438, 0.024679807662963866, 0.02475289535522461, 0.024424320220947267, 0.024319616317749024, 0.024344415664672853, 0.02436355209350586, 0.02431782341003418, 0.024361087799072267, 0.024178016662597657, 0.024369375228881836, 0.024133407592773437, 0.02411587142944336, 0.02410495948791504, 0.024376447677612306, 0.02413043212890625, 0.024188928604125977, 0.02450227165222168, 0.024898591995239257, 0.025083999633789062, 0.024423295974731446, 0.024483840942382814, 0.024465408325195313, 0.024423488616943358, 0.024568767547607423, 0.024407840728759764, 0.0245100154876709, 0.02451318359375, 0.024414207458496092, 0.024391679763793944, 0.024535039901733398, 0.024408063888549804, 0.024434528350830077, 0.024848543167114257, 0.03255497741699219, 0.02445913505554199, 0.024359071731567383, 0.02433987236022949, 0.024180511474609374, 0.02423891258239746, 0.024481792449951172, 0.024482879638671875, 0.024099775314331055, 0.02409881591796875, 0.024102144241333008, 0.024246240615844728, 0.02449398422241211, 0.024426591873168944, 0.024271007537841796, 0.024676992416381837, 0.024213375091552733, 0.025733247756958007, 0.02564499282836914, 0.024516096115112306, 0.024421024322509765, 0.024242080688476563, 0.024379392623901368, 0.02418227195739746, 0.02401055908203125, 0.023927488327026368, 0.02445516777038574, 0.02411337661743164, 0.02411065673828125, 0.024148191452026367, 0.02405344009399414, 0.027379167556762694, 0.024437599182128907, 0.024240127563476564, 0.024256607055664063, 0.02449807929992676, 0.024294559478759765, 0.02436751937866211, 0.024376960754394533, 0.02448214340209961, 0.024354848861694336, 0.02804278373718262, 0.024661088943481447, 0.024467199325561524, 0.024287296295166017, 0.024188287734985353, 0.02416908836364746, 0.024227840423583984, 0.024358911514282225, 0.024211456298828125, 0.024205312728881836, 0.024326047897338866, 0.024227519989013672, 0.024127904891967773, 0.02417568016052246, 0.024312768936157227, 0.024505952835083007, 0.02422559928894043, 0.02405846405029297, 0.02419478416442871, 0.024277280807495118, 0.02467875289916992, 0.024370111465454102, 0.024073152542114257, 0.02409267234802246, 0.0240579833984375, 0.02412646484375, 0.024182687759399413, 0.024072288513183594, 0.0239847354888916, 0.024076576232910155, 0.024061952590942383, 0.02400048065185547, 0.02405379295349121, 0.024061952590942383, 0.02405075263977051, 0.024011711120605468, 0.024035327911376952, 0.024068063735961914, 0.02400464057922363, 0.02408198356628418, 0.024074432373046874, 0.024084735870361328, 0.023984128952026368, 0.023998464584350586, 0.024231712341308595, 0.024223167419433592, 0.024347423553466797, 0.024008512496948242, 0.024084512710571288, 0.024098560333251952, 0.024241888046264648, 0.024070207595825194, 0.024179328918457033, 0.024851999282836913, 0.02484272003173828, 0.024449024200439453, 0.024577823638916016, 0.024501920700073242, 0.024541471481323244, 0.024518943786621093, 0.02466815948486328, 0.02464143943786621, 0.024663488388061525, 0.024627231597900392, 0.02489936065673828, 0.02526908874511719, 0.024513792037963868, 0.024355392456054687, 0.02428447914123535, 0.02428761672973633, 0.02427471923828125, 0.024236255645751954, 0.024314367294311523, 0.024212799072265624, 0.02418659210205078, 0.02428003120422363, 0.024075647354125977, 0.024154752731323243, 0.024156160354614258, 0.024112672805786134, 0.02406857681274414, 0.0242259521484375, 0.02435206413269043]",tokens/s,41.03510937880659,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,848.891904,558.825472,0.0,163.577856,154.631168,s,1,7.92300146484375,7.92300146484375,0.0,7.92300146484375,7.92300146484375,7.92300146484375,7.92300146484375,[7.92300146484375],,kWh,2.2599866625023424e-05,2.4855643478874524e-06,7.5397282540135e-06,3.262515922692438e-05,,MB,1229.242368,609.15712,0.0,199.22944,187.147776,s,25,0.19424972772598267,0.007769989109039306,0.0002163029875023168,0.007699391841888428,0.007972064208984376,0.008142291069030761,0.008529240531921386,"[0.008082976341247558, 0.007766304016113282, 0.007660768032073975, 0.007634943962097168, 0.00768995189666748, 0.008157119750976563, 0.007699391841888428, 0.007702784061431885, 0.007675039768218994, 0.0078056960105895995, 0.007701280117034912, 0.007663839817047119, 0.007749184131622315, 0.007765471935272217, 0.007688096046447754, 0.007707424163818359, 0.007665760040283203, 0.00864675235748291, 0.007653791904449463, 0.007707615852355957, 0.007694464206695556, 0.007737055778503418, 0.0076406078338623045, 0.007692543983459472, 0.007660863876342773]",tokens/s,32947.279128381204,kWh,2.25515588333984e-07,2.4870321807205812e-08,1.0638918866615677e-07,3.567750988073466e-07,tokens/kWh,717538866.5178012,MB,1265.037312,611.254272,0.0,201.326592,187.150336,s,25,9.943050720214842,0.3977220288085938,0.0026772775040471266,0.3964454040527344,0.40168916625976564,0.40314699096679685,0.4038981884765625,"[0.39874029541015626, 0.396388671875, 0.39532431030273435, 0.39672189331054686, 0.3955487976074219, 0.4033482971191406, 0.39606280517578124, 0.396182861328125, 0.40071026611328125, 0.39798458862304686, 0.39671002197265626, 0.4002525329589844, 0.3995301513671875, 0.40234176635742186, 0.3990752563476562, 0.3964454040527344, 0.39581195068359376, 0.39555010986328126, 0.39614224243164065, 0.394832275390625, 0.3995881652832031, 0.40407183837890626, 0.395975341796875, 0.3956080627441406, 0.3941028137207031]",tokens/s,158.40208848557177,kWh,1.1427905487627088e-05,1.2603048490791262e-06,4.2150779437173385e-06,1.690328828042355e-05,tokens/kWh,3727085.461410671,,s,1575,9.931675614833836,0.006305825787196084,0.00013328746779218132,0.006275167942047119,0.006393958282470704,0.00645662407875061,0.006788728313446045,"[0.00632425594329834, 0.006336639881134034, 0.006358399868011475, 0.006400479793548584, 0.006379551887512207, 0.006355231761932373, 0.006375135898590088, 0.006479743957519531, 0.006611072063446045, 0.006379263877868652, 0.006393983840942383, 0.006370463848114014, 0.006386655807495117, 0.006393919944763184, 0.006348608016967773, 0.006373600006103515, 0.006319712162017823, 0.006422719955444336, 0.006295711994171143, 0.006266848087310791, 0.006301695823669433, 0.006322336196899414, 0.0063047041893005374, 0.0062432317733764646, 0.006288896083831787, 0.006272736072540283, 0.00624124813079834, 0.006289472103118896, 0.006262527942657471, 0.006469632148742676, 0.006276288032531738, 0.0063187842369079586, 0.0062772479057312015, 0.006254591941833496, 0.006276447772979737, 0.00623683214187622, 0.006290976047515869, 0.0062243518829345704, 0.006272895812988281, 0.006290719985961914, 0.006237184047698975, 0.006279104232788086, 0.006223775863647461, 0.006323904037475586, 0.0064085121154785155, 0.006354944229125976, 0.006266880035400391, 0.006272928237915039, 0.006350272178649903, 0.006375967979431152, 0.006404287815093994, 0.0063056640625, 0.006303616046905517, 0.0064347519874572755, 0.006286816120147705, 0.0062882242202758785, 0.006277376174926757, 0.006291071891784668, 0.0062873601913452145, 0.0062583680152893065, 0.006248832225799561, 0.006264512062072754, 0.006249855995178222, 0.006164480209350586, 0.00655072021484375, 0.006279232025146485, 0.00624886417388916, 0.006273248195648193, 0.006254720211029053, 0.006307839870452881, 0.0062341117858886715, 0.006309887886047363, 0.006237279891967773, 0.00656060791015625, 0.006297440052032471, 0.006347040176391602, 0.006348832130432129, 0.006268703937530517, 0.006287487983703613, 0.006260831832885742, 0.006250400066375733, 0.006262784004211426, 0.006247424125671387, 0.006267199993133545, 0.006670559883117676, 0.006274687767028809, 0.006243167877197265, 0.006375423908233643, 0.006297535896301269, 0.006223936080932618, 0.006307839870452881, 0.006260960102081299, 0.00632374382019043, 0.006303999900817871, 0.006266880035400391, 0.006305791854858398, 0.006315455913543701, 0.006288032054901123, 0.006385471820831299, 0.006283360004425049, 0.006331552028656006, 0.006260608196258545, 0.006252960205078125, 0.006264736175537109, 0.006234399795532227, 0.006238463878631592, 0.006240384101867676, 0.006254687786102295, 0.0062585282325744625, 0.006261856079101563, 0.006203904151916504, 0.006265312194824219, 0.0062392959594726564, 0.006230815887451172, 0.006254752159118652, 0.006246399879455566, 0.006259007930755615, 0.006232992172241211, 0.006255231857299805, 0.006213151931762696, 0.006255231857299805, 0.006246399879455566, 0.006313983917236328, 0.006225344181060791, 0.006244448184967041, 0.006275263786315918, 0.006176224231719971, 0.006279359817504883, 0.006270624160766601, 0.006258592128753662, 0.006256896018981934, 0.006257279872894287, 0.006246335983276367, 0.006243807792663574, 0.006271168231964112, 0.006259168148040771, 0.006256480216979981, 0.006260735988616943, 0.006270976066589356, 0.006250495910644531, 0.006233312129974365, 0.006301631927490234, 0.006218592166900635, 0.006266240119934082, 0.00625113582611084, 0.006225599765777588, 0.006259263992309571, 0.006233376026153564, 0.006246880054473877, 0.006316031932830811, 0.006276256084442139, 0.006233280181884766, 0.006305439949035645, 0.006236447811126709, 0.0062297282218933105, 0.006248672008514405, 0.006231840133666992, 0.00625875186920166, 0.0062687678337097165, 0.006275167942047119, 0.006266880035400391, 0.006264832019805908, 0.006252543926239014, 0.006266975879669189, 0.0062626562118530274, 0.0062259521484375, 0.006542816162109375, 0.006330111980438233, 0.00651958417892456, 0.006299647808074951, 0.006224095821380615, 0.006341792106628418, 0.0062798080444335935, 0.006247424125671387, 0.006288064002990723, 0.006248767852783203, 0.00626854419708252, 0.006244927883148193, 0.006358079910278321, 0.006265120029449463, 0.006246880054473877, 0.006242303848266601, 0.006231520175933838, 0.006254655838012695, 0.006242784023284912, 0.006279232025146485, 0.006221663951873779, 0.00624070405960083, 0.00625216007232666, 0.006130335807800293, 0.006248703956604004, 0.006416384220123291, 0.006322495937347412, 0.006252416133880615, 0.006225152015686035, 0.006239840030670166, 0.006224575996398926, 0.0062507839202880855, 0.006217728137969971, 0.006745888233184815, 0.006349023818969727, 0.006329535961151123, 0.006263519763946533, 0.006234208106994629, 0.0062791681289672855, 0.0063647680282592775, 0.006246816158294677, 0.006238399982452392, 0.006223552227020264, 0.006260863780975342, 0.006278783798217774, 0.006269567966461181, 0.0062362561225891115, 0.006280064105987549, 0.006247200012207031, 0.006243743896484375, 0.006263391971588135, 0.006245471954345703, 0.006249375820159912, 0.0062864961624145505, 0.006345759868621826, 0.006485568046569824, 0.0062650880813598635, 0.0062997121810913085, 0.0062709121704101565, 0.006297567844390869, 0.006250847816467285, 0.006289087772369385, 0.006276224136352539, 0.006245376110076905, 0.006358687877655029, 0.00623583984375, 0.006246943950653076, 0.0062624640464782715, 0.006252128124237061, 0.006352640151977539, 0.006327616214752197, 0.006256288051605225, 0.006252543926239014, 0.006299903869628906, 0.006231808185577393, 0.0066557440757751465, 0.006322303771972656, 0.006248640060424805, 0.006321216106414795, 0.006267583847045898, 0.006265024185180664, 0.006244351863861084, 0.0062887039184570314, 0.006308544158935547, 0.006233407974243164, 0.006361023902893066, 0.006199711799621582, 0.006338560104370118, 0.006198463916778564, 0.0062576642036437985, 0.0062575039863586425, 0.006229184150695801, 0.00625055980682373, 0.0063318080902099606, 0.006246719837188721, 0.006238207817077636, 0.006250016212463379, 0.006258495807647705, 0.00624502420425415, 0.006221824169158936, 0.0062912960052490235, 0.006246560096740723, 0.006223423957824707, 0.00627785587310791, 0.006225535869598389, 0.0062403521537780764, 0.006242303848266601, 0.006223104000091553, 0.00622873592376709, 0.006243360042572021, 0.006232704162597656, 0.006225344181060791, 0.0063283839225769046, 0.006265696048736572, 0.0062566399574279785, 0.006235616207122803, 0.006335008144378662, 0.006242144107818604, 0.006236288070678711, 0.006267168045043945, 0.006235712051391602, 0.006287136077880859, 0.006274911880493164, 0.0062911357879638675, 0.006257887840270996, 0.006253600120544434, 0.006275008201599121, 0.006243008136749268, 0.006293504238128662, 0.0062873601913452145, 0.006377632141113281, 0.006265791893005371, 0.006282144069671631, 0.006260479927062988, 0.006238592147827148, 0.006284480094909668, 0.006279871940612793, 0.006266975879669189, 0.006275167942047119, 0.0062707839012146, 0.0064280638694763186, 0.0062715840339660645, 0.006373472213745117, 0.006281407833099365, 0.006274208068847657, 0.006323775768280029, 0.006359263896942139, 0.0063366079330444336, 0.006359744071960449, 0.006301055908203125, 0.006356095790863037, 0.006304448127746582, 0.006328991889953613, 0.006317823886871338, 0.00630844783782959, 0.006391551971435547, 0.006316287994384766, 0.006325888156890869, 0.006321728229522705, 0.006282080173492432, 0.00641542387008667, 0.006314911842346191, 0.0063448319435119626, 0.006287231922149659, 0.006339615821838379, 0.006298592090606689, 0.006273024082183838, 0.006355103969573975, 0.00626694393157959, 0.006270815849304199, 0.0062557759284973145, 0.006306367874145508, 0.006287327766418457, 0.006254591941833496, 0.006276832103729248, 0.00624294376373291, 0.006369184017181397, 0.006205440044403076, 0.006284639835357666, 0.006269696235656738, 0.006244256019592285, 0.006270175933837891, 0.006236735820770264, 0.00629315185546875, 0.006281792163848877, 0.00628710412979126, 0.006305056095123291, 0.0062659201622009275, 0.008454015731811523, 0.007951648235321044, 0.008065024375915527, 0.006589119911193848, 0.006395359992980957, 0.0063060479164123535, 0.006271327972412109, 0.006377503871917724, 0.006299647808074951, 0.006254591941833496, 0.0062945599555969236, 0.006308095932006836, 0.006292479991912842, 0.006317759990692139, 0.006281216144561768, 0.006293504238128662, 0.006262784004211426, 0.006288735866546631, 0.006302015781402588, 0.006268671989440918, 0.006264575958251953, 0.006251359939575195, 0.006540544033050537, 0.006279935836791992, 0.006230016231536865, 0.006299647808074951, 0.006289408206939697, 0.006318079948425293, 0.006252543926239014, 0.006307136058807373, 0.0063487358093261715, 0.006302464008331299, 0.006268479824066162, 0.006267327785491944, 0.006260447978973389, 0.006269216060638428, 0.006350751876831055, 0.006238304138183594, 0.006271071910858154, 0.006249792098999023, 0.006255199909210205, 0.006254496097564698, 0.006228223800659179, 0.006246560096740723, 0.00640172815322876, 0.006290880203247071, 0.006277696132659912, 0.006280511856079102, 0.006306496143341065, 0.006252352237701416, 0.006337056159973145, 0.0063056960105896, 0.006321983814239502, 0.006362559795379638, 0.006279679775238037, 0.006338560104370118, 0.006280511856079102, 0.006258912086486816, 0.006222303867340088, 0.006268928050994873, 0.006238207817077636, 0.006246399879455566, 0.006225056171417236, 0.0062258877754211425, 0.006245247840881348, 0.006251935958862305, 0.00628111982345581, 0.006244160175323486, 0.00625548791885376, 0.006274144172668457, 0.006263967990875244, 0.006248479843139649, 0.006255616188049316, 0.006298336029052734, 0.0062770237922668455, 0.0062763838768005375, 0.006269248008728028, 0.006275839805603027, 0.006303487777709961, 0.006368576049804687, 0.0062921600341796875, 0.006301152229309082, 0.0062960958480834965, 0.006281216144561768, 0.0062873601913452145, 0.006274432182312012, 0.006257279872894287, 0.006252416133880615, 0.0063155522346496585, 0.006242847919464111, 0.006289472103118896, 0.00630076789855957, 0.006275296211242676, 0.006277440071105957, 0.006345088005065918, 0.006296671867370605, 0.006236576080322266, 0.00625113582611084, 0.006260128021240235, 0.006271455764770508, 0.006264832019805908, 0.006281216144561768, 0.006276576042175293, 0.0062811517715454105, 0.006320064067840576, 0.006250815868377686, 0.006255008220672607, 0.006264768123626709, 0.006460608005523682, 0.006278240203857422, 0.006270495891571045, 0.006329792022705078, 0.006284095764160156, 0.0062626562118530274, 0.006258431911468506, 0.006240575790405273, 0.006241312026977539, 0.0062509760856628415, 0.006294015884399414, 0.006264927864074707, 0.006258656024932861, 0.006291391849517822, 0.006289087772369385, 0.006288000106811523, 0.006418047904968261, 0.006561855792999268, 0.006266047954559326, 0.006316864013671875, 0.006317696094512939, 0.0063564801216125484, 0.006299967765808105, 0.006314559936523438, 0.006270624160766601, 0.006259039878845215, 0.006367231845855713, 0.006303743839263916, 0.006240287780761719, 0.006257855892181396, 0.006267360210418701, 0.00625273609161377, 0.006236447811126709, 0.006238048076629639, 0.0062156801223754886, 0.0062475199699401854, 0.006228608131408691, 0.006213503837585449, 0.006215744018554688, 0.006242144107818604, 0.0062510080337524416, 0.006208896160125732, 0.006189824104309082, 0.006248447895050049, 0.006220863819122314, 0.006255551815032959, 0.0062259521484375, 0.006207647800445556, 0.006321792125701904, 0.00624454402923584, 0.006227968215942382, 0.006244383811950684, 0.006303711891174317, 0.006207712173461914, 0.006317952156066895, 0.006251488208770752, 0.0062486081123352055, 0.00623199987411499, 0.006267744064331055, 0.006269152164459228, 0.0062902398109436035, 0.006291615962982178, 0.006279232025146485, 0.006379776000976563, 0.006286848068237305, 0.006292448043823242, 0.006273087978363037, 0.006295519828796387, 0.006340608119964599, 0.006309023857116699, 0.006693439960479736, 0.0064085121154785155, 0.006391776084899902, 0.0064264960289001465, 0.006381120204925537, 0.006402624130249023, 0.006449183940887451, 0.00639792013168335, 0.00643891191482544, 0.006432767868041992, 0.006354144096374512, 0.0063695359230041505, 0.0062911038398742675, 0.006320223808288574, 0.0063762240409851074, 0.00649945592880249, 0.006423423767089844, 0.006432672023773193, 0.006397151947021484, 0.006376319885253906, 0.006450943946838379, 0.0064330239295959475, 0.00665177583694458, 0.006407392024993896, 0.006359039783477783, 0.006445919990539551, 0.006416160106658936, 0.0063753600120544434, 0.006433343887329102, 0.0063810238838195804, 0.006379839897155762, 0.006497536182403564, 0.006398719787597656, 0.006404096126556396, 0.006377471923828125, 0.006307487964630127, 0.006374015808105469, 0.006306591987609863, 0.006316991806030273, 0.0062724161148071285, 0.006341504096984863, 0.006419456005096436, 0.00633519983291626, 0.0063448319435119626, 0.006334335803985596, 0.0063506560325622555, 0.006322112083435059, 0.006353151798248291, 0.006311295986175537, 0.006304384231567383, 0.006322495937347412, 0.006391456127166748, 0.006346784114837647, 0.006322175979614258, 0.0063089919090271, 0.0062960958480834965, 0.006547808170318603, 0.006307839870452881, 0.006264512062072754, 0.006261055946350098, 0.006291456222534179, 0.006262784004211426, 0.006296832084655762, 0.006230144023895263, 0.0063023362159729006, 0.006274720191955566, 0.006260704040527343, 0.006273248195648193, 0.006238368034362793, 0.006301695823669433, 0.00628227186203003, 0.006292416095733643, 0.006277503967285156, 0.006240992069244384, 0.006273983955383301, 0.006253888130187988, 0.006259359836578369, 0.006230048179626465, 0.0062947521209716795, 0.006226336002349854, 0.0062527041435241695, 0.006242400169372559, 0.006208767890930176, 0.006452095985412598, 0.006266880035400391, 0.006481696128845215, 0.006406688213348389, 0.006383296012878418, 0.006326272010803223, 0.006285312175750732, 0.00630790376663208, 0.006295263767242432, 0.0063021121025085445, 0.006345823764801025, 0.00628604793548584, 0.006320032119750976, 0.006305888175964356, 0.006338560104370118, 0.006201727867126465, 0.006266848087310791, 0.006258336067199707, 0.006269504070281982, 0.006284639835357666, 0.00627561616897583, 0.006352799892425537, 0.006219808101654053, 0.006291359901428223, 0.006285215854644775, 0.006238304138183594, 0.006237504005432129, 0.006244351863861084, 0.006366112232208252, 0.006246367931365967, 0.006301504135131836, 0.006260735988616943, 0.006248447895050049, 0.006242303848266601, 0.006228032112121582, 0.006262239933013916, 0.006261216163635254, 0.006346848011016846, 0.0063610877990722655, 0.006315872192382812, 0.0063134398460388185, 0.006328927993774414, 0.006370719909667969, 0.006318848133087158, 0.00630460786819458, 0.00628223991394043, 0.006262207984924316, 0.006305823802947998, 0.006288127899169922, 0.006323999881744384, 0.006288671970367431, 0.006337247848510742, 0.006264832019805908, 0.0062494721412658695, 0.006247456073760986, 0.006245696067810059, 0.006503071784973145, 0.006307680130004883, 0.006264832019805908, 0.006268576145172119, 0.006253056049346924, 0.006260064125061035, 0.006251167774200439, 0.006290976047515869, 0.006300127983093261, 0.006275072097778321, 0.0062871999740600586, 0.006264256000518799, 0.006273439884185791, 0.0062709121704101565, 0.006283648014068603, 0.006273024082183838, 0.0064445118904113766, 0.006412831783294678, 0.006270271778106689, 0.006389696121215821, 0.006247168064117432, 0.006272192001342773, 0.006156383991241455, 0.006267104148864746, 0.006404128074645996, 0.00634444808959961, 0.0063201279640197755, 0.006359039783477783, 0.006348639965057373, 0.006332575798034668, 0.0063079681396484375, 0.00633132791519165, 0.006339200019836426, 0.006425151824951172, 0.006409984111785888, 0.0063396477699279785, 0.006316991806030273, 0.006336703777313233, 0.006516191959381103, 0.006395840167999268, 0.006369696140289306, 0.0063569917678833006, 0.00632422399520874, 0.006453248023986816, 0.006362720012664795, 0.006946879863739014, 0.0063487358093261715, 0.006372096061706543, 0.006344351768493652, 0.006385824203491211, 0.006303872108459473, 0.006370272159576416, 0.006394591808319092, 0.006349184036254883, 0.006352543830871582, 0.006352479934692383, 0.006359231948852539, 0.006341023921966553, 0.006371007919311523, 0.006418560028076172, 0.006389920234680176, 0.006337823867797851, 0.006339200019836426, 0.006421792030334473, 0.0063248958587646485, 0.00636736011505127, 0.006352672100067139, 0.006278783798217774, 0.00635753583908081, 0.00631388807296753, 0.006289535999298095, 0.00626691198348999, 0.006365056037902832, 0.006254591941833496, 0.00627126407623291, 0.006285024166107178, 0.006236192226409912, 0.006303711891174317, 0.0062733120918273926, 0.006229375839233399, 0.006288127899169922, 0.006276000022888184, 0.006283967971801758, 0.0062811517715454105, 0.006283328056335449, 0.006199488162994385, 0.006357728004455566, 0.0062873601913452145, 0.006280863761901855, 0.006335872173309326, 0.006331103801727295, 0.0063448319435119626, 0.006312320232391358, 0.006326015949249268, 0.006534527778625488, 0.006357759952545166, 0.006456480026245117, 0.006288127899169922, 0.00628323221206665, 0.00628329610824585, 0.006284543991088867, 0.006314720153808594, 0.006363135814666748, 0.006285056114196778, 0.006293759822845459, 0.006281216144561768, 0.0062804799079895016, 0.0062687678337097165, 0.006324831962585449, 0.00630134391784668, 0.00632806396484375, 0.0063678078651428225, 0.0063155522346496585, 0.0063414077758789066, 0.006293600082397461, 0.006311840057373047, 0.006293280124664307, 0.006534463882446289, 0.006325151920318604, 0.006338655948638916, 0.006338272094726562, 0.006335872173309326, 0.00626691198348999, 0.006290559768676758, 0.006364831924438477, 0.006379903793334961, 0.0063478717803955075, 0.006375679969787598, 0.006342944145202637, 0.0063079681396484375, 0.006341887950897217, 0.006321023941040039, 0.006342400074005127, 0.00628326416015625, 0.006283328056335449, 0.006297632217407227, 0.006313759803771973, 0.006305439949035645, 0.006285952091217041, 0.006347936153411865, 0.006408192157745361, 0.0063434882164001465, 0.006381440162658692, 0.006381472110748291, 0.006401631832122803, 0.006439424037933349, 0.006376448154449463, 0.006406816005706787, 0.006405439853668213, 0.00638431978225708, 0.006354527950286865, 0.00634716796875, 0.00638156795501709, 0.00632422399520874, 0.006409728050231934, 0.006494751930236816, 0.0065392317771911625, 0.0065270719528198245, 0.00644700813293457, 0.006414239883422852, 0.0064245758056640625, 0.006457568168640137, 0.006467455863952636, 0.006451263904571534, 0.006430655956268311, 0.006416704177856445, 0.006377151966094971, 0.006493951797485352, 0.006445312023162842, 0.0064364480972290035, 0.0066926078796386715, 0.006433440208435059, 0.006426784038543701, 0.006456960201263428, 0.006361120223999024, 0.006400191783905029, 0.006391295909881592, 0.0063861761093139645, 0.006475872039794922, 0.006407423973083496, 0.006449215888977051, 0.006502079963684082, 0.006380447864532471, 0.006391903877258301, 0.006363103866577149, 0.006337471961975098, 0.00633900785446167, 0.006267615795135498, 0.006304671764373779, 0.006321087837219238, 0.00631987190246582, 0.0065796799659729, 0.006327199935913086, 0.00630944013595581, 0.006258624076843262, 0.0062993597984313966, 0.00624502420425415, 0.0062791681289672855, 0.0062967357635498045, 0.006275936126708985, 0.006252543926239014, 0.006520671844482422, 0.0062837119102478025, 0.006249311923980713, 0.00628111982345581, 0.006298592090606689, 0.006244351863861084, 0.006291456222534179, 0.006252543926239014, 0.006277376174926757, 0.006235904216766357, 0.006196415901184082, 0.006286079883575439, 0.006248511791229248, 0.0062975997924804685, 0.006271008014678955, 0.006270944118499756, 0.006253632068634033, 0.006267519950866699, 0.0062262721061706544, 0.006284416198730469, 0.006220287799835205, 0.006232416152954102, 0.006240096092224121, 0.006668511867523193, 0.006512576103210449, 0.0073207998275756836, 0.006783103942871094, 0.007082367897033691, 0.006322751998901368, 0.006258399963378907, 0.006317088127136231, 0.00649721622467041, 0.0062724480628967285, 0.0062756800651550294, 0.00625267219543457, 0.006288544178009033, 0.006296288013458252, 0.0062873601913452145, 0.006266719818115234, 0.0062667841911315915, 0.00628111982345581, 0.006277472019195557, 0.006285312175750732, 0.006262784004211426, 0.006254591941833496, 0.006804736137390137, 0.006252543926239014, 0.006297440052032471, 0.006222752094268799, 0.006291584014892578, 0.006242176055908203, 0.006236159801483154, 0.006239456176757812, 0.006253471851348877, 0.006243936061859131, 0.006242591857910157, 0.006262911796569824, 0.006238080024719238, 0.006258272171020508, 0.00625705623626709, 0.006238207817077636, 0.006264832019805908, 0.006292992115020752, 0.006300159931182861, 0.006258240222930908, 0.006294015884399414, 0.006293439865112304, 0.006313983917236328, 0.006287295818328858, 0.006248095989227295, 0.006281023979187012, 0.006247231960296631, 0.006327648162841797, 0.006221663951873779, 0.006268928050994873, 0.006279200077056885, 0.006264800071716308, 0.006273024082183838, 0.0062770237922668455, 0.006230112075805664, 0.006246592044830322, 0.006284255981445313, 0.006269023895263672, 0.006227871894836426, 0.0062592320442199706, 0.006229599952697754, 0.006263519763946533, 0.006266880035400391, 0.006248640060424805, 0.006243775844573975, 0.006273056030273437, 0.006257279872894287, 0.0062665920257568355, 0.0062975997924804685, 0.00628329610824585, 0.006264095783233643, 0.006272960186004639, 0.006250912189483642, 0.006274496078491211, 0.006241087913513184, 0.006289343833923339, 0.006260896205902099, 0.006292704105377197, 0.006295360088348388, 0.006290207862854004, 0.006310080051422119, 0.006266079902648926, 0.00632092809677124, 0.00630790376663208, 0.006395423889160156, 0.006318272113800049, 0.006302175998687744, 0.0062605757713317875, 0.006264736175537109, 0.006280191898345947, 0.006251520156860352, 0.006247776031494141, 0.0062772479057312015, 0.006261312007904053, 0.006285280227661133, 0.006236159801483154, 0.006283455848693847, 0.0062782721519470215, 0.006895679950714112, 0.0062531838417053225, 0.006502655982971192, 0.006285151958465576, 0.006270463943481445, 0.006252960205078125, 0.006293600082397461, 0.006241663932800293, 0.0063023362159729006, 0.0062683200836181644, 0.0062848000526428225, 0.0062715840339660645, 0.0062674241065979, 0.006451456069946289, 0.006232160091400147, 0.006247712135314941, 0.006236415863037109, 0.006248640060424805, 0.006234367847442627, 0.006223680019378662, 0.006268864154815674, 0.006231552124023438, 0.006394464015960694, 0.006393439769744873, 0.006303296089172363, 0.0063348479270935055, 0.006238592147827148, 0.0062631359100341795, 0.0062442879676818846, 0.006273791790008545, 0.006232639789581299, 0.006269343852996826, 0.006243775844573975, 0.006294047832489014, 0.00625267219543457, 0.006239776134490967, 0.0062552962303161624, 0.006210432052612304, 0.0062485761642456054, 0.00621449613571167, 0.006285120010375977, 0.006254623889923096, 0.006243552207946777, 0.006236959934234619, 0.006246399879455566, 0.006273087978363037, 0.006323616027832032, 0.0062626562118530274, 0.006291584014892578, 0.00624294376373291, 0.006264736175537109, 0.006240032196044922, 0.006239808082580566, 0.006235136032104492, 0.0063004159927368165, 0.00628550386428833, 0.006257184028625488, 0.006703296184539795, 0.0062873601913452145, 0.0063240318298339844, 0.006254496097564698, 0.006289696216583252, 0.006277376174926757, 0.006264448165893555, 0.006277440071105957, 0.006242112159729004, 0.006285312175750732, 0.006232063770294189, 0.0063162240982055666, 0.006264800071716308, 0.006237343788146972, 0.006292479991912842, 0.006251967906951904, 0.006262976169586182, 0.0062564477920532225, 0.006283360004425049, 0.006361055850982666, 0.006250527858734131, 0.006270976066589356, 0.006272928237915039, 0.006226016044616699, 0.006244448184967041, 0.006238111972808838, 0.006270944118499756, 0.006274240016937256, 0.006273087978363037, 0.006253344058990478, 0.006234272003173828, 0.006255871772766113, 0.006248832225799561, 0.006252511978149414, 0.0062416958808898925, 0.006324160099029541, 0.006214560031890869, 0.006366720199584961, 0.006285823822021484, 0.006260704040527343, 0.006276800155639649, 0.006256991863250732, 0.006266880035400391, 0.0062293438911437985, 0.0064330239295959475, 0.006250815868377686, 0.006483104228973389, 0.006273600101470947, 0.006270463943481445, 0.006306943893432618, 0.006348800182342529, 0.006260640144348144, 0.006311935901641846, 0.006283167839050293, 0.0063089919090271, 0.006257472038269043, 0.006264832019805908, 0.006259744167327881, 0.006259456157684327, 0.006246592044830322, 0.006248032093048096, 0.006264383792877197, 0.00625708818435669, 0.006275519847869873, 0.006221824169158936, 0.006254591941833496, 0.006244351863861084, 0.006227680206298828, 0.006258975982666015, 0.006256735801696777, 0.006309887886047363, 0.006239456176757812, 0.00628601598739624, 0.0062425599098205565, 0.006232895851135254, 0.006237343788146972, 0.0062379841804504396, 0.006275392055511474, 0.006261760234832763, 0.006304768085479737, 0.00623583984375, 0.00625977611541748, 0.006173567771911621, 0.0062518720626831056, 0.006240672111511231, 0.0062485761642456054, 0.006281248092651367, 0.006244512081146241, 0.006244192123413086, 0.006242303848266601, 0.00633241605758667, 0.006272543907165527, 0.00624832010269165, 0.006243040084838867, 0.006241663932800293, 0.006275712013244629, 0.0062772479057312015, 0.006254432201385498, 0.006270847797393799, 0.0062639999389648435, 0.006292319774627685, 0.006475776195526123, 0.006301695823669433, 0.006267072200775147, 0.006258272171020508, 0.006263008117675781, 0.00621779203414917, 0.006234047889709472, 0.006221183776855469, 0.006277184009552002, 0.0062039680480957034, 0.006266464233398437, 0.006418848037719726, 0.006275072097778321, 0.006299808025360108, 0.0062709121704101565, 0.006291615962982178, 0.006285056114196778, 0.006262784004211426, 0.006287487983703613, 0.006285183906555176, 0.006297632217407227, 0.006461408138275147, 0.006439008235931396, 0.0063036479949951174, 0.006251935958862305, 0.006254432201385498, 0.0062594242095947265, 0.00636521577835083, 0.006262784004211426, 0.006293504238128662, 0.006352896213531494, 0.006297088146209716, 0.006301983833312989, 0.006256864070892334, 0.006305791854858398, 0.006248288154602051, 0.006270592212677002, 0.006257343769073487, 0.006254176139831543, 0.00627455997467041, 0.006310656070709229, 0.006266880035400391, 0.006272895812988281, 0.0062706880569458005, 0.006168511867523194, 0.006259488105773926, 0.006267839908599853, 0.006246816158294677, 0.0062674241065979, 0.0062486081123352055, 0.0062623038291931155, 0.0062549118995666505, 0.006237887859344483, 0.006244671821594238, 0.006264832019805908, 0.006278240203857422, 0.0062271361351013185, 0.0062707839012146, 0.0062135357856750485, 0.006249567985534668, 0.006261663913726807, 0.006238111972808838, 0.0062278079986572265, 0.006250080108642578, 0.00625929594039917, 0.006279200077056885, 0.006290976047515869, 0.0062708802223205564, 0.006257184028625488, 0.00626694393157959, 0.006254496097564698, 0.006248544216156006, 0.0062644162178039555, 0.00627785587310791, 0.006296607971191406, 0.006261248111724854, 0.0062707200050354005, 0.006236703872680664, 0.006281184196472168, 0.006264736175537109, 0.006282400131225586, 0.00627513599395752, 0.0063001918792724605, 0.006287551879882812, 0.0062644162178039555, 0.00627891206741333, 0.006243040084838867, 0.00627839994430542, 0.006238815784454346, 0.0063161921501159665, 0.006266111850738525, 0.006252768039703369, 0.006273280143737793, 0.006256927967071533, 0.006317440032958984, 0.006226560115814209, 0.006291744232177735, 0.006258399963378907, 0.006244351863861084, 0.006237215995788574, 0.00623305606842041, 0.0062566399574279785, 0.006223872184753418, 0.0062782721519470215, 0.006230656147003174, 0.006261023998260498, 0.006296800136566162, 0.006216224193572998, 0.00625171184539795, 0.006247104167938233, 0.006291584014892578, 0.006242303848266601, 0.006240479946136475, 0.006253632068634033, 0.006261792182922363, 0.006266687870025635, 0.006242527961730957, 0.006435743808746338, 0.0062369279861450196, 0.006309728145599366, 0.006254303932189942, 0.006236288070678711, 0.006258399963378907, 0.006265503883361816, 0.006269951820373535, 0.006219871997833252, 0.006244671821594238, 0.006274752140045166, 0.006218048095703125, 0.006244895935058593, 0.006241407871246338, 0.006232831954956055, 0.006230144023895263, 0.006254720211029053, 0.0062280001640319825, 0.006255807876586914, 0.0062614078521728515, 0.0062791681289672855, 0.006295008182525635, 0.006264768123626709, 0.006323935985565185, 0.0062592320442199706, 0.006313695907592774, 0.006310527801513672, 0.006293248176574707, 0.006299903869628906, 0.006294623851776123, 0.006308544158935547, 0.006260223865509033, 0.006295296192169189, 0.0062863039970397945, 0.006252128124237061, 0.00642300796508789, 0.006753888130187988, 0.006943071842193603, 0.006821887969970703, 0.006658239841461182, 0.0064141440391540525, 0.006651904106140137, 0.006684671878814697, 0.006401472091674805, 0.006343232154846191, 0.00632422399520874, 0.006473567962646485, 0.006299392223358154, 0.006473696231842041, 0.006341055870056152, 0.0062791681289672855, 0.006301695823669433, 0.006491583824157715, 0.006342751979827881, 0.0064204797744750975, 0.006262432098388672, 0.006377024173736572, 0.006424479961395264, 0.006283199787139893, 0.006775743961334228, 0.00670246410369873, 0.006959519863128662, 0.0068089919090271, 0.007252799987792969, 0.006895616054534912, 0.006481919765472412, 0.006317887783050537, 0.006246592044830322, 0.006331391811370849, 0.006251296043395996, 0.006375775814056396, 0.0062317438125610355, 0.006334047794342041, 0.006496863842010498, 0.006286431789398193, 0.0063108158111572265, 0.006391808032989502, 0.006318079948425293, 0.006287295818328858, 0.006444799900054932, 0.006317567825317383, 0.0062533760070800784, 0.006293280124664307, 0.006252960205078125, 0.006401855945587159, 0.006316031932830811, 0.006393856048583985, 0.006300864219665528, 0.006323008060455322, 0.0062975997924804685, 0.006252031803131103, 0.006243008136749268, 0.0062252159118652345, 0.006219615936279297, 0.006240287780761719, 0.006293791770935059, 0.006281568050384522, 0.006218976020812989, 0.006217599868774414, 0.006232287883758545, 0.006275072097778321, 0.006242208003997803, 0.0062791681289672855, 0.006472479820251465, 0.006246304035186768, 0.006688064098358155, 0.007977759838104248, 0.0068579201698303225, 0.006299935817718506, 0.006314527988433838, 0.006479743957519531, 0.006246528148651123, 0.006273024082183838, 0.006256095886230469, 0.006265376091003418, 0.006238207817077636, 0.006192704200744629, 0.006247168064117432, 0.006213632106781006, 0.006246399879455566, 0.006217728137969971, 0.006225279808044434, 0.00625113582611084, 0.006287040233612061, 0.006231904029846191, 0.006289055824279785, 0.006222655773162842, 0.006229568004608155, 0.0062408318519592285, 0.006275167942047119, 0.006278528213500977, 0.006250912189483642, 0.006295551776885986, 0.006320032119750976, 0.006289120197296143, 0.006264319896697998, 0.006271232128143311, 0.0062689599990844724, 0.006253151893615722, 0.0065699520111083985, 0.00628329610824585, 0.006309887886047363, 0.006336095809936523, 0.006281407833099365, 0.006318304061889648, 0.006274687767028809, 0.006252927780151367, 0.006271232128143311, 0.006301375865936279, 0.0062997121810913085, 0.0062854719161987305, 0.006301536083221436, 0.0062791681289672855, 0.006267039775848388, 0.0062605757713317875, 0.006238207817077636, 0.006261856079101563, 0.006257887840270996, 0.006311552047729492, 0.006278592109680176, 0.00625932788848877, 0.006268928050994873, 0.006244703769683838, 0.006276768207550049, 0.006246399879455566, 0.006301504135131836, 0.00623635196685791, 0.006328320026397705, 0.006318016052246094, 0.006276768207550049, 0.006310304164886474, 0.006305791854858398, 0.0063055682182312014, 0.00625654411315918, 0.006313663959503174, 0.006288000106811523, 0.006291456222534179, 0.006323775768280029, 0.006269375801086426, 0.006173855781555175, 0.006361440181732177, 0.0062551040649414065, 0.006252543926239014, 0.006240255832672119, 0.006291615962982178, 0.006243807792663574, 0.006300064086914062, 0.006241919994354248, 0.006277184009552002, 0.006230303764343262, 0.006213280200958252, 0.006240511894226074, 0.006237887859344483, 0.0062708802223205564, 0.006257152080535889, 0.006257984161376953, 0.006230720043182373, 0.006249983787536621, 0.006265120029449463, 0.006246335983276367, 0.006269216060638428, 0.006256383895874023, 0.006301951885223389, 0.006223296165466309, 0.006487711906433106, 0.00624351978302002, 0.006235328197479248, 0.006238751888275147, 0.006245471954345703, 0.0062837119102478025, 0.006265312194824219, 0.006277120113372803, 0.006254591941833496, 0.006259903907775879, 0.00626361608505249, 0.006453248023986816, 0.006285312175750732, 0.006227968215942382, 0.006282400131225586, 0.006325088024139404, 0.006240255832672119, 0.006281472206115722, 0.006238175868988037, 0.006252448081970215, 0.0061950721740722655, 0.006275072097778321, 0.006256608009338379, 0.006293536186218262, 0.006274816036224365, 0.006251071929931641, 0.006248127937316894, 0.006250048160552978, 0.006434271812438965, 0.006230591773986816, 0.006214144229888916, 0.006283167839050293, 0.006219168186187744, 0.006740831851959229, 0.006266623973846436, 0.006231904029846191, 0.006234272003173828, 0.006238207817077636, 0.006150144100189209, 0.0062585601806640625, 0.006305920124053955, 0.006299647808074951, 0.006256192207336426, 0.006259136199951172, 0.0062566399574279785, 0.006288415908813476, 0.006255424022674561, 0.0063062400817871095, 0.006241824150085449, 0.0062791681289672855, 0.006275263786315918, 0.006250495910644531, 0.006266816139221192, 0.0062321281433105466, 0.006232063770294189, 0.006231328010559082, 0.006256703853607178, 0.006214303970336914, 0.006273024082183838, 0.006266880035400391, 0.006236480236053467, 0.006244031906127929, 0.006229279994964599, 0.006238111972808838, 0.006222655773162842, 0.0062583680152893065, 0.006248767852783203, 0.006248640060424805, 0.0062724161148071285, 0.006238143920898437, 0.0062646718025207515, 0.006250495910644531, 0.006266655921936035, 0.006242335796356201, 0.006273471832275391, 0.006252287864685058, 0.006232992172241211, 0.006260735988616943, 0.006231872081756592, 0.006239488124847412, 0.006232160091400147, 0.006246304035186768, 0.006224544048309326, 0.0062259202003479, 0.006242400169372559, 0.006246304035186768, 0.006242303848266601, 0.006213183879852295, 0.006242144107818604, 0.006223936080932618, 0.006259039878845215, 0.006245855808258057, 0.0062326078414916995, 0.006244095802307129, 0.006222271919250488, 0.00628326416015625, 0.006227968215942382, 0.006256415843963623, 0.006240479946136475, 0.006266240119934082, 0.006233823776245117]",tokens/s,158.58351209614608,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1847.468032,3181.248512,0.0,2778.7264,2485.458944,s,1,9.1980478515625,9.1980478515625,0.0,9.1980478515625,9.1980478515625,9.1980478515625,9.1980478515625,[9.1980478515625],,kWh,6.0446132979162326e-05,6.660180298332034e-06,2.3353907571993515e-05,9.046022084948788e-05,,MB,1923.166208,3208.511488,0.0,2791.309312,2192.393728,s,10,0.6957987823486328,0.06957987823486328,0.00019205109045825584,0.06954795455932616,0.06979649047851562,0.06992168655395507,0.07002184341430663,"[0.07004688262939453, 0.0694933090209961, 0.06957395172119141, 0.0694862060546875, 0.06935145568847656, 0.06952528381347656, 0.06957062530517578, 0.06937324523925781, 0.06976866912841796, 0.06960915374755859]",tokens/s,3679.224604789983,kWh,2.083399863652439e-06,2.2976146407695612e-07,1.3832733682411582e-06,3.696434695970553e-06,tokens/kWh,69255923.89852391,MB,1927.65952,3208.511488,0.0,2791.309312,2192.396288,s,10,16.6251416015625,1.66251416015625,0.09475645038249406,1.6275960083007812,1.6827520629882813,1.8138452697753904,1.918719835205078,"[1.9449384765625, 1.6249873046875, 1.632620361328125, 1.6227357177734374, 1.61879296875, 1.6262879638671874, 1.628904052734375, 1.6235780029296876, 1.6536202392578125, 1.648676513671875]",tokens/s,37.894414080707136,kWh,4.7596070965090686e-05,5.249487500456165e-06,2.460024741856028e-05,7.744580588410713e-05,tokens/kWh,813472.069672509,,s,630,16.623141096115113,0.026385938247801766,0.0018345248029130855,0.025810991287231445,0.026793289375305174,0.03217377490997315,0.03244344123840332,"[0.03233033752441406, 0.03245475387573242, 0.03253958511352539, 0.03250390243530273, 0.032221248626708984, 0.03230803298950195, 0.03219046401977539, 0.03214745712280274, 0.032292865753173826, 0.03233996963500976, 0.03227033615112305, 0.032178176879882815, 0.032415744781494144, 0.032233470916748046, 0.03212492752075195, 0.032302463531494144, 0.032257823944091796, 0.032133983612060546, 0.03216998291015625, 0.03212831878662109, 0.032119487762451174, 0.03214950561523437, 0.036057086944580076, 0.03263897705078125, 0.03256729507446289, 0.03224140930175781, 0.03221452713012695, 0.032180065155029296, 0.03209056091308594, 0.032272289276123044, 0.03225657653808594, 0.03222937774658203, 0.03217203140258789, 0.032137313842773435, 0.03219244766235352, 0.032175201416015625, 0.032091007232666016, 0.03208806228637695, 0.03209340667724609, 0.03221372985839844, 0.03206118392944336, 0.032102718353271484, 0.03219251251220703, 0.032315391540527344, 0.041304222106933595, 0.03237257766723633, 0.03222323226928711, 0.031614912033081054, 0.027897920608520508, 0.02546892738342285, 0.025394271850585938, 0.025530687332153322, 0.0254552001953125, 0.025540607452392578, 0.02551094436645508, 0.025490272521972657, 0.02551411247253418, 0.02553446388244629, 0.025511936187744142, 0.025409536361694338, 0.025374143600463868, 0.025412160873413085, 0.025796159744262696, 0.026581151962280274, 0.026212831497192383, 0.026631008148193358, 0.025918399810791016, 0.026060800552368164, 0.02573855972290039, 0.025846527099609374, 0.025805791854858397, 0.02577097511291504, 0.02553446388244629, 0.025587711334228515, 0.025689184188842775, 0.025472959518432616, 0.025539487838745118, 0.025589824676513672, 0.02618704032897949, 0.025731775283813478, 0.025687135696411133, 0.025555744171142578, 0.025970848083496093, 0.026046464920043946, 0.02570240020751953, 0.025812992095947264, 0.025745183944702148, 0.025518112182617188, 0.02552560043334961, 0.025496416091918946, 0.02549964714050293, 0.02546067237854004, 0.025444416046142577, 0.025530303955078125, 0.025411935806274415, 0.02546659278869629, 0.025458688735961913, 0.025382911682128906, 0.025438175201416016, 0.025961919784545897, 0.02581679916381836, 0.025692384719848634, 0.025664159774780274, 0.02614476776123047, 0.025888736724853516, 0.026146848678588866, 0.0262936954498291, 0.026034751892089845, 0.026332160949707032, 0.026036928176879883, 0.02588857650756836, 0.025967103958129883, 0.025769535064697265, 0.025749887466430664, 0.02573516845703125, 0.025884735107421876, 0.025949344635009766, 0.02574380874633789, 0.025801055908203124, 0.02579462432861328, 0.025772031784057618, 0.02598294448852539, 0.025829408645629885, 0.025675775527954102, 0.025710559844970702, 0.025476543426513672, 0.02586038398742676, 0.02593791961669922, 0.025783456802368165, 0.025840480804443358, 0.025870336532592773, 0.02595587158203125, 0.026116575241088867, 0.02578963279724121, 0.025846368789672853, 0.02588489532470703, 0.02614067268371582, 0.025971935272216796, 0.02582156753540039, 0.025983135223388673, 0.025833728790283204, 0.02588457679748535, 0.025733280181884765, 0.02569209671020508, 0.02560540771484375, 0.025739999771118165, 0.025579519271850586, 0.02570444869995117, 0.025636863708496094, 0.025653247833251954, 0.02569215965270996, 0.027790367126464845, 0.02611305618286133, 0.02605254364013672, 0.026052352905273437, 0.02596028709411621, 0.02577449607849121, 0.02572287940979004, 0.025870336532592773, 0.025790464401245116, 0.025753152847290038, 0.025833824157714843, 0.025828832626342772, 0.02596108818054199, 0.025815040588378906, 0.025851903915405275, 0.025681087493896484, 0.025675743103027344, 0.025645919799804687, 0.025614336013793947, 0.025757696151733397, 0.02574527931213379, 0.02563017654418945, 0.026092191696166993, 0.025857215881347657, 0.025651968002319336, 0.026716224670410155, 0.027488256454467775, 0.026779647827148437, 0.02619503974914551, 0.025926559448242188, 0.025777151107788086, 0.025995712280273437, 0.026057279586791993, 0.026009599685668947, 0.025610240936279297, 0.025669088363647462, 0.02555958366394043, 0.025542272567749023, 0.025808448791503905, 0.026636735916137695, 0.02708684730529785, 0.02611958312988281, 0.02605731201171875, 0.025765888214111327, 0.025765888214111327, 0.02561664009094238, 0.025816831588745117, 0.025616031646728515, 0.02562428855895996, 0.02560063934326172, 0.025782175064086914, 0.025626144409179687, 0.025606719970703126, 0.02580271911621094, 0.025735200881958006, 0.02568121528625488, 0.025570207595825196, 0.025636640548706055, 0.025839616775512695, 0.0257410888671875, 0.025722208023071288, 0.025645952224731445, 0.02557513618469238, 0.026073375701904298, 0.02553798484802246, 0.025651039123535155, 0.025559776306152342, 0.02571468734741211, 0.025638463973999024, 0.02578915214538574, 0.025718048095703126, 0.025737056732177733, 0.025743967056274415, 0.02557542419433594, 0.02589673614501953, 0.025606367111206056, 0.025998752593994142, 0.025882911682128907, 0.025669023513793944, 0.0255861759185791, 0.025682336807250978, 0.025561088562011718, 0.025579519271850586, 0.025769983291625977, 0.025574623107910158, 0.025717344284057617, 0.025632959365844726, 0.025655231475830077, 0.025607776641845704, 0.02560611152648926, 0.025686527252197267, 0.025779712677001954, 0.02571731185913086, 0.025683712005615235, 0.025653472900390627, 0.02608892822265625, 0.025655807495117186, 0.025937631607055665, 0.02571833610534668, 0.02563759994506836, 0.02573516845703125, 0.025869279861450194, 0.025892704010009766, 0.025754751205444334, 0.026033023834228515, 0.025866239547729493, 0.025849855422973633, 0.025912416458129882, 0.025666463851928712, 0.025612064361572266, 0.025730815887451172, 0.025625024795532227, 0.025659423828125, 0.025671680450439452, 0.025729280471801758, 0.02572876739501953, 0.025820480346679688, 0.025717439651489257, 0.025759328842163087, 0.025772127151489257, 0.02573139190673828, 0.025574975967407227, 0.025661792755126953, 0.025675647735595702, 0.025624799728393554, 0.025593856811523437, 0.025624191284179688, 0.025567615509033203, 0.025555999755859374, 0.025639392852783202, 0.02562508773803711, 0.025604032516479493, 0.025738847732543944, 0.025555423736572266, 0.025651199340820312, 0.02556064033508301, 0.025675935745239256, 0.02571676826477051, 0.026022144317626953, 0.025624576568603515, 0.02609676742553711, 0.025598367691040038, 0.025536991119384764, 0.025589759826660157, 0.02574950408935547, 0.025708127975463867, 0.02565977668762207, 0.025497472763061524, 0.025585664749145507, 0.02549907112121582, 0.02561484718322754, 0.025890687942504882, 0.025643360137939452, 0.025701791763305663, 0.025688671112060548, 0.025610240936279297, 0.02556710433959961, 0.025505056381225587, 0.02572496032714844, 0.025780927658081054, 0.02565951919555664, 0.02580009651184082, 0.025643487930297852, 0.025559167861938476, 0.025808895111083984, 0.025931776046752928, 0.025712127685546874, 0.025702911376953123, 0.025586847305297852, 0.028973567962646486, 0.026228448867797852, 0.02581158447265625, 0.02568716812133789, 0.025572223663330076, 0.025948160171508788, 0.025819040298461913, 0.02567350387573242, 0.025634687423706056, 0.025655744552612304, 0.025796607971191408, 0.025867359161376953, 0.025764768600463867, 0.025839616775512695, 0.025824640274047853, 0.025734880447387695, 0.025820064544677734, 0.025786367416381836, 0.025868288040161135, 0.025665536880493164, 0.025680160522460936, 0.025712352752685547, 0.02568806457519531, 0.025669631958007814, 0.025661439895629884, 0.025841279983520506, 0.025700128555297852, 0.025743967056274415, 0.025802528381347656, 0.02576406478881836, 0.02570240020751953, 0.025579519271850586, 0.025726144790649413, 0.0257030086517334, 0.025624799728393554, 0.02567913627624512, 0.025543455123901368, 0.025630655288696288, 0.025558303833007813, 0.025602783203125, 0.02574131202697754, 0.025542272567749023, 0.025760128021240235, 0.025589696884155272, 0.02662816047668457, 0.025651199340820312, 0.02554265594482422, 0.025780223846435548, 0.025837087631225587, 0.025648832321166992, 0.02573187255859375, 0.025810783386230468, 0.025714111328125, 0.025670368194580077, 0.02571468734741211, 0.02583763122558594, 0.026490816116333006, 0.026076608657836915, 0.02573513603210449, 0.025651615142822267, 0.02568547248840332, 0.025698911666870116, 0.025617504119873048, 0.025612863540649414, 0.025790367126464844, 0.02565065574645996, 0.025631103515625, 0.02556777572631836, 0.02794905662536621, 0.025730464935302736, 0.025606752395629883, 0.02559119987487793, 0.02557923126220703, 0.025596799850463866, 0.025687744140625, 0.02556755256652832, 0.025513343811035157, 0.02558835220336914, 0.02552176094055176, 0.025575040817260742, 0.025731584548950196, 0.025559295654296876, 0.025533695220947266, 0.025762592315673828, 0.02587993621826172, 0.02595702362060547, 0.026018943786621094, 0.025831584930419923, 0.025733280181884765, 0.025663583755493165, 0.025648704528808595, 0.026066816329956055, 0.025971424102783202, 0.02583171272277832, 0.027856895446777344, 0.027648000717163085, 0.026458112716674805, 0.02628630447387695, 0.026010431289672852, 0.026790271759033202, 0.02586662483215332, 0.025811199188232423, 0.025834815979003906, 0.02590787124633789, 0.025628351211547853, 0.025674047470092772, 0.02570240020751953, 0.025647104263305662, 0.025616064071655273, 0.02557776069641113, 0.02565849685668945, 0.025655904769897462, 0.025659711837768554, 0.02587238311767578, 0.025748960494995116, 0.02587504005432129, 0.02564499282836914, 0.025683263778686523, 0.025578176498413086, 0.02609542465209961, 0.025591808319091795, 0.025800703048706054, 0.02592697525024414, 0.025714815139770506, 0.025778751373291015, 0.025640960693359374, 0.025630239486694337, 0.02569059181213379, 0.025612287521362305, 0.025769983291625977, 0.025614271163940428, 0.025507904052734374, 0.025577472686767577, 0.025562400817871093, 0.02562940788269043, 0.025544704437255858, 0.025616064071655273, 0.025567359924316406, 0.025608383178710937, 0.025602304458618164, 0.025650432586669922, 0.025522687911987304, 0.025597280502319335, 0.025608863830566406, 0.025578784942626952, 0.02566012763977051, 0.025561088562011718, 0.025562688827514647, 0.02554515266418457, 0.025503103256225585, 0.025534431457519532, 0.02563702392578125, 0.02565990447998047, 0.025585664749145507, 0.025777856826782228, 0.025864511489868163, 0.026053951263427733, 0.02590105628967285, 0.02590585517883301, 0.025866239547729493, 0.026041696548461914, 0.02585625648498535, 0.025659807205200197, 0.0256058235168457, 0.025749824523925782, 0.02570240020751953, 0.025922752380371093, 0.025908031463623048, 0.025896127700805665, 0.02584566307067871, 0.02588559913635254, 0.025767391204833984, 0.025921440124511717, 0.026147584915161132, 0.026065887451171874, 0.02606319999694824, 0.026124063491821288, 0.025996063232421877, 0.026187776565551758, 0.026292224884033204, 0.026062847137451172, 0.0259051513671875, 0.025869728088378906, 0.025911903381347655, 0.026372064590454103, 0.02608140754699707, 0.02640822410583496, 0.02604435157775879, 0.026192319869995116, 0.026130495071411134, 0.02635372734069824, 0.0266296329498291, 0.026399360656738282, 0.026177536010742186, 0.026140159606933593, 0.02626201629638672, 0.026251264572143555, 0.026054367065429688, 0.026165536880493164, 0.02633932876586914, 0.02622377586364746, 0.02621116828918457, 0.027432512283325196, 0.026198335647583008, 0.026705856323242187, 0.026282175064086914, 0.02636595153808594, 0.02622371292114258, 0.026172319412231446, 0.02638768005371094, 0.026459135055541993, 0.026187456130981446, 0.026227903366088868, 0.026258623123168946, 0.02641481590270996, 0.02612224006652832, 0.0260849609375, 0.026114463806152344, 0.026113504409790038, 0.026081663131713867, 0.025981088638305665, 0.02602934455871582, 0.026052736282348634, 0.02620844841003418, 0.026956192016601564, 0.025882591247558595, 0.02584307289123535, 0.027308704376220704, 0.02636390495300293, 0.026647808074951172, 0.026219263076782226, 0.026232416152954102, 0.026233247756958008, 0.026163200378417968, 0.026164640426635744, 0.026042783737182617, 0.025830848693847656, 0.026085311889648438, 0.026065343856811522, 0.026175872802734375, 0.026067232131958006, 0.02604412841796875, 0.025931776046752928, 0.025812063217163086, 0.026135456085205077, 0.02646188735961914, 0.026196287155151366, 0.026820447921752928, 0.026077312469482423, 0.02610630416870117, 0.026248384475708007, 0.026141504287719726, 0.02707865524291992, 0.026263551712036134, 0.02608870315551758, 0.025883392333984376, 0.025939327239990234, 0.025930368423461914, 0.025849855422973633, 0.026040319442749024, 0.026286079406738282, 0.025907199859619142, 0.026183679580688478, 0.02607513618469238, 0.02710527992248535, 0.02650111961364746, 0.026077184677124023, 0.025858047485351563, 0.025847328186035155, 0.02597052764892578, 0.025792896270751955, 0.02562073516845703, 0.025687936782836915, 0.025689727783203126, 0.02583616065979004, 0.026206079483032226, 0.02605174446105957, 0.026121055603027344, 0.025896608352661134, 0.025781919479370117, 0.026004032135009767, 0.026257535934448243, 0.0264040641784668, 0.026153760910034178, 0.026232831954956053, 0.025988672256469728, 0.026193952560424803, 0.02631839942932129, 0.026214815139770507, 0.026300832748413085, 0.026243040084838867, 0.026195968627929687, 0.026169408798217775, 0.026344831466674806, 0.026239423751831054, 0.02643574333190918, 0.026822687149047852, 0.026496000289916992, 0.02672947120666504, 0.026092639923095705, 0.02622876739501953, 0.026231775283813475, 0.026349472045898437, 0.026038272857666016, 0.02619913673400879, 0.02610425567626953, 0.026081695556640624, 0.026110015869140624, 0.026199840545654298, 0.026107616424560547]",tokens/s,37.89897446922551,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4758.388736,7635.59936,0.0,7233.077248,6543.2832,s,1,12.3408505859375,12.3408505859375,0.0,12.3408505859375,12.3408505859375,12.3408505859375,12.3408505859375,[12.3408505859375],,kWh,0.00015202254792084206,1.6761836748916948e-05,6.771255416998523e-05,0.00023649693883974422,,MB,1796.972544,7654.473728,0.0,7237.271552,5960.482816,s,10,2.4328533630371094,0.24328533630371094,0.00024076226537256992,0.24331623077392578,0.2436219467163086,0.24362496109008788,0.24362737258911132,"[0.2436279754638672, 0.24308323669433593, 0.24345068359375, 0.2428390655517578, 0.24317744445800782, 0.2430455322265625, 0.24337568664550782, 0.2433539581298828, 0.24327850341796875, 0.24362127685546875]",tokens/s,1052.2623512352443,kWh,7.159461614836304e-06,7.895647222964416e-07,4.757937410682496e-06,1.2706963747815242e-05,tokens/kWh,20146433.489590704,MB,1805.529088,7654.473728,0.0,7237.271552,5960.485376,s,10,24.600891357421876,2.4600891357421872,0.006768554399055857,2.458739013671875,2.469114306640625,2.4721555419921875,2.474588530273438,"[2.461261474609375, 2.4684384765625, 2.450878173828125, 2.459242431640625, 2.458235595703125, 2.47519677734375, 2.454518798828125, 2.4560732421875, 2.4555439453125, 2.46150244140625]",tokens/s,25.608828186216698,kWh,7.209802875140809e-05,7.951352123776644e-06,4.6588214777320706e-05,0.0001266375956525054,tokens/kWh,497482.59729182255,,s,630,24.598819301605253,0.0390457449231829,0.0004906395163426348,0.03893424034118652,0.03935225143432618,0.039595933532714844,0.041793840713501,"[0.03940937423706055, 0.038965984344482424, 0.03891404724121094, 0.03896319961547851, 0.038954113006591795, 0.038886398315429685, 0.03889123153686523, 0.038945087432861326, 0.039042911529541015, 0.038874176025390624, 0.039119041442871094, 0.03895782470703125, 0.03890380859375, 0.03881167984008789, 0.038938560485839845, 0.03873311996459961, 0.038892257690429685, 0.03884646224975586, 0.038895263671875, 0.03893036651611328, 0.038913951873779294, 0.03952633666992188, 0.04026547241210938, 0.039432960510253905, 0.0392765121459961, 0.03907696151733398, 0.039060478210449216, 0.03898931121826172, 0.03896166229248047, 0.039098590850830076, 0.03896297454833984, 0.03925564956665039, 0.0390742073059082, 0.038913246154785155, 0.039010272979736325, 0.03891900634765625, 0.039075809478759764, 0.0390032958984375, 0.03930316925048828, 0.03898863983154297, 0.03884239959716797, 0.038870433807373046, 0.03882451248168945, 0.03888092803955078, 0.03882735824584961, 0.03887804794311524, 0.038891681671142576, 0.038985919952392575, 0.038893184661865234, 0.038932769775390626, 0.039319454193115236, 0.03901571273803711, 0.03885654449462891, 0.03887993621826172, 0.03882368087768555, 0.03882783889770508, 0.03878483200073242, 0.039125823974609376, 0.039206687927246096, 0.041990081787109376, 0.03907612609863281, 0.03921100616455078, 0.039124000549316404, 0.039411231994628905, 0.03890470504760742, 0.038929534912109376, 0.03873494338989258, 0.038892478942871095, 0.03878380966186523, 0.038735870361328126, 0.038940673828125, 0.039153087615966795, 0.03888364791870117, 0.0387597770690918, 0.03864643096923828, 0.038675937652587894, 0.038676864624023435, 0.03874038314819336, 0.03885635375976562, 0.04260006332397461, 0.0390786247253418, 0.039163902282714845, 0.03889152145385742, 0.039007423400878906, 0.03891398239135742, 0.040021984100341794, 0.03888220977783203, 0.03916799926757813, 0.04069580841064453, 0.03954687881469727, 0.03884848022460938, 0.039368736267089845, 0.03918985748291016, 0.03896377563476563, 0.039268417358398436, 0.038998046875, 0.038981407165527344, 0.039180511474609374, 0.03915129470825195, 0.03895273590087891, 0.0389268798828125, 0.03888079833984375, 0.03908451080322266, 0.03902195358276367, 0.03918707275390625, 0.04039680099487305, 0.03935004806518555, 0.04303484725952148, 0.03949363327026367, 0.03933910369873047, 0.0389532470703125, 0.03888601684570313, 0.038835262298583986, 0.03936761474609375, 0.03896115112304688, 0.03897753524780274, 0.03894476699829102, 0.03897465515136719, 0.03982009506225586, 0.03903622436523437, 0.038609569549560546, 0.03865603256225586, 0.03872358322143555, 0.03873129653930664, 0.03870361709594727, 0.03872883224487305, 0.04008550262451172, 0.0393994255065918, 0.03884585571289063, 0.03891267013549805, 0.03885254287719726, 0.03844300842285156, 0.03856083297729492, 0.03851769638061524, 0.03860684967041016, 0.038583648681640624, 0.03894905471801758, 0.03892473602294922, 0.0384400634765625, 0.038507423400878905, 0.038504638671875, 0.03879916763305664, 0.038795265197753906, 0.03881574249267578, 0.03886630249023437, 0.038969406127929686, 0.038730304718017576, 0.03877097702026367, 0.03889123153686523, 0.03892428970336914, 0.03886899185180664, 0.039016448974609375, 0.03891190338134766, 0.03908822250366211, 0.03877062225341797, 0.039193790435791014, 0.038816638946533207, 0.038821983337402347, 0.039120800018310545, 0.03889766311645508, 0.0389568977355957, 0.03875596618652344, 0.03881808090209961, 0.03882329559326172, 0.039220096588134766, 0.03881369781494141, 0.03891766357421875, 0.03871382522583008, 0.038782913208007815, 0.03879455947875977, 0.03884313583374024, 0.03902259063720703, 0.038851966857910154, 0.03894121551513672, 0.0389422721862793, 0.03904931259155273, 0.0389442253112793, 0.038938945770263675, 0.039053985595703125, 0.039220767974853514, 0.03887692642211914, 0.03919126510620117, 0.03879673767089844, 0.03900473785400391, 0.039044670104980465, 0.03884601593017578, 0.039097217559814455, 0.03887855911254883, 0.03932022476196289, 0.0391615982055664, 0.0393177604675293, 0.03933116912841797, 0.038924896240234375, 0.03913119888305664, 0.03913318252563477, 0.03906083297729492, 0.039322208404541016, 0.03901155090332031, 0.039061439514160155, 0.03894979095458984, 0.038927425384521486, 0.03898054504394531, 0.03880550384521484, 0.0390077133178711, 0.03889360046386719, 0.03883468627929688, 0.038983680725097655, 0.0388746223449707, 0.03907020950317383, 0.03895500946044922, 0.0389051513671875, 0.03898847961425781, 0.03885193634033203, 0.03888723373413086, 0.03880624008178711, 0.03869286346435547, 0.039096000671386716, 0.03892268753051758, 0.03892979049682617, 0.039062145233154294, 0.03947436904907226, 0.038949249267578125, 0.03906508636474609, 0.03884719848632812, 0.03909276962280273, 0.03900998306274414, 0.039032833099365234, 0.038844417572021485, 0.039858272552490234, 0.03919891357421875, 0.03890147018432617, 0.03914547348022461, 0.03891571044921875, 0.03944460678100586, 0.039534847259521486, 0.03891155242919922, 0.03890991973876953, 0.038935009002685546, 0.03889078521728516, 0.038916831970214845, 0.03898112106323242, 0.03893056106567383, 0.03908143997192383, 0.03894496154785156, 0.03915235137939453, 0.038993408203125, 0.03908377456665039, 0.03897542572021485, 0.03905414581298828, 0.039049217224121094, 0.03906576156616211, 0.03897078323364258, 0.03991961669921875, 0.03997228622436524, 0.03915423965454102, 0.03904512023925781, 0.0398287353515625, 0.038873855590820315, 0.038954208374023434, 0.03896809768676758, 0.03890995025634766, 0.03885174560546875, 0.03882992172241211, 0.038849536895751956, 0.038836223602294925, 0.03884025573730469, 0.038956737518310545, 0.039147903442382816, 0.038801406860351564, 0.03921516799926758, 0.038774688720703124, 0.038937950134277345, 0.03894467163085937, 0.03881999969482422, 0.03895951843261719, 0.038866241455078124, 0.03891190338134766, 0.038994945526123044, 0.039071998596191405, 0.039128768920898435, 0.03892575836181641, 0.0388614387512207, 0.038772705078125, 0.038783008575439454, 0.038752254486083985, 0.03878297424316406, 0.03889152145385742, 0.038793216705322264, 0.038835582733154295, 0.038980224609375, 0.038894622802734376, 0.03926233673095703, 0.03907884979248047, 0.039451583862304684, 0.03959033584594727, 0.039139198303222655, 0.039030910491943356, 0.03900374221801758, 0.03922438430786133, 0.039446590423583984, 0.03902649688720703, 0.038819839477539066, 0.03900553512573242, 0.03898134231567383, 0.03887430572509765, 0.03907110214233398, 0.03892057418823242, 0.039546497344970705, 0.03881612777709961, 0.03898777770996094, 0.038803455352783206, 0.03886489486694336, 0.03887308883666992, 0.03878911972045898, 0.03877478408813476, 0.04041932678222656, 0.03958988952636719, 0.03937014389038086, 0.039045726776123044, 0.039167102813720704, 0.04391411209106445, 0.03945676803588867, 0.03916799926757813, 0.03921004867553711, 0.03922998428344727, 0.03984425735473633, 0.0392355842590332, 0.039057407379150394, 0.03911270523071289, 0.039118335723876956, 0.03905382537841797, 0.0389788818359375, 0.039088287353515626, 0.03909276962280273, 0.039102527618408205, 0.03907929611206055, 0.0395530891418457, 0.0393666877746582, 0.04197548675537109, 0.03928758239746094, 0.03958972930908203, 0.03909571075439453, 0.03941616058349609, 0.039080352783203126, 0.03909836959838867, 0.03901760101318359, 0.038763294219970705, 0.03885065460205078, 0.03879935836791992, 0.03966326522827148, 0.04027571105957031, 0.03898428726196289, 0.03899737548828125, 0.03910297775268555, 0.03881587219238281, 0.03901638412475586, 0.038870624542236325, 0.03879590225219726, 0.03908182525634766, 0.03890790557861328, 0.039059070587158205, 0.03880287933349609, 0.03897849655151367, 0.03925993728637695, 0.039147743225097655, 0.03996057510375976, 0.0388403205871582, 0.03881942367553711, 0.03883980941772461, 0.038812576293945314, 0.03877478408813476, 0.038844383239746094, 0.03900380706787109, 0.039216640472412106, 0.039487712860107424, 0.03930588912963867, 0.03918438339233398, 0.03891302490234375, 0.03935065460205078, 0.039505470275878904, 0.03934422302246094, 0.039113216400146485, 0.03902195358276367, 0.039310302734375, 0.03914662551879883, 0.03895795059204102, 0.03887625503540039, 0.03890060806274414, 0.038868385314941405, 0.03883814239501953, 0.03883494567871094, 0.03878911972045898, 0.038794750213623046, 0.03886540985107422, 0.038733505249023435, 0.03884201431274414, 0.03878086471557617, 0.0389700813293457, 0.03879510498046875, 0.03904528045654297, 0.03906259155273437, 0.038927295684814456, 0.038798686981201175, 0.03884918212890625, 0.038785247802734374, 0.038731361389160154, 0.03896121597290039, 0.039141120910644533, 0.038958942413330075, 0.039247486114501955, 0.039489727020263675, 0.03911753463745117, 0.0388175048828125, 0.03892457580566406, 0.03877199935913086, 0.03876873779296875, 0.03878073501586914, 0.03880428695678711, 0.03870105743408203, 0.03902873611450195, 0.038757568359375, 0.03878790283203125, 0.03871929550170899, 0.03873811340332031, 0.03874534225463867, 0.03877145767211914, 0.03876249694824219, 0.038688640594482425, 0.038715137481689456, 0.03877856063842773, 0.03874035263061523, 0.038909343719482424, 0.041128032684326174, 0.03910943984985352, 0.03902860641479492, 0.038905406951904295, 0.03897171020507813, 0.03910403060913086, 0.038933216094970705, 0.03887855911254883, 0.03879177474975586, 0.03893945693969727, 0.038760448455810545, 0.03884848022460938, 0.03878710556030274, 0.038975265502929686, 0.03880508804321289, 0.03877974319458008, 0.038730751037597655, 0.038738719940185545, 0.0388218879699707, 0.03861503982543945, 0.03913868713378906, 0.038775424957275394, 0.039032833099365234, 0.03943328094482422, 0.0392790412902832, 0.03887974548339844, 0.03885670471191406, 0.039144798278808596, 0.03873654556274414, 0.038849536895751956, 0.03894579315185547, 0.03890380859375, 0.0392806396484375, 0.03892838287353516, 0.03898540878295898, 0.03885023880004883, 0.039532318115234374, 0.03894953536987305, 0.03888105773925781, 0.038848896026611325, 0.03890800094604492, 0.038889408111572266, 0.03898732757568359, 0.038863296508789065, 0.03883779144287109, 0.0390599365234375, 0.03890585708618164, 0.03915996932983398, 0.03884755325317383, 0.03889846420288086, 0.039038433074951175, 0.03910915374755859, 0.03907328033447265, 0.03887273788452148, 0.03904393768310547, 0.03892633438110352, 0.0389119987487793, 0.03889900970458984, 0.04251513671875, 0.039137279510498044, 0.03896115112304688, 0.038866943359375, 0.03895840072631836, 0.03890591812133789, 0.038946624755859374, 0.03886163330078125, 0.03873788833618164, 0.03879280090332031, 0.038695358276367185, 0.038719486236572266, 0.03873177719116211, 0.038770046234130856, 0.039544223785400394, 0.038933086395263675, 0.03889152145385742, 0.03886463928222656, 0.03876268768310547, 0.03907916641235352, 0.03880121612548828, 0.03881881713867188, 0.038772319793701174, 0.039893409729003904, 0.03883580780029297, 0.038801441192626955, 0.03884070587158203, 0.03873523330688477, 0.03870783996582031, 0.03870719909667969, 0.038551231384277344, 0.038721153259277344, 0.03887174224853516, 0.03881574249267578, 0.038722911834716794, 0.038832801818847656, 0.03884787368774414, 0.03876671981811523, 0.03878144073486328, 0.03890380859375, 0.03892019271850586, 0.03893862533569336, 0.03873936080932617, 0.038890079498291014, 0.03883929443359375, 0.038745025634765624, 0.03894892883300781, 0.038793087005615234, 0.03883414459228516, 0.03878927993774414, 0.038749664306640626, 0.038752159118652346, 0.03870169448852539, 0.03878508758544922, 0.03869279861450195, 0.03871539306640625, 0.03870719909667969, 0.03873904037475586, 0.039158432006835935, 0.03887744140625, 0.03901030349731445, 0.039057407379150394, 0.039216159820556644, 0.03893468856811524, 0.038951744079589845, 0.03901808166503906, 0.03901180648803711, 0.03930003356933594, 0.03901993560791016, 0.042244735717773436, 0.03936662292480469, 0.039122943878173826, 0.040060127258300784, 0.03897423934936523, 0.039128192901611326, 0.03892060852050781, 0.03887766265869141, 0.0397127685546875, 0.03899091339111328, 0.03985299301147461, 0.04026115036010742, 0.03973168182373047, 0.039329792022705076, 0.03903395080566406, 0.03946588897705078, 0.03913228988647461, 0.038962047576904295, 0.0390261116027832, 0.03909619140625, 0.038967647552490235, 0.03907823944091797, 0.04134912109375, 0.039198719024658206, 0.038900894165039064, 0.038949024200439455, 0.03892838287353516, 0.03870361709594727, 0.03895724868774414, 0.03892150497436524, 0.038865150451660155, 0.03897305679321289, 0.0387940788269043, 0.03893395233154297, 0.0390948486328125, 0.03884198379516601, 0.039600513458251954, 0.04008563232421875, 0.038943809509277345, 0.039019168853759764, 0.038863006591796874, 0.0388935661315918, 0.03881347274780273, 0.03878121566772461, 0.03896867370605469, 0.03901702499389648, 0.03935030364990234, 0.039008255004882815, 0.038972766876220706, 0.03891801452636719, 0.03883292770385742, 0.038754528045654296, 0.03881347274780273, 0.0388935661315918, 0.03889120101928711, 0.03885087966918945, 0.03879116821289062, 0.0387193603515625, 0.038875263214111326, 0.0388403205871582, 0.038849536895751956, 0.03882291030883789, 0.039155006408691406, 0.038859455108642575, 0.038934528350830076, 0.03885030364990234, 0.03876480102539062, 0.03883174514770508, 0.03883456039428711, 0.038888671875, 0.03895366287231445]",tokens/s,25.6109853190754,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,891.793408,578.68288,0.0,176.160768,154.500608,s,1,7.68460009765625,7.68460009765625,0.0,7.68460009765625,7.68460009765625,7.68460009765625,7.68460009765625,[7.68460009765625],,kWh,1.8725651933330783e-05,2.0554159316113788e-06,5.712226791992903e-06,2.6493294656935067e-05,,MB,1347.403776,660.471808,0.0,243.269632,200.402944,s,15,0.21683673763275146,0.014455782508850098,0.00015123674706497335,0.014476991653442383,0.01462774429321289,0.014664598846435546,0.014711172943115235,"[0.01453660774230957, 0.014423999786376953, 0.01448851203918457, 0.014207296371459961, 0.014370016098022461, 0.0146396484375, 0.014177344322204589, 0.0144170560836792, 0.014301471710205078, 0.014526432037353515, 0.014609888076782226, 0.014476991653442383, 0.014722816467285156, 0.014351296424865722, 0.014587360382080078]",tokens/s,17709.176230568773,kWh,3.3981241503204394e-07,3.747519960016414e-08,1.8161442546214828e-07,5.589020400943564e-07,tokens/kWh,458040911.70749867,MB,1382.301696,675.151872,0.0,257.949696,200.405504,s,15,10.160081970214845,0.6773387980143231,0.0027783873214522093,0.6773782348632813,0.6793719604492188,0.6810576232910156,0.6836510754394531,"[0.674166748046875, 0.6788673095703125, 0.6730918579101562, 0.677441162109375, 0.6842994384765625, 0.6778885498046875, 0.67672998046875, 0.6723080444335937, 0.6771316528320312, 0.6796682739257812, 0.6766846313476562, 0.678927490234375, 0.6767113647460937, 0.6773782348632813, 0.6787872314453125]",tokens/s,93.01106061647424,kWh,1.5136034747907643e-05,1.6692354952332711e-06,5.71055403095921e-06,2.251582427410012e-05,tokens/kWh,2798032.1409982177,,s,945,10.151029086112963,0.010741829720754471,0.00022057065331269437,0.010686271667480469,0.010957222747802734,0.01102350082397461,0.011512038421630857,"[0.010279999732971192, 0.010664896011352539, 0.010714655876159667, 0.010606335639953614, 0.01066057586669922, 0.010692383766174316, 0.010649824142456054, 0.010770048141479492, 0.010979711532592773, 0.010706944465637207, 0.01055510425567627, 0.010655263900756835, 0.010637248039245605, 0.010549504280090332, 0.010650176048278809, 0.010557727813720703, 0.010732416152954102, 0.010826592445373534, 0.010631104469299317, 0.010571840286254883, 0.01058726406097412, 0.011006848335266113, 0.010585951805114745, 0.010667967796325683, 0.010679648399353028, 0.010626208305358887, 0.010700480461120605, 0.010680352210998535, 0.010854399681091309, 0.010916128158569335, 0.010910655975341797, 0.010785568237304687, 0.010716992378234863, 0.01080953598022461, 0.011024479866027831, 0.010991519927978515, 0.010791168212890626, 0.010743295669555664, 0.010641663551330567, 0.010672127723693848, 0.01063321590423584, 0.010613887786865235, 0.01066096019744873, 0.01064463996887207, 0.010744416236877441, 0.010774016380310059, 0.010678815841674804, 0.010587679862976073, 0.010626527786254883, 0.010585247993469238, 0.010651359558105469, 0.010585247993469238, 0.01058512020111084, 0.010620223999023437, 0.010703007698059082, 0.010776608467102052, 0.01060905647277832, 0.010634655952453614, 0.010670720100402832, 0.010616479873657226, 0.01069705581665039, 0.010676223754882813, 0.01071718406677246, 0.010389856338500977, 0.010749600410461425, 0.010729696273803711, 0.010866815567016602, 0.010852000236511231, 0.010887167930603027, 0.011044704437255859, 0.010938528060913087, 0.010992704391479492, 0.010962976455688476, 0.010991583824157715, 0.010963904380798339, 0.010743647575378419, 0.010666144371032716, 0.01067948818206787, 0.010691295623779297, 0.01071132755279541, 0.01065123176574707, 0.010719039916992187, 0.010602911949157716, 0.01063929557800293, 0.010678336143493653, 0.01065727996826172, 0.010674688339233398, 0.010771743774414063, 0.010649503707885742, 0.010703807830810546, 0.01067404842376709, 0.010564831733703614, 0.010758943557739258, 0.010554944038391113, 0.010558015823364257, 0.010657343864440918, 0.010782848358154297, 0.010629311561584472, 0.010681728363037109, 0.010651264190673828, 0.011169088363647462, 0.010733247756958008, 0.010627360343933105, 0.01081929588317871, 0.010958911895751954, 0.010947903633117676, 0.010812031745910644, 0.010832159996032716, 0.010831808090209961, 0.010966815948486329, 0.0109650239944458, 0.010880991935729981, 0.010813440322875977, 0.010700799942016602, 0.010645503997802735, 0.010699007987976074, 0.010608480453491212, 0.010636768341064452, 0.01074835205078125, 0.010598400115966796, 0.010651647567749023, 0.010595808029174805, 0.010566207885742187, 0.010587712287902833, 0.012001440048217774, 0.010765727996826171, 0.010354496002197266, 0.010633631706237793, 0.010639103889465332, 0.010617088317871094, 0.010598079681396484, 0.010541312217712403, 0.010537343978881836, 0.010583744049072266, 0.010522624015808106, 0.01058182430267334, 0.010516672134399415, 0.01054297637939453, 0.010573951721191407, 0.010653696060180663, 0.010562815666198731, 0.010858816146850586, 0.010801600456237794, 0.010620351791381835, 0.010653535842895507, 0.010653663635253906, 0.010664896011352539, 0.01069654369354248, 0.010840031623840332, 0.010905599594116211, 0.010878720283508302, 0.010809599876403808, 0.010804703712463379, 0.010813440322875977, 0.010957504272460937, 0.011140735626220703, 0.01094435214996338, 0.010793343544006347, 0.010670080184936523, 0.010569184303283692, 0.010746560096740722, 0.010661664009094239, 0.010607775688171386, 0.010591039657592774, 0.010647775650024414, 0.010668959617614746, 0.010677215576171874, 0.010693920135498048, 0.01055964756011963, 0.010576704025268554, 0.010634943962097168, 0.010631232261657715, 0.010698495864868164, 0.01087923240661621, 0.010716192245483399, 0.010716128349304199, 0.010670080184936523, 0.010605600357055664, 0.010628064155578614, 0.010683551788330078, 0.010582880020141601, 0.01073971176147461, 0.010710911750793457, 0.010640768051147461, 0.01062169647216797, 0.010612704277038575, 0.01063644790649414, 0.010542271614074707, 0.010589023590087891, 0.010396832466125489, 0.010746975898742676, 0.010649151802062988, 0.010658143997192383, 0.01064345645904541, 0.010799103736877442, 0.010901503562927246, 0.010921983718872071, 0.010807295799255372, 0.010812543869018554, 0.010826432228088378, 0.011161888122558594, 0.01090726375579834, 0.010878399848937988, 0.011352383613586425, 0.011305503845214844, 0.010778623580932617, 0.010741375923156738, 0.010758336067199708, 0.010840512275695801, 0.010661631584167481, 0.010622976303100586, 0.010687487602233887, 0.01063219165802002, 0.01062502384185791, 0.010748031616210938, 0.010695743560791015, 0.010593440055847168, 0.010761055946350098, 0.010561663627624512, 0.010599231719970703, 0.01059119987487793, 0.010611680030822754, 0.010588288307189941, 0.010606143951416016, 0.010533120155334473, 0.010584063529968261, 0.010530816078186036, 0.010548928260803222, 0.010590432167053222, 0.01061302375793457, 0.0106179838180542, 0.01078979206085205, 0.010619775772094726, 0.010640288352966308, 0.010817343711853028, 0.010936287879943848, 0.01083993625640869, 0.010829407691955567, 0.010810111999511719, 0.01084006404876709, 0.01101414394378662, 0.010946335792541504, 0.010802399635314942, 0.010822784423828126, 0.010729344367980957, 0.01075609588623047, 0.010755264282226562, 0.010656255722045899, 0.010715456008911132, 0.010672127723693848, 0.01061683177947998, 0.01072537612915039, 0.010323040008544922, 0.010621952056884766, 0.01066966438293457, 0.010635583877563477, 0.010548959732055664, 0.01059055995941162, 0.011160991668701171, 0.011860480308532715, 0.013412384033203125, 0.010743743896484375, 0.010637375831604004, 0.01063430404663086, 0.0110797119140625, 0.010714015960693359, 0.01075216007232666, 0.01074124813079834, 0.010715807914733887, 0.010652671813964844, 0.010678976058959962, 0.010921183586120606, 0.011111200332641602, 0.010868736267089844, 0.010874879837036134, 0.011687616348266601, 0.010969759941101075, 0.010970175743103027, 0.010916064262390137, 0.010956352233886718, 0.010865471839904784, 0.010657792091369628, 0.011024415969848633, 0.01069161605834961, 0.01067910385131836, 0.010639264106750488, 0.01067478370666504, 0.010641056060791016, 0.010624704360961914, 0.010637887954711914, 0.010694208145141602, 0.010885279655456543, 0.010748191833496094, 0.010606304168701172, 0.010594304084777833, 0.010677311897277832, 0.0105481595993042, 0.010682368278503418, 0.010639264106750488, 0.011419743537902831, 0.010964447975158692, 0.011695839881896973, 0.01076307201385498, 0.010686271667480469, 0.010700991630554199, 0.01069593620300293, 0.010673215866088867, 0.010641087532043457, 0.010841535568237306, 0.010763999938964843, 0.010758336067199708, 0.010670463562011718, 0.010836159706115723, 0.010894720077514648, 0.010967840194702148, 0.01060540771484375, 0.010850144386291503, 0.010934432029724122, 0.011144255638122559, 0.011037631988525391, 0.010795007705688477, 0.010743935585021973, 0.010698816299438477, 0.010655008316040038, 0.010721823692321778, 0.010645279884338379, 0.010612832069396973, 0.010725503921508788, 0.01071232032775879, 0.010745984077453613, 0.010741663932800292, 0.010723551750183106, 0.010656255722045899, 0.01064140796661377, 0.010684255599975587, 0.010614944458007812, 0.010631168365478515, 0.010747808456420899, 0.01052236843109131, 0.010605088233947754, 0.01054700756072998, 0.010588191986083984, 0.010559488296508789, 0.01065782356262207, 0.010550399780273438, 0.010613568305969238, 0.01078656005859375, 0.010844415664672851, 0.010716863632202148, 0.010782272338867187, 0.010855168342590331, 0.011224672317504883, 0.010920255661010743, 0.010831968307495117, 0.01115328025817871, 0.0109202241897583, 0.011089247703552246, 0.011006464004516601, 0.010820927619934083, 0.010767295837402343, 0.010790143966674805, 0.010697216033935546, 0.010684415817260743, 0.010681695938110351, 0.010717856407165527, 0.01075814437866211, 0.01065510368347168, 0.010613375663757325, 0.010660927772521972, 0.010832351684570312, 0.01082140827178955, 0.010654399871826172, 0.010698528289794921, 0.010955264091491699, 0.010659008026123047, 0.01076483154296875, 0.010637311935424805, 0.010589183807373047, 0.01028115177154541, 0.010577728271484375, 0.010636863708496093, 0.01056991958618164, 0.010762495994567871, 0.010563584327697753, 0.010852352142333984, 0.010780672073364257, 0.010700799942016602, 0.010671520233154297, 0.010703455924987794, 0.011001248359680176, 0.010914400100708007, 0.011012415885925293, 0.010812512397766113, 0.010881759643554688, 0.010964896202087402, 0.010911264419555665, 0.010895808219909668, 0.010819583892822266, 0.010706720352172851, 0.010692192077636718, 0.01070361614227295, 0.01066380786895752, 0.010801024436950683, 0.01070479965209961, 0.01065113639831543, 0.010574463844299317, 0.010636672019958496, 0.010602911949157716, 0.010621248245239258, 0.010673983573913574, 0.01061843204498291, 0.010541728019714355, 0.010590144157409668, 0.010626239776611328, 0.010619872093200684, 0.010604415893554688, 0.010550656318664552, 0.010591168403625489, 0.010560352325439453, 0.010537887573242187, 0.010585887908935547, 0.010983584403991699, 0.010658047676086425, 0.010821536064147949, 0.01072441577911377, 0.010707743644714355, 0.010887104034423828, 0.010994912147521973, 0.010972319602966308, 0.010901535987854004, 0.010860032081604003, 0.010778783798217773, 0.010870719909667969, 0.010960960388183593, 0.010903552055358886, 0.010863743782043456, 0.010686431884765625, 0.010668288230895996, 0.010783391952514649, 0.01064140796661377, 0.010692607879638672, 0.010355615615844726, 0.010640768051147461, 0.010667648315429688, 0.01067311954498291, 0.010596256256103515, 0.010606623649597169, 0.010561632156372071, 0.010584287643432617, 0.010623071670532227, 0.010573023796081544, 0.010586112022399903, 0.010594783782958984, 0.01056339168548584, 0.01062492847442627, 0.010600735664367675, 0.010598400115966796, 0.010606111526489258, 0.01055724811553955, 0.010637248039245605, 0.010577983856201172, 0.010576448440551757, 0.010579168319702148, 0.010695743560791015, 0.010823840141296387, 0.010689824104309083, 0.01065334415435791, 0.01067081642150879, 0.010653696060180663, 0.010758272171020508, 0.010874015808105469, 0.010842847824096679, 0.010831744194030761, 0.01082198429107666, 0.010732640266418457, 0.01076307201385498, 0.010921728134155274, 0.010863743782043456, 0.010812159538269042, 0.010752256393432618, 0.010637311935424805, 0.010834272384643556, 0.010699999809265138, 0.010682175636291504, 0.01060102367401123, 0.010679360389709472, 0.010597215652465821, 0.010623392105102538, 0.01064627170562744, 0.010613759994506837, 0.01068671989440918, 0.010663680076599121, 0.010663935661315918, 0.010665984153747558, 0.010612735748291016, 0.01070899200439453, 0.010672287940979004, 0.01068937587738037, 0.010568384170532227, 0.010663455963134765, 0.010611040115356445, 0.010569472312927247, 0.010598912239074706, 0.0105863037109375, 0.010293536186218262, 0.010579968452453613, 0.010595840454101562, 0.010590720176696777, 0.010557439804077149, 0.010586112022399903, 0.010639360427856445, 0.010727104187011718, 0.010915295600891113, 0.010998623847961426, 0.010988544464111329, 0.010959168434143067, 0.010855392456054687, 0.010882783889770507, 0.010886943817138672, 0.010924256324768067, 0.011106304168701172, 0.010942208290100097, 0.010866368293762207, 0.010747776031494141, 0.010740415573120117, 0.010782719612121582, 0.010770432472229004, 0.010687904357910156, 0.010664192199707032, 0.01059055995941162, 0.010629119873046875, 0.010573823928833008, 0.010616479873657226, 0.010577471733093261, 0.010611328125, 0.010581567764282226, 0.010565695762634277, 0.011066944122314452, 0.010680928230285644, 0.010694111824035644, 0.010706015586853027, 0.010548800468444823, 0.010572383880615235, 0.01062492847442627, 0.01055510425567627, 0.0106080961227417, 0.010600383758544923, 0.01057859230041504, 0.01059615993499756, 0.010660287857055663, 0.010634655952453614, 0.01061673641204834, 0.01065782356262207, 0.010676223754882813, 0.010885791778564453, 0.010931936264038086, 0.011247615814208984, 0.010792736053466798, 0.010954976081848145, 0.010921983718872071, 0.010964991569519043, 0.010979328155517578, 0.010866911888122558, 0.010700575828552246, 0.010801152229309082, 0.010657792091369628, 0.010716575622558594, 0.01077507209777832, 0.010710240364074707, 0.010706080436706542, 0.0106975679397583, 0.010694720268249512, 0.010617440223693847, 0.010623392105102538, 0.010662816047668456, 0.010675168037414551, 0.010655743598937988, 0.01059772777557373, 0.010594911575317383, 0.010577247619628907, 0.010613408088684082, 0.0106080961227417, 0.010596960067749024, 0.010629119873046875, 0.010698752403259277, 0.010606176376342773, 0.010634976387023926, 0.010617440223693847, 0.010612223625183105, 0.010734175682067871, 0.01074995231628418, 0.010880703926086427, 0.011202719688415528, 0.0110796480178833, 0.010967231750488282, 0.010868608474731445, 0.010868672370910645, 0.010958656311035157, 0.010959232330322265, 0.010849887847900391, 0.010787232398986817, 0.010706239700317383, 0.01068511962890625, 0.010716768264770507, 0.010675840377807617, 0.011102080345153809, 0.010883808135986328, 0.010670271873474121, 0.010706751823425294, 0.010725407600402833, 0.010712448120117188, 0.010640192031860351, 0.010676192283630372, 0.010595552444458008, 0.010703680038452148, 0.010671456336975098, 0.01295584011077881, 0.011499584197998047, 0.011118944168090821, 0.010601759910583496, 0.010830911636352539, 0.010724191665649413, 0.01079804801940918, 0.01068819236755371, 0.010605055809020996, 0.010632800102233888, 0.010590304374694824, 0.010755488395690918, 0.010619647979736328, 0.010659680366516114, 0.010285056114196778, 0.01059779167175293, 0.010652192115783691, 0.010706303596496582, 0.010922623634338378, 0.010886207580566406, 0.010890175819396972, 0.010815648078918457, 0.010725055694580078, 0.010800352096557617, 0.010798111915588379, 0.010891488075256348, 0.010890144348144531, 0.010789664268493652, 0.01070899200439453, 0.010651040077209472, 0.010835616111755372, 0.010723711967468262, 0.010664511680603027, 0.010696160316467284, 0.010699359893798829, 0.010706879615783692, 0.010774687767028808, 0.010665823936462402, 0.01063542366027832, 0.01063100814819336, 0.01058396816253662, 0.010637408256530761, 0.010687904357910156, 0.010629728317260742, 0.01069702434539795, 0.010652959823608399, 0.010588671684265137, 0.010578975677490235, 0.010754336357116699, 0.010705696105957032, 0.010581024169921875, 0.010576671600341796, 0.010577759742736816, 0.01059823989868164, 0.010602432250976563, 0.010543583869934082, 0.01100380802154541, 0.010657119750976562, 0.010568160057067871, 0.01068607997894287, 0.010668383598327637, 0.010628576278686524, 0.0106626558303833, 0.011048959732055665, 0.010893312454223633, 0.01095680046081543, 0.010962207794189454, 0.010841919898986816, 0.010807295799255372, 0.010904512405395507, 0.010991583824157715, 0.010950400352478027, 0.010856032371520996, 0.01073423957824707, 0.010845184326171875, 0.010714112281799316, 0.010668031692504883, 0.010411487579345704, 0.010664640426635742, 0.01067311954498291, 0.011049823760986328, 0.010860575675964355, 0.010740799903869629, 0.010736607551574706, 0.010692352294921876, 0.010686688423156739, 0.010638751983642579, 0.010615391731262207, 0.01065939235687256, 0.010660287857055663, 0.010636832237243652, 0.010674943923950195, 0.010643168449401856, 0.010626943588256835, 0.01122111988067627, 0.012665984153747558, 0.010678336143493653, 0.010635168075561523, 0.010672831535339355, 0.010731712341308593, 0.010577376365661622, 0.010643775939941407, 0.011329024314880372, 0.01101696014404297, 0.011043231964111328, 0.010861408233642579, 0.010828543663024902, 0.010815008163452149, 0.01085251235961914, 0.010946880340576172, 0.010845952033996582, 0.010748160362243653, 0.010869983673095704, 0.010785471916198731, 0.010748160362243653, 0.010644800186157226, 0.01068070411682129, 0.010678432464599609, 0.010593695640563965, 0.01069279956817627, 0.010613151550292969, 0.010682463645935059, 0.010961088180541992, 0.010730815887451171, 0.010662303924560548, 0.010604224205017089, 0.010633536338806152, 0.010601887702941895, 0.01059286403656006, 0.01062707233428955, 0.01062495994567871, 0.010754112243652345, 0.010679424285888672, 0.01064844799041748, 0.01084812831878662, 0.010745183944702148, 0.010631839752197265, 0.010685728073120118, 0.010725312232971192, 0.010756352424621581, 0.010747551918029785, 0.010864800453186035, 0.010823583602905274, 0.010833919525146484, 0.010905568122863769, 0.010915871620178222, 0.010858495712280274, 0.010858495712280274, 0.010828800201416015, 0.01072214412689209, 0.01066204833984375, 0.010682687759399415, 0.010663616180419921, 0.010700799942016602, 0.010691776275634765, 0.010637791633605957, 0.010633567810058593, 0.010600447654724121, 0.010606111526489258, 0.010568351745605468, 0.010586976051330566, 0.010610783576965332, 0.010591360092163086, 0.010535807609558105, 0.010593152046203614, 0.010543104171752929, 0.010545151710510254, 0.01052467155456543, 0.010743807792663575, 0.010557727813720703, 0.010548959732055664, 0.010586112022399903, 0.010560864448547364, 0.01097590446472168, 0.010942463874816894, 0.010729056358337402, 0.010742176055908203, 0.010703136444091797, 0.010682080268859863, 0.010919936180114746, 0.010774463653564452, 0.010748000144958495, 0.010774080276489257, 0.0109651517868042, 0.01100592041015625, 0.010850879669189453, 0.010779744148254394, 0.01077948760986328, 0.010780447959899903, 0.01092636775970459, 0.010970047950744629, 0.010926943778991699, 0.010927776336669921, 0.010733856201171874, 0.010763680458068848, 0.010746463775634766, 0.010585247993469238, 0.010677151679992676, 0.01065766429901123, 0.01067024040222168, 0.010751263618469238, 0.010656479835510255, 0.010626144409179687, 0.010444831848144532, 0.010856703758239746, 0.011125151634216308, 0.010692095756530762, 0.010660384178161621, 0.01067523193359375, 0.01066851234436035, 0.010605055809020996, 0.010594623565673828, 0.01060649585723877, 0.010867584228515624, 0.010563712120056152, 0.010585023880004882, 0.010569536209106445, 0.010614784240722656, 0.010640576362609863, 0.010728256225585937, 0.010684063911437988, 0.010629599571228027, 0.010555264472961425, 0.010635295867919921, 0.010739680290222167, 0.01104860782623291, 0.011326016426086426, 0.01152182388305664, 0.011294079780578612, 0.011765536308288574, 0.010937536239624023, 0.010874527931213379, 0.010870783805847169, 0.010936320304870606, 0.011030367851257324, 0.010997920036315919, 0.010816608428955078, 0.010720159530639648, 0.010676223754882813, 0.010737664222717285, 0.010700799942016602, 0.01061888027191162, 0.01063036823272705, 0.010644448280334473, 0.010853856086730958, 0.010635616302490235, 0.010657471656799316, 0.010619168281555175, 0.01061068820953369, 0.010656959533691406, 0.010574687957763671, 0.01057692813873291, 0.010595359802246095, 0.010792703628540038, 0.010671711921691895, 0.010584063529968261, 0.010519136428833007, 0.010589568138122559, 0.010605183601379395, 0.010583840370178222, 0.010618271827697754, 0.01057033634185791, 0.010610912322998047, 0.010637568473815919, 0.010684415817260743, 0.010630463600158692, 0.01054956817626953, 0.010983039855957031, 0.011019840240478515, 0.010889727592468262, 0.010776415824890136, 0.010903712272644042, 0.010856448173522949, 0.01107148838043213, 0.010870559692382813, 0.010917632102966309, 0.010721759796142578, 0.010612735748291016, 0.010681792259216309, 0.01065167999267578, 0.0106779203414917, 0.010682911872863769, 0.010659296035766602, 0.010599295616149902, 0.011964415550231934, 0.010713055610656738, 0.010688639640808106, 0.010599871635437012, 0.010625056266784668, 0.010624832153320312, 0.010578559875488282, 0.010532671928405762, 0.01058835220336914, 0.010526335716247559, 0.010602879524230956, 0.01057209587097168, 0.010543135643005372, 0.010546431541442872, 0.0106080322265625, 0.01059920024871826, 0.010756319999694824, 0.010743871688842773, 0.010854559898376465, 0.010778400421142578, 0.01084620761871338, 0.011167231559753419, 0.010930560111999511, 0.011013343811035156, 0.010873760223388672, 0.010793279647827149, 0.010773504257202148, 0.010958784103393555, 0.01095961570739746, 0.010836000442504884, 0.01080521583557129, 0.01071718406677246, 0.011177984237670899, 0.01084175968170166, 0.010774880409240723, 0.010765536308288574, 0.010801952362060547, 0.010791071891784668, 0.010618720054626465, 0.01056287956237793, 0.010582847595214844, 0.010611616134643554, 0.010671072006225586, 0.010597951889038086, 0.010533344268798829]",tokens/s,93.0940096795505,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 222738 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,950.829056,708.706304,0.0,306.184192,267.166208,s,1,8.2034541015625,8.2034541015625,0.0,8.2034541015625,8.2034541015625,8.2034541015625,8.2034541015625,[8.2034541015625],,kWh,2.5446036145823808e-05,2.7994162056712644e-06,8.805840378017793e-06,3.7051292729512866e-05,,MB,1384.390656,750.649344,0.0,333.447168,310.432256,s,10,0.21025465774536134,0.021025465774536133,0.0003659479102247272,0.021124863624572755,0.02140039768218994,0.02140045518875122,0.021400501194000242,"[0.020212799072265624, 0.021028480529785155, 0.021400384902954102, 0.0214005126953125, 0.021137439727783203, 0.021251487731933593, 0.020471616744995116, 0.021112287521362304, 0.021149824142456055, 0.02108982467651367]",tokens/s,12175.711242033016,kWh,8.002159355021843e-07,8.82412792681899e-08,4.4704297559453915e-07,1.3355001903649135e-06,tokens/kWh,191688478.85379207,MB,1420.120064,792.592384,0.0,375.390208,310.434816,s,10,10.17654473876953,1.017654473876953,0.013924501283801946,1.0233404541015625,1.0301390625,1.0307939575195313,1.0313178735351562,"[1.0239837036132813, 1.0299935302734375, 0.99323681640625, 1.0104454956054687, 1.0314488525390626, 1.0282720947265624, 0.9927484741210938, 1.0226972045898437, 1.0294189453125, 1.0142996215820312]",tokens/s,61.90706336698862,kWh,3.909986173241231e-05,4.312265712426072e-06,1.4235274325806476e-05,5.764740177064487e-05,tokens/kWh,1092850.6413983912,,s,630,10.16971279144287,0.016142401256258525,0.0004914318138524007,0.016289008140563964,0.016468935203552245,0.016620542621612548,0.017173493328094484,"[0.015096575736999511, 0.015402720451354981, 0.01537052822113037, 0.016185440063476563, 0.016226207733154297, 0.01638809585571289, 0.016459775924682618, 0.016269311904907227, 0.01633286476135254, 0.016361440658569336, 0.016274879455566407, 0.016388639450073243, 0.01628316879272461, 0.01650044822692871, 0.016326496124267578, 0.016284576416015627, 0.01620377540588379, 0.016229471206665038, 0.016472095489501952, 0.01623744010925293, 0.017188127517700196, 0.01635740852355957, 0.01616147232055664, 0.0163002872467041, 0.01636319923400879, 0.016231775283813477, 0.016598079681396486, 0.016239360809326173, 0.01615737533569336, 0.016186975479125978, 0.01634502410888672, 0.016304319381713867, 0.016396255493164064, 0.016368511199951172, 0.016874624252319337, 0.016372480392456056, 0.016287519454956056, 0.016644096374511717, 0.016350400924682616, 0.016347967147827148, 0.016436351776123046, 0.016284543991088866, 0.016291839599609375, 0.016200960159301756, 0.01624140739440918, 0.01620172882080078, 0.01620694351196289, 0.016292768478393553, 0.016258304595947265, 0.016112192153930664, 0.01581811237335205, 0.015599712371826172, 0.015440159797668458, 0.01578867244720459, 0.016471872329711913, 0.01629497528076172, 0.0161942081451416, 0.016337184906005858, 0.016314367294311523, 0.01638400077819824, 0.016291744232177736, 0.016253023147583007, 0.016181055068969726, 0.016241472244262697, 0.016287744522094725, 0.01629497528076172, 0.016352031707763674, 0.016289152145385743, 0.01629862403869629, 0.01632236862182617, 0.01643948745727539, 0.016320512771606444, 0.016373760223388673, 0.01642527961730957, 0.016287424087524413, 0.01621401596069336, 0.016194719314575196, 0.016243551254272463, 0.016472063064575194, 0.016281600952148437, 0.0162795524597168, 0.016440351486206053, 0.016339935302734376, 0.016340991973876954, 0.016420543670654295, 0.016376127243041994, 0.016479583740234376, 0.016382944107055663, 0.016301984786987304, 0.016447328567504884, 0.016351167678833007, 0.016293472290039062, 0.0163209285736084, 0.016316095352172853, 0.016456319808959962, 0.01635196876525879, 0.016308544158935546, 0.016323232650756837, 0.01637977600097656, 0.01618751907348633, 0.016312320709228514, 0.01627689552307129, 0.016233055114746094, 0.01620355224609375, 0.016406848907470704, 0.017086143493652343, 0.016734432220458985, 0.016288959503173828, 0.016193792343139647, 0.016228832244873048, 0.016223583221435547, 0.016327072143554687, 0.01640892791748047, 0.016383455276489257, 0.01641072082519531, 0.01628828811645508, 0.016286624908447265, 0.016294464111328125, 0.016308671951293947, 0.016285696029663087, 0.01630406379699707, 0.016195648193359374, 0.016228351593017578, 0.016256799697875978, 0.0164354248046875, 0.016251232147216795, 0.01621718406677246, 0.016268064498901367, 0.016582784652709962, 0.016852352142333986, 0.016488319396972657, 0.016301151275634765, 0.01638502311706543, 0.017116992950439454, 0.01650147247314453, 0.01654924774169922, 0.016376575469970702, 0.016332799911499024, 0.01678108787536621, 0.016294111251831056, 0.01618070411682129, 0.016374303817749025, 0.016145599365234374, 0.015719264030456544, 0.015548447608947753, 0.015531968116760254, 0.015580256462097168, 0.015673888206481933, 0.015530431747436523, 0.015382464408874513, 0.015257951736450195, 0.01531599998474121, 0.015276351928710937, 0.015310943603515625, 0.015775967597961425, 0.015439871788024903, 0.015368032455444335, 0.015220895767211914, 0.015345664024353027, 0.015362239837646485, 0.015273983955383302, 0.015259455680847168, 0.015277567863464356, 0.015254015922546387, 0.015255552291870117, 0.015333375930786132, 0.015427264213562011, 0.015305024147033691, 0.015198464393615723, 0.015261728286743164, 0.015264703750610351, 0.015346464157104492, 0.015276320457458496, 0.01546793556213379, 0.015350079536437989, 0.015405055999755859, 0.015411264419555664, 0.015384511947631836, 0.015377632141113281, 0.01529651165008545, 0.015349984169006348, 0.015414048194885253, 0.015381471633911133, 0.01571331214904785, 0.016325664520263673, 0.01628441619873047, 0.016381759643554688, 0.016224607467651368, 0.01663968086242676, 0.016199680328369142, 0.01635443115234375, 0.016271743774414062, 0.016595264434814454, 0.017680511474609376, 0.016410688400268554, 0.01637580871582031, 0.017, 0.01642508888244629, 0.016498783111572265, 0.01649830436706543, 0.01665519905090332, 0.016291616439819336, 0.01631023979187012, 0.016252927780151367, 0.016199487686157227, 0.01629372787475586, 0.015857024192810057, 0.015673952102661134, 0.015434111595153809, 0.015359999656677246, 0.015284223556518555, 0.015226880073547363, 0.016141855239868164, 0.01535257625579834, 0.01537337589263916, 0.015343487739562988, 0.01545849609375, 0.01545299243927002, 0.015335200309753418, 0.015541983604431151, 0.01575964832305908, 0.01711027145385742, 0.015600383758544922, 0.015335424423217774, 0.015319040298461914, 0.015327232360839844, 0.015327136039733886, 0.015387007713317871, 0.01531875228881836, 0.015462400436401368, 0.015333056449890137, 0.01538099193572998, 0.01605219268798828, 0.01622364807128906, 0.016353727340698242, 0.016369279861450196, 0.016283872604370118, 0.016243967056274414, 0.01641360092163086, 0.016251007080078126, 0.01624051284790039, 0.016425983428955078, 0.0162509765625, 0.016252063751220704, 0.01624393653869629, 0.0162774715423584, 0.01626620864868164, 0.01647724723815918, 0.016311071395874024, 0.01624448013305664, 0.01641472053527832, 0.01634000015258789, 0.016238527297973634, 0.016299455642700196, 0.016298559188842772, 0.016236543655395508, 0.017612607955932617, 0.01645359992980957, 0.01655743980407715, 0.01631932830810547, 0.016326656341552736, 0.016292991638183593, 0.01681043243408203, 0.01640233612060547, 0.016623264312744142, 0.016337791442871095, 0.016376863479614256, 0.016276639938354494, 0.016240127563476564, 0.016440927505493166, 0.01634124755859375, 0.01646860885620117, 0.01630396842956543, 0.01657241630554199, 0.01641823959350586, 0.01645382308959961, 0.016628095626831055, 0.01693804740905762, 0.016323488235473634, 0.01635638427734375, 0.015993856430053712, 0.01583334445953369, 0.015746527671813965, 0.015550271987915039, 0.01612233543395996, 0.01634873580932617, 0.01633513641357422, 0.0163637752532959, 0.01636751937866211, 0.01644857597351074, 0.01635424041748047, 0.01636307144165039, 0.016309791564941407, 0.016312831878662108, 0.01629020881652832, 0.016340991973876954, 0.016346399307250976, 0.01637798309326172, 0.016318111419677733, 0.01626985549926758, 0.0164071044921875, 0.01643440055847168, 0.016370431900024414, 0.01632431983947754, 0.016275775909423827, 0.0163022403717041, 0.01635091209411621, 0.016457984924316407, 0.01639193534851074, 0.01641472053527832, 0.01639423942565918, 0.01638604736328125, 0.016306528091430662, 0.016352384567260743, 0.01621609687805176, 0.01620047950744629, 0.016281600952148437, 0.016207807540893553, 0.016304447174072267, 0.016330720901489258, 0.016358240127563477, 0.01631097602844238, 0.01662518310546875, 0.016321247100830077, 0.016529407501220703, 0.016586751937866212, 0.01633695983886719, 0.016313535690307617, 0.016313087463378905, 0.01638764762878418, 0.016327104568481444, 0.016467872619628905, 0.016306560516357423, 0.016282495498657228, 0.01618979263305664, 0.016236480712890626, 0.01633951950073242, 0.01653555107116699, 0.016316640853881837, 0.01642438316345215, 0.016384672164916993, 0.016399936676025392, 0.01624239921569824, 0.016560352325439454, 0.01625721549987793, 0.016336896896362304, 0.01624678421020508, 0.01629952049255371, 0.016365440368652343, 0.01649113655090332, 0.016326656341552736, 0.016275455474853515, 0.016332799911499024, 0.016267040252685546, 0.016332319259643555, 0.016418527603149415, 0.01626108741760254, 0.016339744567871094, 0.016238399505615234, 0.016228767395019533, 0.0163143367767334, 0.016277727127075196, 0.01635852813720703, 0.01627939224243164, 0.01627120018005371, 0.016276639938354494, 0.016359424591064452, 0.016318496704101564, 0.01641436767578125, 0.01652560043334961, 0.01646076774597168, 0.01629475212097168, 0.01713567924499512, 0.015923328399658203, 0.015865728378295897, 0.015635519981384276, 0.015500160217285156, 0.01606048011779785, 0.015106047630310059, 0.015355903625488282, 0.015368288040161132, 0.015276127815246583, 0.015429023742675782, 0.015290559768676758, 0.015433664321899415, 0.015427840232849122, 0.015619775772094727, 0.015482848167419433, 0.015429887771606445, 0.01536627197265625, 0.01529651165008545, 0.015366144180297851, 0.01528649616241455, 0.015336895942687988, 0.015320927619934081, 0.015327743530273438, 0.01548083209991455, 0.015328960418701172, 0.015336959838867188, 0.015302656173706054, 0.015340319633483887, 0.015355936050415039, 0.015357855796813966, 0.015319135665893555, 0.015370112419128418, 0.015374239921569824, 0.015325471878051758, 0.015378560066223144, 0.015643551826477052, 0.01521507167816162, 0.015197792053222657, 0.015244128227233887, 0.015289567947387695, 0.015324992179870605, 0.01544495964050293, 0.01604431915283203, 0.01626835250854492, 0.01629475212097168, 0.016279359817504883, 0.01630780792236328, 0.016279327392578126, 0.016396928787231445, 0.016227712631225587, 0.016251232147216795, 0.016217599868774413, 0.016208480834960938, 0.016392736434936522, 0.016289440155029297, 0.01627168083190918, 0.016363040924072265, 0.016343103408813477, 0.01633929634094238, 0.016617216110229493, 0.0163853759765625, 0.01640719985961914, 0.016322208404541017, 0.016304479598999024, 0.016283775329589845, 0.016304000854492188, 0.016238208770751952, 0.016234304428100584, 0.016046079635620117, 0.016424415588378906, 0.016237152099609374, 0.016404447555541993, 0.016969696044921875, 0.01621721649169922, 0.01643622398376465, 0.01623232078552246, 0.016246335983276367, 0.016269535064697267, 0.01639651107788086, 0.01624678421020508, 0.016328927993774413, 0.0163221435546875, 0.016273279190063477, 0.0162871036529541, 0.016236703872680665, 0.015921919822692872, 0.015626272201538085, 0.015465984344482422, 0.015356415748596192, 0.015267616271972656, 0.015372447967529297, 0.015304351806640625, 0.015417951583862305, 0.016082752227783204, 0.01630633544921875, 0.01630169677734375, 0.016511199951171875, 0.016224607467651368, 0.01626416015625, 0.01630678367614746, 0.01631007957458496, 0.01620748710632324, 0.016290367126464842, 0.01619321632385254, 0.01630384063720703, 0.016257728576660156, 0.016300031661987305, 0.016250495910644532, 0.016363040924072265, 0.016248863220214845, 0.016363935470581056, 0.016383455276489257, 0.0162805118560791, 0.01626316833496094, 0.016300064086914062, 0.01624662399291992, 0.016261247634887694, 0.01640447998046875, 0.016257024765014647, 0.01664748764038086, 0.01634169578552246, 0.016289632797241212, 0.016318687438964842, 0.016473087310791015, 0.016360383987426758, 0.016351551055908204, 0.01630339241027832, 0.017043296813964843, 0.016366464614868164, 0.016396127700805663, 0.016273183822631834, 0.016225503921508788, 0.01649072074890137, 0.0164071044921875, 0.01632655906677246, 0.016281728744506837, 0.0162890567779541, 0.016247488021850585, 0.016422624588012694, 0.016309984207153322, 0.0164071044921875, 0.01633875274658203, 0.01604748725891113, 0.015960927963256835, 0.016316192626953125, 0.016343488693237304, 0.016563968658447267, 0.016510623931884766, 0.016269535064697267, 0.016375263214111327, 0.016302015304565428, 0.016437984466552733, 0.01631382369995117, 0.016900640487670898, 0.016287744522094725, 0.016330175399780274, 0.016267744064331055, 0.016304224014282227, 0.016248960494995118, 0.016274560928344728, 0.01621878433227539, 0.016261247634887694, 0.016181215286254883, 0.01623859214782715, 0.016225503921508788, 0.016292991638183593, 0.01627510452270508, 0.016759904861450195, 0.016253055572509764, 0.01636841583251953, 0.016324607849121094, 0.016346399307250976, 0.01633535957336426, 0.016242080688476563, 0.016229183197021484, 0.01618297576904297, 0.0162390079498291, 0.016319904327392578, 0.016303903579711915, 0.01654243278503418, 0.01629916763305664, 0.016429920196533204, 0.016349184036254884, 0.016281728744506837, 0.0162425594329834, 0.01632217597961426, 0.01631065559387207, 0.01631439971923828, 0.016368896484375, 0.016286432266235353, 0.01630044746398926, 0.016354911804199217, 0.016281248092651367, 0.016595424652099608, 0.01620172882080078, 0.016289791107177733, 0.01629929542541504, 0.016327392578125, 0.01630835151672363, 0.016332799911499024, 0.016361343383789063, 0.016390111923217772, 0.01650390434265137, 0.016335807800292968, 0.01641302490234375, 0.016363296508789062, 0.01627123260498047, 0.017223871231079102, 0.017280832290649414, 0.017585439682006834, 0.016698144912719728, 0.016382047653198242, 0.02137071990966797, 0.016629632949829103, 0.01636569595336914, 0.017137664794921875, 0.01632655906677246, 0.016332544326782227, 0.016312831878662108, 0.01629961585998535, 0.016331296920776367, 0.016305599212646484, 0.01625116729736328, 0.016300384521484374, 0.01639583969116211, 0.016389408111572266, 0.0163438720703125, 0.01641472053527832, 0.016407808303833007, 0.015964927673339843, 0.01570137596130371, 0.015621855735778808, 0.015541088104248047, 0.015385663986206055, 0.015399935722351075, 0.01534102439880371, 0.015301152229309082, 0.015369536399841309, 0.015323936462402344, 0.015310751914978027, 0.015291423797607421, 0.015258591651916504, 0.015306207656860351, 0.015353504180908204, 0.015321023941040039, 0.015391167640686034, 0.01535974407196045, 0.015485695838928223, 0.015463647842407226, 0.015368288040161132, 0.01567404842376709, 0.015582688331604004, 0.01540559959411621, 0.015374336242675781, 0.01572006416320801, 0.015303008079528809, 0.015376416206359864]",tokens/s,61.94865213205456,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7361.441792,9673.048064,0.0,9277.800448,8679.633408,s,1,14.699318359375,14.699318359375,0.0,14.699318359375,14.699318359375,14.699318359375,14.699318359375,[14.699318359375],,kWh,0.00022458091174167217,2.4765545809885814e-05,0.00010117424760598681,0.00035052070515754477,,MB,1702.240256,9687.728128,0.0,9277.800448,8206.575616,s,10,3.9106325683593752,0.3910632568359375,0.0018663111923173303,0.39105384826660156,0.3933038970947265,0.393489030456543,0.39363713714599613,"[0.3874965209960938, 0.391135498046875, 0.389873779296875, 0.39326275634765623, 0.389359375, 0.3922892150878906, 0.39097219848632814, 0.39278826904296876, 0.3936741638183594, 0.3897807922363281]",tokens/s,654.6255510458235,kWh,1.1371556762821183e-05,1.254069628126514e-06,7.523809437846008e-06,2.014943582879371e-05,tokens/kWh,12705070.364013562,MB,1709.735936,9687.728128,0.0,9277.800448,8480.92416,s,10,27.260906982421876,2.7260906982421877,0.004611978211197152,2.72476708984375,2.7334384521484374,2.7347204467773434,2.7357460424804687,"[2.73600244140625, 2.722218505859375, 2.724154296875, 2.7224716796875, 2.72430078125, 2.7252333984375, 2.733153564453125, 2.727108154296875, 2.72086767578125, 2.725396484375]",tokens/s,23.11001612698472,kWh,7.990248834301077e-05,8.813380497793079e-06,5.313982242635492e-05,0.00014185569126715872,tokens/kWh,444113.30583382276,,s,630,27.257424415588368,0.04326575304061648,0.0005153213214338492,0.04324404716491699,0.04369912796020508,0.04383510227203369,0.044511854667663574,"[0.044332801818847654, 0.0430247688293457, 0.04291785430908203, 0.0427848014831543, 0.04272111892700195, 0.042909854888916014, 0.04286873626708984, 0.042907230377197264, 0.04295644760131836, 0.04301491165161133, 0.043015167236328124, 0.043033695220947264, 0.04298160171508789, 0.042995391845703126, 0.043118495941162106, 0.043006046295166016, 0.043020286560058595, 0.04312473678588867, 0.04309961700439453, 0.04318832015991211, 0.04314156723022461, 0.04345644760131836, 0.04329177474975586, 0.04335647964477539, 0.043412094116210935, 0.04341964721679688, 0.04326604843139648, 0.04335030364990235, 0.04332073593139649, 0.043227455139160154, 0.0434442253112793, 0.04352329635620117, 0.043420448303222656, 0.04374489593505859, 0.043909503936767576, 0.04348435211181641, 0.043379360198974606, 0.04346428680419922, 0.04323142242431641, 0.043342529296875, 0.04341292953491211, 0.043450366973876955, 0.04362060928344726, 0.04398899078369141, 0.044434593200683596, 0.043754112243652346, 0.043730335235595705, 0.04370105743408203, 0.04369612884521484, 0.04380876922607422, 0.043665279388427736, 0.04361804962158203, 0.043573631286621096, 0.043640830993652346, 0.043586784362792966, 0.043488033294677736, 0.04379852676391602, 0.043597824096679685, 0.043816543579101565, 0.04371836853027344, 0.04365382385253906, 0.044771007537841793, 0.043883968353271484, 0.043439903259277345, 0.04291609573364258, 0.042816062927246094, 0.04277199935913086, 0.04269481658935547, 0.04281788635253906, 0.04276633453369141, 0.04280316925048828, 0.04298873519897461, 0.042793502807617186, 0.04288262557983399, 0.042912513732910156, 0.04285356903076172, 0.04270703887939453, 0.042787105560302734, 0.042775104522705075, 0.04285222244262695, 0.04289945602416992, 0.0429035530090332, 0.04287692642211914, 0.04310015869140625, 0.04300799942016602, 0.04298956680297852, 0.04299980926513672, 0.04294451141357422, 0.042869888305664065, 0.043047264099121095, 0.04336080169677734, 0.04326604843139648, 0.04319027328491211, 0.04314726257324219, 0.04316310501098633, 0.043264545440673825, 0.04316364669799805, 0.04320460891723633, 0.043251361846923825, 0.043211105346679685, 0.043243518829345705, 0.04333286285400391, 0.04340403366088867, 0.04332748794555664, 0.04335558319091797, 0.043407745361328125, 0.04358777618408203, 0.043574783325195314, 0.043582977294921874, 0.04373811340332031, 0.04374262237548828, 0.043651649475097656, 0.043569183349609374, 0.043466625213623045, 0.043407360076904294, 0.043606143951416015, 0.043482208251953126, 0.0436069450378418, 0.04355481719970703, 0.04350960159301758, 0.043408576965332034, 0.043464702606201173, 0.043453407287597653, 0.04361795043945312, 0.04360022354125977, 0.043720703125, 0.04343603134155274, 0.042866687774658206, 0.04262236785888672, 0.04265635299682617, 0.04250848007202149, 0.04276416015625, 0.04268025588989258, 0.04278681564331055, 0.042782718658447266, 0.042970912933349606, 0.04290380859375, 0.04292809677124024, 0.042872127532958985, 0.04294112014770508, 0.042913791656494144, 0.042976734161376956, 0.04295100784301758, 0.04301433563232422, 0.043071487426757815, 0.04294595336914062, 0.042930782318115236, 0.04305817413330078, 0.04298425674438477, 0.043122879028320314, 0.0433144645690918, 0.04326268768310547, 0.043466110229492184, 0.043270496368408205, 0.043190559387207034, 0.043283870697021484, 0.04346303939819336, 0.043251361846923825, 0.04322361755371094, 0.043081729888916016, 0.04319968032836914, 0.04326688003540039, 0.043200511932373044, 0.04324966430664062, 0.043210750579833986, 0.04319232177734375, 0.0433889274597168, 0.04354457473754883, 0.04364287948608398, 0.043656864166259766, 0.0433606071472168, 0.04348089599609375, 0.04334012985229492, 0.04325334548950195, 0.04331315231323242, 0.04350592041015625, 0.04357120132446289, 0.04347916793823242, 0.043638656616210934, 0.043566497802734375, 0.04363734436035156, 0.04346255874633789, 0.04348118209838867, 0.043595775604248044, 0.04356025695800781, 0.04366815948486328, 0.04392499160766602, 0.04358563232421875, 0.04438457489013672, 0.0434400634765625, 0.04302403259277344, 0.04281135940551758, 0.04287238311767578, 0.042906494140625, 0.042796737670898435, 0.04268828964233398, 0.04268086242675781, 0.04282572937011719, 0.042692607879638675, 0.042780670166015625, 0.04280319976806641, 0.04269875335693359, 0.04268032073974609, 0.04272272109985351, 0.043618144989013674, 0.04297395324707031, 0.0429567985534668, 0.043216705322265625, 0.04286896133422852, 0.04317385482788086, 0.04279500961303711, 0.04287078475952148, 0.042958175659179684, 0.042937183380126955, 0.042942272186279294, 0.04305820846557617, 0.04296803283691406, 0.04289535903930664, 0.04299353790283203, 0.04295897674560547, 0.04293017578125, 0.042976734161376956, 0.04286307144165039, 0.0429788818359375, 0.04293616104125977, 0.043006622314453125, 0.043169792175292966, 0.04346265411376953, 0.04341862487792969, 0.04374591827392578, 0.043534465789794925, 0.043596031188964844, 0.04351180648803711, 0.04350080108642578, 0.0434793586730957, 0.04360768127441406, 0.043600704193115236, 0.04374118423461914, 0.04394188690185547, 0.043499519348144534, 0.04333059310913086, 0.04335030364990235, 0.043527072906494144, 0.043568511962890626, 0.04360768127441406, 0.04363241577148438, 0.04357017517089844, 0.04393926239013672, 0.04435811233520508, 0.04372079849243164, 0.04365641784667969, 0.04374195098876953, 0.04372480010986328, 0.04337254333496094, 0.042802433013916015, 0.042893505096435545, 0.042897022247314454, 0.042959392547607424, 0.04281180953979492, 0.04286873626708984, 0.04279296112060547, 0.042755073547363284, 0.042733791351318356, 0.04282857513427735, 0.04297865676879883, 0.042900127410888673, 0.04295503997802735, 0.04300156784057617, 0.0429752311706543, 0.04315135955810547, 0.04296499252319336, 0.04272127914428711, 0.04278019332885742, 0.04293795013427734, 0.043039169311523434, 0.04310675048828125, 0.0429486083984375, 0.04297727966308594, 0.043018047332763674, 0.042866878509521485, 0.04282284927368164, 0.042826560974121096, 0.043001697540283206, 0.042977439880371095, 0.04295993423461914, 0.04301900863647461, 0.04299795150756836, 0.04308153533935547, 0.043063392639160154, 0.043012001037597655, 0.043753246307373046, 0.0432880973815918, 0.04354342269897461, 0.0436121597290039, 0.0437125129699707, 0.0436420783996582, 0.04352617645263672, 0.04347110366821289, 0.04354508972167969, 0.04364492797851562, 0.04367708969116211, 0.04364064025878906, 0.04353513717651367, 0.04346060943603516, 0.04364239883422852, 0.043638336181640626, 0.04368681716918945, 0.04362035369873047, 0.04375475311279297, 0.04371737670898437, 0.04384143829345703, 0.043843616485595704, 0.043763137817382815, 0.043866142272949216, 0.04393020629882813, 0.04337667083740234, 0.04297318267822266, 0.04278598403930664, 0.04279142379760742, 0.04281171035766602, 0.04288710403442383, 0.04270700836181641, 0.04290566253662109, 0.04292396926879883, 0.04293974304199219, 0.042969024658203125, 0.04317843246459961, 0.04302876663208008, 0.0429854736328125, 0.04279500961303711, 0.04274393463134766, 0.042785888671875, 0.042848865509033204, 0.042850494384765625, 0.04291788864135742, 0.043245567321777346, 0.0431280632019043, 0.043014881134033206, 0.04306742477416992, 0.04308287811279297, 0.04304780960083008, 0.043096065521240234, 0.04297929763793945, 0.043071521759033206, 0.04295232009887695, 0.04297881698608398, 0.043101375579833984, 0.04331692886352539, 0.043278335571289066, 0.04344627380371094, 0.04338220977783203, 0.04336934280395508, 0.043306686401367187, 0.04329062271118164, 0.0433704948425293, 0.043412734985351566, 0.04351052856445312, 0.04354281616210937, 0.043568031311035156, 0.0435513916015625, 0.04344380950927734, 0.043501758575439455, 0.04361644744873047, 0.04358777618408203, 0.04364278411865234, 0.0433337287902832, 0.04330223846435547, 0.0432831039428711, 0.04330617523193359, 0.04338771057128906, 0.04368368148803711, 0.04362460708618164, 0.04395622253417969, 0.04508262252807617, 0.043671550750732424, 0.043589630126953126, 0.0437037124633789, 0.04382735824584961, 0.0434411506652832, 0.043061855316162106, 0.0432371826171875, 0.043262081146240236, 0.04292214584350586, 0.04306361770629883, 0.042711231231689455, 0.04272297668457031, 0.04269641494750977, 0.042654144287109376, 0.0426618881225586, 0.04265331268310547, 0.042936447143554685, 0.042895614624023436, 0.04315280151367187, 0.04305107116699219, 0.04309660720825195, 0.04348652648925781, 0.04330060958862305, 0.04322604751586914, 0.04332672119140625, 0.04326633453369141, 0.043377120971679686, 0.043259902954101564, 0.043151039123535156, 0.043180351257324216, 0.0432803840637207, 0.04316159820556641, 0.04325145721435547, 0.04331545639038086, 0.04329471969604492, 0.04463411331176758, 0.04345446395874023, 0.04345427322387695, 0.04338297653198242, 0.04348880004882812, 0.04342012786865234, 0.04355871963500976, 0.043509952545166014, 0.04357510375976562, 0.043535839080810546, 0.04369891357421875, 0.043617313385009765, 0.04362908935546875, 0.04385836791992188, 0.043703968048095704, 0.043563358306884764, 0.043622398376464845, 0.043619552612304685, 0.04364287948608398, 0.04361091232299805, 0.04351571273803711, 0.043788734436035155, 0.04378319931030274, 0.04368252944946289, 0.04387593460083008, 0.04371830368041992, 0.04358000183105469, 0.04358979034423828, 0.043606048583984376, 0.043748737335205075, 0.04362284851074219, 0.043652320861816404, 0.043533790588378904, 0.044195999145507814, 0.0440280647277832, 0.04262928009033203, 0.04283193588256836, 0.04286089706420899, 0.04354601669311523, 0.04268809509277344, 0.042877342224121096, 0.04268672180175781, 0.04270284652709961, 0.042987518310546875, 0.04306905746459961, 0.04310784149169922, 0.043103073120117186, 0.043154464721679685, 0.04303936004638672, 0.04306777572631836, 0.04338399887084961, 0.042932064056396484, 0.04306182479858398, 0.04311286544799805, 0.04320390319824219, 0.04305990219116211, 0.042977310180664065, 0.04290505599975586, 0.043020416259765625, 0.042996097564697265, 0.04331670379638672, 0.043221534729003905, 0.04317184066772461, 0.043024574279785156, 0.042960704803466795, 0.04293632125854492, 0.04305500793457031, 0.043144641876220705, 0.04311312103271484, 0.043075649261474606, 0.04313081741333008, 0.0433493766784668, 0.043168384552001955, 0.04335177612304687, 0.04317734527587891, 0.043283073425292966, 0.04338307189941406, 0.043340991973876954, 0.043348094940185544, 0.04329337692260742, 0.04325785446166992, 0.0434524154663086, 0.04361830520629883, 0.043548671722412106, 0.044611583709716796, 0.04356915283203125, 0.04355184173583984, 0.0434984016418457, 0.043705665588378906, 0.04373984146118164, 0.043888641357421876, 0.043869857788085935, 0.04391302490234375, 0.04386051177978516, 0.04400672149658203, 0.04355939102172852, 0.042865951538085936, 0.04258614349365234, 0.04248569488525391, 0.04257244873046875, 0.04274720001220703, 0.04273231887817383, 0.04284928131103516, 0.04286566543579102, 0.04287257766723633, 0.04290790557861328, 0.04290969467163086, 0.04290560150146484, 0.042997760772705076, 0.04299273681640625, 0.0431748161315918, 0.04299980926513672, 0.04316524887084961, 0.04323116683959961, 0.043059711456298826, 0.04292403030395508, 0.042923263549804684, 0.043018142700195314, 0.04315024185180664, 0.04326918411254883, 0.0434837760925293, 0.0433073616027832, 0.043552574157714845, 0.0432720947265625, 0.04322732925415039, 0.043202560424804685, 0.043128353118896484, 0.04307535934448242, 0.043167774200439456, 0.043184383392333985, 0.04308022308349609, 0.04314825439453125, 0.04313999938964844, 0.04315545654296875, 0.043457855224609376, 0.04343836975097656, 0.04341292953491211, 0.043330303192138674, 0.04330928039550781, 0.043218944549560545, 0.04320367813110351, 0.04324457550048828, 0.04321225738525391, 0.04325827026367188, 0.04326604843139648, 0.04341705703735352, 0.04450358581542969, 0.0433540153503418, 0.04321699142456055, 0.0433438720703125, 0.043319297790527345, 0.04336435317993164, 0.04348518371582031, 0.04327980804443359, 0.043399646759033204, 0.04363888168334961, 0.04339712142944336, 0.04362035369873047, 0.04355619049072266, 0.043439071655273435, 0.042616832733154295, 0.04238240051269531, 0.04244985580444336, 0.042743743896484374, 0.04248735809326172, 0.042514240264892575, 0.04259910583496094, 0.042555679321289064, 0.0427168960571289, 0.04266099166870117, 0.04264230346679688, 0.042724895477294925, 0.04263958358764648, 0.04267852783203125, 0.04281958389282227, 0.043433822631835935, 0.04359164810180664, 0.04291193771362305, 0.04339731216430664, 0.04283577728271484, 0.04294451141357422, 0.042891456604003904, 0.042954879760742186, 0.042968769073486325, 0.04306534576416016, 0.042807296752929686, 0.042815486907958986, 0.04288713455200195, 0.04287491226196289, 0.04294041442871094, 0.04296467208862305, 0.04306771087646484, 0.043153408050537106, 0.0430582389831543, 0.04305811309814453, 0.04301619338989258, 0.04301824188232422, 0.04299980926513672, 0.04319811248779297, 0.043259326934814456, 0.04318048095703125, 0.04327267074584961, 0.04334796905517578, 0.04329062271118164, 0.04336435317993164, 0.044539905548095705, 0.043695903778076174, 0.04337276840209961, 0.04334796905517578, 0.0432988166809082, 0.043369823455810544, 0.04329439926147461, 0.04327932739257812, 0.04338687896728516, 0.04451523208618164, 0.05200291061401367, 0.043546302795410156, 0.04367148971557617, 0.04372848129272461, 0.04356380844116211, 0.043681793212890625]",tokens/s,23.112968796850307,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4142.239744,5959.974912,0.0,5557.4528,5102.547968,s,1,11.3579736328125,11.3579736328125,0.0,11.3579736328125,11.3579736328125,11.3579736328125,11.3579736328125,[11.3579736328125],,kWh,0.0001232327817875178,1.3586307343211346e-05,5.5161710796003405e-05,0.00019198079992673256,,MB,1471.340544,5993.529344,0.0,5576.327168,4701.113344,s,10,2.2378789062499997,0.22378789062499999,0.0002189927741978738,0.22382008361816408,0.22403562774658203,0.2240372688293457,0.22403858169555663,"[0.22368531799316407, 0.22403890991210937, 0.22381356811523437, 0.22393299865722657, 0.22403526306152344, 0.22382659912109376, 0.22357075500488283, 0.22367919921875, 0.2233137664794922, 0.22398252868652344]",tokens/s,1143.940359261787,kWh,6.600594317592031e-06,7.275468419877332e-07,4.385892397599604e-06,1.1714033557179369e-05,tokens/kWh,21854128.9599688,MB,1500.033024,5993.529344,0.0,5576.327168,4701.115904,s,10,32.09952001953125,3.2099520019531247,0.006455107516177166,3.2110751953125,3.216955102539062,3.2182889526367187,3.219356032714844,"[3.219622802734375, 3.212086669921875, 3.210063720703125, 3.2137177734375, 3.216244140625, 3.204347900390625, 3.20202587890625, 3.20470263671875, 3.2000498046875, 3.21665869140625]",tokens/s,19.626461692158347,kWh,8.13811449386552e-05,8.976775670583635e-06,4.489475813800094e-05,0.0001352526787472398,tokens/kWh,465794.84105992754,,s,630,32.097109970092745,0.05094779360332186,0.0006652971880553079,0.05079310417175293,0.05134559288024902,0.051735566139221194,0.054517566986083996,"[0.051076671600341794, 0.05143222427368164, 0.050884033203125, 0.05075392150878906, 0.05105635070800781, 0.05120582580566406, 0.05112313461303711, 0.05107843017578125, 0.05123555374145508, 0.05083750534057617, 0.05180329513549805, 0.051153759002685546, 0.050904193878173826, 0.05084454345703125, 0.050841598510742186, 0.050814304351806644, 0.050842304229736325, 0.05072803115844727, 0.05090188980102539, 0.05148876953125, 0.051253246307373046, 0.05142323303222656, 0.05083276748657226, 0.050821758270263674, 0.050816062927246095, 0.05079545593261719, 0.05090428924560547, 0.050845985412597654, 0.05090764617919922, 0.05152758407592774, 0.051319168090820315, 0.05120995330810547, 0.053542911529541014, 0.053224609375, 0.050989921569824216, 0.051165184020996096, 0.0511420783996582, 0.05124563217163086, 0.051023872375488284, 0.05088236618041992, 0.05105478286743164, 0.051133438110351564, 0.05120745468139649, 0.051229633331298825, 0.05109430313110352, 0.051081214904785156, 0.050904640197753905, 0.051122177124023435, 0.051106239318847654, 0.050955455780029295, 0.05080966567993164, 0.05092665481567383, 0.051082176208496095, 0.05093497467041016, 0.050850624084472655, 0.05064089584350586, 0.05071644973754883, 0.05104252624511719, 0.050834686279296874, 0.05084393692016602, 0.05087014389038086, 0.05088521575927735, 0.05114291381835938, 0.05411430358886719, 0.05107097625732422, 0.050871391296386716, 0.05067663955688476, 0.05067366409301758, 0.05088665771484375, 0.05070412826538086, 0.050847808837890626, 0.05111648178100586, 0.050958080291748045, 0.05087631988525391, 0.05099529647827149, 0.051162784576416015, 0.05097507095336914, 0.050948223114013674, 0.05157612609863281, 0.05126364898681641, 0.050862495422363284, 0.050733055114746094, 0.05117279815673828, 0.050716705322265625, 0.0514381103515625, 0.050982559204101566, 0.05093791961669922, 0.050772254943847656, 0.05094332885742187, 0.05081974411010742, 0.05186511993408203, 0.05072310256958008, 0.05083564758300781, 0.05069823837280273, 0.05058956909179688, 0.050542881011962894, 0.05173641586303711, 0.05098086547851562, 0.05099260711669922, 0.05061904144287109, 0.0507632942199707, 0.05065718460083008, 0.0509376335144043, 0.050705055236816406, 0.05157273483276367, 0.05071462249755859, 0.05067078399658203, 0.05069667053222656, 0.05079280090332031, 0.050775230407714846, 0.050661598205566406, 0.05052876663208008, 0.0507105598449707, 0.05068601608276367, 0.05099929428100586, 0.050819072723388675, 0.05068556976318359, 0.050599521636962894, 0.05066831970214844, 0.05219747161865235, 0.0506932487487793, 0.05179475021362305, 0.051431102752685545, 0.0507470703125, 0.05102447891235352, 0.05064470291137695, 0.0512174072265625, 0.050893505096435546, 0.05096006393432617, 0.05557059097290039, 0.05131280136108399, 0.05095167922973633, 0.050783775329589845, 0.05084463882446289, 0.050679649353027344, 0.050706367492675784, 0.05059196853637695, 0.05398732757568359, 0.05195980834960937, 0.05135273742675781, 0.05074393463134766, 0.05056451034545899, 0.050629440307617186, 0.05044976043701172, 0.05066819381713867, 0.05050777435302734, 0.05043199920654297, 0.050710529327392576, 0.05053440093994141, 0.05083932876586914, 0.05068412780761719, 0.05069823837280273, 0.0505398063659668, 0.05101232147216797, 0.0506695671081543, 0.05173452758789063, 0.05102985763549805, 0.05090934371948242, 0.05110147094726562, 0.0508449592590332, 0.050764511108398434, 0.05107120132446289, 0.05079654312133789, 0.050724864959716794, 0.05084979248046875, 0.0511016960144043, 0.051133567810058594, 0.05052406311035156, 0.05112931060791016, 0.05079228973388672, 0.05078796768188477, 0.050614814758300784, 0.050644992828369144, 0.05044367980957031, 0.05062883377075195, 0.050618751525878906, 0.05065052795410156, 0.05058176040649414, 0.050921375274658204, 0.05121068954467774, 0.05122867202758789, 0.05089279937744141, 0.050855934143066404, 0.05076377487182617, 0.05058969497680664, 0.05068912124633789, 0.05053737640380859, 0.05051801681518555, 0.05064704132080078, 0.05114857482910156, 0.0506864013671875, 0.05107680130004883, 0.05107356643676758, 0.05119388961791992, 0.050617664337158204, 0.05071270370483399, 0.05070249557495117, 0.05082287979125977, 0.05061443328857422, 0.05051408004760742, 0.05060240173339844, 0.051214336395263675, 0.05111983871459961, 0.0509884147644043, 0.05132380676269531, 0.051229759216308596, 0.050893470764160155, 0.0506104621887207, 0.050828544616699216, 0.05097468948364258, 0.050678558349609375, 0.050616222381591795, 0.05073632049560547, 0.05089782333374023, 0.05066342544555664, 0.050544639587402344, 0.05649407958984375, 0.051033344268798825, 0.0507850227355957, 0.05070342254638672, 0.050793407440185546, 0.051329025268554686, 0.05075276947021484, 0.05078678512573242, 0.050815265655517576, 0.05062223815917969, 0.050847072601318356, 0.050772865295410155, 0.050677761077880856, 0.05062451171875, 0.051129886627197266, 0.05098105621337891, 0.05091107177734375, 0.05130438232421875, 0.05149747085571289, 0.05111360168457031, 0.050913345336914065, 0.0508787841796875, 0.050937278747558594, 0.05063942337036133, 0.05083955383300781, 0.05086207962036133, 0.050939071655273435, 0.05088441467285156, 0.050936832427978515, 0.05095126342773437, 0.05097564697265625, 0.05092681503295898, 0.05080758285522461, 0.05081683349609375, 0.05229792022705078, 0.05180416107177734, 0.05123750305175781, 0.051073024749755856, 0.05084073638916015, 0.05115683364868164, 0.05096255874633789, 0.05111273574829102, 0.05199241638183594, 0.0508930549621582, 0.05079561614990234, 0.05065987014770508, 0.05065119934082031, 0.05058591842651367, 0.050887935638427736, 0.0509304313659668, 0.05065878295898438, 0.050950687408447264, 0.050907135009765625, 0.050695808410644534, 0.05072524642944336, 0.050685951232910156, 0.05057024002075195, 0.050810943603515624, 0.054608222961425784, 0.05160966491699219, 0.05123945617675781, 0.05098495864868164, 0.050735103607177735, 0.05205811309814453, 0.05100953674316406, 0.050661376953125, 0.0505643196105957, 0.05049628829956055, 0.050513313293457034, 0.05150371170043945, 0.050794559478759764, 0.051313697814941404, 0.05086025619506836, 0.050950847625732425, 0.05085184097290039, 0.05082643127441406, 0.05092147064208984, 0.0510206069946289, 0.05114380645751953, 0.05107392120361328, 0.05090256118774414, 0.05128220748901367, 0.05324614334106445, 0.05090508651733398, 0.05070800018310547, 0.050737632751464846, 0.05067161560058594, 0.050966304779052736, 0.051405025482177735, 0.050867584228515624, 0.0508355827331543, 0.050967041015625, 0.05050982284545898, 0.05083340835571289, 0.050670814514160153, 0.0507481918334961, 0.0510720329284668, 0.05099004745483399, 0.05210281753540039, 0.051689952850341794, 0.05105395126342773, 0.05075212860107422, 0.0506163215637207, 0.050566177368164066, 0.054764385223388674, 0.051368064880371093, 0.0515909423828125, 0.050972576141357424, 0.050993473052978515, 0.05065932846069336, 0.05069193649291992, 0.05073936080932617, 0.05066336059570312, 0.050745407104492185, 0.05106687927246094, 0.05060812759399414, 0.05082521438598633, 0.050773662567138673, 0.050530654907226566, 0.05046886444091797, 0.050423809051513675, 0.050528255462646485, 0.050718719482421876, 0.05097987365722656, 0.05050057601928711, 0.05057126235961914, 0.051078975677490236, 0.050466625213623044, 0.05086825561523438, 0.05062595367431641, 0.050428768157958985, 0.05045052719116211, 0.05062246322631836, 0.05209632110595703, 0.05138079833984375, 0.05161075210571289, 0.05142630386352539, 0.050972671508789064, 0.05087593460083008, 0.050618846893310546, 0.050950145721435545, 0.05094521713256836, 0.050647361755371094, 0.05053459167480469, 0.050639041900634764, 0.05120975875854492, 0.05063238525390625, 0.05045340728759766, 0.05071993637084961, 0.0507523193359375, 0.050560192108154295, 0.05060076904296875, 0.05055692672729492, 0.05058355331420898, 0.05055606460571289, 0.05108111953735352, 0.05071270370483399, 0.05050067138671875, 0.05063577651977539, 0.050498302459716794, 0.05053401565551758, 0.050428321838378906, 0.051515296936035154, 0.05082944107055664, 0.05050320053100586, 0.05051772689819336, 0.0506929931640625, 0.05076553726196289, 0.050646305084228516, 0.050609153747558595, 0.0505999984741211, 0.051172767639160156, 0.05079024124145508, 0.050786880493164065, 0.050702465057373046, 0.05063884735107422, 0.050391040802001956, 0.05034598541259765, 0.05039839935302735, 0.05039187240600586, 0.05087798309326172, 0.05076588821411133, 0.05073516845703125, 0.05100374221801758, 0.05356748962402344, 0.05048524856567383, 0.050392097473144534, 0.050791393280029296, 0.050816158294677734, 0.05045129776000976, 0.050810657501220706, 0.0507465934753418, 0.05090611267089844, 0.051476478576660156, 0.05095219039916992, 0.05061014556884766, 0.05031945419311523, 0.051344799041748046, 0.05066729736328125, 0.050649856567382814, 0.05073651123046875, 0.0510142707824707, 0.051700992584228514, 0.05090361785888672, 0.050581695556640625, 0.05034950256347656, 0.05037318420410156, 0.05079001617431641, 0.05078825759887695, 0.05050614547729492, 0.0504730224609375, 0.05063065719604492, 0.050724864959716794, 0.05048320007324219, 0.05044140625, 0.05069702529907227, 0.050527679443359376, 0.051272254943847656, 0.05329305648803711, 0.05074703979492187, 0.050861534118652345, 0.051424129486083985, 0.050761985778808597, 0.05049932861328125, 0.05054873657226563, 0.0512239990234375, 0.05091702270507813, 0.05072579193115234, 0.05077811050415039, 0.05039875030517578, 0.05047872161865234, 0.05048201751708985, 0.05082931137084961, 0.05070438385009766, 0.050462718963623046, 0.05046393585205078, 0.051001792907714845, 0.050548095703125, 0.050961406707763675, 0.05050982284545898, 0.05065523147583008, 0.05060771179199219, 0.050591552734375, 0.050461280822753904, 0.05055487823486328, 0.050918655395507814, 0.05085465621948242, 0.05179593658447266, 0.05103788757324219, 0.050784255981445314, 0.05068809509277344, 0.050759937286376955, 0.05064908981323242, 0.050579456329345705, 0.05082668685913086, 0.05122304153442383, 0.051471710205078125, 0.050782943725585936, 0.050759681701660155, 0.05121023941040039, 0.05086800003051758, 0.05052390289306641, 0.05105894470214844, 0.05076192092895508, 0.05058972930908203, 0.050898944854736325, 0.050533409118652346, 0.05082572937011719, 0.05119228744506836, 0.05089033508300781, 0.05071299362182617, 0.050726558685302736, 0.05089724731445312, 0.05082112121582031, 0.050595584869384765, 0.050651039123535156, 0.05099555206298828, 0.05072832107543945, 0.05053299331665039, 0.05041971206665039, 0.05143756866455078, 0.050730239868164065, 0.05141142272949219, 0.053226974487304686, 0.052398910522460936, 0.05113375854492187, 0.05075833511352539, 0.05045043182373047, 0.055003135681152344, 0.05104230499267578, 0.050700286865234374, 0.050634750366210936, 0.05051391983032227, 0.050487297058105465, 0.05051801681518555, 0.05059318542480469, 0.05074367904663086, 0.050528480529785154, 0.050391040802001956, 0.05049958419799805, 0.0513331184387207, 0.05119180679321289, 0.05075062561035156, 0.05055065536499023, 0.05106710433959961, 0.05112704086303711, 0.051179424285888675, 0.050748863220214845, 0.050541088104248046, 0.05067129516601562, 0.05058195114135742, 0.05069823837280273, 0.050638721466064456, 0.05058982467651367, 0.05054374313354492, 0.05044518280029297, 0.05086617660522461, 0.05113232040405274, 0.05089295959472656, 0.05070431900024414, 0.05054572677612305, 0.05045548629760742, 0.05061654281616211, 0.050450206756591794, 0.050603038787841795, 0.050789344787597654, 0.05088774490356445, 0.0505241584777832, 0.05035712051391601, 0.05066473770141602, 0.0504409294128418, 0.05111609649658203, 0.05079040145874023, 0.05066547012329101, 0.050826847076416014, 0.05059625625610351, 0.050683902740478515, 0.05060940933227539, 0.05049827194213867, 0.05112323379516601, 0.05110271835327149, 0.050743297576904295, 0.05068377685546875, 0.050812160491943356, 0.050723712921142576, 0.050783424377441405, 0.05133180618286133, 0.05065532684326172, 0.05059174346923828, 0.05072281646728516, 0.05052137756347656, 0.05080873489379883, 0.05138236618041992, 0.05060831832885742, 0.05053766250610352, 0.05055142211914063, 0.05303046417236328, 0.05129260635375977, 0.050840576171875, 0.050653888702392576, 0.05073078536987305, 0.051409503936767575, 0.05122655868530274, 0.05078015899658203, 0.050767871856689455, 0.05078835296630859, 0.051105792999267576, 0.05076496124267578, 0.0537026252746582, 0.05076873779296875, 0.05090307235717773, 0.05090284729003906, 0.05078790283203125, 0.05072342300415039, 0.05089436721801758, 0.05088275146484375, 0.05059932708740234, 0.050469375610351565, 0.05055702209472656, 0.05050556945800781, 0.05075606536865234, 0.05488601684570313, 0.050772350311279295, 0.05044182586669922, 0.05054265594482422, 0.050778465270996095, 0.05103411102294922, 0.050617408752441403, 0.05080313491821289, 0.05049135971069336, 0.05065577697753906, 0.0510750732421875, 0.05061427307128906, 0.050563201904296876, 0.05061344146728516, 0.05078713607788086, 0.050908798217773436, 0.05057279968261719, 0.05068057632446289, 0.05150067138671875, 0.05163251113891602, 0.054295616149902345, 0.05091628646850586, 0.05555801773071289, 0.05055910491943359, 0.0506262092590332, 0.05077164840698242, 0.05053916931152344, 0.05047865676879883, 0.05055327987670898, 0.05079040145874023, 0.050415584564208984, 0.050354209899902344, 0.05087343978881836]",tokens/s,19.627935368231505,,, 4bit-awq-exllama-v2-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1048.969216,965.67296,0.0,570.425344,536.326656,s,1,8.42441015625,8.42441015625,0.0,8.42441015625,8.42441015625,8.42441015625,8.42441015625,[8.42441015625],,kWh,3.914825401667864e-05,4.310742464555275e-06,1.281612136400101e-05,5.6275117845234926e-05,,MB,1284.702208,1034.878976,0.0,624.951296,594.377728,s,10,0.26368310546875,0.026368310546875003,0.0002944813357621705,0.026420432090759276,0.026643379402160643,0.026729434108734133,0.026798277873992922,"[0.026592159271240236, 0.026053504943847658, 0.026624256134033204, 0.02681548881530762, 0.026454591751098634, 0.02652511978149414, 0.02638627243041992, 0.025766719818115236, 0.026129184722900392, 0.02633580780029297]",tokens/s,9708.623521590745,kWh,7.829544049486437e-07,8.630932426148413e-08,5.197487029598338e-07,1.3890124321699618e-06,tokens/kWh,184303605.98001865,MB,1320.767488,1049.55904,0.0,639.63136,607.71072,s,10,14.155046142578126,1.4155046142578125,0.020044638068717466,1.4226021728515623,1.4353281616210938,1.439636065673828,1.4430823889160156,"[1.4244794921875, 1.4072734375, 1.4439439697265626, 1.4047330322265625, 1.428071533203125, 1.4275911865234374, 1.3831318359375, 1.3807259521484374, 1.434370849609375, 1.420724853515625]",tokens/s,44.50709617293096,kWh,4.129228820421656e-05,4.554160190913781e-06,1.5726347627237077e-05,6.157279602236742e-05,tokens/kWh,1023179.1321789923,,s,630,14.148588958740227,0.022458077712286084,0.0006456810369411583,0.022481712341308593,0.022870140266418457,0.023023983955383298,0.024744324131011963,"[0.022230239868164064, 0.022661535263061524, 0.022646303176879882, 0.022819456100463868, 0.022765184402465822, 0.022737503051757812, 0.022618112564086915, 0.022514879226684572, 0.02265990447998047, 0.02262015914916992, 0.022624256134033204, 0.02262835121154785, 0.02246451187133789, 0.02240121650695801, 0.022331104278564454, 0.02237654495239258, 0.022538368225097655, 0.02249235153198242, 0.022801088333129882, 0.02239244842529297, 0.022370687484741213, 0.022194271087646485, 0.02226371192932129, 0.022259616851806642, 0.02264179229736328, 0.02233238410949707, 0.022403072357177735, 0.022494400024414062, 0.02242639923095703, 0.02242767906188965, 0.022456352233886718, 0.022386655807495118, 0.022379871368408202, 0.022436511993408202, 0.022386528015136718, 0.02252012825012207, 0.02275926399230957, 0.022765663146972655, 0.022750463485717774, 0.02276790428161621, 0.02277619171142578, 0.022676992416381835, 0.02260799980163574, 0.023404928207397462, 0.02253647994995117, 0.02274070358276367, 0.02272870445251465, 0.022656351089477538, 0.02271913528442383, 0.025694400787353515, 0.023480127334594727, 0.02301923179626465, 0.02263478469848633, 0.022355583190917967, 0.02245804786682129, 0.02247478485107422, 0.022327455520629882, 0.022335487365722655, 0.02236262321472168, 0.02229471969604492, 0.022413408279418946, 0.02223865509033203, 0.022210847854614257, 0.021541919708251953, 0.021699552536010743, 0.02169593620300293, 0.021744480133056642, 0.02181292724609375, 0.021600288391113283, 0.02167398452758789, 0.02247270393371582, 0.022636640548706056, 0.021931936264038086, 0.02292870330810547, 0.022119007110595702, 0.021884416580200194, 0.021832544326782225, 0.021957536697387696, 0.022623071670532225, 0.02195193672180176, 0.021917247772216798, 0.02222591972351074, 0.022218751907348632, 0.022341184616088867, 0.022106559753417968, 0.022046720504760742, 0.022157087326049804, 0.02312390327453613, 0.02233100891113281, 0.02221232032775879, 0.021914560317993163, 0.022068384170532227, 0.022149759292602537, 0.02252207946777344, 0.02199955177307129, 0.022005823135375975, 0.02215936088562012, 0.02231881523132324, 0.02212278366088867, 0.022176767349243166, 0.02241958427429199, 0.02247283172607422, 0.02234601593017578, 0.02240764808654785, 0.02254643249511719, 0.02271843147277832, 0.022761856079101563, 0.022601343154907225, 0.022374591827392577, 0.02256675148010254, 0.02273855972290039, 0.02259721565246582, 0.022784799575805665, 0.022550527572631835, 0.022378496170043945, 0.02249113655090332, 0.022565088272094726, 0.02289641571044922, 0.022597984313964845, 0.022608575820922853, 0.022694879531860352, 0.022681503295898436, 0.022941919326782228, 0.022915103912353515, 0.022800479888916016, 0.022905664443969728, 0.022567487716674803, 0.023592384338378906, 0.02298486328125, 0.022836864471435545, 0.022844383239746094, 0.022733055114746093, 0.02266048049926758, 0.022923616409301757, 0.02274723243713379, 0.022918815612792968, 0.022958335876464845, 0.02289664077758789, 0.022748479843139647, 0.022710975646972657, 0.02279814338684082, 0.022712511062622072, 0.02285468864440918, 0.022933887481689452, 0.022849952697753906, 0.022847679138183592, 0.022763263702392577, 0.022997247695922853, 0.02292051124572754, 0.022944448471069336, 0.02304739189147949, 0.02282192039489746, 0.022801216125488282, 0.022708480834960937, 0.022758079528808595, 0.0226910400390625, 0.02277238464355469, 0.02323244857788086, 0.02267568016052246, 0.022734207153320314, 0.022651487350463868, 0.02277299118041992, 0.022581792831420897, 0.025107807159423828, 0.023477119445800783, 0.02338323211669922, 0.0227061767578125, 0.022720447540283205, 0.0229303035736084, 0.022675455093383787, 0.022935232162475585, 0.022612415313720702, 0.02292076873779297, 0.022589759826660158, 0.022910688400268556, 0.022802719116210936, 0.022703712463378906, 0.022647392272949218, 0.02275641632080078, 0.022675743103027345, 0.02266364860534668, 0.02272991943359375, 0.023200639724731444, 0.02551171112060547, 0.02286150360107422, 0.022741472244262696, 0.02254204750061035, 0.02268307113647461, 0.02280022430419922, 0.022249343872070313, 0.022563072204589845, 0.023061952590942382, 0.022554527282714842, 0.022545120239257813, 0.02247091293334961, 0.022449024200439455, 0.02235251235961914, 0.022627904891967775, 0.02249951934814453, 0.022523967742919922, 0.02254800033569336, 0.022458368301391602, 0.02219059181213379, 0.022142751693725586, 0.02208620834350586, 0.022031423568725585, 0.022264448165893555, 0.022249792098999025, 0.02225315284729004, 0.022200735092163085, 0.0219931526184082, 0.02188038444519043, 0.021945119857788086, 0.021933759689331055, 0.021831008911132814, 0.0241591682434082, 0.022920831680297852, 0.022227071762084962, 0.022195903778076172, 0.022137407302856446, 0.02189926338195801, 0.022084703445434572, 0.022037408828735353, 0.022169151306152345, 0.021963327407836915, 0.022085184097290038, 0.022168960571289063, 0.022094112396240234, 0.022012256622314454, 0.02210032081604004, 0.022177183151245117, 0.02250809669494629, 0.022284000396728516, 0.022129056930541992, 0.022218656539916993, 0.022446048736572265, 0.022401023864746093, 0.022374176025390626, 0.022182111740112306, 0.022257728576660155, 0.022124479293823242, 0.022063104629516602, 0.022116352081298828, 0.022044607162475586, 0.021835199356079103, 0.021891712188720703, 0.022025760650634767, 0.02226019287109375, 0.022475839614868164, 0.022703039169311524, 0.02260361671447754, 0.02277187156677246, 0.022630048751831056, 0.02281827163696289, 0.022805120468139647, 0.022776063919067384, 0.02276259231567383, 0.022795167922973633, 0.02275958442687988, 0.022668256759643554, 0.022815616607666015, 0.0228701114654541, 0.022869951248168947, 0.022789823532104493, 0.022687999725341797, 0.022605087280273436, 0.022616832733154298, 0.022611520767211915, 0.022647232055664063, 0.02268956756591797, 0.022579519271850586, 0.022534048080444336, 0.022419679641723634, 0.022355743408203125, 0.022331424713134766, 0.022402624130249023, 0.022618528366088866, 0.02252150344848633, 0.022464063644409178, 0.023083808898925782, 0.02253113555908203, 0.022402015686035157, 0.022338752746582032, 0.02244588851928711, 0.022416351318359375, 0.02265910339355469, 0.022857887268066406, 0.022740800857543944, 0.022626304626464845, 0.022585567474365235, 0.022735967636108398, 0.02264950370788574, 0.022586784362792968, 0.022610048294067382, 0.022679391860961913, 0.022612640380859375, 0.022902431488037108, 0.02264713668823242, 0.02268707275390625, 0.022573728561401368, 0.02277484893798828, 0.02296703910827637, 0.023137887954711913, 0.022805248260498047, 0.022820512771606447, 0.022620351791381835, 0.0225316162109375, 0.022723039627075194, 0.02264678382873535, 0.02272774314880371, 0.022524864196777343, 0.022482751846313476, 0.02257734489440918, 0.022591487884521484, 0.02263382339477539, 0.022296575546264647, 0.02247270393371582, 0.022562816619873048, 0.023068672180175782, 0.02245193672180176, 0.02238902473449707, 0.02245449638366699, 0.022599584579467775, 0.0226507511138916, 0.022616064071655274, 0.022597696304321287, 0.022534080505371094, 0.022494815826416017, 0.02284160041809082, 0.022648000717163087, 0.022466527938842774, 0.022537023544311523, 0.022449695587158203, 0.022506143569946287, 0.02247478485107422, 0.022665088653564452, 0.022691904067993166, 0.022765727996826173, 0.02272233581542969, 0.022800479888916016, 0.022888511657714845, 0.022847423553466795, 0.022665023803710938, 0.022679391860961913, 0.022782304763793945, 0.022734943389892577, 0.022713504791259765, 0.02268227195739746, 0.022478527069091796, 0.02260419273376465, 0.022769664764404295, 0.02276483154296875, 0.022657024383544923, 0.022596384048461916, 0.02263238334655762, 0.022760831832885742, 0.02263104057312012, 0.02273689651489258, 0.022572927474975586, 0.022982847213745116, 0.02248067283630371, 0.022706239700317384, 0.022696287155151366, 0.022655040740966796, 0.022545568466186522, 0.022585887908935547, 0.02415407943725586, 0.022650943756103516, 0.022558496475219725, 0.02252783966064453, 0.0222291202545166, 0.02223459243774414, 0.022532863616943358, 0.022775423049926757, 0.022685279846191408, 0.022871936798095703, 0.022520223617553712, 0.022563488006591796, 0.02228000068664551, 0.022599231719970702, 0.02259868812561035, 0.02249510383605957, 0.022646432876586915, 0.02288627243041992, 0.022534624099731445, 0.022411264419555665, 0.022458240509033202, 0.022691999435424805, 0.022903871536254884, 0.02255459213256836, 0.022387744903564454, 0.022339487075805665, 0.022369440078735352, 0.02219424057006836, 0.02207414436340332, 0.02189926338195801, 0.021794591903686523, 0.021588191986083985, 0.02152448081970215, 0.021470399856567384, 0.021520511627197265, 0.02165158462524414, 0.021717567443847657, 0.021770240783691407, 0.021890432357788085, 0.02168230438232422, 0.02177689552307129, 0.021614912033081055, 0.02165318489074707, 0.021642303466796874, 0.02165171241760254, 0.021748416900634764, 0.02195590400695801, 0.0218221435546875, 0.0216760311126709, 0.021702783584594727, 0.02170252799987793, 0.02203251266479492, 0.021796735763549804, 0.02178483200073242, 0.021927679061889648, 0.021696512222290038, 0.02175699234008789, 0.02161961555480957, 0.021741600036621095, 0.021705823898315428, 0.021734304428100586, 0.02166169548034668, 0.0216712646484375, 0.02199344062805176, 0.02184671974182129, 0.021614591598510743, 0.021962751388549806, 0.021805055618286134, 0.021679519653320312, 0.021831392288208008, 0.021601152420043946, 0.021540864944458008, 0.0220897274017334, 0.021940223693847655, 0.021694784164428712, 0.021395456314086913, 0.021712896347045898, 0.021675775527954102, 0.02169481658935547, 0.02166543960571289, 0.021649663925170898, 0.021673887252807618, 0.021743520736694336, 0.021651168823242188, 0.021694015502929688, 0.02165443229675293, 0.02172857666015625, 0.021656255722045898, 0.02170572853088379, 0.022193248748779298, 0.021867488861083983, 0.02181999969482422, 0.021706111907958986, 0.02170537567138672, 0.021672256469726564, 0.022026336669921875, 0.021776287078857422, 0.021911808013916016, 0.021849279403686524, 0.021915712356567384, 0.021823999404907226, 0.021964736938476562, 0.02200966453552246, 0.021903167724609374, 0.02172915267944336, 0.021859039306640626, 0.021864288330078124, 0.021890239715576174, 0.022182016372680663, 0.0219965763092041, 0.021898080825805664, 0.022161439895629884, 0.021939231872558595, 0.021934112548828124, 0.02176518440246582, 0.021797536849975586, 0.021700607299804688, 0.021745664596557617, 0.02184774398803711, 0.021850431442260742, 0.021765920639038087, 0.024104991912841798, 0.022753408432006836, 0.02234579277038574, 0.02225948715209961, 0.02225161552429199, 0.021964927673339844, 0.0218787841796875, 0.021749759674072267, 0.02194780731201172, 0.022061471939086915, 0.022114463806152344, 0.02198121643066406, 0.021916799545288086, 0.021775232315063477, 0.021831615447998047, 0.021843103408813475, 0.021963552474975587, 0.02179478454589844, 0.021985631942749023, 0.022177791595458983, 0.02188083267211914, 0.022029920578002928, 0.023253408432006836, 0.029998912811279296, 0.02225606346130371, 0.022163200378417968, 0.022011808395385742, 0.021987743377685547, 0.022122175216674804, 0.022115520477294922, 0.02216659164428711, 0.02209078407287598, 0.022051551818847655, 0.021993471145629884, 0.022378528594970703, 0.02201913642883301, 0.02197555160522461, 0.022140735626220702, 0.02336409568786621, 0.022612064361572266, 0.02259744071960449, 0.02255891227722168, 0.022618080139160158, 0.022609376907348634, 0.022784576416015626, 0.022644287109375, 0.02262063980102539, 0.022716384887695312, 0.022800479888916016, 0.022617279052734376, 0.022631135940551758, 0.022548479080200197, 0.022757343292236328, 0.02272591972351074, 0.02285385513305664, 0.02288092803955078, 0.02329737663269043, 0.022691743850708008, 0.02257369613647461, 0.022509567260742186, 0.02253363227844238, 0.022486623764038087, 0.0224736328125, 0.02253385543823242, 0.022399168014526367, 0.02242799949645996, 0.022594879150390625, 0.022448575973510743, 0.022367904663085938, 0.022295167922973633, 0.02226736068725586, 0.022261247634887696, 0.022811391830444335, 0.023198783874511717, 0.02328876876831055, 0.023117824554443358, 0.0228818244934082, 0.029682144165039063, 0.024685888290405272, 0.02232569694519043, 0.022047103881835936, 0.022380640029907226, 0.022466560363769532, 0.022769535064697265, 0.022400447845458984, 0.02231158447265625, 0.022340991973876952, 0.022120672225952147, 0.022204927444458008, 0.02229987144470215, 0.02232579231262207, 0.0221976318359375, 0.022252351760864257, 0.02232729530334473, 0.022468671798706055, 0.022394304275512696, 0.02230678367614746, 0.022261920928955077, 0.02233283233642578, 0.022385215759277342, 0.022448543548583985, 0.022361728668212892, 0.022400800704956054, 0.022436447143554687, 0.02249728012084961, 0.022548511505126954, 0.022783327102661132, 0.022585760116577147, 0.022823135375976564, 0.022595199584960937, 0.02256934356689453, 0.02257030487060547, 0.022589792251586915, 0.02250489616394043, 0.024867744445800782, 0.02332876777648926, 0.02274518394470215, 0.022632064819335936, 0.022769567489624023, 0.022870399475097655, 0.022716415405273437, 0.023000511169433593, 0.02267350387573242, 0.022567392349243164, 0.022333120346069334, 0.02231715202331543, 0.02249545669555664, 0.022681312561035158, 0.022849279403686522, 0.022771360397338868, 0.022710752487182618, 0.022309024810791014, 0.02228268814086914, 0.023027872085571287, 0.024768192291259764, 0.022228511810302734, 0.022177600860595705, 0.02204640007019043, 0.021866592407226562, 0.021965919494628908, 0.022081567764282228, 0.022271711349487303, 0.022380544662475587]",tokens/s,44.52740848131149,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2163.679232,2192.50688,0.0,1814.03648,1724.34432,s,1,8.6876279296875,8.6876279296875,0.0,8.6876279296875,8.6876279296875,8.6876279296875,8.6876279296875,[8.6876279296875],,kWh,5.2378508583327246e-05,5.770573031547885e-06,1.6856124595995836e-05,7.500520621087097e-05,,MB,2234.568704,2337.210368,0.0,1929.37984,1887.281152,s,10,1.687504837036133,0.16875048370361329,0.0007658928780544979,0.16883905792236328,0.16966636962890624,0.16974974212646485,0.16981644012451172,"[0.1671366424560547, 0.16897462463378907, 0.16917030334472657, 0.16802627563476563, 0.16818873596191405, 0.16919760131835937, 0.1687034912109375, 0.16964784240722655, 0.16983311462402345, 0.16862620544433593]",tokens/s,1517.0326886270047,kWh,5.101365707686887e-06,5.625916986270001e-07,3.3957594215861072e-06,9.059716827899995e-06,tokens/kWh,28256953.816882126,MB,2242.064384,2484.011008,0.0,2076.18048,1946.88,s,10,21.033904541015623,2.1033904541015622,0.014815617817911382,2.100609619140625,2.11775634765625,2.1223167724609375,2.1259651123046877,"[2.096475830078125, 2.126877197265625, 2.116742919921875, 2.073525146484375, 2.09481396484375, 2.11563916015625, 2.116057861328125, 2.102658935546875, 2.09255322265625, 2.098560302734375]",tokens/s,29.95164301385483,kWh,6.14890224543972e-05,6.78207965365139e-06,3.169151864821403e-05,9.99626207562626e-05,tokens/kWh,630235.5772925561,,s,630,21.030927616119378,0.03338242478749109,0.0005420377273182946,0.033304639816284176,0.033925838470458984,0.03415963306427002,0.03517127418518068,"[0.03385971069335938, 0.033560577392578124, 0.03357491302490234, 0.033230846405029296, 0.034095104217529294, 0.03323878479003906, 0.03341337585449219, 0.033593505859375, 0.033570655822753905, 0.03295363235473633, 0.033733345031738284, 0.033013343811035156, 0.032943649291992186, 0.032912254333496094, 0.03292144012451172, 0.03291971206665039, 0.033044063568115234, 0.032995742797851564, 0.03378364944458008, 0.03377577590942383, 0.033650688171386715, 0.03292131042480469, 0.03308915328979492, 0.03289155197143555, 0.033114017486572264, 0.033036384582519535, 0.03295379257202148, 0.032776447296142576, 0.032903488159179685, 0.03292940902709961, 0.03299161529541016, 0.033124351501464845, 0.033293663024902345, 0.03300214385986328, 0.0331038703918457, 0.03307900619506836, 0.0329587516784668, 0.03311174392700195, 0.033005889892578126, 0.03298099136352539, 0.033090847015380856, 0.03294486236572266, 0.03301580810546875, 0.03337830352783203, 0.03301715087890625, 0.03285881423950195, 0.032829536437988284, 0.033850528717041015, 0.03352652740478516, 0.0332344970703125, 0.03326556777954102, 0.032995872497558594, 0.03349903869628906, 0.03322889709472656, 0.033060256958007815, 0.03326620864868164, 0.03399481582641602, 0.03362403106689453, 0.03370374298095703, 0.033810272216796874, 0.03382720184326172, 0.034293022155761715, 0.03377020645141601, 0.03381862258911133, 0.03363577651977539, 0.03324371337890625, 0.032933887481689454, 0.032774303436279295, 0.03368921661376953, 0.03720828628540039, 0.03580089569091797, 0.03385567855834961, 0.03385139083862305, 0.03459036636352539, 0.03438595199584961, 0.03406867218017578, 0.03383219146728516, 0.0340937614440918, 0.033742111206054685, 0.03396006393432617, 0.035255073547363285, 0.033924606323242186, 0.03343001556396484, 0.03358310317993164, 0.033274974822998044, 0.03343049621582031, 0.0337509765625, 0.03369574356079102, 0.03310953521728516, 0.033247711181640625, 0.03285606384277344, 0.03278643035888672, 0.03297644805908203, 0.03341094589233398, 0.03296723175048828, 0.03373875045776367, 0.03348889541625977, 0.03366214370727539, 0.033770305633544925, 0.03431398391723633, 0.03384345626831055, 0.03416064071655273, 0.033808383941650394, 0.03448384094238281, 0.033982463836669925, 0.03373708724975586, 0.03339491271972656, 0.03378972625732422, 0.03367279815673828, 0.033614238739013674, 0.03377971267700195, 0.03365599822998047, 0.03329926300048828, 0.03371615982055664, 0.033453376770019534, 0.03355519866943359, 0.03371177673339844, 0.034154624938964845, 0.034449630737304685, 0.03386777496337891, 0.03343155288696289, 0.03338444900512695, 0.03341862487792969, 0.03345996856689453, 0.03323936080932617, 0.03337273788452148, 0.03368521499633789, 0.03354431915283203, 0.03430825424194336, 0.03375059127807617, 0.03346886444091797, 0.033474529266357425, 0.03363174438476563, 0.03320876693725586, 0.033451904296875, 0.033191776275634764, 0.033188224792480465, 0.0332861442565918, 0.0332421760559082, 0.03330495834350586, 0.033464897155761716, 0.033388511657714844, 0.03366060638427734, 0.03333769607543945, 0.033521503448486326, 0.033161376953125, 0.03338854217529297, 0.033639904022216796, 0.03352617645263672, 0.03398665618896484, 0.033947681427001955, 0.03381452941894531, 0.03406972885131836, 0.034613407135009766, 0.034587265014648434, 0.033834720611572264, 0.033706272125244144, 0.03392451095581055, 0.03428208160400391, 0.03386777496337891, 0.033625278472900394, 0.03346688079833984, 0.033492767333984375, 0.033304222106933595, 0.033532798767089846, 0.03349462509155274, 0.03746815872192383, 0.0339439697265625, 0.03410124969482422, 0.03377356719970703, 0.0337606086730957, 0.033868446350097656, 0.033705215454101566, 0.0337599983215332, 0.03341516876220703, 0.03320732879638672, 0.033557472229003904, 0.033500831604003904, 0.033206623077392576, 0.033150558471679685, 0.03313910293579102, 0.03287161636352539, 0.032889503479003906, 0.03310198211669922, 0.03272496032714844, 0.03287817764282226, 0.03278006362915039, 0.03288336181640625, 0.03340220642089844, 0.03318198394775391, 0.03299881744384765, 0.03312428665161133, 0.03287926483154297, 0.03282124710083008, 0.03284172821044922, 0.032858081817626957, 0.03300102233886719, 0.03290358352661133, 0.03283062362670899, 0.03333987045288086, 0.0328974723815918, 0.03282124710083008, 0.032655361175537106, 0.03282473754882813, 0.03273936080932617, 0.03260883331298828, 0.03278438568115234, 0.03302387237548828, 0.03278451156616211, 0.032753662109375, 0.032732673645019535, 0.032700927734375, 0.03294003295898437, 0.03287788772583008, 0.03329683303833008, 0.03305472183227539, 0.032868606567382816, 0.03276201629638672, 0.03290476989746094, 0.032866241455078125, 0.03268803024291992, 0.03280704116821289, 0.03270892715454102, 0.03266479873657226, 0.032897823333740236, 0.032860160827636715, 0.03278598403930664, 0.033753536224365235, 0.033355777740478515, 0.032812801361083985, 0.032798816680908206, 0.03283779144287109, 0.0325766716003418, 0.03320304107666015, 0.03269222259521484, 0.03264716720581055, 0.032830463409423825, 0.03281808090209961, 0.03263702392578125, 0.03293366241455078, 0.03273926544189453, 0.033585281372070314, 0.032747264862060546, 0.03314729690551758, 0.03314387130737305, 0.03361264038085938, 0.03289712142944336, 0.0328623046875, 0.03276335906982422, 0.03292963027954102, 0.03275414276123047, 0.03306918334960938, 0.034050239562988284, 0.03365017700195312, 0.033632606506347654, 0.03326755142211914, 0.03302431869506836, 0.03311001586914063, 0.03337625503540039, 0.03315273666381836, 0.03313897705078125, 0.03321446228027344, 0.03322985458374023, 0.03344688034057617, 0.03330841445922852, 0.03292505645751953, 0.03315801620483398, 0.03324313735961914, 0.033576961517333984, 0.03328409576416016, 0.03374694442749023, 0.033081344604492184, 0.033073150634765625, 0.03316940689086914, 0.03321651077270508, 0.03348889541625977, 0.03354214477539062, 0.0332677116394043, 0.033638111114501955, 0.033562145233154296, 0.03360015869140625, 0.033635520935058595, 0.03366118240356445, 0.03379008102416992, 0.03347715377807617, 0.03341107177734375, 0.033288192749023435, 0.03308544158935547, 0.03387587356567383, 0.03290940856933594, 0.03309721755981445, 0.03306752014160156, 0.03285811233520508, 0.03290521621704102, 0.033089183807373045, 0.0333043212890625, 0.033454689025878906, 0.03304025650024414, 0.0329686393737793, 0.032913600921630856, 0.03331071853637695, 0.033056766510009765, 0.03309568023681641, 0.03284313583374023, 0.032891521453857424, 0.03292979049682617, 0.032968894958496094, 0.03317695999145508, 0.03322515106201172, 0.03304652786254883, 0.03292505645751953, 0.03304307174682617, 0.03320012664794922, 0.03283967971801758, 0.03299916839599609, 0.033716350555419924, 0.03355420684814453, 0.03374028778076172, 0.03377020645141601, 0.033377281188964845, 0.03347967910766601, 0.03354828643798828, 0.033406368255615236, 0.033385055541992184, 0.033058815002441407, 0.03316873550415039, 0.03397216033935547, 0.033495777130126955, 0.033057022094726565, 0.03311999893188477, 0.032890689849853515, 0.032823486328125, 0.03287449645996094, 0.03291446304321289, 0.03304735946655273, 0.03329782485961914, 0.03356953430175781, 0.03359539031982422, 0.034549758911132815, 0.03382476806640625, 0.033462272644042966, 0.03362972640991211, 0.033513824462890626, 0.03347049713134766, 0.03353174209594727, 0.0334637451171875, 0.033057342529296874, 0.03319206237792969, 0.033285919189453124, 0.03392351913452148, 0.037513118743896484, 0.03350262451171875, 0.03330928039550781, 0.03320012664794922, 0.033058815002441407, 0.03306086349487305, 0.03359699249267578, 0.03366249465942383, 0.03359017562866211, 0.03569868850708008, 0.03471769714355469, 0.03386368179321289, 0.03354214477539062, 0.0336629753112793, 0.03361705780029297, 0.03377558517456054, 0.03364537429809571, 0.034116992950439455, 0.03341139221191406, 0.0332353286743164, 0.03340268707275391, 0.03365292739868164, 0.033685504913330076, 0.033438720703125, 0.033291263580322264, 0.03330355072021484, 0.03333017730712891, 0.0336732177734375, 0.03399318313598633, 0.03402873611450195, 0.034085025787353514, 0.03452399826049805, 0.034170047760009765, 0.03451372909545898, 0.034245662689208985, 0.03372540664672852, 0.03393692779541015, 0.03344966506958008, 0.03336272048950195, 0.033664798736572264, 0.033495201110839846, 0.032940097808837894, 0.03482777786254883, 0.03350783920288086, 0.03280806350708008, 0.03309862518310547, 0.03299049758911133, 0.032809791564941404, 0.03305011367797851, 0.032855518341064455, 0.032779201507568356, 0.03310960006713867, 0.03358143997192383, 0.03342448043823242, 0.033923553466796874, 0.03364255905151367, 0.034142623901367186, 0.03404390335083008, 0.033883201599121095, 0.033684417724609374, 0.0337367057800293, 0.034162017822265626, 0.033317569732666016, 0.03331273651123047, 0.03359743881225586, 0.03336761474609375, 0.033398880004882815, 0.03318204879760742, 0.03297484970092773, 0.033266944885253905, 0.03320908737182617, 0.03306646347045898, 0.03337648010253906, 0.03314448165893555, 0.03336259078979492, 0.03365644836425781, 0.03355481719970703, 0.033372161865234375, 0.03390614318847656, 0.033617790222167966, 0.033581729888916015, 0.03376451110839844, 0.03436835098266602, 0.035823776245117185, 0.03371769714355469, 0.033347103118896486, 0.03326569747924805, 0.03351023864746094, 0.033367649078369144, 0.033055137634277344, 0.03310182571411133, 0.03496611022949219, 0.03398451232910156, 0.03394355010986328, 0.034095104217529294, 0.03377577590942383, 0.033883998870849606, 0.03415840148925781, 0.03369184112548828, 0.03364799880981445, 0.033479297637939456, 0.0332578239440918, 0.03310147094726563, 0.03318374252319336, 0.03325337600708008, 0.03328409576416016, 0.033067008972167966, 0.0328007698059082, 0.0329051513671875, 0.0329420166015625, 0.03274665451049805, 0.03314527893066406, 0.0331063346862793, 0.033242271423339846, 0.03375151824951172, 0.03375155258178711, 0.03426537704467773, 0.03389616012573242, 0.033799232482910155, 0.033616703033447264, 0.03368889617919922, 0.034030399322509765, 0.03352883148193359, 0.03343215942382813, 0.033283809661865234, 0.033149631500244144, 0.03405974578857422, 0.03387241744995117, 0.0333496322631836, 0.033105247497558596, 0.03296527862548828, 0.03279199981689453, 0.03287823867797852, 0.03285903930664062, 0.03285308837890625, 0.03309241485595703, 0.033447776794433594, 0.03357708740234375, 0.03310172653198242, 0.032917728424072264, 0.03273689651489258, 0.032944190979003904, 0.033113792419433595, 0.03277423858642578, 0.03305292892456055, 0.03291164779663086, 0.03301580810546875, 0.03326566314697266, 0.033081344604492184, 0.03301375961303711, 0.03326678466796875, 0.03349382400512695, 0.033380447387695314, 0.033562625885009766, 0.03363616180419922, 0.03343993759155273, 0.03353571319580078, 0.03390288162231445, 0.03304560089111328, 0.03294739151000976, 0.03288444900512695, 0.032737281799316405, 0.03295830535888672, 0.03291561508178711, 0.03273932647705078, 0.034062335968017575, 0.03370729446411133, 0.033757919311523436, 0.03357491302490234, 0.03354009628295898, 0.033352832794189456, 0.03361667251586914, 0.03428691101074219, 0.03369859313964844, 0.03370598220825195, 0.03339820861816406, 0.033016319274902346, 0.03312236785888672, 0.033337215423583984, 0.033969280242919925, 0.033299102783203124, 0.03359737777709961, 0.03309609603881836, 0.03316326522827148, 0.03309113693237305, 0.03281894302368164, 0.03297548675537109, 0.03333740615844726, 0.03352576065063476, 0.03347171020507812, 0.033484928131103514, 0.033232864379882814, 0.03335443115234375, 0.03313638305664063, 0.03288691329956055, 0.033177310943603516, 0.03299343872070312, 0.03284137725830078, 0.032860767364501955, 0.03266559982299805, 0.032702465057373044, 0.032859233856201174, 0.033364414215087894, 0.032920032501220706, 0.03307724761962891, 0.032860160827636715, 0.03280883026123047, 0.033236576080322267, 0.033003456115722654, 0.03274163055419922, 0.03289116668701172, 0.032873950958251956, 0.03356118392944336, 0.03304857635498047, 0.03289907073974609, 0.03273318481445313, 0.03281305694580078, 0.03365718460083008, 0.03350153732299805, 0.03354828643798828, 0.033462272644042966, 0.03361753463745117, 0.03348691177368164, 0.03340934371948242, 0.03301324844360352, 0.03316787338256836, 0.03316940689086914, 0.03322995376586914, 0.03366182327270508, 0.033355777740478515, 0.03299926376342773, 0.03310812759399414, 0.03298918533325195, 0.0330015983581543, 0.03304947280883789, 0.03323801422119141, 0.03365683364868164, 0.03399270248413086, 0.03379609680175781, 0.03347433471679687, 0.03352348709106445, 0.033752670288085936, 0.03345632171630859, 0.03367388916015625, 0.033726463317871096, 0.033465343475341795, 0.03341209411621094, 0.03336355209350586, 0.03355430221557617, 0.03329692840576172, 0.0344535026550293, 0.03321392059326172, 0.033102367401123045, 0.03326976013183594, 0.03285606384277344, 0.03290521621704102, 0.033314208984375, 0.03288275146484375, 0.03388022232055664, 0.033936958312988284, 0.03324169540405274, 0.033056991577148434, 0.03277983856201172, 0.03275411224365234, 0.03304243087768555, 0.03285948944091797, 0.032627201080322264, 0.033406238555908206, 0.03302652740478516, 0.0327982063293457, 0.032934814453125, 0.032903167724609376, 0.03273523330688476, 0.03288444900512695, 0.0328768310546875, 0.03390259170532227, 0.033056575775146486, 0.03292793655395508, 0.03297206497192383, 0.03476095962524414]",tokens/s,29.955882664782205,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3130.662912,4369.350656,0.0,3990.880256,3908.719616,s,1,10.236123046875,10.236123046875,0.0,10.236123046875,10.236123046875,10.236123046875,10.236123046875,[10.236123046875],,kWh,9.583114598749489e-05,1.056201819548879e-05,3.11369693540009e-05,0.00013753013353698458,,MB,3038.547968,4736.352256,0.0,4328.521728,4275.211264,s,10,2.2004496765136716,0.22004496765136716,0.0017202474563007386,0.22012940979003906,0.22196122589111328,0.22231882400512695,0.22260490249633788,"[0.21974082946777343, 0.21711001586914064, 0.21838937377929687, 0.21789814758300782, 0.22267642211914063, 0.22020541381835937, 0.22084617614746094, 0.22164813232421876, 0.22005340576171875, 0.2218817596435547]",tokens/s,1163.3985668129387,kWh,6.732886124338178e-06,7.425168213095678e-07,4.457497252863894e-06,1.193290019851164e-05,tokens/kWh,21453292.63978343,MB,3045.90848,4738.449408,0.0,4330.61888,4275.213824,s,10,33.801979003906254,3.3801979003906255,0.032645405571140824,3.3820683593750003,3.4161689453125,3.4249503173828124,3.4319754150390627,"[3.38634912109375, 3.319181640625, 3.352884765625, 3.361162353515625, 3.3530166015625, 3.414217529296875, 3.37778759765625, 3.395310546875, 3.433731689453125, 3.408337158203125]",tokens/s,18.637961994094944,kWh,9.909167303066185e-05,1.0930001910182555e-05,5.104018603413411e-05,0.0001610618609749785,tokens/kWh,391154.0548372731,,s,630,33.79927659225466,0.05364964538453117,0.0010443882622783844,0.05361448097229004,0.05456036338806153,0.054943120002746586,0.05713659645080567,"[0.05480243301391602, 0.05358102416992187, 0.053728031158447265, 0.053386878967285153, 0.05339993667602539, 0.0534466552734375, 0.053437599182128905, 0.05309638214111328, 0.053158111572265625, 0.05378947067260742, 0.05319260787963867, 0.05398527908325195, 0.05310665512084961, 0.052916255950927735, 0.052574207305908206, 0.052795391082763675, 0.05241142272949219, 0.05253152084350586, 0.05255644989013672, 0.05395225524902344, 0.05399343872070313, 0.05269942474365234, 0.05240627288818359, 0.05375907135009766, 0.05234985733032226, 0.05235004806518555, 0.05232665634155274, 0.05287184143066406, 0.05320243072509766, 0.05378623962402344, 0.05387148666381836, 0.05370265579223633, 0.05356460952758789, 0.05338604736328125, 0.05316198348999023, 0.053712417602539066, 0.05423283386230469, 0.0544222412109375, 0.05471641540527344, 0.05437827301025391, 0.05440902328491211, 0.053658016204833986, 0.054441761016845704, 0.053739105224609375, 0.05370124816894531, 0.06377881622314453, 0.054335487365722655, 0.05368012619018555, 0.05439692687988281, 0.05446870422363281, 0.0536143684387207, 0.0535777587890625, 0.05308339309692383, 0.05554399871826172, 0.05357020950317383, 0.05351580810546875, 0.05327052688598633, 0.05431343841552734, 0.054075393676757816, 0.054130687713623046, 0.054017822265625, 0.05412681579589844, 0.05391360092163086, 0.0547993278503418, 0.05399027252197266, 0.053617855072021485, 0.054067134857177734, 0.05375475311279297, 0.05353014373779297, 0.05260131072998047, 0.052326400756835936, 0.05258444976806641, 0.05231814575195313, 0.052240478515625, 0.05203945541381836, 0.05205180740356445, 0.05215881729125976, 0.05219446563720703, 0.05219955062866211, 0.05222598266601562, 0.0520497932434082, 0.052155296325683595, 0.05246895980834961, 0.05209276962280274, 0.05224281692504883, 0.05212326431274414, 0.052026016235351566, 0.05203936004638672, 0.052095584869384766, 0.05211328125, 0.05256969451904297, 0.05288399887084961, 0.052119327545166017, 0.05207241439819336, 0.055653663635253904, 0.05628598403930664, 0.05245561599731445, 0.052325439453125, 0.0523922233581543, 0.05236086273193359, 0.05240224075317383, 0.05297657775878906, 0.052319297790527346, 0.05215078353881836, 0.052418846130371094, 0.05224204635620117, 0.05223040008544922, 0.05216899108886719, 0.052974689483642576, 0.05280828857421875, 0.052746337890625, 0.052480224609375, 0.052506622314453126, 0.0522608642578125, 0.052221630096435545, 0.05275174331665039, 0.05282271957397461, 0.05284377670288086, 0.05246054458618164, 0.05275155258178711, 0.05264851379394531, 0.05244099044799805, 0.052564319610595704, 0.05306163024902344, 0.05315523147583008, 0.05326704025268555, 0.05422288131713867, 0.05315785598754883, 0.052926464080810545, 0.05264384078979492, 0.05261011123657226, 0.05288032150268555, 0.05364726257324219, 0.053588062286376956, 0.05414425659179688, 0.053564159393310544, 0.05352352142333985, 0.05328377532958985, 0.05302377700805664, 0.053220096588134765, 0.05322476959228516, 0.052911006927490234, 0.052587966918945316, 0.05282787322998047, 0.05284745788574219, 0.053174049377441406, 0.053166305541992184, 0.05323980712890625, 0.053741214752197265, 0.05294457626342773, 0.05263836669921875, 0.057360511779785156, 0.05283785629272461, 0.052854976654052734, 0.05238560104370117, 0.052455711364746097, 0.05264191818237305, 0.05219126510620117, 0.052195297241210935, 0.05267660903930664, 0.053542911529541014, 0.05321932983398438, 0.054080894470214844, 0.053460895538330076, 0.05340643310546875, 0.05302403259277344, 0.05317705535888672, 0.05312102508544922, 0.05313049697875977, 0.053166561126708985, 0.05286921691894531, 0.0528089599609375, 0.05349846267700195, 0.053534976959228514, 0.053491809844970706, 0.05464678573608398, 0.05489049530029297, 0.053629150390625, 0.05337680053710937, 0.053181854248046875, 0.05314982223510742, 0.05329072189331055, 0.053004257202148436, 0.05283900833129883, 0.052312255859375, 0.052453182220458985, 0.05285808181762695, 0.05335363388061523, 0.05276633453369141, 0.053014816284179686, 0.05275033569335937, 0.05291747283935547, 0.05288768005371094, 0.052937374114990235, 0.053233409881591795, 0.05321139144897461, 0.05363622283935547, 0.053539710998535155, 0.053501953125, 0.05447407913208008, 0.05378319931030273, 0.05381324768066406, 0.05372313690185547, 0.053855712890625, 0.05365011215209961, 0.05393718338012695, 0.05410268783569336, 0.05383411026000977, 0.053114654541015625, 0.052873374938964844, 0.052709217071533206, 0.052776607513427734, 0.052943199157714844, 0.05279081726074219, 0.05257577514648438, 0.05285577774047852, 0.052559009552001955, 0.052589374542236327, 0.052340862274169925, 0.05268191909790039, 0.05230662536621094, 0.058383647918701174, 0.05350230407714844, 0.053082496643066406, 0.053073566436767576, 0.05334460830688476, 0.0530882568359375, 0.05266236877441406, 0.05338723373413086, 0.05539158248901367, 0.05363772964477539, 0.05295849609375, 0.05274867248535156, 0.05369071960449219, 0.052776641845703125, 0.052865345001220705, 0.06351049423217774, 0.05330742263793945, 0.052776958465576174, 0.05253049468994141, 0.052746849060058595, 0.05250672149658203, 0.05286297607421875, 0.05277196884155273, 0.05285366439819336, 0.052571582794189456, 0.05265375900268555, 0.0525013427734375, 0.05266620635986328, 0.05246787261962891, 0.05311689758300781, 0.05257206344604492, 0.05323356628417969, 0.05270732879638672, 0.05248803329467774, 0.052455585479736326, 0.052432415008544925, 0.05242723083496094, 0.05248409652709961, 0.05254143905639649, 0.05251862335205078, 0.05250393676757813, 0.05237164688110352, 0.05238224029541016, 0.05292665481567383, 0.053370174407958985, 0.05408633422851562, 0.05387667083740234, 0.05377750396728516, 0.053212127685546874, 0.052916255950927735, 0.05769823837280273, 0.05325417709350586, 0.05283625411987305, 0.052516960144042966, 0.052585601806640625, 0.05281881713867188, 0.05350297546386719, 0.0535843505859375, 0.0533837776184082, 0.05354079818725586, 0.05336812973022461, 0.0533633918762207, 0.05304707336425781, 0.05301475143432617, 0.05306163024902344, 0.05318143844604492, 0.052946174621582034, 0.05290927886962891, 0.05265462493896484, 0.052739456176757814, 0.053451393127441404, 0.05321664047241211, 0.05294963073730469, 0.052836353302001954, 0.05279948806762695, 0.052727008819580076, 0.052625823974609375, 0.05322351837158203, 0.05362054443359375, 0.05343484878540039, 0.05328700637817383, 0.053423969268798825, 0.0535115852355957, 0.05415388870239258, 0.053287071228027345, 0.05335228729248047, 0.05337484741210938, 0.05406943893432617, 0.05361971282958984, 0.05344966506958008, 0.05361692810058594, 0.05404848098754883, 0.053907455444335936, 0.05401804733276367, 0.0551649284362793, 0.05720873641967773, 0.055070110321044925, 0.05451232147216797, 0.055285598754882814, 0.05459743881225586, 0.05475363159179687, 0.054314590454101565, 0.054290752410888675, 0.0546038703918457, 0.05451718521118164, 0.05411078262329101, 0.054296577453613284, 0.054085601806640626, 0.05408476638793945, 0.05424342346191406, 0.05364556884765625, 0.05339926528930664, 0.05332064056396484, 0.05364108657836914, 0.05361868667602539, 0.05426287841796875, 0.05459241485595703, 0.054095775604248046, 0.054331489562988285, 0.054507518768310545, 0.05428377532958984, 0.053690879821777344, 0.05373299026489258, 0.05389503860473633, 0.05368057632446289, 0.053682239532470706, 0.05369356918334961, 0.05329110336303711, 0.053352222442626954, 0.05645004653930664, 0.05396684646606445, 0.05361459350585938, 0.052924415588378904, 0.05524684906005859, 0.05341129684448242, 0.053148193359375, 0.05316812896728516, 0.05402627182006836, 0.05388284683227539, 0.0543719367980957, 0.053967262268066404, 0.055332160949707034, 0.054155967712402345, 0.054216064453125, 0.054325630187988284, 0.054311038970947266, 0.05353279876708984, 0.05370022583007812, 0.05370425415039062, 0.053598880767822266, 0.05382364654541016, 0.05408563232421875, 0.054986751556396485, 0.054919361114501956, 0.053802814483642575, 0.05419766235351563, 0.05519011306762695, 0.054475616455078125, 0.05407299041748047, 0.05416585540771485, 0.053819393157958986, 0.05390950393676758, 0.053370208740234376, 0.05365731048583984, 0.052894271850585935, 0.05316755294799805, 0.05318041610717773, 0.05298067092895508, 0.05260902404785156, 0.05287526321411133, 0.05296537780761719, 0.05282524871826172, 0.053016704559326173, 0.05273225784301758, 0.05531292724609375, 0.05382128143310547, 0.05349980926513672, 0.053673439025878907, 0.05392601776123047, 0.05335897445678711, 0.05333414459228516, 0.05353267288208008, 0.05703472137451172, 0.05397414398193359, 0.053963615417480466, 0.05384304046630859, 0.05471942520141602, 0.05372214508056641, 0.053700702667236325, 0.05388092803955078, 0.053570369720458984, 0.05368012619018555, 0.053198623657226565, 0.05335881423950195, 0.05337519836425781, 0.053403167724609374, 0.0535340805053711, 0.05433433532714844, 0.05351222229003906, 0.053165950775146485, 0.05329446411132813, 0.05303104019165039, 0.05296758270263672, 0.05301248168945313, 0.05300883102416992, 0.05416454315185547, 0.05375481414794922, 0.05336441421508789, 0.05393644714355469, 0.0537784309387207, 0.05390729522705078, 0.053235870361328125, 0.05349577713012695, 0.05358969497680664, 0.05365184020996094, 0.054067073822021486, 0.05369865417480469, 0.053609664916992185, 0.05351916885375976, 0.053303295135498044, 0.05445817565917969, 0.05360655975341797, 0.05398940658569336, 0.053823486328125, 0.056296863555908204, 0.053725791931152345, 0.053583038330078124, 0.05364406585693359, 0.05339116668701172, 0.053655776977539066, 0.05349286270141602, 0.05282905578613281, 0.05293875122070312, 0.052884544372558594, 0.053205951690673825, 0.053703807830810545, 0.0538710708618164, 0.05379219055175781, 0.05412550354003906, 0.05394761657714844, 0.05360108947753906, 0.05417977523803711, 0.05408291244506836, 0.053891807556152346, 0.05382070541381836, 0.05356003189086914, 0.053585918426513675, 0.05346259307861328, 0.054109825134277346, 0.053957439422607424, 0.05406924819946289, 0.05398332977294922, 0.05420431900024414, 0.053846080780029296, 0.05406508636474609, 0.053661697387695315, 0.0538869743347168, 0.05381891250610352, 0.053750240325927734, 0.05360547256469726, 0.054317440032958984, 0.05370729446411133, 0.05378262329101562, 0.054206207275390626, 0.05376015853881836, 0.05360435104370117, 0.05353267288208008, 0.05413273620605469, 0.054177310943603514, 0.05401033782958985, 0.05406105422973633, 0.053907455444335936, 0.053905406951904294, 0.05391769790649414, 0.05420851135253906, 0.05396480178833008, 0.056586238861083986, 0.053553150177001956, 0.05374156951904297, 0.05399347305297852, 0.053982273101806644, 0.05411718368530274, 0.053759391784667966, 0.054728382110595705, 0.054092353820800784, 0.05455974578857422, 0.05424630355834961, 0.05455062484741211, 0.0545054702758789, 0.055119873046875, 0.05449728012084961, 0.05472003173828125, 0.05480495834350586, 0.05450688171386719, 0.05454092788696289, 0.05405491256713867, 0.05400985717773438, 0.05432633590698242, 0.054047679901123045, 0.05411840057373047, 0.05493465423583985, 0.054558624267578126, 0.05595849609375, 0.055512863159179686, 0.05478422546386719, 0.05467750549316406, 0.05546303939819336, 0.05455756759643555, 0.05460508728027344, 0.05447679901123047, 0.05418819046020508, 0.05427872085571289, 0.05456592178344727, 0.0542852783203125, 0.05427123260498047, 0.054427745819091794, 0.05469046401977539, 0.05466726303100586, 0.054816959381103515, 0.05472172927856445, 0.05469862365722656, 0.05461814498901367, 0.054790111541748045, 0.054540287017822264, 0.054766719818115234, 0.05428627014160156, 0.056070240020751956, 0.05419865417480469, 0.0541065902709961, 0.05545574569702148, 0.054093822479248044, 0.054235038757324217, 0.05436380767822266, 0.05433152008056641, 0.054322784423828124, 0.054366943359375, 0.0540263671875, 0.05409747314453125, 0.05416172790527344, 0.05374156951904297, 0.05396835327148437, 0.053551361083984374, 0.054130817413330076, 0.054353664398193356, 0.05412918472290039, 0.054155136108398436, 0.055801246643066404, 0.05488259124755859, 0.053911872863769535, 0.05345619201660156, 0.0531297607421875, 0.05495004653930664, 0.054165695190429686, 0.0539824333190918, 0.054852096557617185, 0.05717820739746094, 0.05409817504882813, 0.054021472930908206, 0.054118080139160155, 0.054459102630615236, 0.053866497039794924, 0.053389022827148434, 0.053285152435302734, 0.05335881423950195, 0.053430049896240235, 0.053661697387695315, 0.05384396743774414, 0.05350809478759765, 0.053424095153808596, 0.0535142707824707, 0.05340678405761719, 0.053339073181152344, 0.05369651031494141, 0.054214656829833986, 0.05489254379272461, 0.05402563095092774, 0.05417417526245117, 0.054042369842529296, 0.05391580963134766, 0.053953758239746095, 0.053926719665527346, 0.0537081298828125, 0.05396128082275391, 0.05381148910522461, 0.05386614227294922, 0.05403596878051758, 0.05417407989501953, 0.05408979034423828, 0.05416387176513672, 0.05414912033081055, 0.05408723068237305, 0.054067615509033204, 0.05381289672851562, 0.053924095153808596, 0.05397862243652344, 0.054037120819091795, 0.05407660675048828, 0.05345158386230469, 0.053208927154541015, 0.05412665557861328, 0.05431856155395508, 0.056236671447753905, 0.05455462265014648, 0.054489086151123044, 0.05464995193481445, 0.05432352066040039, 0.054245281219482425, 0.05433529663085938, 0.054293376922607425]",tokens/s,18.639452187103004,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4359.479296,4562.28864,0.0,4183.81824,4182.069248,s,1,10.2638212890625,10.2638212890625,0.0,10.2638212890625,10.2638212890625,10.2638212890625,10.2638212890625,[10.2638212890625],,kWh,9.306532716250094e-05,1.0258485197407685e-05,2.925252340199992e-05,0.00013257633576190854,,MB,1650.229248,4715.380736,0.0,4307.550208,4281.174016,s,10,4.167554779052734,0.41675547790527345,0.006229591291880606,0.41788583374023436,0.42322588806152345,0.42383643951416017,0.42432488067626956,"[0.40105911254882814, 0.4190422668457031, 0.4153018798828125, 0.41954266357421877, 0.42015341186523436, 0.4149872131347656, 0.4167294006347656, 0.4244469909667969, 0.41320162963867185, 0.4230902099609375]",tokens/s,614.2690704072463,kWh,1.2115698900666626e-05,1.3361515482894504e-06,8.055739777919988e-06,2.1507590226876064e-05,tokens/kWh,11902774.662318991,MB,1658.617856,4730.0608,0.0,4322.230272,4281.176576,s,10,25.517523681640625,2.551752368164063,0.01786764099154408,2.5614346923828126,2.569007763671875,2.569834814453125,2.5704964550781253,"[2.522132080078125, 2.530519775390625, 2.5293154296875, 2.564641357421875, 2.541203125, 2.568823974609375, 2.567356689453125, 2.570661865234375, 2.560109130859375, 2.56276025390625]",tokens/s,24.68891605078721,kWh,7.458373593766659e-05,8.226687555871763e-06,4.913483375228005e-05,0.00013194525724581844,tokens/kWh,477470.7429053618,,s,630,25.51400104522709,0.04049841435750325,0.0006892768181309524,0.04049379348754883,0.04097646636962891,0.04123205833435059,0.043241351547241215,"[0.042024959564208986, 0.04011318588256836, 0.04326089477539063, 0.03966345596313477, 0.039863967895507814, 0.0402474250793457, 0.04042176055908203, 0.04027190399169922, 0.040433631896972654, 0.040605342864990235, 0.040393054962158205, 0.04018175888061523, 0.04040703964233398, 0.03999948883056641, 0.040253440856933595, 0.040062110900878904, 0.039905502319335935, 0.04002195358276367, 0.03972166442871094, 0.04012236785888672, 0.04004044723510742, 0.03980054473876953, 0.040242881774902345, 0.04012502288818359, 0.03992575836181641, 0.0397242546081543, 0.03997113418579101, 0.039577438354492185, 0.03962944030761719, 0.03973952102661133, 0.03973516845703125, 0.0397946891784668, 0.03964518356323242, 0.03965068817138672, 0.03959228897094726, 0.039540321350097656, 0.03948409652709961, 0.03997459030151367, 0.03957587051391601, 0.03965651321411133, 0.03950073623657226, 0.039591934204101564, 0.039573184967041014, 0.03990572738647461, 0.03992563247680664, 0.03967967987060547, 0.039696704864501955, 0.03959958267211914, 0.04003894424438476, 0.040003360748291014, 0.040433887481689454, 0.039711872100830076, 0.0398078727722168, 0.03953184127807617, 0.04002835083007812, 0.03998307037353516, 0.040083999633789065, 0.04035174560546875, 0.04012214279174805, 0.04027391815185547, 0.04023654556274414, 0.040282848358154294, 0.04009296035766602, 0.040704254150390626, 0.040509151458740233, 0.0403482551574707, 0.04049846267700195, 0.03994214248657227, 0.040106689453125, 0.03988275146484375, 0.040298526763916015, 0.04028006362915039, 0.04077772903442383, 0.04047052764892578, 0.04070707321166992, 0.04032553482055664, 0.04060425567626953, 0.040548351287841795, 0.04070352172851562, 0.04056111907958984, 0.04061199951171875, 0.04062006378173828, 0.041121505737304685, 0.04030620956420899, 0.040804000854492185, 0.0402276496887207, 0.04024124908447266, 0.04010332870483398, 0.040393310546875, 0.0407459831237793, 0.04041011047363281, 0.040449344635009765, 0.040462753295898435, 0.0397540168762207, 0.040031742095947266, 0.03975628662109375, 0.04025753784179688, 0.03969843292236328, 0.04002816009521484, 0.04012236785888672, 0.03965951919555664, 0.03975715255737305, 0.03980380630493164, 0.03998080062866211, 0.04007689666748047, 0.03982963180541992, 0.039870750427246096, 0.03970048141479492, 0.03947468948364258, 0.04010847854614258, 0.040107486724853515, 0.04016803359985351, 0.04023295974731445, 0.04002121734619141, 0.040126976013183595, 0.04000390243530273, 0.04004246520996094, 0.0396693115234375, 0.039635391235351564, 0.03999276733398437, 0.04019257736206055, 0.0398721923828125, 0.0401447982788086, 0.039532001495361326, 0.03940879821777344, 0.039409439086914064, 0.04073046493530273, 0.03975702285766602, 0.039475326538085935, 0.04128441619873047, 0.0404637451171875, 0.03932223892211914, 0.03946700668334961, 0.039202495574951174, 0.039591392517089846, 0.039813983917236326, 0.04002921676635742, 0.04005542373657227, 0.03995846557617187, 0.04023484802246094, 0.040005760192871095, 0.04001327896118164, 0.039917888641357424, 0.039834270477294924, 0.03988595199584961, 0.039785343170166014, 0.0404400634765625, 0.041285377502441406, 0.040118270874023435, 0.039808158874511716, 0.039919742584228514, 0.03991328048706055, 0.04567337417602539, 0.040304641723632816, 0.039800670623779295, 0.03969244766235352, 0.03932160186767578, 0.03912908935546875, 0.039900672912597655, 0.04657526397705078, 0.039264865875244144, 0.0393177604675293, 0.04019200134277344, 0.03984588623046875, 0.0400096321105957, 0.03952239990234375, 0.039785823822021484, 0.04115635299682617, 0.03956828689575195, 0.039516353607177736, 0.03970790481567383, 0.03966172790527344, 0.039492000579833986, 0.039522014617919925, 0.0395041618347168, 0.03970265579223633, 0.039933792114257814, 0.040108062744140624, 0.040211967468261715, 0.040968097686767575, 0.04028886413574219, 0.04033126449584961, 0.04007753753662109, 0.04011110305786133, 0.04055324935913086, 0.04012441635131836, 0.040081409454345705, 0.04003424072265625, 0.03970054244995117, 0.04093801498413086, 0.04028460693359375, 0.04024300765991211, 0.04011644744873047, 0.04018694305419922, 0.040270751953125, 0.03990323257446289, 0.03978230285644531, 0.039792736053466796, 0.04064051055908203, 0.04005887985229492, 0.039948287963867186, 0.0407347183227539, 0.04444160079956055, 0.04019200134277344, 0.04010598373413086, 0.0405362548828125, 0.04045395278930664, 0.04122544097900391, 0.03994499206542969, 0.040182785034179686, 0.04040192031860351, 0.04061187362670898, 0.04299929428100586, 0.04352867126464844, 0.04187660980224609, 0.0406146240234375, 0.04064636611938476, 0.040658432006835936, 0.0412108154296875, 0.041166847229003906, 0.041850879669189454, 0.040337120056152344, 0.040735008239746094, 0.040461345672607424, 0.040503326416015624, 0.04103238296508789, 0.040874305725097655, 0.04072959899902344, 0.041140960693359374, 0.040607391357421876, 0.04076201629638672, 0.04060355377197265, 0.040953857421875, 0.04088134384155274, 0.04049798583984375, 0.040736766815185545, 0.04036422348022461, 0.04033107376098633, 0.04022502517700195, 0.040253150939941404, 0.04043328094482422, 0.040446369171142575, 0.04045004653930664, 0.040591327667236325, 0.04041350555419922, 0.04055420684814453, 0.0404029426574707, 0.040556640625, 0.04063606262207031, 0.04078208160400391, 0.04067737579345703, 0.04074905776977539, 0.040993152618408205, 0.040267105102539065, 0.040677089691162106, 0.040541118621826175, 0.04066304016113281, 0.04079001617431641, 0.04042956924438477, 0.040130561828613284, 0.040861473083496094, 0.04024956893920899, 0.040321025848388675, 0.041261409759521486, 0.040438526153564455, 0.04029872131347656, 0.04083097457885742, 0.04050348663330078, 0.040588993072509766, 0.04054713439941406, 0.04048896026611328, 0.040529918670654294, 0.04034764862060547, 0.04040230560302734, 0.03992214584350586, 0.04055177688598633, 0.04027475357055664, 0.04007302474975586, 0.04020659255981445, 0.0401673583984375, 0.04013187026977539, 0.04001603317260742, 0.03988323211669922, 0.04004079818725586, 0.03968547058105469, 0.03974185562133789, 0.03946700668334961, 0.04005062484741211, 0.039884864807128904, 0.04023859024047852, 0.04039487838745117, 0.040110462188720705, 0.04026777648925781, 0.039847934722900394, 0.039826816558837894, 0.04026841735839844, 0.03987865447998047, 0.040097793579101565, 0.03988431930541992, 0.040243679046630856, 0.04003830337524414, 0.040115936279296875, 0.0399284782409668, 0.04013347244262695, 0.04031372833251953, 0.040922496795654295, 0.041091552734375, 0.04050307083129883, 0.040441505432128905, 0.040471038818359374, 0.04086806488037109, 0.04055449676513672, 0.04061980819702148, 0.04087968063354492, 0.040618656158447265, 0.04086083221435547, 0.040065696716308594, 0.03993414306640625, 0.04018096160888672, 0.04012521743774414, 0.04038444900512695, 0.04040620803833008, 0.040341793060302736, 0.045468257904052733, 0.04045209503173828, 0.040476673126220705, 0.04062822341918945, 0.04071014404296875, 0.04084326553344726, 0.04077772903442383, 0.04048073577880859, 0.0410747184753418, 0.04045318222045898, 0.040465087890625, 0.040495361328125, 0.04018380737304687, 0.04093648147583008, 0.0404694709777832, 0.040755199432373046, 0.04066099166870117, 0.04092927932739258, 0.04071219253540039, 0.04099071884155273, 0.04063846588134765, 0.04103366470336914, 0.041692607879638674, 0.04270758438110352, 0.040582782745361326, 0.04059324645996094, 0.04081699371337891, 0.04092252731323242, 0.0409505615234375, 0.040755199432373046, 0.040613887786865234, 0.04044521713256836, 0.040557281494140625, 0.040632320404052735, 0.04039680099487305, 0.04026367950439453, 0.04042342376708984, 0.04038057708740234, 0.04048374557495117, 0.04132863998413086, 0.04171846389770508, 0.04063257598876953, 0.040288257598876956, 0.04029974365234375, 0.04049148941040039, 0.040876415252685545, 0.04066841506958008, 0.0405203857421875, 0.04052195358276367, 0.04049283218383789, 0.040675296783447265, 0.04049513626098633, 0.04307353591918945, 0.04070604705810547, 0.04050534439086914, 0.041286209106445315, 0.04079430389404297, 0.04042956924438477, 0.040564289093017576, 0.040454593658447266, 0.04069481658935547, 0.04123747253417969, 0.04067327880859375, 0.04066227340698242, 0.040661758422851565, 0.04071219253540039, 0.040531967163085936, 0.040771583557128906, 0.04076339340209961, 0.04057452774047852, 0.04083324813842774, 0.04036608123779297, 0.040761566162109374, 0.04085145568847656, 0.04090265655517578, 0.04053606414794922, 0.040716289520263675, 0.04140351867675781, 0.040711040496826174, 0.04049452972412109, 0.040878654479980465, 0.04050515365600586, 0.04068694305419922, 0.04121481704711914, 0.04067907333374023, 0.04095564651489258, 0.04065955352783203, 0.04048191833496094, 0.04035625457763672, 0.040112865447998046, 0.04045529556274414, 0.04029481506347656, 0.0401901741027832, 0.040120319366455076, 0.042261695861816405, 0.04118815994262695, 0.040703998565673825, 0.04083011245727539, 0.040866207122802735, 0.04109151840209961, 0.040927230834960936, 0.04061363220214844, 0.04043955230712891, 0.040600223541259764, 0.04149059295654297, 0.04192540740966797, 0.040659358978271484, 0.04067103958129883, 0.0404568977355957, 0.04040723037719727, 0.041459518432617186, 0.040417278289794925, 0.04211036682128906, 0.04025324630737305, 0.040453983306884767, 0.040584129333496095, 0.040279998779296874, 0.04032928085327148, 0.041011199951171876, 0.04057292938232422, 0.04053401565551758, 0.04046233749389649, 0.040513439178466795, 0.04066624069213867, 0.04059849548339844, 0.04085139083862305, 0.040888031005859374, 0.04066361618041992, 0.0406976318359375, 0.04059340667724609, 0.040785152435302736, 0.04108774566650391, 0.040959999084472655, 0.040720382690429685, 0.04087398529052735, 0.04064166259765625, 0.04088102340698242, 0.04099689483642578, 0.04111289596557617, 0.04089267349243164, 0.0409194564819336, 0.04091289520263672, 0.04089414215087891, 0.04086201477050781, 0.041003009796142575, 0.04095974349975586, 0.0408328971862793, 0.04053833770751953, 0.04146777725219727, 0.041299968719482424, 0.040859935760498046, 0.040642177581787106, 0.04096160125732422, 0.04098239898681641, 0.04087612915039063, 0.040901214599609374, 0.04087628936767578, 0.04067440032958984, 0.04102159881591797, 0.04100787353515625, 0.040682880401611325, 0.0411440315246582, 0.0408353271484375, 0.04090275192260742, 0.04055507278442383, 0.040607135772705076, 0.040664833068847654, 0.04050825500488281, 0.040757438659667966, 0.040637889862060544, 0.040431297302246094, 0.040766239166259766, 0.04056054306030273, 0.040521728515625, 0.040687168121337894, 0.040656768798828125, 0.04076192092895508, 0.040777057647705076, 0.04083164978027344, 0.04072857666015625, 0.04076275253295898, 0.0411800651550293, 0.04095180892944336, 0.041164798736572264, 0.040827999114990236, 0.04074383926391602, 0.04084870529174805, 0.040936126708984374, 0.0413076171875, 0.04063286590576172, 0.04082688140869141, 0.04075843048095703, 0.0408009262084961, 0.04059126281738281, 0.040449951171875, 0.040524158477783206, 0.04086579132080078, 0.04072412872314453, 0.04056860733032226, 0.040434238433837894, 0.040584224700927735, 0.04021343994140625, 0.04022012710571289, 0.04013520050048828, 0.04042345428466797, 0.04028985595703125, 0.04036447906494141, 0.041060352325439455, 0.04061798477172852, 0.040787967681884765, 0.04058854293823242, 0.040395519256591794, 0.04040438461303711, 0.04085411071777344, 0.04084659194946289, 0.040604415893554686, 0.040648704528808595, 0.040959648132324215, 0.04114182281494141, 0.040632705688476566, 0.04064457702636719, 0.040542655944824216, 0.04064371109008789, 0.04067007827758789, 0.04115184020996094, 0.04079478454589844, 0.04066838455200195, 0.040527713775634765, 0.04026179122924805, 0.040007774353027346, 0.04040547180175781, 0.040089824676513675, 0.04039475250244141, 0.040493057250976565, 0.04032441711425781, 0.04032166290283203, 0.0404337272644043, 0.041748191833496096, 0.04050582504272461, 0.04025904083251953, 0.04022716903686523, 0.040578784942626955, 0.04053942489624023, 0.040575424194335935, 0.04448969650268555, 0.040820735931396485, 0.040941505432128905, 0.040975807189941406, 0.040683231353759765, 0.040810657501220704, 0.0406638069152832, 0.040771808624267575, 0.04049795150756836, 0.04012134552001953, 0.040037887573242184, 0.04006067276000977, 0.040174335479736326, 0.04319350433349609, 0.04014166259765625, 0.040654399871826175, 0.04043001556396485, 0.04048896026611328, 0.04030668640136719, 0.040785472869873045, 0.039983329772949217, 0.04008940887451172, 0.040527584075927735, 0.04015897750854492, 0.040331199645996095, 0.04055955123901367, 0.04012777709960937, 0.040497791290283205, 0.04046454238891602, 0.0405948486328125, 0.04027862548828125, 0.04098867034912109, 0.040966304779052734, 0.040610782623291014, 0.0405533447265625, 0.040818496704101564, 0.0406447982788086, 0.04087603378295898, 0.040656192779541016, 0.04070060729980469, 0.04071353530883789, 0.04074367904663086, 0.040642143249511715, 0.04065734481811523, 0.04063449478149414, 0.04049059295654297, 0.04028435134887695, 0.04091494369506836, 0.04066918563842774, 0.04058236694335938, 0.040618751525878904, 0.040654880523681644, 0.04057088088989258, 0.040591358184814456, 0.04057907104492187, 0.040509441375732425, 0.04055859375, 0.040499198913574216, 0.04066918563842774, 0.040648414611816404, 0.0406715202331543, 0.04090060806274414, 0.04107468795776367]",tokens/s,24.69232477035801,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2043.994112,2084.438016,0.0,1698.69312,1578.029056,s,1,9.0598056640625,9.0598056640625,0.0,9.0598056640625,9.0598056640625,9.0598056640625,9.0598056640625,[9.0598056640625],,kWh,4.774328895002782e-05,5.2541440592774355e-06,1.5968623886003508e-05,6.896605689530875e-05,,MB,1559.560192,2199.781376,0.0,1784.676352,1744.498688,s,10,1.3894958648681641,0.1389495864868164,0.0020308874650182386,0.13906698608398438,0.14082433013916015,0.1413388542175293,0.14175047348022463,"[0.1337958068847656, 0.13924552917480468, 0.13840390014648438, 0.14185337829589845, 0.14070999145507812, 0.13972726440429686, 0.13888844299316405, 0.13815449523925782, 0.14013055419921874, 0.13858650207519532]",tokens/s,1842.3948316268604,kWh,4.075704879572321e-06,4.4947643227721546e-07,2.704832410777631e-06,7.230013722627167e-06,tokens/kWh,35407954.9252885,MB,1569.435648,2409.496576,0.0,1994.391552,1852.698624,s,10,41.52539892578125,4.152539892578124,0.029639461292232716,4.137546630859376,4.18505068359375,4.207651318359376,4.2257318261718755,"[4.230251953125, 4.1800283203125, 4.15973095703125, 4.12930908203125, 4.13408056640625, 4.13739599609375, 4.1348798828125, 4.137697265625, 4.14493603515625, 4.1370888671875]",tokens/s,15.171437633290537,kWh,0.00012088944776667803,1.3334380350051296e-05,8.043176187622285e-05,0.0002146555899929521,tokens/kWh,293493.40495660284,,s,630,41.521001930236835,0.06590635227021717,0.0010394954462310262,0.06572310256958008,0.06707007522583008,0.06723046150207519,0.07094651573181153,"[0.07145881652832031, 0.06719692993164063, 0.0670711669921875, 0.06679401397705079, 0.06694316864013672, 0.06691571044921875, 0.06682288360595703, 0.06689756774902343, 0.06672569274902344, 0.0670307846069336, 0.06697859191894531, 0.06690815734863281, 0.06678451538085937, 0.06698470306396484, 0.06710886383056641, 0.06732390594482422, 0.06707609558105469, 0.06720921325683593, 0.06707753753662109, 0.06723673248291015, 0.0671864013671875, 0.06699132537841797, 0.0672627182006836, 0.06689846038818359, 0.06724361419677734, 0.06725635528564453, 0.06704370880126953, 0.06700220489501953, 0.06708470153808593, 0.06703488159179688, 0.06711705780029296, 0.06722764587402344, 0.06708838653564453, 0.0671982421875, 0.0671280288696289, 0.06711090850830079, 0.06700428771972657, 0.06699343872070312, 0.06712611389160156, 0.06716336059570313, 0.06735906982421876, 0.0669610595703125, 0.06711929321289062, 0.06701023864746093, 0.06716304016113281, 0.06700259399414063, 0.06702877044677734, 0.06700252532958985, 0.06718777465820312, 0.06720387268066406, 0.0670044174194336, 0.06702915191650391, 0.06694895935058594, 0.06716729736328125, 0.06700947570800782, 0.06720716857910156, 0.06694461059570313, 0.0671297607421875, 0.0672194595336914, 0.06727254486083985, 0.06703324890136719, 0.06698371124267578, 0.06718486022949219, 0.07271907043457031, 0.06723276519775391, 0.06736406707763672, 0.0668710708618164, 0.06695356750488281, 0.06692009735107422, 0.06676620483398438, 0.06656269073486328, 0.06706995391845703, 0.06665618896484375, 0.06681401824951172, 0.06690367889404297, 0.0667610855102539, 0.06683846282958984, 0.066744384765625, 0.06688153839111328, 0.06682803344726562, 0.06686720275878906, 0.06700672149658203, 0.06691948699951172, 0.06769526672363281, 0.06706204986572266, 0.06684620666503906, 0.06716365051269531, 0.06528431701660156, 0.06481517028808594, 0.06519625854492188, 0.06498707580566407, 0.0651962890625, 0.0662547836303711, 0.0667303695678711, 0.06641868591308593, 0.06649446105957031, 0.06616409301757813, 0.06760921478271484, 0.06613935852050781, 0.06482204437255859, 0.06494771575927734, 0.06526361846923828, 0.06657276916503906, 0.06654569244384766, 0.06631759643554687, 0.06653001403808594, 0.06668720245361329, 0.0662976303100586, 0.06602931213378906, 0.06603801727294922, 0.06550355529785157, 0.06515446472167968, 0.06533760070800781, 0.06603984069824219, 0.06685935974121093, 0.06512118530273438, 0.06594432067871094, 0.06547865295410156, 0.06503014373779296, 0.06487245178222656, 0.06512137603759766, 0.06608927917480469, 0.06603427124023438, 0.0661605453491211, 0.06552790069580078, 0.06557081604003906, 0.07139260864257813, 0.06570665740966797, 0.06664543914794922, 0.06524781036376953, 0.06636319732666016, 0.06489734649658203, 0.06473241424560547, 0.0644288330078125, 0.06484156799316407, 0.0656174087524414, 0.06650931549072266, 0.06643456268310546, 0.06678339385986329, 0.06690364837646484, 0.06727455902099609, 0.06776326751708985, 0.06809996795654297, 0.068615234375, 0.0664513931274414, 0.06669593811035156, 0.06509113311767578, 0.06475395202636719, 0.06505705261230468, 0.06503206634521484, 0.06484716796875, 0.06516422271728516, 0.06603366088867188, 0.06579747009277344, 0.06612649536132813, 0.06625807952880859, 0.0668803482055664, 0.06651084899902343, 0.06591270446777343, 0.06718067169189453, 0.06575513458251953, 0.06548889923095703, 0.06584729766845702, 0.06544207763671875, 0.06540431976318359, 0.06579232025146485, 0.06596966552734375, 0.06553231811523437, 0.06604195404052735, 0.0658958740234375, 0.06612230682373046, 0.06600428771972656, 0.06593917083740235, 0.0654830093383789, 0.06563292694091796, 0.06701062774658204, 0.065455810546875, 0.06529875183105469, 0.06529638671875, 0.06535372924804687, 0.06524915313720703, 0.06566925048828125, 0.0656240005493164, 0.06579840087890625, 0.06599990081787109, 0.06602397155761719, 0.06626534271240235, 0.06592636871337891, 0.0659566421508789, 0.07118134307861328, 0.0654448013305664, 0.06503241729736328, 0.06503810882568359, 0.0650955810546875, 0.06476640319824219, 0.06498883056640625, 0.06481305694580078, 0.06475116729736329, 0.06466515350341796, 0.06488768005371094, 0.0648826904296875, 0.06478438568115234, 0.06546998596191406, 0.06704093170166016, 0.06773843383789062, 0.06592320251464844, 0.06520409393310547, 0.06483353424072266, 0.0648755874633789, 0.06487324523925782, 0.06488285064697266, 0.0648089599609375, 0.06492569732666016, 0.06500297546386719, 0.06500393676757812, 0.0651095962524414, 0.06500406646728515, 0.06568550109863282, 0.0661445083618164, 0.06637337493896485, 0.06719078063964844, 0.06544384002685547, 0.06530374145507813, 0.06516819000244141, 0.06492915344238281, 0.06506118774414063, 0.06510009765625, 0.06486329650878907, 0.06486086273193359, 0.06477426910400391, 0.0663115234375, 0.06612866973876953, 0.06664396667480468, 0.06614835357666016, 0.06610726165771484, 0.06582694244384765, 0.06656409454345703, 0.06536348724365235, 0.06530044555664062, 0.06513043212890625, 0.06503043365478516, 0.0652720947265625, 0.06546454620361328, 0.06503363037109375, 0.06526604461669921, 0.0661277084350586, 0.06532316589355469, 0.0658493423461914, 0.06571974182128906, 0.06638569641113282, 0.06596028900146485, 0.06570006561279297, 0.07098169708251953, 0.0652410888671875, 0.06476595306396485, 0.0646041259765625, 0.06483971405029297, 0.06479462432861328, 0.0646126708984375, 0.06468370819091797, 0.06506495666503906, 0.06545801544189453, 0.06534508514404297, 0.06516182708740234, 0.06487654113769531, 0.06723993682861328, 0.06624050903320312, 0.06643516540527344, 0.06630521392822265, 0.06557564544677734, 0.06477107238769532, 0.06471753692626953, 0.06495260620117188, 0.0648622055053711, 0.06475714874267578, 0.06481571197509765, 0.06502976226806641, 0.06521075439453125, 0.06500969696044921, 0.06514236450195313, 0.06619379425048828, 0.06744271850585938, 0.06563533020019531, 0.0670893783569336, 0.06576742553710938, 0.0653128662109375, 0.06493920135498046, 0.06487120056152344, 0.06493382263183593, 0.06566604614257812, 0.06623538970947265, 0.0649175033569336, 0.0650624008178711, 0.06519564819335938, 0.06503091430664062, 0.06588428497314452, 0.06601113891601562, 0.06710384368896484, 0.06606307220458985, 0.066119873046875, 0.06547277069091798, 0.06564198303222656, 0.06525961303710938, 0.06545008087158204, 0.06555449676513672, 0.06561984252929688, 0.06541939544677734, 0.06516941070556641, 0.06522470092773437, 0.0658892822265625, 0.06573772430419922, 0.06628352355957032, 0.06622185516357422, 0.06759037017822266, 0.06617279815673828, 0.07085075378417968, 0.06532073974609375, 0.06494195556640625, 0.06467190551757812, 0.06474137878417968, 0.06507453155517579, 0.06503075408935546, 0.06489094543457032, 0.06486134338378906, 0.06531772613525391, 0.0653404769897461, 0.0669767074584961, 0.06550681304931641, 0.06528627014160156, 0.06614649963378906, 0.06614240264892578, 0.06599382019042968, 0.06542438507080078, 0.06495426940917969, 0.06478137969970703, 0.06482742309570312, 0.06508022308349609, 0.06495619201660156, 0.06498713684082032, 0.06506018829345703, 0.0656314239501953, 0.06580095672607422, 0.06706658935546875, 0.06553008270263672, 0.0659947509765625, 0.06603981018066406, 0.06653270721435547, 0.06599852752685546, 0.0653463363647461, 0.06498841857910156, 0.06554048156738282, 0.06497542572021485, 0.06538384246826172, 0.06501952362060547, 0.06520905303955078, 0.06534579467773438, 0.06560768127441406, 0.06522230529785156, 0.06735318756103516, 0.06591871643066406, 0.0662194595336914, 0.06646793365478515, 0.06626060485839844, 0.06582284545898437, 0.0651123504638672, 0.0650346908569336, 0.06515507507324218, 0.06503174591064453, 0.06505516815185547, 0.06587548828125, 0.06585215759277344, 0.06603302764892578, 0.0656135711669922, 0.0658784942626953, 0.06733344268798828, 0.06601606750488281, 0.06637075042724609, 0.06619564819335938, 0.07086038208007812, 0.06534844970703126, 0.06514892578125, 0.06478230285644532, 0.06464825439453124, 0.06479894256591796, 0.06513740539550782, 0.06472499084472656, 0.06487452697753907, 0.06628963470458984, 0.06512844848632812, 0.06549081420898438, 0.06486982727050782, 0.06547936248779297, 0.06807558441162109, 0.06619497680664063, 0.06581654357910156, 0.06503660583496093, 0.06478771209716797, 0.0647487335205078, 0.06499030303955078, 0.06478899383544921, 0.06537593841552734, 0.06539234924316406, 0.06501651000976562, 0.06660095977783204, 0.0649912338256836, 0.06515440368652343, 0.06604806518554687, 0.06595849609375, 0.06596812438964844, 0.06603123474121093, 0.0659664306640625, 0.06542243194580079, 0.06511302185058594, 0.06492124938964844, 0.06547926330566406, 0.06507833862304688, 0.06518854522705078, 0.06516941070556641, 0.06539036560058593, 0.06679519653320312, 0.06519609832763672, 0.06597475433349609, 0.06616806030273438, 0.06605225372314454, 0.06602812957763672, 0.06575513458251953, 0.06520614624023438, 0.06492908477783203, 0.06527053070068359, 0.0653592300415039, 0.06519789123535157, 0.06672207641601563, 0.06528374481201171, 0.06514112091064453, 0.06543212890625, 0.06711705780029296, 0.06568716430664062, 0.06639449310302735, 0.06673999786376954, 0.06576751708984375, 0.06594982147216796, 0.07074976348876953, 0.0656584014892578, 0.06496553802490235, 0.06484786987304687, 0.06470400238037109, 0.06478489685058594, 0.0649543685913086, 0.06634444427490234, 0.06510361480712891, 0.06532921600341797, 0.06574697875976562, 0.06548550415039063, 0.06493385314941406, 0.06526105499267579, 0.06688819122314453, 0.06625472259521484, 0.06593958282470704, 0.06562179565429688, 0.06522412872314454, 0.06484620666503907, 0.06473356628417969, 0.06489910125732422, 0.06514399719238281, 0.06670214080810546, 0.06520368194580078, 0.06578963470458984, 0.06531158447265625, 0.06529964447021484, 0.06588499450683594, 0.06643014526367187, 0.06633257293701172, 0.06595814514160156, 0.06572713470458984, 0.06542527770996094, 0.06525350189208984, 0.06489702606201173, 0.0649318389892578, 0.0655453109741211, 0.06558403015136718, 0.06679551696777344, 0.06500761413574219, 0.06532300567626953, 0.06560562896728515, 0.06615177917480469, 0.06591964721679687, 0.06611923217773437, 0.066019775390625, 0.06557491302490234, 0.06576691436767577, 0.0653013153076172, 0.0650351333618164, 0.06497974395751953, 0.06610128021240234, 0.06582067108154296, 0.06526156616210937, 0.06652105712890626, 0.0652984619140625, 0.06594739532470703, 0.06604342651367187, 0.06629449462890626, 0.06614585876464844, 0.06582927703857422, 0.06562723541259766, 0.07104214477539063, 0.06561270141601562, 0.06488626861572265, 0.06489139556884765, 0.0651546859741211, 0.06658905792236328, 0.06475791931152344, 0.06481600189208984, 0.06520527648925781, 0.0659161605834961, 0.06566889953613281, 0.06496963500976563, 0.06498886108398437, 0.06568787384033203, 0.06674227142333984, 0.06693068695068359, 0.06625484466552735, 0.06558070373535156, 0.06522675323486328, 0.06540937805175781, 0.06558499145507812, 0.06554169464111329, 0.0651608657836914, 0.06578816223144532, 0.06557523345947265, 0.06544563293457031, 0.06544857788085938, 0.06567961883544922, 0.066107421875, 0.06623436737060547, 0.06634665679931641, 0.06664198303222656, 0.06588355255126953, 0.06523878479003906, 0.06519999694824219, 0.06504956817626953, 0.06641190338134766, 0.06546038055419921, 0.06576166534423829, 0.06547449493408203, 0.0651429443359375, 0.06525965118408203, 0.06634278106689454, 0.06624050903320312, 0.06631423950195313, 0.06611779022216797, 0.06621734619140625, 0.06570626831054688, 0.06521446228027344, 0.06506944274902343, 0.06510777282714844, 0.06636851501464844, 0.06516429138183594, 0.06584320068359376, 0.06574809265136719, 0.06549625396728516, 0.0653631362915039, 0.06552963256835938, 0.06609081268310547, 0.06616566467285157, 0.06627286529541015, 0.06653379058837891, 0.066868896484375, 0.07119878387451171, 0.06566937255859374, 0.06494617462158203, 0.06506281280517578, 0.06480086517333984, 0.06489491271972657, 0.0647998046875, 0.06482998657226563, 0.06543612670898437, 0.06622412872314454, 0.06617049407958984, 0.06571968078613281, 0.06484457397460938, 0.06525974273681641, 0.06659442901611329, 0.06671603393554687, 0.06641426849365234, 0.06580009460449218, 0.06494249725341797, 0.0647886734008789, 0.06476576232910156, 0.06510514831542968, 0.0650165786743164, 0.06520861053466796, 0.06506877136230468, 0.0656527328491211, 0.0658042221069336, 0.06554220581054687, 0.06582681274414062, 0.06611353302001953, 0.06636748504638672, 0.06642483520507812, 0.06612691497802735, 0.06548569488525391, 0.06525062561035157, 0.06495708465576172, 0.06494627380371094, 0.06494950103759765, 0.06524185943603515, 0.06502194976806641, 0.06525897979736328, 0.06579273223876952, 0.06599456024169922, 0.06608873748779297, 0.06611580657958985, 0.06591487884521484, 0.06604185485839843, 0.06594355010986327, 0.06592025756835937, 0.06521539306640625, 0.06545990753173828, 0.06529424285888671, 0.0650140151977539, 0.06525958251953125, 0.06573369598388672, 0.0652314910888672, 0.06592127990722656, 0.06572646331787109, 0.0659947509765625, 0.0660904312133789, 0.06624009704589844, 0.0664393310546875, 0.06602035522460938]",tokens/s,15.173044259830721,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1859.751936,2812.14976,0.0,2428.502016,2401.330688,s,1,8.4876806640625,8.4876806640625,0.0,8.4876806640625,8.4876806640625,8.4876806640625,8.4876806640625,[8.4876806640625],,kWh,4.621206245830459e-05,5.0902597788769525e-06,1.5129734326019273e-05,6.643205656320082e-05,,MB,1874.497536,3864.920064,0.0,3445.620736,2993.465344,s,10,5.526091491699219,0.5526091491699219,0.002300170049142373,0.55202001953125,0.5537782958984375,0.5564201232910156,0.5585335852050781,"[0.5520280151367187, 0.5531912231445313, 0.5518743896484375, 0.55192333984375, 0.5500921630859374, 0.5590619506835938, 0.5520120239257813, 0.552649169921875, 0.5509331665039062, 0.5523260498046875]",tokens/s,463.25689754601314,kWh,1.123319801378306e-05,1.2385072190119475e-06,6.1956994010002275e-06,1.8667404633795233e-05,tokens/kWh,13713743.555788191,MB,1881.800704,3867.017216,0.0,3447.717888,2993.467904,s,10,36.68996948242187,3.6689969482421874,0.009059822855344207,3.66531640625,3.6807741455078125,3.6836700317382816,3.6859867407226563,"[3.665924072265625, 3.68656591796875, 3.6595478515625, 3.663341064453125, 3.659964599609375, 3.675495361328125, 3.6745078125, 3.659783447265625, 3.680130615234375, 3.664708740234375]",tokens/s,17.170905533236606,kWh,8.572915743537752e-05,9.4564389074066e-06,4.172528338020193e-05,0.00013691087972298607,tokens/kWh,460153.3503215295,,s,630,36.68829464721683,0.05823538832891555,0.0007414233367921718,0.0580358715057373,0.058749253845214844,0.05951336364746093,0.0615607272720337,"[0.05853155136108398, 0.0579463996887207, 0.05793791961669922, 0.058085376739501954, 0.05808127975463867, 0.05809766387939453, 0.05791084671020508, 0.05824966430664062, 0.05798700714111328, 0.05790636825561524, 0.05788351821899414, 0.06236774444580078, 0.06264627075195313, 0.05842534255981445, 0.057697566986083984, 0.058014240264892575, 0.05784409713745117, 0.057638721466064455, 0.05785971069335937, 0.05781132888793945, 0.057885921478271485, 0.05790390396118164, 0.05783552169799805, 0.0579788818359375, 0.05789081573486328, 0.05801907348632813, 0.05766332626342773, 0.05773814392089844, 0.058035648345947266, 0.057551422119140626, 0.05824467086791992, 0.05761478424072266, 0.05813417434692383, 0.05786249542236328, 0.05774748611450195, 0.057716705322265624, 0.057731071472167966, 0.05796044921875, 0.05804592132568359, 0.05783606338500977, 0.057614334106445314, 0.05775155258178711, 0.05806892776489258, 0.05786016082763672, 0.0577988166809082, 0.0576448974609375, 0.05900406265258789, 0.05791420745849609, 0.05826710510253906, 0.05796918487548828, 0.05819596862792969, 0.058316001892089846, 0.05795920181274414, 0.05788671875, 0.05908908843994141, 0.05912351989746094, 0.059187198638916014, 0.05862332916259766, 0.05796726226806641, 0.057828895568847655, 0.058458656311035154, 0.05904582214355469, 0.0578600959777832, 0.060233280181884764, 0.060197311401367186, 0.05837004852294922, 0.05815849685668945, 0.05796300888061524, 0.058456321716308594, 0.05854323196411133, 0.05815305709838867, 0.061972511291503905, 0.05866105651855469, 0.058157310485839844, 0.059609249114990236, 0.05877062225341797, 0.05864316940307617, 0.05825750350952148, 0.058036224365234375, 0.05849087905883789, 0.05809532928466797, 0.059194686889648435, 0.058159934997558595, 0.05797196960449219, 0.058147232055664064, 0.0580195198059082, 0.05836403274536133, 0.059052734375, 0.059189342498779295, 0.05906143951416016, 0.05813638305664062, 0.05798390579223633, 0.05808127975463867, 0.05810496139526367, 0.05803507232666016, 0.05789215850830078, 0.05789766311645508, 0.05792153549194336, 0.061601215362548825, 0.05813103866577148, 0.057818817138671874, 0.05812393569946289, 0.05805641555786133, 0.05870070266723633, 0.058218494415283206, 0.058175487518310545, 0.058331134796142575, 0.05809971237182617, 0.0579317741394043, 0.05800921630859375, 0.05815260696411133, 0.05898108673095703, 0.05831894302368164, 0.05931407928466797, 0.05912985610961914, 0.05926448059082031, 0.058709918975830076, 0.058366561889648436, 0.05879548645019531, 0.05802355194091797, 0.05795449447631836, 0.05837622451782227, 0.058081729888916016, 0.05814249420166016, 0.057821407318115234, 0.057788894653320315, 0.058435039520263674, 0.05823651123046875, 0.05801670455932617, 0.05781094360351562, 0.05807440185546875, 0.05787939071655274, 0.05765337753295898, 0.05771180725097656, 0.05804521560668945, 0.05786537551879883, 0.057950496673583984, 0.057864513397216794, 0.05781097412109375, 0.05769395065307617, 0.05800886535644531, 0.05789782333374023, 0.05784998321533203, 0.05818150329589844, 0.05786841583251953, 0.05769596862792969, 0.0583551025390625, 0.05809036636352539, 0.05874687957763672, 0.05792956924438476, 0.0579351692199707, 0.057856575012207034, 0.058065185546875, 0.05816227340698242, 0.05774361419677734, 0.05790531158447266, 0.057816928863525394, 0.05773401641845703, 0.05766940689086914, 0.05776569747924805, 0.05779804611206055, 0.057681793212890624, 0.05766223907470703, 0.05791756820678711, 0.06384204864501954, 0.05851776123046875, 0.05891481781005859, 0.05783871841430664, 0.05776995086669922, 0.05790390396118164, 0.05817142486572266, 0.057839935302734374, 0.05807900619506836, 0.05817958450317383, 0.057898368835449215, 0.05865536117553711, 0.058001407623291014, 0.05800755310058594, 0.057820384979248046, 0.05793667221069336, 0.0584884147644043, 0.05829059219360352, 0.057903102874755856, 0.05803363037109375, 0.0578785285949707, 0.05782172775268555, 0.05807923126220703, 0.05813446426391602, 0.05799459075927734, 0.05816608047485351, 0.05818163299560547, 0.05824431991577148, 0.05795510482788086, 0.05778841781616211, 0.057938976287841795, 0.058616798400878904, 0.05800527954101563, 0.057716960906982424, 0.05793791961669922, 0.057675777435302736, 0.05786576080322266, 0.05770083236694336, 0.05790243148803711, 0.05785603332519531, 0.0578771858215332, 0.057753536224365236, 0.05823283386230469, 0.05781094360351562, 0.058186878204345704, 0.059179424285888675, 0.0581698226928711, 0.057839614868164066, 0.058044414520263675, 0.05804796981811523, 0.05785427093505859, 0.05781078338623047, 0.0595807991027832, 0.05852979278564453, 0.057837024688720706, 0.05803676986694336, 0.057862014770507814, 0.05788611221313476, 0.057764575958251956, 0.058009632110595705, 0.059805374145507816, 0.058396961212158205, 0.058687488555908204, 0.058079071044921875, 0.05822684860229492, 0.05814204788208008, 0.058575519561767576, 0.05832908630371094, 0.05783475112915039, 0.05804092788696289, 0.05815516662597656, 0.058038272857666016, 0.05770425415039063, 0.058332576751708984, 0.057831615447998044, 0.05770710372924805, 0.05827174377441406, 0.05791129684448242, 0.0581069450378418, 0.05786515045166016, 0.05785971069335937, 0.0580714225769043, 0.058068992614746094, 0.05816524887084961, 0.05850899124145508, 0.05773344039916992, 0.05792496109008789, 0.0609304313659668, 0.05794889450073242, 0.0580425910949707, 0.058183582305908206, 0.05811561584472656, 0.05822832107543945, 0.05812828826904297, 0.05798604965209961, 0.05828537750244141, 0.05770684814453125, 0.057703777313232424, 0.057723583221435545, 0.0576445426940918, 0.057964958190917966, 0.05805507278442383, 0.05766144180297852, 0.05774335861206055, 0.05772083282470703, 0.05762662506103516, 0.05773721694946289, 0.058025886535644534, 0.05813676834106445, 0.05792870330810547, 0.06123408126831055, 0.05828208160400391, 0.05815836715698242, 0.05825961685180664, 0.057625152587890624, 0.0579071044921875, 0.05775769424438477, 0.057864192962646485, 0.057990848541259764, 0.05778668975830078, 0.058226688385009766, 0.05771468734741211, 0.057870334625244144, 0.05814838409423828, 0.057737152099609376, 0.058409503936767575, 0.05804851150512695, 0.05795635223388672, 0.057831424713134766, 0.05780915069580078, 0.058611583709716794, 0.05775347137451172, 0.058388416290283206, 0.05785196685791016, 0.05804646301269531, 0.0585005111694336, 0.05851107025146484, 0.05891161727905273, 0.058277599334716795, 0.05888848114013672, 0.05802924728393555, 0.05817427062988281, 0.05831439971923828, 0.05855401611328125, 0.058437664031982424, 0.05808601760864258, 0.05786140823364258, 0.05794803237915039, 0.05790147018432617, 0.05791580963134765, 0.05792160034179687, 0.059312126159667966, 0.05837619018554688, 0.0581058578491211, 0.05825503921508789, 0.05797510528564453, 0.05794793701171875, 0.05828220748901367, 0.058224639892578124, 0.05869158554077149, 0.06146160125732422, 0.0596407356262207, 0.058666942596435546, 0.058560577392578125, 0.05797683334350586, 0.0579317741394043, 0.05785740661621094, 0.05798361587524414, 0.05827993774414063, 0.0583454704284668, 0.05826764678955078, 0.05766902542114258, 0.05836064147949219, 0.0586965446472168, 0.05855039978027344, 0.05836268615722656, 0.05780419158935547, 0.05801033782958984, 0.057839488983154295, 0.05793302536010742, 0.057815841674804684, 0.057970687866210936, 0.05791267013549805, 0.057821056365966794, 0.05800940704345703, 0.05823321533203125, 0.0585195198059082, 0.06046700668334961, 0.058393310546875, 0.05835945510864258, 0.058079681396484374, 0.05773516845703125, 0.05797411346435547, 0.0577127685546875, 0.05806473541259766, 0.05885203170776367, 0.057915393829345706, 0.05816320037841797, 0.05936124801635742, 0.0576962890625, 0.05780275344848633, 0.057755294799804686, 0.05784793472290039, 0.057905376434326174, 0.058150337219238284, 0.060303936004638674, 0.05889023971557617, 0.05848867034912109, 0.057824512481689454, 0.05795449447631836, 0.05794041442871094, 0.05779244613647461, 0.058425697326660156, 0.05981184005737305, 0.05855526351928711, 0.05791129684448242, 0.05782931137084961, 0.05788220977783203, 0.0579343032836914, 0.05829193496704101, 0.05823311996459961, 0.05763068771362305, 0.05784988784790039, 0.05784700775146484, 0.057895168304443356, 0.05799580764770508, 0.057939327239990235, 0.057976478576660155, 0.05831731033325195, 0.05874121475219726, 0.058324993133544924, 0.06423551940917968, 0.05820211029052735, 0.058023265838623043, 0.058399391174316403, 0.05803212738037109, 0.058044639587402344, 0.057904895782470704, 0.05781097412109375, 0.058049793243408206, 0.058032894134521486, 0.05774335861206055, 0.05780438232421875, 0.05798748779296875, 0.05791692733764649, 0.0586163215637207, 0.058940513610839844, 0.058342304229736325, 0.059140094757080076, 0.05805382537841797, 0.05786092758178711, 0.05778636932373047, 0.05809952163696289, 0.057847999572753904, 0.058380287170410154, 0.05805875015258789, 0.057919486999511716, 0.05780684661865235, 0.05805062484741211, 0.05822777557373047, 0.05826374435424805, 0.0578853759765625, 0.058241024017333984, 0.058732513427734376, 0.05801558303833008, 0.057993408203125, 0.05817139053344727, 0.05813452911376953, 0.05824499130249024, 0.058151039123535156, 0.05873619079589844, 0.05843603134155274, 0.05937267303466797, 0.05881126403808594, 0.059714622497558593, 0.06043539047241211, 0.058517505645751956, 0.05873593521118164, 0.05833388900756836, 0.0578331184387207, 0.057829727172851564, 0.05797014236450195, 0.05773571014404297, 0.057815040588378906, 0.05780918502807617, 0.057812286376953126, 0.05758095932006836, 0.05772127914428711, 0.05808297729492187, 0.05764371109008789, 0.05784598541259765, 0.05764505767822266, 0.05812144088745117, 0.05763356781005859, 0.05760409545898437, 0.05766758346557617, 0.05975244903564453, 0.058176544189453124, 0.0596346549987793, 0.058017791748046874, 0.0578067512512207, 0.05781475067138672, 0.05796448135375976, 0.0578232650756836, 0.057968673706054685, 0.05846054458618164, 0.05787356948852539, 0.05814963150024414, 0.05781414413452148, 0.05803046417236328, 0.0582042236328125, 0.05782992172241211, 0.057853214263916014, 0.058224510192871094, 0.05796745681762695, 0.05821440124511719, 0.057622528076171874, 0.05775299072265625, 0.05777673721313477, 0.057614334106445314, 0.05781856155395508, 0.05970748901367187, 0.05848863983154297, 0.058677024841308593, 0.057754497528076175, 0.05780070495605469, 0.058044414520263675, 0.058087265014648434, 0.05811215972900391, 0.057879840850830075, 0.05779235076904297, 0.05838937759399414, 0.057979137420654296, 0.05827967834472656, 0.05809561538696289, 0.057968639373779295, 0.05785203170776367, 0.058036094665527345, 0.05788655853271484, 0.06119200134277344, 0.05842256164550781, 0.05922889709472656, 0.057828800201416015, 0.05789676666259766, 0.057998046875, 0.05807120132446289, 0.05794976043701172, 0.06025804901123047, 0.06041657638549805, 0.05903923034667969, 0.05840908813476563, 0.059718017578125, 0.06132262420654297, 0.05906639862060547, 0.05849353790283203, 0.05809561538696289, 0.06017638397216797, 0.058407966613769534, 0.058641502380371094, 0.058066272735595705, 0.058081790924072264, 0.05792156982421875, 0.05820975875854492, 0.05801001739501953, 0.05822000122070312, 0.05845673751831055, 0.05943094253540039, 0.058123966217041016, 0.05796278381347656, 0.058142719268798826, 0.058060798645019535, 0.05806489562988281, 0.058547393798828125, 0.05818576049804688, 0.058475296020507814, 0.05803033447265625, 0.05799708938598633, 0.0578515510559082, 0.05789932632446289, 0.05779455947875976, 0.05838169479370117, 0.057944671630859375, 0.0583616943359375, 0.05795772933959961, 0.060728382110595704, 0.05811996841430664, 0.05824512100219727, 0.0578600959777832, 0.05814476776123047, 0.05828607940673828, 0.05786617660522461, 0.05772499084472656, 0.05796454238891602, 0.05791241455078125, 0.05791836929321289, 0.05790719985961914, 0.058144287109375, 0.05801571273803711, 0.05815961456298828, 0.058334369659423825, 0.05870678329467773, 0.058292224884033204, 0.058015422821044924, 0.05847148895263672, 0.058366912841796875, 0.0584007682800293, 0.058480640411376954, 0.05842256164550781, 0.05819276809692383, 0.05800252914428711, 0.058045024871826174, 0.05780223846435547, 0.058081214904785156, 0.05778684616088867, 0.05805491256713867, 0.05827993774414063, 0.057987071990966796, 0.05837987136840821, 0.057909664154052735, 0.058017791748046874, 0.058130176544189456, 0.0581080322265625, 0.05812406539916992, 0.058111743927001955, 0.05817171096801758, 0.058173694610595704, 0.057984352111816406, 0.058012351989746094, 0.05781628799438476, 0.057815135955810545, 0.05781734466552734, 0.05816320037841797, 0.0579279670715332, 0.05820841598510742, 0.0580423698425293, 0.05791049575805664, 0.058049312591552736, 0.05799840164184571, 0.058038463592529295, 0.05796121597290039, 0.05794351959228516, 0.05799132919311523, 0.058146526336669925, 0.05834524917602539, 0.05799411010742188, 0.057927711486816406, 0.05805286407470703, 0.05805411148071289, 0.057871902465820316, 0.06064316940307617, 0.05819635009765625, 0.06309840011596679, 0.057912288665771486, 0.05797238540649414, 0.057853950500488284, 0.057780574798583985, 0.057736415863037106, 0.05797353744506836, 0.05794611358642578, 0.05811199951171875, 0.05839225769042969, 0.057949886322021485, 0.057686656951904294, 0.05798201751708985, 0.05771974563598633, 0.058015422821044924]",tokens/s,17.171689391885984,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3812.012032,4371.447808,0.0,3992.977408,3875.045888,s,1,9.7830205078125,9.7830205078125,0.0,9.7830205078125,9.7830205078125,9.7830205078125,9.7830205078125,[9.7830205078125],,kWh,7.739799573333338e-05,8.521289569338107e-06,2.5187797927997413e-05,0.0001111070832306689,,MB,2050.576384,4595.843072,0.0,4188.012544,4099.587072,s,10,3.3127397155761718,0.3312739715576172,0.0017208316625747086,0.3311283721923828,0.3333113311767578,0.3334079696655273,0.33348528045654297,"[0.3335046081542969, 0.3300729675292969, 0.329398193359375, 0.3296027526855469, 0.32876229858398437, 0.3304183654785156, 0.33183837890625, 0.3329078369140625, 0.3329444580078125, 0.33328985595703126]",tokens/s,772.7742653499565,kWh,1.0025084371944438e-05,1.1055919539487317e-06,6.651486802666575e-06,1.7782163128559746e-05,tokens/kWh,14396448.741876688,MB,2059.534336,4700.700672,0.0,4292.870144,4197.814272,s,10,27.489537109375,2.7489537109375,0.017626090166925304,2.743663330078125,2.7678792480468752,2.78010087890625,2.78987818359375,"[2.755270263671875, 2.74456005859375, 2.739586669921875, 2.731625244140625, 2.7427666015625, 2.7393564453125, 2.765163330078125, 2.74955908203125, 2.729326904296875, 2.792322509765625]",tokens/s,22.917810419774057,kWh,8.129318500138914e-05,8.966749398900194e-06,5.0396086613134033e-05,0.00014065602101342336,tokens/kWh,447901.19574040605,,s,630,27.48669079208376,0.04362966792394245,0.0006390068181520789,0.04353998374938965,0.04422013168334961,0.04451613445281982,0.04659644077301026,"[0.043960319519042966, 0.04343603134155274, 0.04382009506225586, 0.04642649459838867, 0.043063072204589846, 0.04311721420288086, 0.04314739227294922, 0.04322937774658203, 0.042990753173828125, 0.04321062469482422, 0.04330310440063476, 0.04361801528930664, 0.043721790313720706, 0.04364582443237305, 0.04357830429077148, 0.043684993743896484, 0.043581920623779295, 0.04368172836303711, 0.04380720138549805, 0.043620128631591794, 0.043534561157226564, 0.04345158386230469, 0.04359609603881836, 0.04347055816650391, 0.043428638458251956, 0.043756576538085935, 0.04377289581298828, 0.04364191818237305, 0.04446201705932617, 0.04382646560668945, 0.043779041290283205, 0.04377657699584961, 0.04396428680419922, 0.04352851104736328, 0.044257278442382815, 0.04351619338989258, 0.043714176177978514, 0.0436014404296875, 0.04376633453369141, 0.043802623748779294, 0.043665374755859375, 0.043687488555908205, 0.0439136962890625, 0.04392460632324219, 0.04402995300292969, 0.04397116851806641, 0.04393308639526367, 0.04408204650878906, 0.044014816284179685, 0.04406480026245117, 0.04415155029296875, 0.04397830581665039, 0.04410806274414063, 0.04427132797241211, 0.04377347183227539, 0.043616798400878905, 0.04358614349365234, 0.04340671920776367, 0.043520416259765625, 0.043747329711914064, 0.04352614212036133, 0.043345569610595706, 0.04338723373413086, 0.043905025482177736, 0.04396569442749024, 0.04389555358886719, 0.044041664123535156, 0.043870174407958984, 0.04398271942138672, 0.043746017456054685, 0.043796478271484376, 0.04339712142944336, 0.0433963851928711, 0.043450942993164064, 0.04341708755493164, 0.04315203094482422, 0.04307558441162109, 0.04315135955810547, 0.043278335571289066, 0.043153408050537106, 0.04376166534423828, 0.04363468933105469, 0.04377190399169922, 0.04357324981689453, 0.04360988616943359, 0.04352550506591797, 0.04377401733398437, 0.044357856750488284, 0.04352057647705078, 0.04375961685180664, 0.04628889465332031, 0.043824352264404294, 0.04346345520019531, 0.0431383056640625, 0.04344294357299805, 0.04330214309692383, 0.04356739044189453, 0.04343036651611328, 0.043786239624023435, 0.04363600158691406, 0.04362745666503906, 0.043757183074951175, 0.04372662353515625, 0.04365350341796875, 0.04384902572631836, 0.043383487701416014, 0.04331257629394531, 0.043493087768554685, 0.04338979339599609, 0.04314931106567383, 0.043617408752441404, 0.04298640060424805, 0.04428086471557617, 0.043129791259765626, 0.04372889709472656, 0.043524097442626954, 0.04437926483154297, 0.042997760772705076, 0.04302499389648438, 0.043039169311523434, 0.04321644973754883, 0.043103809356689456, 0.043356094360351566, 0.04269750213623047, 0.04318371200561524, 0.04286233520507812, 0.04375551986694336, 0.04331315231323242, 0.04337670516967773, 0.04347385787963867, 0.04353126525878906, 0.04367055892944336, 0.043581439971923826, 0.043592575073242185, 0.04350268936157226, 0.043463680267333986, 0.043093791961669924, 0.04362982559204102, 0.04345289611816406, 0.04329318237304688, 0.043044193267822266, 0.042869407653808596, 0.04293840026855469, 0.042933280944824216, 0.04319942474365234, 0.043730945587158204, 0.043905120849609375, 0.04302998352050781, 0.043382911682128905, 0.04292256164550781, 0.042897151947021483, 0.04376166534423828, 0.04305500793457031, 0.043634529113769534, 0.04368368148803711, 0.04383580780029297, 0.04393292617797852, 0.04371513748168945, 0.04372636795043945, 0.04355718231201172, 0.043649375915527346, 0.043676895141601564, 0.04375632095336914, 0.043649024963378906, 0.04383119964599609, 0.04352355194091797, 0.04321958541870117, 0.04323328018188476, 0.04321279907226563, 0.04320569610595703, 0.043576416015625, 0.04412188720703125, 0.04381827163696289, 0.04354300689697266, 0.04367366409301758, 0.04356451034545898, 0.04385871887207031, 0.04357120132446289, 0.04337417602539063, 0.0432193603515625, 0.04349737548828125, 0.04418569564819336, 0.04431824111938477, 0.0433732795715332, 0.04309695816040039, 0.04302115249633789, 0.04303292846679688, 0.043294654846191404, 0.04369382476806641, 0.04511510467529297, 0.04321548843383789, 0.04317219161987305, 0.042929824829101564, 0.042950721740722654, 0.042983360290527343, 0.043012096405029294, 0.04297552108764648, 0.04274288177490235, 0.042770496368408205, 0.042743423461914065, 0.042756927490234374, 0.04298144149780273, 0.04356313705444336, 0.04340694427490235, 0.04312303924560547, 0.042999519348144534, 0.0429071044921875, 0.04290233612060547, 0.04293632125854492, 0.043084030151367185, 0.04690713500976563, 0.04466281509399414, 0.04363827133178711, 0.04368841552734375, 0.043558399200439454, 0.04380108642578125, 0.0433328971862793, 0.0432663688659668, 0.04350611114501953, 0.04318819046020508, 0.043065055847167966, 0.04294655990600586, 0.04293247985839844, 0.042799457550048825, 0.04296044921875, 0.04326822280883789, 0.043558048248291015, 0.043514976501464846, 0.04353200149536133, 0.043369632720947265, 0.04335036849975586, 0.043393566131591794, 0.04361830520629883, 0.0435847053527832, 0.04315404891967774, 0.043794559478759765, 0.04336569595336914, 0.0431778564453125, 0.04320105743408203, 0.04328278350830078, 0.04301824188232422, 0.04297318267822266, 0.042962944030761716, 0.04329644775390625, 0.043374752044677736, 0.043964160919189456, 0.04342211151123047, 0.043741024017333985, 0.04365558242797852, 0.043361183166503905, 0.04355158233642578, 0.04331836700439453, 0.04393574523925781, 0.04398262405395508, 0.04390729522705078, 0.04304883193969727, 0.04317971038818359, 0.04315180969238281, 0.04320870590209961, 0.04324761581420898, 0.04292550277709961, 0.04287577438354492, 0.04276192092895508, 0.042649856567382814, 0.042925697326660156, 0.04310822296142578, 0.0433175048828125, 0.043524097442626954, 0.04353567886352539, 0.043456897735595704, 0.044017982482910153, 0.04381647872924805, 0.04358396911621094, 0.04343132781982422, 0.04456745529174805, 0.043455360412597656, 0.04327302551269531, 0.043522209167480466, 0.042942527770996095, 0.043269920349121097, 0.04310198211669922, 0.043176158905029294, 0.04347488021850586, 0.04341542434692383, 0.04363455963134766, 0.043619998931884764, 0.043597694396972655, 0.043682590484619144, 0.0436096305847168, 0.04352774429321289, 0.04358224105834961, 0.043409534454345707, 0.043490943908691404, 0.04353267288208008, 0.04335184097290039, 0.04352854537963867, 0.043449760437011715, 0.04330169677734375, 0.04328569412231445, 0.04341715240478516, 0.04401379013061523, 0.043880382537841794, 0.043980735778808594, 0.04396966552734375, 0.04452729415893555, 0.043911167144775394, 0.04455014419555664, 0.044025856018066405, 0.04396156692504883, 0.04404643249511719, 0.043767711639404294, 0.04385385513305664, 0.04374556732177735, 0.043420063018798825, 0.043039905548095704, 0.0436940803527832, 0.04530172729492187, 0.04351513671875, 0.043313953399658205, 0.043218944549560545, 0.04302643203735351, 0.04287078475952148, 0.04318627166748047, 0.04309187316894531, 0.043253761291503906, 0.043286529541015625, 0.0430398063659668, 0.044209087371826175, 0.04473251342773438, 0.04459881591796875, 0.04361648178100586, 0.043819393157958984, 0.04365599822998047, 0.04372579193115234, 0.04353868865966797, 0.043481952667236326, 0.04335094451904297, 0.04340038299560547, 0.04332204818725586, 0.04338035202026367, 0.04348396682739258, 0.043636417388916014, 0.04310835266113281, 0.042971134185791016, 0.04331135940551758, 0.043181217193603516, 0.043151905059814456, 0.04345657730102539, 0.043466751098632815, 0.043046432495117186, 0.043032638549804686, 0.04299817657470703, 0.043194366455078126, 0.043259902954101564, 0.04289292907714844, 0.04280105590820313, 0.04287126541137695, 0.042831871032714845, 0.043442176818847655, 0.0437022705078125, 0.0429936637878418, 0.04322099304199219, 0.04436195373535156, 0.043818782806396485, 0.04395212936401367, 0.044183551788330076, 0.04368105697631836, 0.04350844955444336, 0.043574432373046874, 0.043661376953125, 0.04366175842285156, 0.04393340682983399, 0.04352473449707031, 0.044678302764892576, 0.04341609573364258, 0.04316543960571289, 0.04325555038452149, 0.04302102279663086, 0.04402252960205078, 0.0438455696105957, 0.043910335540771485, 0.04900908660888672, 0.04666585540771485, 0.04391142272949219, 0.04383327865600586, 0.04349753570556641, 0.04351523208618164, 0.04366118240356445, 0.04331372833251953, 0.043122913360595705, 0.043142848968505856, 0.0459062385559082, 0.04357551956176758, 0.043589408874511716, 0.04370822525024414, 0.04341350555419922, 0.04353414535522461, 0.04328691101074219, 0.04753952026367188, 0.04396819305419922, 0.04406310272216797, 0.04390531158447265, 0.04386608123779297, 0.043792766571044923, 0.04346879959106445, 0.043380897521972654, 0.04351270294189453, 0.043888641357421876, 0.04342473602294922, 0.043370113372802735, 0.04319798278808594, 0.04325462341308594, 0.04317948913574219, 0.04323916625976563, 0.043934497833251956, 0.04370841598510742, 0.0436861457824707, 0.04391692733764648, 0.046976318359375, 0.04406175994873047, 0.04359382247924805, 0.04351264190673828, 0.04347574234008789, 0.04363270568847656, 0.04364006423950195, 0.04353305435180664, 0.04365046310424805, 0.04334441757202148, 0.043280448913574215, 0.04341356658935547, 0.04359980773925781, 0.04380387115478516, 0.043813663482666014, 0.04370431900024414, 0.04356300735473633, 0.04375551986694336, 0.04361625671386719, 0.043687934875488284, 0.04353843307495117, 0.04331257629394531, 0.04359142303466797, 0.04429865646362305, 0.043526336669921874, 0.04316156768798828, 0.043228191375732423, 0.042988704681396483, 0.043490463256835935, 0.042997856140136716, 0.04326031875610352, 0.04296518325805664, 0.04317177581787109, 0.04294278335571289, 0.043635902404785154, 0.043432449340820314, 0.04309811019897461, 0.043116127014160156, 0.04338294219970703, 0.0434158706665039, 0.04400300979614258, 0.043888702392578124, 0.04373302459716797, 0.04367177581787109, 0.04426777648925781, 0.04381875228881836, 0.044056159973144535, 0.04364944076538086, 0.043463775634765625, 0.043404193878173826, 0.04366854476928711, 0.043533470153808596, 0.04350278472900391, 0.04341411209106445, 0.04351100921630859, 0.04359862518310547, 0.043826366424560545, 0.044165950775146484, 0.044154815673828125, 0.04395219039916992, 0.04435385513305664, 0.044220096588134764, 0.04474998474121094, 0.044190559387207035, 0.04446966552734375, 0.04418518447875976, 0.04420083236694336, 0.044324993133544925, 0.04388454437255859, 0.04349542236328125, 0.04314726257324219, 0.04336975860595703, 0.04337276840209961, 0.04329318237304688, 0.04315264129638672, 0.04337740707397461, 0.043663360595703124, 0.04383667373657227, 0.04388227081298828, 0.043768798828125, 0.04361593627929688, 0.04352143859863281, 0.043458782196044925, 0.04354323196411133, 0.04348748779296875, 0.04333132934570313, 0.04378617477416992, 0.043361087799072266, 0.043046913146972655, 0.04306534576416016, 0.04280115127563477, 0.04283391952514649, 0.042880382537841794, 0.04301427078247071, 0.042969470977783204, 0.04307366561889649, 0.04291491317749024, 0.04291990280151367, 0.042828510284423825, 0.04400559997558594, 0.04293427276611328, 0.04327529525756836, 0.04288972854614258, 0.04291433715820313, 0.043794368743896486, 0.04292812728881836, 0.04310966491699219, 0.0433076171875, 0.04283772659301758, 0.043063358306884764, 0.04307388687133789, 0.04302643203735351, 0.04285590362548828, 0.04293891143798828, 0.042848255157470705, 0.04289251327514648, 0.04315177536010742, 0.04710435104370117, 0.04351798248291015, 0.04332038497924805, 0.04309907150268555, 0.04323123168945313, 0.04350566482543945, 0.04315071868896484, 0.04340364837646484, 0.04325174331665039, 0.04298921585083008, 0.04298553466796875, 0.04289353561401367, 0.04303862380981445, 0.04321260833740234, 0.04417593765258789, 0.04333100891113281, 0.043507457733154294, 0.04323001480102539, 0.043665409088134766, 0.043655166625976564, 0.04370140838623047, 0.04367257690429688, 0.04384553527832031, 0.04385580825805664, 0.04365107345581055, 0.043319297790527345, 0.0433704948425293, 0.04346579360961914, 0.043541278839111325, 0.04361846542358398, 0.043883838653564454, 0.04350588989257813, 0.043829280853271486, 0.04364582443237305, 0.043601024627685545, 0.04357004928588867, 0.04378534317016602, 0.04382400131225586, 0.04366070556640625, 0.04395257568359375, 0.04406697463989258, 0.04422969436645508, 0.044157920837402345, 0.04424448013305664, 0.04415686416625977, 0.044184288024902346, 0.04402473449707031, 0.044219295501708986, 0.04415081787109375, 0.04445532989501953, 0.043890975952148435, 0.043912513732910156, 0.044552352905273436, 0.044813182830810545, 0.04421638488769531, 0.044211105346679686, 0.046255264282226566, 0.04423011016845703, 0.04457302474975586, 0.04452761459350586, 0.044406398773193356, 0.044405120849609375, 0.045647422790527345, 0.04502732849121094, 0.04425881576538086, 0.04445087814331055, 0.04473990249633789, 0.045109825134277345, 0.04464652633666992, 0.04450249481201172, 0.044529182434082035, 0.04434009552001953, 0.04438355255126953, 0.0442204475402832, 0.04476176071166992, 0.044480159759521486, 0.044376033782958985, 0.04449955368041992, 0.0443021125793457, 0.044050079345703125, 0.044138622283935544, 0.044226783752441406, 0.04387145614624023, 0.0440239372253418, 0.0438895378112793, 0.044262176513671876, 0.043864959716796874, 0.043935073852539065, 0.04415180969238281, 0.0473210563659668, 0.044031806945800785, 0.044066814422607424, 0.043998336791992186, 0.04411481475830078, 0.043991073608398434]",tokens/s,22.920183617790837,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7172.022272,10246.5536,0.0,9860.808704,9797.323264,s,1,13.2211689453125,13.2211689453125,0.0,13.2211689453125,13.2211689453125,13.2211689453125,13.2211689453125,[13.2211689453125],,kWh,0.00016171045942918073,1.782812838696042e-05,5.287448674400064e-05,0.00023241307456014178,,MB,3114.426368,10626.138112,0.0,10211.033088,10096.966144,s,10,7.538050537109375,0.7538050537109375,0.00793216900750495,0.7540419616699219,0.7613850219726562,0.7631519531250001,0.764565498046875,"[0.7330369873046875, 0.7515281982421875, 0.7533756713867188, 0.7528856201171875, 0.7609923706054688, 0.75665185546875, 0.7649188842773438, 0.7565770263671875, 0.7534981079101563, 0.7545858154296875]",tokens/s,339.61035249064355,kWh,2.1851729629166053e-05,2.4097313488246706e-06,1.442136074342826e-05,3.8682821721418984e-05,tokens/kWh,6617924.665465931,MB,3130.580992,10630.332416,0.0,10213.13024,10096.968704,s,10,37.540674560546876,3.7540674560546874,0.010010934544368294,3.7587991943359373,3.762990649414063,3.7641482788085936,3.7650743823242188,"[3.7627333984375, 3.765305908203125, 3.742988525390625, 3.737947265625, 3.760489013671875, 3.737583984375, 3.754548828125, 3.757109375, 3.7611796875, 3.76078857421875]",tokens/s,16.781797540263018,kWh,0.0001096356925441675,1.2092824354417807e-05,7.289840355677225e-05,0.00019462692045535754,tokens/kWh,323696.2279041485,,s,630,37.537324813842794,0.059583055260067894,0.0007962526358102235,0.05942601585388184,0.060120168304443354,0.06054462490081787,0.0634981608581543,"[0.06065737533569336, 0.05999235153198242, 0.06016950225830078, 0.059509471893310545, 0.05974371337890625, 0.059731998443603516, 0.05959084701538086, 0.059748863220214846, 0.059141056060791015, 0.059362144470214845, 0.05948624038696289, 0.06010675048828125, 0.06020249557495117, 0.05968134307861328, 0.05916393661499023, 0.05951308822631836, 0.05976473617553711, 0.05929616165161133, 0.05954307174682617, 0.059138526916503904, 0.06032588958740234, 0.059428863525390625, 0.0596049919128418, 0.05930188751220703, 0.05929779052734375, 0.05939795303344726, 0.06055039978027344, 0.059765697479248044, 0.05965619277954102, 0.05993881607055664, 0.05990371322631836, 0.0595843505859375, 0.06011910247802734, 0.059646015167236326, 0.05935340881347656, 0.05989699172973633, 0.060920673370361327, 0.05990316772460937, 0.05910611343383789, 0.059703296661376956, 0.059396095275878906, 0.05964799880981445, 0.05927731323242187, 0.06034377670288086, 0.06092854309082031, 0.05942272186279297, 0.059994304656982425, 0.05944268798828125, 0.05943340682983399, 0.059470878601074216, 0.05944924926757812, 0.05960800170898437, 0.05956809616088867, 0.059598880767822264, 0.061886463165283206, 0.059338752746582034, 0.05937561416625976, 0.05897216033935547, 0.06000147247314453, 0.05987356948852539, 0.059550048828125, 0.05963983917236328, 0.05924844741821289, 0.06027465438842773, 0.059658271789550785, 0.0598364143371582, 0.0593422737121582, 0.05936595153808594, 0.059348033905029296, 0.05915948867797852, 0.05963919830322266, 0.05900348663330078, 0.059205631256103515, 0.0592097282409668, 0.05945548629760742, 0.05922169494628906, 0.059257152557373044, 0.05928889465332031, 0.05931488037109375, 0.059598209381103516, 0.05979199981689453, 0.05959267044067383, 0.06006377410888672, 0.059719680786132816, 0.059379711151123046, 0.059501983642578124, 0.059622047424316406, 0.05987526321411133, 0.059332447052001955, 0.05968707275390625, 0.05975376129150391, 0.06466223907470703, 0.05996495819091797, 0.05947216033935547, 0.059944766998291016, 0.059656574249267576, 0.05967424011230469, 0.05964617538452149, 0.059491775512695313, 0.06014771270751953, 0.060201694488525394, 0.05989580917358398, 0.059754432678222655, 0.05946988677978516, 0.05955744171142578, 0.059625247955322265, 0.0598125114440918, 0.060471454620361326, 0.05979119873046875, 0.05977468872070312, 0.05982441711425781, 0.06040576171875, 0.06021231842041016, 0.05975958251953125, 0.06008211135864258, 0.05959823989868164, 0.05996604919433594, 0.059778881072998044, 0.05972377777099609, 0.05985670471191406, 0.060039295196533206, 0.05957043075561524, 0.059641857147216794, 0.059668479919433595, 0.059580352783203124, 0.05973408126831055, 0.060549247741699216, 0.059433761596679686, 0.059285663604736326, 0.05965119934082031, 0.0597081298828125, 0.05965331268310547, 0.05891113662719726, 0.059912479400634766, 0.060211326599121096, 0.059666431427001954, 0.059799072265625, 0.05957270431518555, 0.05981388854980469, 0.059117408752441404, 0.05931955337524414, 0.06079171371459961, 0.05928345489501953, 0.059665855407714845, 0.0590239372253418, 0.05951631927490234, 0.05900348663330078, 0.05893033599853516, 0.05871529769897461, 0.05912905502319336, 0.059093441009521484, 0.05924192047119141, 0.05907721710205078, 0.05889023971557617, 0.059898113250732424, 0.05943014526367187, 0.05941708755493164, 0.0590799674987793, 0.05978595352172852, 0.05892300796508789, 0.059410079956054684, 0.05939849472045899, 0.05871206283569336, 0.05964384078979492, 0.05950239944458008, 0.05909939193725586, 0.059114944458007815, 0.05909561538696289, 0.05929369735717773, 0.058832225799560545, 0.05865539169311523, 0.05965187072753906, 0.059205856323242184, 0.05894553756713867, 0.05885337448120117, 0.059875328063964846, 0.05867504119873047, 0.0591769905090332, 0.059824256896972655, 0.06074758529663086, 0.059596992492675784, 0.05933603286743164, 0.059139873504638674, 0.05909183883666992, 0.05931827163696289, 0.059006271362304685, 0.060917919158935546, 0.05938623809814453, 0.05965430450439453, 0.060598785400390626, 0.058934593200683595, 0.05892121505737305, 0.05927756881713867, 0.05965238571166992, 0.05945487976074219, 0.059171329498291014, 0.05897011184692383, 0.05870143890380859, 0.05887392044067383, 0.058915199279785155, 0.059517921447753905, 0.058966144561767575, 0.059197887420654294, 0.059300254821777344, 0.05932137680053711, 0.05938275146484375, 0.05953126525878906, 0.05938380813598633, 0.05894259262084961, 0.059160545349121095, 0.05980220794677735, 0.05927052688598633, 0.059345375061035155, 0.05957475280761719, 0.059324127197265625, 0.059845024108886716, 0.059120735168457034, 0.0601297607421875, 0.05925680160522461, 0.05932271957397461, 0.05933055877685547, 0.05943910217285156, 0.05935078430175781, 0.05956771087646484, 0.05927388763427734, 0.059161918640136715, 0.059283935546875, 0.059864574432373044, 0.059943328857421874, 0.059223934173583986, 0.05905657577514648, 0.059150337219238285, 0.05926812744140625, 0.06005382537841797, 0.05954012680053711, 0.06006505584716797, 0.0591756477355957, 0.05900912094116211, 0.05956361770629883, 0.059341121673583984, 0.059448448181152344, 0.05909718322753906, 0.05909932708740234, 0.05918780899047851, 0.06002035140991211, 0.05912409591674805, 0.05906022262573242, 0.05907455825805664, 0.05886918258666992, 0.05877612686157226, 0.05870796966552734, 0.05931827163696289, 0.059991649627685543, 0.05885788726806641, 0.05880595016479492, 0.058954048156738284, 0.05905433654785156, 0.05926886367797852, 0.06720719909667969, 0.05939606475830078, 0.05950054550170898, 0.06342646408081054, 0.059868766784667966, 0.05981622314453125, 0.05944956970214844, 0.0600186882019043, 0.0592619857788086, 0.058806686401367186, 0.05891142272949219, 0.05886348724365234, 0.05893081665039063, 0.058982784271240235, 0.060063743591308595, 0.05917484664916992, 0.0588862075805664, 0.058990367889404295, 0.05953926467895508, 0.05989827346801758, 0.05978889465332031, 0.05937184143066406, 0.0592446403503418, 0.058887294769287106, 0.05934908676147461, 0.05970819091796875, 0.06125360107421875, 0.059493824005126955, 0.0592239990234375, 0.05892572784423828, 0.05893545532226562, 0.05884297561645508, 0.061052928924560546, 0.05901676940917969, 0.05939859390258789, 0.05932015991210938, 0.05948172760009766, 0.060166782379150394, 0.05970524978637695, 0.059840511322021485, 0.05881468963623047, 0.05936912155151367, 0.059206016540527345, 0.05907004928588867, 0.060158111572265624, 0.06347289657592774, 0.05959347152709961, 0.05978275299072266, 0.05898591995239258, 0.0594106559753418, 0.05904019165039062, 0.05991455841064453, 0.059006912231445316, 0.0607314567565918, 0.059709312438964844, 0.05957440185546875, 0.05938336181640625, 0.059996990203857424, 0.06092620849609375, 0.05948390579223633, 0.05916377639770508, 0.05927411270141601, 0.06024367904663086, 0.05946531295776367, 0.059224769592285155, 0.059305984497070315, 0.059080513000488284, 0.05919353485107422, 0.05934080123901367, 0.05897145462036133, 0.059090686798095704, 0.05925766372680664, 0.05960512161254883, 0.05939603042602539, 0.05932243347167969, 0.059361438751220706, 0.059350879669189456, 0.05957868957519531, 0.059094718933105465, 0.05942012786865234, 0.058999168395996095, 0.058826366424560544, 0.059951648712158204, 0.05927526473999024, 0.06005904006958008, 0.05950729751586914, 0.05878988647460937, 0.059172863006591796, 0.05946726226806641, 0.059419136047363284, 0.05921996688842773, 0.059262977600097654, 0.05913385772705078, 0.058977439880371095, 0.05926598358154297, 0.05888614273071289, 0.059169921875, 0.05911859130859375, 0.05915430450439453, 0.05898137664794922, 0.05903462219238281, 0.05977299118041992, 0.05940339279174805, 0.05931500625610352, 0.059188766479492186, 0.05964438247680664, 0.059369377136230465, 0.05934499359130859, 0.05920767974853516, 0.05933465576171875, 0.058865215301513674, 0.059728321075439454, 0.05918310546875, 0.05930527877807617, 0.059099838256835936, 0.05911347198486328, 0.05917718505859375, 0.0592852783203125, 0.05903974533081055, 0.05902950286865234, 0.059873630523681644, 0.05905408096313477, 0.059033599853515625, 0.058824222564697264, 0.05927779388427734, 0.059184864044189454, 0.0586794548034668, 0.058843265533447264, 0.05961423873901367, 0.059234977722167965, 0.05969891357421875, 0.05905059051513672, 0.058984447479248046, 0.05943910217285156, 0.059098751068115234, 0.05941856002807617, 0.05881625747680664, 0.05908755111694336, 0.05909292984008789, 0.05906438446044922, 0.05897177505493164, 0.05897663879394531, 0.05928345489501953, 0.06350848007202148, 0.05941843032836914, 0.05980707168579102, 0.06319558334350586, 0.05925724792480469, 0.06087200164794922, 0.059326400756835936, 0.05906614303588867, 0.05982905578613281, 0.06003235244750976, 0.0589238395690918, 0.05914214324951172, 0.06002454376220703, 0.059351329803466794, 0.059510784149169924, 0.059931968688964846, 0.05960559844970703, 0.05965628814697266, 0.059907455444335934, 0.06048012924194336, 0.05916831970214844, 0.059859390258789065, 0.05943910217285156, 0.05955136108398437, 0.05923417663574219, 0.059867008209228516, 0.06016883087158203, 0.05998387145996094, 0.060088321685791014, 0.059579391479492184, 0.059251712799072265, 0.05935500717163086, 0.0596890869140625, 0.05970105743408203, 0.059813438415527345, 0.05938035202026367, 0.059254783630371094, 0.05909612655639648, 0.06053897476196289, 0.05974921417236328, 0.06115913772583008, 0.05965238571166992, 0.059588191986083984, 0.059515296936035154, 0.05980364990234375, 0.05924863815307617, 0.05929312133789062, 0.06048006439208985, 0.05937881469726562, 0.05932121658325195, 0.06419865417480469, 0.059346240997314455, 0.06007593536376953, 0.059447391510009766, 0.059620033264160155, 0.059710590362548825, 0.05988399887084961, 0.059277694702148435, 0.05994704055786133, 0.059170814514160154, 0.05922719955444336, 0.0589683837890625, 0.059625537872314456, 0.05897808074951172, 0.06016899108886719, 0.05961497497558594, 0.059232513427734376, 0.05921551895141602, 0.059133567810058595, 0.059049758911132816, 0.05893331146240234, 0.05912051010131836, 0.06044672012329102, 0.06020044708251953, 0.059294208526611325, 0.059511966705322265, 0.05964009475708008, 0.05924448013305664, 0.059273311614990234, 0.059394046783447264, 0.059140224456787106, 0.05961078262329102, 0.05983868789672851, 0.05978140640258789, 0.059830047607421874, 0.05968073654174805, 0.059256927490234375, 0.059275680541992185, 0.05908415985107422, 0.06061270523071289, 0.05947856140136719, 0.05959993743896484, 0.05981209564208984, 0.05940646362304688, 0.05925881576538086, 0.05889257431030273, 0.059009376525878905, 0.05912371063232422, 0.06032793426513672, 0.060297183990478516, 0.05949161529541016, 0.059965663909912106, 0.05959939193725586, 0.061423583984375, 0.06417359924316406, 0.05934486389160156, 0.05937203216552735, 0.058928672790527346, 0.05921548843383789, 0.05917776107788086, 0.05969107055664063, 0.05965024185180664, 0.05952902221679687, 0.059289600372314455, 0.05993267059326172, 0.05960908889770508, 0.059312126159667966, 0.0592911376953125, 0.059095455169677735, 0.05997129440307617, 0.059940673828125, 0.05964857482910156, 0.059703296661376956, 0.05944297790527344, 0.059197662353515625, 0.05924249649047852, 0.05918105697631836, 0.05900697708129883, 0.06000230407714844, 0.05940963363647461, 0.059593406677246094, 0.0598590087890625, 0.05984195327758789, 0.05939878463745117, 0.05907030487060547, 0.05950419235229492, 0.06024790573120117, 0.05985667037963867, 0.059229152679443356, 0.059184478759765624, 0.059312736511230466, 0.05911529541015625, 0.06037478256225586, 0.06271235275268555, 0.05909711837768555, 0.06417404937744141, 0.05943471908569336, 0.060436767578125, 0.05953740692138672, 0.05896806335449219, 0.05966377639770508, 0.059265087127685544, 0.059781665802001956, 0.060118144989013675, 0.05900979232788086, 0.05896764755249023, 0.05927936172485351, 0.059234367370605466, 0.05964540863037109, 0.059358207702636716, 0.05917283248901367, 0.05966019058227539, 0.05919961547851563, 0.05894508743286133, 0.05974879837036133, 0.05907046508789063, 0.06613801574707032, 0.05981811141967774, 0.058851329803466794, 0.05878524780273438, 0.0587264633178711, 0.05893286514282227, 0.059065185546875, 0.058746177673339846, 0.0597982063293457, 0.05965548706054687, 0.05988832092285156, 0.059205631256103515, 0.05921811294555664, 0.05923001480102539, 0.0591278076171875, 0.05905340957641601, 0.0595851821899414, 0.059663455963134764, 0.05916969680786133, 0.05974016189575195, 0.059140094757080076, 0.05904793548583984, 0.05907660675048828, 0.0603658561706543, 0.059460575103759766, 0.06002880096435547, 0.06129471969604492, 0.060112895965576174, 0.05948543930053711, 0.059290176391601564, 0.059224254608154295, 0.05907251358032226, 0.05915036773681641, 0.05968278503417969, 0.06175350570678711, 0.05981100845336914, 0.06014028930664062, 0.05951059341430664, 0.05972956848144531, 0.059423168182373046, 0.05941862487792969, 0.059756542205810545, 0.06046281433105469, 0.06015327835083008, 0.06096080017089844, 0.05932505416870117, 0.05932582473754883, 0.05950099182128906, 0.05949683380126953, 0.05960639953613281, 0.06008623886108398, 0.059787071228027344, 0.05948825454711914, 0.059582496643066404, 0.059379936218261715, 0.059429473876953125, 0.05952716827392578, 0.05977907180786133, 0.05992038345336914, 0.059940864562988284, 0.059815937042236325, 0.05924454498291016, 0.059254783630371094]",tokens/s,16.78329511024911,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 109955 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,872.189952,556.72832,0.0,178.25792,176.190464,s,1,7.68558203125,7.68558203125,0.0,7.68558203125,7.68558203125,7.68558203125,7.68558203125,[7.68558203125],,kWh,2.0834734899998844e-05,2.2905349177520978e-06,6.4622273920067475e-06,2.958749720975769e-05,,MB,1168.748544,661.58592,0.0,253.755392,220.750336,s,12,0.183656286239624,0.01530469051996867,0.00019884486284842023,0.015267151832580567,0.015476128292083741,0.015652350568771362,0.015812454061508177,"[0.015237055778503417, 0.015360063552856445, 0.015313632011413574, 0.01585247993469238, 0.015135199546813964, 0.015297247886657714, 0.015488608360290528, 0.015225055694580079, 0.015363807678222657, 0.015139295578002929, 0.01512771224975586, 0.015116127967834472]",tokens/s,16726.8981797434,kWh,4.6199605640822427e-07,5.0950060289420976e-08,3.079489981170813e-07,8.208951148147265e-07,tokens/kWh,311854700.2899127,MB,1202.958336,676.265984,0.0,268.435456,220.752896,s,12,10.052539978027344,0.837711664835612,0.008306164220725462,0.8395189514160156,0.8449926452636718,0.8473671356201171,0.8496844134521484,"[0.8449517211914063, 0.8366310424804687, 0.84122119140625, 0.8502637329101562, 0.8379326782226563, 0.841105224609375, 0.829469482421875, 0.8449971923828125, 0.8243952026367187, 0.8356583251953125, 0.821886474609375, 0.8440277099609375]",tokens/s,75.20487375851783,kWh,2.4577269508523106e-05,2.7104509462638155e-06,9.10293816021661e-06,3.6390658615003534e-05,tokens/kWh,1731213.514614041,,s,756,10.046389430999769,0.013288874908729837,0.0003385731659422532,0.013247408390045165,0.01360647964477539,0.01367526388168335,0.014316770839691167,"[0.012775808334350587, 0.01313587188720703, 0.013276576042175293, 0.013025823593139649, 0.013002816200256348, 0.012983519554138184, 0.012978976249694824, 0.013080351829528809, 0.013222111701965331, 0.013008895874023438, 0.01298588752746582, 0.012978655815124512, 0.012959744453430176, 0.012958815574645996, 0.013015968322753906, 0.013074432373046875, 0.013033727645874023, 0.013094464302062988, 0.013131967544555664, 0.013303808212280274, 0.013328384399414063, 0.013404159545898438, 0.013449024200439454, 0.013556991577148438, 0.01385478401184082, 0.013627424240112304, 0.013599424362182618, 0.013629311561584473, 0.013701343536376953, 0.013651424407958985, 0.013652576446533202, 0.013563936233520507, 0.01362723159790039, 0.013964703559875488, 0.013586688041687011, 0.013632160186767578, 0.013633343696594238, 0.013619199752807617, 0.013561856269836426, 0.013598719596862792, 0.013543231964111328, 0.013719167709350586, 0.0142741756439209, 0.013514880180358887, 0.013534015655517578, 0.01349555206298828, 0.013529855728149414, 0.01353932762145996, 0.013518848419189454, 0.013431072235107422, 0.013487199783325195, 0.013378175735473632, 0.013453311920166015, 0.013424639701843261, 0.01339187240600586, 0.013451264381408692, 0.013471903800964356, 0.01358240032196045, 0.013481760025024414, 0.013641728401184081, 0.013444735527038574, 0.013451647758483887, 0.013399231910705566, 0.013090944290161133, 0.013522687911987305, 0.013469696044921875, 0.013444671630859375, 0.013418944358825683, 0.013432479858398438, 0.014053728103637695, 0.014391039848327637, 0.013562111854553223, 0.013399200439453125, 0.013398112297058106, 0.013384063720703126, 0.013654175758361817, 0.013381535530090333, 0.013387999534606933, 0.013353055953979492, 0.013238271713256837, 0.01323209571838379, 0.013024479866027831, 0.013020064353942871, 0.01313980770111084, 0.013219903945922852, 0.01313587188720703, 0.013254015922546387, 0.01299078369140625, 0.013058367729187012, 0.01305190372467041, 0.013246015548706054, 0.013338784217834472, 0.014383392333984375, 0.013547616004943848, 0.0130600004196167, 0.013125727653503418, 0.012982175827026368, 0.013030688285827636, 0.01408073616027832, 0.01304371166229248, 0.013029024124145508, 0.013369695663452148, 0.013107168197631835, 0.013086784362792968, 0.013041631698608398, 0.013000639915466308, 0.013066304206848145, 0.013372703552246094, 0.013110239982604981, 0.013144864082336425, 0.012975071907043457, 0.01293875217437744, 0.0129684476852417, 0.012979455947875976, 0.012888992309570312, 0.012938816070556641, 0.013052191734313965, 0.013059616088867188, 0.013057696342468261, 0.01307875156402588, 0.013123744010925293, 0.013519295692443848, 0.013383071899414062, 0.013422623634338379, 0.013400927543640136, 0.013414143562316894, 0.013178879737854005, 0.013540351867675781, 0.013551520347595216, 0.013635680198669434, 0.013719008445739747, 0.01344313621520996, 0.013518655776977539, 0.01355020809173584, 0.013637120246887208, 0.0135316801071167, 0.013554816246032714, 0.013605759620666505, 0.013637439727783203, 0.01346726417541504, 0.013435680389404297, 0.01341004753112793, 0.013452447891235352, 0.013441408157348633, 0.013347264289855956, 0.013545536041259765, 0.013455360412597657, 0.013506239891052246, 0.013443391799926757, 0.013434816360473632, 0.013482048034667968, 0.013410304069519043, 0.013249919891357423, 0.013355392456054688, 0.013310208320617675, 0.013357376098632813, 0.013292863845825195, 0.013318528175354004, 0.01324614429473877, 0.013295743942260742, 0.01365830421447754, 0.013312000274658203, 0.013303808212280274, 0.013228032112121582, 0.013467647552490235, 0.013735263824462891, 0.013191840171813966, 0.013254783630371093, 0.013221504211425782, 0.013078656196594238, 0.013119615554809571, 0.01309017562866211, 0.01309887981414795, 0.013019904136657715, 0.013038880348205566, 0.012986592292785645, 0.013083135604858399, 0.013008607864379882, 0.01300211238861084, 0.013060959815979004, 0.013028639793395996, 0.013062687873840332, 0.013109567642211915, 0.013148287773132324, 0.013252351760864258, 0.013328351974487306, 0.013588576316833497, 0.013392928123474122, 0.013422911643981933, 0.013099616050720214, 0.01356060791015625, 0.013556639671325683, 0.013639552116394043, 0.013594752311706542, 0.013704575538635255, 0.013659839630126952, 0.013688960075378419, 0.013689727783203124, 0.013621536254882813, 0.013638943672180175, 0.01359648036956787, 0.013672351837158203, 0.013607680320739746, 0.013729727745056153, 0.013684000015258789, 0.013652000427246094, 0.013623680114746094, 0.013607199668884278, 0.013639039993286133, 0.013522848129272461, 0.013559871673583985, 0.01358505630493164, 0.013529088020324707, 0.013469696044921875, 0.01357430362701416, 0.013541215896606446, 0.013555328369140624, 0.01348646354675293, 0.013510656356811524, 0.013590527534484862, 0.013602144241333007, 0.013531807899475098, 0.013479552268981933, 0.013541343688964844, 0.013576383590698243, 0.013537504196166993, 0.01350169563293457, 0.013512479782104491, 0.013497311592102051, 0.0135863037109375, 0.013506015777587891, 0.013515423774719239, 0.013438752174377441, 0.013316351890563965, 0.014213088035583497, 0.01339187240600586, 0.013337984085083007, 0.013292415618896485, 0.013389887809753418, 0.01361411190032959, 0.013421567916870117, 0.013330080032348634, 0.013252608299255371, 0.013444512367248536, 0.013367136001586915, 0.013283295631408691, 0.0131878719329834, 0.0130600004196167, 0.01296985626220703, 0.01298044776916504, 0.012924032211303711, 0.01299135971069336, 0.013043775558471679, 0.01372332763671875, 0.013128543853759765, 0.013276896476745605, 0.013135744094848632, 0.013136320114135742, 0.013123519897460937, 0.013042079925537109, 0.013112607955932618, 0.013081151962280274, 0.014055328369140625, 0.014036543846130371, 0.01401039981842041, 0.01385923194885254, 0.013256640434265136, 0.013236288070678712, 0.013432607650756836, 0.013301759719848634, 0.013058143615722656, 0.013056127548217774, 0.013057760238647461, 0.01303171157836914, 0.012904255867004395, 0.013010463714599609, 0.013003423690795898, 0.013154399871826173, 0.013152159690856934, 0.013103296279907226, 0.01310700798034668, 0.013109248161315918, 0.013091103553771972, 0.013102815628051758, 0.013098272323608398, 0.012952287673950196, 0.013042943954467773, 0.012874496459960938, 0.012936223983764648, 0.012901344299316407, 0.012996512413024902, 0.013021280288696289, 0.013238143920898438, 0.013226112365722657, 0.013223039627075195, 0.013339455604553224, 0.014368831634521484, 0.01600307273864746, 0.013165984153747558, 0.014186431884765625, 0.013122048377990723, 0.013144224166870118, 0.013037599563598633, 0.01313100814819336, 0.013218624114990234, 0.013565855979919434, 0.013295904159545898, 0.013117152214050293, 0.01318057632446289, 0.013017375946044922, 0.013080351829528809, 0.01295798397064209, 0.013065279960632325, 0.013257823944091796, 0.014761183738708496, 0.012696864128112793, 0.013700063705444337, 0.01628927993774414, 0.014596416473388671, 0.013072447776794434, 0.013100864410400391, 0.013045887947082519, 0.01311574363708496, 0.013001952171325683, 0.012971776008605957, 0.012985055923461913, 0.012948703765869141, 0.012935263633728027, 0.012966303825378419, 0.01298185634613037, 0.013535008430480958, 0.013042559623718262, 0.012975584030151368, 0.013168224334716797, 0.01330681610107422, 0.013307904243469238, 0.013342720031738281, 0.013447168350219727, 0.013436351776123047, 0.013498751640319825, 0.013446784019470215, 0.013426495552062987, 0.013443424224853515, 0.013433279991149903, 0.013428640365600587, 0.013531200408935547, 0.013430879592895508, 0.013437855720520019, 0.013576640129089355, 0.013459936141967773, 0.013510751724243163, 0.013418496131896973, 0.013414719581604003, 0.013301440238952637, 0.013277503967285156, 0.01336473560333252, 0.013508864402770996, 0.013315327644348144, 0.013300415992736816, 0.013370656013488769, 0.013508576393127442, 0.013384448051452637, 0.013393695831298829, 0.0133820161819458, 0.013213312149047852, 0.013289695739746093, 0.013191167831420898, 0.01313372802734375, 0.013123680114746093, 0.013141311645507813, 0.013171392440795899, 0.013220159530639649, 0.013184351921081543, 0.013175168037414552, 0.013191136360168457, 0.01344921588897705, 0.013359359741210938, 0.013215680122375489, 0.0129518404006958, 0.013181280136108399, 0.013251839637756347, 0.013316512107849121, 0.013230463981628419, 0.013196576118469238, 0.013205408096313476, 0.013213536262512208, 0.0132424955368042, 0.01334768009185791, 0.013469696044921875, 0.013423999786376954, 0.013202207565307617, 0.013109087944030762, 0.013391263961791992, 0.01301302433013916, 0.013046336174011231, 0.013072640419006347, 0.013067839622497558, 0.013266847610473633, 0.013244064331054687, 0.013224224090576171, 0.013160896301269532, 0.013161760330200195, 0.013095552444458008, 0.013076031684875488, 0.01302950382232666, 0.013029696464538575, 0.012976127624511719, 0.013025152206420898, 0.013187199592590333, 0.013217791557312012, 0.013191167831420898, 0.01315782356262207, 0.01318057632446289, 0.013073311805725098, 0.013010944366455078, 0.012978400230407715, 0.01306595230102539, 0.013051039695739746, 0.013203680038452148, 0.013238975524902344, 0.013189120292663574, 0.013148127555847168, 0.0132892484664917, 0.013407839775085448, 0.01337388801574707, 0.013316320419311524, 0.013172736167907715, 0.013091936111450195, 0.013142815589904785, 0.013054047584533691, 0.013008288383483887, 0.01303382396697998, 0.013017375946044922, 0.013047807693481446, 0.0131146240234375, 0.013152064323425293, 0.013123871803283692, 0.013154815673828125, 0.013125791549682617, 0.013078720092773437, 0.013147616386413574, 0.01319974422454834, 0.01366972827911377, 0.013640064239501953, 0.013629728317260743, 0.01358233642578125, 0.013567551612854004, 0.013612480163574218, 0.013717696189880372, 0.013714048385620117, 0.013611200332641602, 0.013584159851074218, 0.01359488010406494, 0.013660223960876464, 0.013567904472351074, 0.013658111572265624, 0.013539232254028321, 0.0135578556060791, 0.01349955177307129, 0.013773344039916991, 0.013476384162902833, 0.013501728057861328, 0.013446751594543458, 0.013444095611572266, 0.01346127986907959, 0.013399359703063964, 0.01347049617767334, 0.013471776008605958, 0.013428640365600587, 0.013381695747375489, 0.013285759925842286, 0.01363491153717041, 0.01330735969543457, 0.01323209571838379, 0.013169792175292969, 0.013254176139831543, 0.013246432304382324, 0.013162752151489258, 0.013115391731262208, 0.013130784034729004, 0.013097951889038086, 0.013119615554809571, 0.013100768089294433, 0.013135711669921874, 0.013054047584533691, 0.013061440467834473, 0.013092896461486817, 0.013046976089477538, 0.012936800003051759, 0.013055904388427735, 0.01332038402557373, 0.013328384399414063, 0.013315296173095703, 0.0133721923828125, 0.013420063972473145, 0.0133853120803833, 0.013626239776611327, 0.013479935646057128, 0.013478976249694824, 0.013825087547302247, 0.013451295852661133, 0.013438816070556641, 0.013457375526428223, 0.013441151618957519, 0.013008864402770996, 0.013377568244934083, 0.01333801555633545, 0.013374048233032227, 0.013344896316528321, 0.013323583602905274, 0.013248384475708008, 0.013365951538085938, 0.013294624328613282, 0.013278240203857422, 0.013303359985351563, 0.013391231536865235, 0.013433088302612304, 0.013424960136413575, 0.013269375801086426, 0.013115391731262208, 0.013107263565063476, 0.013172032356262207, 0.013052096366882324, 0.013165056228637695, 0.013103103637695313, 0.01307852840423584, 0.013033760070800782, 0.01305519962310791, 0.013036031723022461, 0.013031328201293945, 0.012977663993835448, 0.012951807975769043, 0.012978528022766113, 0.012940735816955566, 0.012908384323120118, 0.01291324806213379, 0.012900639533996582, 0.012933247566223145, 0.013004511833190917, 0.012951423645019531, 0.013006560325622559, 0.012990880012512206, 0.012972031593322754, 0.012965696334838867, 0.012949503898620606, 0.012906304359436035, 0.01294979190826416, 0.012903743743896485, 0.012951807975769043, 0.012976672172546387, 0.012953599929809571, 0.012982272148132324, 0.01315225601196289, 0.012998656272888183, 0.013061951637268067, 0.013019328117370605, 0.013032671928405761, 0.013007648468017578, 0.013020544052124024, 0.013020992279052734, 0.01300153636932373, 0.012976127624511719, 0.01303337574005127, 0.012959839820861817, 0.012939040184020995, 0.012947456359863281, 0.013062687873840332, 0.012665087699890137, 0.013041152000427245, 0.012989695549011231, 0.013134847640991211, 0.013049023628234863, 0.013032256126403808, 0.012985952377319336, 0.012988096237182617, 0.013060383796691895, 0.01297862434387207, 0.012959839820861817, 0.012944767951965332, 0.012968640327453613, 0.012922719955444336, 0.012992511749267579, 0.01290060806274414, 0.01308249568939209, 0.012953472137451173, 0.012959039688110352, 0.013012672424316406, 0.012915712356567383, 0.013067584037780761, 0.013324992179870605, 0.013424768447875976, 0.01343609619140625, 0.013506367683410644, 0.013501343727111816, 0.01358028793334961, 0.013457056045532226, 0.013500384330749512, 0.01356326389312744, 0.013537823677062988, 0.013566240310668946, 0.01350496006011963, 0.013650912284851074, 0.013570624351501465, 0.013552127838134765, 0.013460607528686523, 0.013437503814697266, 0.013432127952575684, 0.013512543678283691, 0.013541631698608399, 0.013657983779907226, 0.013447392463684081, 0.013396479606628419, 0.013478079795837402, 0.013446975708007812, 0.013500415802001953, 0.013493824005126952, 0.013725664138793945, 0.013451807975769043, 0.0133602237701416, 0.01326576042175293, 0.013274368286132812, 0.013194144248962402, 0.013201248168945313, 0.013153440475463867, 0.013146976470947265, 0.013091872215270996, 0.013138912200927734, 0.013082431793212891, 0.012992704391479492, 0.013029376029968261, 0.012685312271118163, 0.012984319686889649, 0.012913760185241699, 0.012929951667785645, 0.01289241600036621, 0.012965215682983398, 0.012930784225463867, 0.012958047866821289, 0.012879743576049804, 0.01287548828125, 0.01296678352355957, 0.012991968154907227, 0.012927136421203612, 0.012953824043273925, 0.013031359672546386, 0.012949567794799804, 0.012954784393310546, 0.01296895980834961, 0.013039487838745117, 0.012909695625305176, 0.012917183876037597, 0.01309331226348877, 0.012857343673706055, 0.01287168025970459, 0.012928031921386718, 0.013063136100769044, 0.012881024360656738, 0.012860447883605956, 0.012859552383422852, 0.012958880424499511, 0.012847647666931153, 0.012918784141540527, 0.012890144348144532, 0.01291808032989502, 0.012965727806091308, 0.01294159984588623, 0.012964127540588379, 0.013041919708251953, 0.012985631942749024, 0.012945728302001953, 0.012939680099487304, 0.012907903671264648, 0.01315062427520752, 0.016723712921142577, 0.013594304084777832, 0.013100000381469726, 0.013029151916503907, 0.013027711868286133, 0.012992159843444824, 0.012984064102172852, 0.013037376403808594, 0.012985856056213378, 0.012948415756225585, 0.013020511627197265, 0.012959648132324218, 0.01297596836090088, 0.012946335792541504, 0.012969792366027832, 0.012975647926330566, 0.013073408126831054, 0.01328707218170166, 0.013286815643310548, 0.01304035186767578, 0.01278771209716797, 0.013139967918395995, 0.013100607872009278, 0.013107711791992188, 0.01305299186706543, 0.013032320022583008, 0.013086112022399902, 0.013121824264526367, 0.013062463760375977, 0.013013312339782715, 0.012998335838317871, 0.013000191688537598, 0.01297049617767334, 0.0129617919921875, 0.013021183967590331, 0.01306601619720459, 0.013153504371643067, 0.013424960136413575, 0.0134519681930542, 0.013462719917297363, 0.013384511947631836, 0.01349836826324463, 0.013414400100708008, 0.013413439750671387, 0.013446047782897949, 0.013586496353149414, 0.013549535751342774, 0.01358243179321289, 0.013538623809814453, 0.013546303749084472, 0.013778719902038574, 0.013981696128845214, 0.013533184051513672, 0.013594688415527344, 0.013522879600524902, 0.013528512001037598, 0.013468416213989257, 0.013442879676818847, 0.013491264343261719, 0.013448127746582031, 0.0134782075881958, 0.013442272186279297, 0.013449695587158203, 0.013514431953430175, 0.013476160049438477, 0.013610527992248536, 0.013522720336914063, 0.013611488342285157, 0.013588704109191895, 0.013389823913574218, 0.01338368034362793, 0.013334527969360351, 0.013316096305847168, 0.01351593589782715, 0.013978495597839355, 0.013553631782531739, 0.013499391555786134, 0.013511679649353027, 0.013540512084960937, 0.013478752136230468, 0.013484031677246093, 0.013532608032226563, 0.013480031967163086]",tokens/s,75.25091528576823,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1138.520064,1096.679424,0.0,710.934528,686.03904,s,1,8.581947265625,8.581947265625,0.0,8.581947265625,8.581947265625,8.581947265625,8.581947265625,[8.581947265625],,kWh,3.028218689998236e-05,3.3329752637880467e-06,9.33750747000861e-06,4.2952669633779014e-05,,MB,1498.996736,1436.418048,0.0,1021.313024,985.00096,s,10,0.3625167045593263,0.03625167045593262,0.00021326722920203412,0.036177934646606444,0.03636599769592285,0.03661200733184815,0.03680881504058838,"[0.03685801696777344, 0.036311328887939455, 0.036157150268554684, 0.0361987190246582, 0.036135711669921876, 0.036092254638671876, 0.03629014587402344, 0.036156257629394534, 0.03610809707641602, 0.03620902252197265]",tokens/s,7061.743549478431,kWh,1.181011162987339e-06,1.3024354844658021e-07,7.819809123226835e-07,2.0932356237566027e-06,tokens/kWh,122298702.11198315,MB,1531.551744,1486.749696,0.0,1071.644672,985.00352,s,10,16.115920166015627,1.6115920166015623,0.00446747942641449,1.6106911010742189,1.6192678588867186,1.6195976135253904,1.6198614172363281,"[1.619194580078125, 1.6054296875, 1.60847412109375, 1.6074649658203124, 1.6096724853515625, 1.610689697265625, 1.6199273681640625, 1.613071044921875, 1.6106925048828125, 1.6113037109375]",tokens/s,39.091779650814466,kWh,5.74596066828467e-05,6.33701027013548e-06,2.3074621506275535e-05,8.687123845925773e-05,tokens/kWh,725211.2565374183,,s,630,16.112701669693006,0.025575716936020624,0.0007607594888319328,0.025405376434326173,0.025761101150512694,0.026643320751190185,0.0302019294166565,"[0.02538960075378418, 0.025730976104736326, 0.025276544570922852, 0.025794559478759766, 0.025511232376098633, 0.025393440246582032, 0.025514400482177735, 0.026634239196777345, 0.02555084800720215, 0.02524083137512207, 0.025289472579956056, 0.025196544647216795, 0.02536396789550781, 0.0252359676361084, 0.025220928192138673, 0.025292640686035157, 0.025471328735351562, 0.025306848526000975, 0.025440351486206055, 0.03029216003417969, 0.025413631439208984, 0.02549519920349121, 0.02528291130065918, 0.025307455062866212, 0.02519321632385254, 0.02547808074951172, 0.025497055053710936, 0.025245920181274414, 0.025414207458496093, 0.02529199981689453, 0.025639455795288087, 0.02540675163269043, 0.025299007415771485, 0.02538470458984375, 0.025348320007324218, 0.025520544052124023, 0.025368799209594728, 0.02537478446960449, 0.02556643295288086, 0.025448768615722657, 0.025782751083374023, 0.025386367797851563, 0.025471616744995117, 0.027412479400634765, 0.026916864395141602, 0.02597887992858887, 0.025438207626342774, 0.025609823226928712, 0.02721014404296875, 0.0257126407623291, 0.025362239837646485, 0.025362335205078124, 0.025332000732421874, 0.02527427291870117, 0.025331039428710938, 0.025301984786987305, 0.02539289665222168, 0.025303071975708007, 0.02540060806274414, 0.027908832550048827, 0.02774630355834961, 0.02668070411682129, 0.025434112548828124, 0.02535433578491211, 0.02567804718017578, 0.025534208297729493, 0.02559391975402832, 0.025491552352905275, 0.025431968688964843, 0.02539948844909668, 0.025487360000610353, 0.025214208602905273, 0.025329631805419923, 0.02540764808654785, 0.0252926082611084, 0.02530339241027832, 0.02569264030456543, 0.025440095901489258, 0.025319263458251952, 0.025362688064575194, 0.025689216613769533, 0.02531219291687012, 0.02531328010559082, 0.02550579261779785, 0.025380767822265626, 0.025145439147949217, 0.025302047729492187, 0.025350303649902345, 0.025172800064086915, 0.025401344299316408, 0.025210880279541017, 0.025195648193359375, 0.025213632583618164, 0.026295551300048826, 0.02550409507751465, 0.02531337547302246, 0.025892959594726563, 0.025688127517700197, 0.027909536361694336, 0.02751584053039551, 0.025171743392944337, 0.02533193588256836, 0.025304895401000976, 0.025526464462280272, 0.025379903793334962, 0.02546988868713379, 0.025462783813476563, 0.025290752410888673, 0.02546281623840332, 0.025398784637451172, 0.02544483184814453, 0.02535321617126465, 0.025225311279296874, 0.025440448760986327, 0.02539388847351074, 0.025182111740112305, 0.02535638427734375, 0.025546432495117188, 0.025293119430541994, 0.025454591751098633, 0.02535974311828613, 0.02533030319213867, 0.025386592864990235, 0.025343711853027345, 0.025288639068603517, 0.02528108787536621, 0.025408384323120117, 0.025372064590454102, 0.025352800369262695, 0.02537811279296875, 0.02522118377685547, 0.02523814392089844, 0.025349920272827148, 0.0252677116394043, 0.025416416168212892, 0.025409536361694338, 0.025450496673583983, 0.025663488388061522, 0.028508031845092773, 0.025587839126586916, 0.0252105598449707, 0.025438528060913086, 0.02531532859802246, 0.025529600143432616, 0.025281280517578126, 0.02535424041748047, 0.025281919479370116, 0.025287296295166014, 0.025366527557373047, 0.025281696319580077, 0.025281375885009765, 0.02530303955078125, 0.02536038398742676, 0.025288703918457032, 0.025264127731323242, 0.02551945686340332, 0.0254366397857666, 0.025458879470825195, 0.025231359481811523, 0.02556492805480957, 0.02575334358215332, 0.02545270347595215, 0.025280704498291017, 0.025349599838256836, 0.02534809684753418, 0.02524188804626465, 0.02541200065612793, 0.025476896286010742, 0.025266399383544923, 0.0252391357421875, 0.025342367172241212, 0.025416799545288086, 0.0253155517578125, 0.025778528213500976, 0.025570816040039062, 0.02578054428100586, 0.025524511337280273, 0.02563088035583496, 0.029537439346313477, 0.025364992141723632, 0.025409311294555665, 0.025418399810791015, 0.025441919326782227, 0.02560652732849121, 0.025523872375488282, 0.025405792236328124, 0.02565017509460449, 0.02545257568359375, 0.02546575927734375, 0.025385536193847657, 0.0254421443939209, 0.02552841567993164, 0.025569183349609375, 0.025397151947021485, 0.02555683135986328, 0.025735231399536134, 0.02561859130859375, 0.025413759231567384, 0.025194175720214845, 0.025392799377441405, 0.02530534362792969, 0.025546367645263673, 0.025434944152832033, 0.02535580825805664, 0.025342496871948242, 0.025495296478271486, 0.025353919982910155, 0.02528927993774414, 0.025196544647216795, 0.025337087631225587, 0.0253570556640625, 0.02531123161315918, 0.02550409507751465, 0.02537763214111328, 0.02542207908630371, 0.02539571189880371, 0.025407392501831053, 0.027948671340942383, 0.02761782455444336, 0.02531491279602051, 0.025541023254394533, 0.02553856086730957, 0.025231359481811523, 0.025183551788330077, 0.02545254325866699, 0.025297599792480467, 0.025464704513549805, 0.0266507511138916, 0.02547302436828613, 0.025288415908813477, 0.02520012855529785, 0.025419967651367188, 0.02562886428833008, 0.025334175109863282, 0.02552217674255371, 0.025450496673583983, 0.026025312423706055, 0.02556716728210449, 0.02543280029296875, 0.025460416793823243, 0.02541804885864258, 0.025463968276977538, 0.02545136070251465, 0.02540889549255371, 0.025479808807373047, 0.025394847869873047, 0.025141599655151368, 0.025483135223388673, 0.025386175155639647, 0.025127872467041016, 0.02537676811218262, 0.025304319381713868, 0.02531670379638672, 0.02520521545410156, 0.02542425537109375, 0.02523750305175781, 0.02783807945251465, 0.027811328887939454, 0.025944799423217774, 0.02514672088623047, 0.026006336212158202, 0.025183551788330077, 0.025217727661132814, 0.02530031967163086, 0.025270912170410158, 0.02525187110900879, 0.025227264404296876, 0.02518556785583496, 0.025291488647460936, 0.025268224716186522, 0.025300479888916014, 0.025467391967773437, 0.025161727905273438, 0.0252620792388916, 0.025362432479858397, 0.025286048889160157, 0.02522534370422363, 0.025208927154541014, 0.025313983917236327, 0.02534163284301758, 0.025212928771972655, 0.025425247192382813, 0.02536057662963867, 0.025188159942626954, 0.025268543243408204, 0.02541971206665039, 0.025332128524780274, 0.025239551544189453, 0.025903104782104492, 0.02650931167602539, 0.02569011116027832, 0.025616384506225585, 0.025377119064331054, 0.025843360900878905, 0.025769983291625977, 0.02536038398742676, 0.025591808319091795, 0.030459903717041017, 0.025247488021850586, 0.02508201599121094, 0.025597248077392578, 0.02534684753417969, 0.0251429443359375, 0.025416032791137695, 0.025210880279541017, 0.025407487869262696, 0.02543180847167969, 0.025245504379272463, 0.025300575256347657, 0.025439071655273437, 0.025260128021240235, 0.025284191131591797, 0.025444671630859374, 0.025540607452392578, 0.025335615158081054, 0.02518239974975586, 0.025341951370239257, 0.025235328674316406, 0.025323616027832032, 0.025159135818481445, 0.02513523292541504, 0.02510310363769531, 0.025378143310546875, 0.02508755111694336, 0.025105119705200196, 0.025688383102416994, 0.025321151733398436, 0.025161920547485353, 0.025194368362426757, 0.025081247329711915, 0.025164384841918946, 0.02548240089416504, 0.025305376052856446, 0.025278432846069336, 0.025746015548706053, 0.025386016845703126, 0.027921375274658204, 0.02730188751220703, 0.025401023864746092, 0.025133279800415038, 0.025473119735717774, 0.025249792098999024, 0.025212928771972655, 0.025404960632324218, 0.02528713607788086, 0.025163360595703125, 0.025511775970458984, 0.025342111587524415, 0.02523360061645508, 0.025956575393676757, 0.025406911849975587, 0.025569856643676756, 0.02537833595275879, 0.02545097541809082, 0.025242879867553712, 0.02536729621887207, 0.025538368225097655, 0.02534623908996582, 0.025640960693359374, 0.025345407485961913, 0.025422464370727538, 0.02532863998413086, 0.025267200469970705, 0.025358335494995117, 0.0251146240234375, 0.025198591232299804, 0.026243072509765625, 0.025500703811645507, 0.02541257667541504, 0.025650848388671876, 0.028608543395996094, 0.02541804885864258, 0.025437503814697265, 0.025251935958862305, 0.02519273567199707, 0.025276735305786134, 0.025595903396606445, 0.030346431732177735, 0.025577472686767577, 0.025585023880004883, 0.025344640731811523, 0.025487360000610353, 0.025417119979858398, 0.025382688522338867, 0.025299264907836915, 0.025807039260864258, 0.025463104248046875, 0.027776800155639648, 0.02593302345275879, 0.027532512664794923, 0.02529020881652832, 0.025411775588989258, 0.025167999267578126, 0.025511936187744142, 0.025337568283081056, 0.025194528579711915, 0.025522432327270507, 0.025347360610961912, 0.025332128524780274, 0.025379039764404296, 0.02523129653930664, 0.025394336700439453, 0.025523199081420898, 0.025323200225830077, 0.025588031768798827, 0.02529596710205078, 0.025303968429565428, 0.025577152252197265, 0.025424192428588867, 0.025366336822509765, 0.025256128311157228, 0.02527027130126953, 0.02527129554748535, 0.026149728775024413, 0.025479328155517577, 0.025640960693359374, 0.030248960494995116, 0.025398399353027342, 0.025406335830688475, 0.025413408279418945, 0.025520351409912108, 0.02532307243347168, 0.025465280532836913, 0.025845760345458983, 0.025409536361694338, 0.02572083282470703, 0.031352640151977536, 0.02562886428833008, 0.02582304000854492, 0.025897151947021486, 0.02532124710083008, 0.02537196731567383, 0.025594783782958985, 0.02533340835571289, 0.0252890567779541, 0.025447872161865233, 0.025508415222167968, 0.025251840591430662, 0.025761024475097656, 0.025565183639526368, 0.025510656356811524, 0.02567945671081543, 0.0277390079498291, 0.026003103256225586, 0.025495904922485352, 0.02527027130126953, 0.02574745559692383, 0.025384000778198242, 0.025477184295654296, 0.025410367965698243, 0.0254683837890625, 0.02564156723022461, 0.025297056198120116, 0.025476959228515624, 0.028081888198852538, 0.027920671463012695, 0.025290719985961913, 0.02528873634338379, 0.025487360000610353, 0.02537478446960449, 0.0254749755859375, 0.025354272842407228, 0.025257568359375, 0.025463199615478514, 0.02553788757324219, 0.02551465606689453, 0.02536038398742676, 0.025440576553344727, 0.025402528762817383, 0.02546463966369629, 0.025112928390502928, 0.025345727920532225, 0.025254112243652344, 0.02510691261291504, 0.02532275199890137, 0.025277183532714843, 0.02555084800720215, 0.025411584854125976, 0.025608192443847655, 0.02554265594482422, 0.025268224716186522, 0.02542521667480469, 0.02523014450073242, 0.025171232223510743, 0.02544291114807129, 0.025372352600097656, 0.025114912033081055, 0.02567967987060547, 0.025434335708618163, 0.02530940818786621, 0.025200416564941406, 0.025380863189697265, 0.025253023147583008, 0.025316192626953126, 0.02555084800720215, 0.030373888015747072, 0.025361919403076173, 0.025575551986694336, 0.025179712295532227, 0.025387775421142577, 0.025473087310791016, 0.025223167419433593, 0.02535580825805664, 0.02533030319213867, 0.025432416915893555, 0.025470239639282227, 0.025266912460327147, 0.025417728424072264, 0.025430015563964844, 0.025289951324462892, 0.02526902389526367, 0.025333759307861328, 0.025347295761108397, 0.02535094451904297, 0.025173759460449217, 0.025352575302124022, 0.02525347137451172, 0.025487199783325195, 0.025288671493530274, 0.025298944473266603, 0.025326047897338867, 0.02528665542602539, 0.025409568786621095, 0.025442272186279296, 0.02574505615234375, 0.025536863327026368, 0.025393152236938478, 0.025417728424072264, 0.026308671951293945, 0.025382848739624025, 0.025550432205200195, 0.02537104034423828, 0.025503744125366212, 0.02562156867980957, 0.030008256912231444, 0.025561088562011718, 0.02537788772583008, 0.02550876808166504, 0.025382911682128906, 0.02551379203796387, 0.02575971221923828, 0.0254998722076416, 0.02583750343322754, 0.02571494483947754, 0.025597759246826172, 0.02553446388244629, 0.025437280654907225, 0.025624704360961915, 0.025444799423217774, 0.025852256774902344, 0.025374719619750977, 0.025540416717529296, 0.025479360580444334, 0.025600000381469725, 0.025450496673583983, 0.026195167541503906, 0.025283327102661134, 0.025448480606079103, 0.025275968551635743, 0.025868736267089843, 0.025341951370239257, 0.025475072860717773, 0.025483264923095703, 0.02568191909790039, 0.025495424270629882, 0.025395328521728516, 0.025552192687988282, 0.02534844779968262, 0.025626752853393556, 0.025604095458984375, 0.02553241539001465, 0.025456640243530275, 0.025683231353759765, 0.030489120483398437, 0.025712831497192383, 0.025455839157104494, 0.025510688781738282, 0.025388383865356447, 0.025655967712402344, 0.025282560348510744, 0.02541312026977539, 0.025307647705078123, 0.02525951957702637, 0.025516544342041016, 0.025839391708374022, 0.02547443199157715, 0.025465696334838868, 0.02540947151184082, 0.025419839859008787, 0.02548940849304199, 0.025456640243530275, 0.025430015563964844, 0.02541263961791992, 0.025293567657470702, 0.025355680465698242, 0.025372575759887696, 0.02520284843444824, 0.02537548828125, 0.025298944473266603, 0.025455936431884766, 0.025494047164916992, 0.025413791656494142, 0.02531532859802246, 0.025761791229248047, 0.025411584854125976, 0.025202688217163087, 0.02533785629272461, 0.025253599166870117, 0.025149728775024412, 0.02527663993835449, 0.025347936630249025, 0.025321184158325197, 0.0253603515625, 0.02545894432067871, 0.030086784362792968, 0.025512287139892578, 0.02519862365722656, 0.025621503829956056, 0.025455392837524415, 0.025453983306884767, 0.02525254440307617, 0.02539491271972656, 0.02517852783203125, 0.025536096572875977, 0.02521913528442383, 0.025432096481323243, 0.025421567916870117, 0.02524012756347656, 0.025370624542236327, 0.025503103256225585]",tokens/s,39.09958819537952,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1479.483392,1324.285952,0.0,945.815552,943.480832,s,1,8.6265927734375,8.6265927734375,0.0,8.6265927734375,8.6265927734375,8.6265927734375,8.6265927734375,[8.6265927734375],,kWh,3.7683497074990176e-05,4.149340682252034e-06,1.2175009740003628e-05,5.4007847497245837e-05,,MB,1564.012544,1487.863808,0.0,1080.03328,1046.51776,s,10,0.8680324630737304,0.08680324630737304,0.0006278083832273644,0.08660831832885743,0.08742984313964844,0.0878890167236328,0.08825635559082032,"[0.08834819030761719, 0.08587276458740234, 0.08732780456542968, 0.08653225708007813, 0.08689542388916016, 0.08658422088623047, 0.08647846221923829, 0.08643663787841797, 0.08663241577148438, 0.08692428588867188]",tokens/s,2949.1984561671334,kWh,2.7090827212576394e-06,2.987616894364072e-07,1.794060077222214e-06,4.801904487916261e-06,tokens/kWh,53312180.749161184,MB,1567.834112,1573.84704,0.0,1166.016512,1082.822656,s,10,15.914975585937501,1.5914975585937499,0.01135133220021555,1.58663525390625,1.6098780395507812,1.611726983642578,1.6132061389160157,"[1.5799921875, 1.613575927734375, 1.58172900390625, 1.5866124267578126, 1.59975390625, 1.6094671630859374, 1.5866580810546875, 1.582243408203125, 1.5903753662109374, 1.584568115234375]",tokens/s,39.58535761479076,kWh,4.6502290346242e-05,5.128885789732272e-06,2.1754403205977757e-05,7.338557934195202e-05,tokens/kWh,858479.2893225149,,s,630,15.912596687316864,0.025258089979868087,0.00046688073172256815,0.02511844730377197,0.025812412071228027,0.02593710994720459,0.02718702049255372,"[0.025034719467163086, 0.024907808303833007, 0.024838144302368165, 0.025008127212524413, 0.02488035202026367, 0.0248306884765625, 0.02478291130065918, 0.025324607849121095, 0.025113536834716798, 0.024991743087768553, 0.02496905517578125, 0.025039007186889648, 0.025044767379760743, 0.025059551239013673, 0.025648128509521483, 0.025131776809692384, 0.02503638458251953, 0.024948543548583984, 0.02489036750793457, 0.02489139175415039, 0.025034400939941408, 0.02510812759399414, 0.025182367324829102, 0.025321855545043945, 0.025200639724731445, 0.025067359924316406, 0.024954944610595702, 0.025072895050048827, 0.02503971290588379, 0.025040895462036132, 0.024928255081176756, 0.024930240631103516, 0.025098304748535156, 0.024954879760742187, 0.024938495635986328, 0.02514364814758301, 0.025018016815185548, 0.02494259262084961, 0.024954879760742187, 0.024958240509033204, 0.024918752670288084, 0.025057279586791992, 0.024899423599243162, 0.025041023254394532, 0.0249866886138916, 0.025082048416137696, 0.024988319396972655, 0.02512499237060547, 0.025110080718994142, 0.027017120361328126, 0.02555926322937012, 0.02500377655029297, 0.024928352355957032, 0.02496928024291992, 0.025163679122924804, 0.025143808364868164, 0.025118719100952147, 0.025092096328735353, 0.024989280700683594, 0.025563552856445314, 0.02497331237792969, 0.024944799423217773, 0.024831071853637695, 0.02504515266418457, 0.02492985534667969, 0.02506604766845703, 0.02511359977722168, 0.024988447189331055, 0.025075807571411132, 0.024991424560546874, 0.02512928009033203, 0.025149440765380858, 0.025065599441528322, 0.025064863204956055, 0.025038368225097658, 0.02518111991882324, 0.025296672821044922, 0.025329023361206054, 0.025458560943603516, 0.02570953559875488, 0.025644319534301758, 0.02564579200744629, 0.025659391403198242, 0.025868064880371095, 0.025870559692382812, 0.02560646438598633, 0.025517087936401367, 0.025543327331542968, 0.02554265594482422, 0.025857791900634766, 0.02614076805114746, 0.025762176513671874, 0.025982240676879882, 0.026845312118530272, 0.025885055541992188, 0.025980928421020507, 0.025977855682373048, 0.026009952545166016, 0.02598979187011719, 0.02595840072631836, 0.026200063705444337, 0.025925792694091798, 0.025945375442504883, 0.025894720077514647, 0.025886463165283202, 0.02568294334411621, 0.025758016586303712, 0.025810176849365235, 0.02565190315246582, 0.025452287673950195, 0.0257126407623291, 0.029024255752563476, 0.025606143951416017, 0.025350112915039063, 0.02531059265136719, 0.025352863311767577, 0.025362432479858397, 0.025382495880126952, 0.025399711608886717, 0.02550579261779785, 0.02539241600036621, 0.02544476890563965, 0.025430303573608398, 0.025444095611572265, 0.025325855255126952, 0.025179647445678712, 0.02539084815979004, 0.025233087539672853, 0.02516169548034668, 0.025106752395629883, 0.02521072006225586, 0.025090240478515626, 0.024993791580200195, 0.024963071823120117, 0.024869056701660158, 0.024831520080566407, 0.0249715518951416, 0.02495065689086914, 0.025054336547851563, 0.025242528915405273, 0.025345535278320314, 0.024920223236083984, 0.025040895462036132, 0.024726015090942383, 0.02497488021850586, 0.02477712059020996, 0.02488319969177246, 0.024841567993164063, 0.025072288513183594, 0.024809471130371095, 0.02491596794128418, 0.024825344085693358, 0.024951295852661134, 0.02479292869567871, 0.02481577682495117, 0.024796735763549804, 0.024842079162597657, 0.02481939125061035, 0.024941471099853514, 0.025402463912963868, 0.025666463851928712, 0.02488684844970703, 0.025008352279663085, 0.02474367904663086, 0.02497177505493164, 0.024999519348144532, 0.02503718376159668, 0.02476995277404785, 0.025167871475219726, 0.025025119781494142, 0.025328927993774415, 0.025344991683959962, 0.025577215194702147, 0.025436159133911132, 0.02553446388244629, 0.025482240676879882, 0.025600608825683595, 0.02544476890563965, 0.02550783920288086, 0.025241600036621094, 0.02554252815246582, 0.02533788871765137, 0.02530518341064453, 0.025028608322143556, 0.025152896881103514, 0.024932992935180663, 0.025255935668945313, 0.02512073516845703, 0.025476640701293945, 0.025128608703613282, 0.025056991577148437, 0.0250118408203125, 0.024964384078979492, 0.024946367263793946, 0.02500521659851074, 0.02510220718383789, 0.025455615997314454, 0.02520377540588379, 0.02523027229309082, 0.02487500762939453, 0.02509823989868164, 0.024999359130859374, 0.02502876853942871, 0.02515190315246582, 0.02488319969177246, 0.024844608306884765, 0.024989248275756836, 0.025072799682617188, 0.026908992767333984, 0.025842336654663085, 0.025084928512573244, 0.024902111053466798, 0.024891839981079102, 0.024878623962402344, 0.024973888397216797, 0.024964832305908204, 0.02484662437438965, 0.024862016677856445, 0.024950815200805665, 0.024883392333984376, 0.024816095352172853, 0.02498150444030762, 0.024954879760742187, 0.024877056121826172, 0.025038848876953124, 0.02485215950012207, 0.02921504020690918, 0.025169919967651368, 0.025318559646606446, 0.025064287185668947, 0.025574623107910158, 0.02497817611694336, 0.02527440071105957, 0.02488319969177246, 0.025444351196289062, 0.025257823944091796, 0.02506675148010254, 0.025123136520385742, 0.025166400909423826, 0.025045024871826173, 0.024977407455444335, 0.025083904266357423, 0.025174016952514647, 0.02500320053100586, 0.024950944900512695, 0.024939168930053712, 0.02521513557434082, 0.025284671783447267, 0.025455615997314454, 0.02527027130126953, 0.025545503616333006, 0.02532681655883789, 0.025987071990966795, 0.027271167755126953, 0.025520128250122072, 0.02533580780029297, 0.02533580780029297, 0.025298688888549806, 0.025176319122314453, 0.02519196891784668, 0.025247871398925783, 0.02591155242919922, 0.025763999938964843, 0.025853023529052735, 0.02573766326904297, 0.02585775947570801, 0.025801824569702148, 0.02584307289123535, 0.025835615158081054, 0.02587455940246582, 0.025843679428100588, 0.025784351348876952, 0.025806367874145506, 0.02584828758239746, 0.025843584060668945, 0.025843839645385742, 0.02559916877746582, 0.025612447738647463, 0.02540595245361328, 0.025315488815307617, 0.025293983459472657, 0.02538582420349121, 0.025254240036010744, 0.025355199813842773, 0.025237375259399415, 0.02511296081542969, 0.024868448257446288, 0.02510732841491699, 0.02513100814819336, 0.025051136016845704, 0.025049087524414062, 0.024958976745605467, 0.02503628730773926, 0.024993791580200195, 0.024918527603149415, 0.024922111511230468, 0.024900928497314453, 0.024953535079956055, 0.024901216506958007, 0.02500783920288086, 0.024883712768554687, 0.024952255249023437, 0.024791744232177733, 0.024956256866455077, 0.024902368545532228, 0.025132095336914063, 0.025125215530395508, 0.025335615158081054, 0.025115392684936524, 0.025878559112548827, 0.02568191909790039, 0.025593856811523437, 0.025266176223754884, 0.025395200729370116, 0.025288896560668947, 0.025277952194213867, 0.025433984756469727, 0.025473535537719725, 0.025283967971801758, 0.02520537567138672, 0.025049087524414062, 0.02510233688354492, 0.024973119735717773, 0.024968608856201172, 0.024916383743286134, 0.02497983932495117, 0.024987167358398437, 0.025274208068847656, 0.02499612808227539, 0.02508220863342285, 0.025595903396606445, 0.025273408889770508, 0.02525279998779297, 0.025016319274902343, 0.025122207641601564, 0.02485103988647461, 0.024952096939086912, 0.02486502456665039, 0.024994239807128907, 0.025068767547607423, 0.025146240234375, 0.025216896057128905, 0.025699392318725586, 0.025668607711791993, 0.025471071243286132, 0.025800607681274415, 0.025632768630981444, 0.02570035171508789, 0.025544704437255858, 0.02552217674255371, 0.025413856506347657, 0.02531011199951172, 0.025415775299072265, 0.026667808532714842, 0.025905311584472655, 0.028718847274780274, 0.026013792037963866, 0.025927007675170897, 0.026058496475219725, 0.025862367630004882, 0.025950624465942384, 0.02586038398742676, 0.025952255249023438, 0.025790239334106447, 0.025864416122436524, 0.025897151947021486, 0.025986112594604493, 0.025973440170288086, 0.0259748477935791, 0.025820735931396485, 0.02582271957397461, 0.025811487197875977, 0.02584003257751465, 0.025825279235839844, 0.025705663681030274, 0.025468896865844727, 0.025496416091918946, 0.025460960388183594, 0.025593568801879883, 0.025286943435668945, 0.02511257553100586, 0.02510438346862793, 0.025519807815551757, 0.025184095382690428, 0.025186784744262697, 0.025030399322509767, 0.024968992233276366, 0.024956512451171874, 0.024942943572998047, 0.025133600234985353, 0.02489753532409668, 0.02493235206604004, 0.024889055252075194, 0.02497532844543457, 0.024912448883056642, 0.025267967224121092, 0.025432064056396485, 0.02546892738342285, 0.025269823074340822, 0.025422271728515626, 0.025399007797241212, 0.025540895462036133, 0.025484928131103514, 0.025510271072387694, 0.025446399688720703, 0.02547235107421875, 0.025473279953002928, 0.02543657684326172, 0.025413631439208984, 0.02534726333618164, 0.025170751571655273, 0.024986656188964843, 0.025094432830810545, 0.025237279891967772, 0.025238079071044923, 0.025215328216552733, 0.024983552932739257, 0.025252960205078126, 0.025133983612060547, 0.025067520141601563, 0.024956384658813478, 0.025049631118774413, 0.024913919448852538, 0.025012224197387696, 0.024966495513916016, 0.025251935958862305, 0.025432640075683594, 0.025341951370239257, 0.02555904006958008, 0.0252192325592041, 0.025057024002075195, 0.025023679733276367, 0.024959903717041015, 0.024975360870361327, 0.02502364730834961, 0.025166688919067384, 0.024993791580200195, 0.024978752136230468, 0.025032928466796875, 0.025118175506591796, 0.025002143859863282, 0.025002464294433594, 0.02494803237915039, 0.025016191482543946, 0.02490665626525879, 0.02505904006958008, 0.025028736114501952, 0.0248240966796875, 0.024945632934570312, 0.02497737693786621, 0.024879903793334962, 0.025087871551513673, 0.025245824813842774, 0.025284608840942382, 0.024879007339477538, 0.025077407836914062, 0.02486534309387207, 0.025294527053833008, 0.025136480331420897, 0.02502454376220703, 0.02484876823425293, 0.024973760604858397, 0.02485862350463867, 0.024930303573608398, 0.024904767990112306, 0.02552707290649414, 0.024989280700683594, 0.024999807357788086, 0.026184640884399413, 0.02498739242553711, 0.0249007682800293, 0.025129215240478515, 0.02500003242492676, 0.025012319564819335, 0.02501696014404297, 0.025196319580078126, 0.02490777587890625, 0.025202688217163087, 0.024927295684814454, 0.025918239593505858, 0.024882720947265624, 0.02519126319885254, 0.024849216461181642, 0.02496406364440918, 0.02489753532409668, 0.02513715171813965, 0.027610271453857423, 0.025205120086669922, 0.025077407836914062, 0.025137983322143554, 0.024983104705810548, 0.02550239944458008, 0.02491916847229004, 0.025215103149414064, 0.02491993522644043, 0.025080448150634767, 0.024995840072631836, 0.025292448043823242, 0.02506515121459961, 0.02504361534118652, 0.024991743087768553, 0.025200895309448242, 0.024874752044677734, 0.02507776069641113, 0.02506435203552246, 0.025008127212524413, 0.024863935470581053, 0.024953664779663084, 0.025200639724731445, 0.02560153579711914, 0.024908287048339844, 0.025113983154296873, 0.02520083236694336, 0.02570230484008789, 0.02519094467163086, 0.025116895675659178, 0.02490083122253418, 0.025108863830566406, 0.02497926330566406, 0.025082239151000975, 0.024936704635620116, 0.025229055404663085, 0.025057024002075195, 0.02509644889831543, 0.024872671127319335, 0.025034624099731444, 0.024891807556152345, 0.024989696502685548, 0.025092063903808595, 0.02535958480834961, 0.025359167098999023, 0.025411584854125976, 0.025417472839355467, 0.025909503936767577, 0.02604412841796875, 0.02563039970397949, 0.02549177551269531, 0.02575103950500488, 0.025547552108764648, 0.025618080139160157, 0.025317216873168947, 0.02571683120727539, 0.025082271575927736, 0.0255729923248291, 0.02520307159423828, 0.025501407623291016, 0.02523311996459961, 0.025352640151977537, 0.025105600357055665, 0.025228096008300782, 0.025045120239257812, 0.025218496322631834, 0.025078399658203125, 0.02546886444091797, 0.025231327056884766, 0.02531724739074707, 0.025186752319335936, 0.025519840240478514, 0.025464832305908205, 0.025769983291625977, 0.025044544219970703, 0.025201087951660157, 0.024848384857177733, 0.024971263885498047, 0.02481295967102051, 0.02505379295349121, 0.0248668155670166, 0.025025535583496093, 0.025011167526245118, 0.024815616607666017, 0.024932319641113282, 0.024842336654663087, 0.024940479278564454, 0.024895488739013674, 0.024915552139282225, 0.024816032409667968, 0.02481705665588379, 0.02507632064819336, 0.024964927673339844, 0.024950944900512695, 0.024994911193847655, 0.024959936141967773, 0.025161727905273438, 0.02789990425109863, 0.025964544296264647, 0.02520579147338867, 0.02514633560180664, 0.025165824890136718, 0.024997888565063478, 0.025156639099121095, 0.025123807907104494, 0.02503014373779297, 0.025022111892700195, 0.0248919677734375, 0.02502176094055176, 0.024839136123657228, 0.025034751892089844, 0.02495052719116211, 0.025131263732910157, 0.024845951080322264, 0.025371007919311524, 0.026218496322631835, 0.025219072341918947, 0.025310880661010744, 0.024999935150146483, 0.02489904022216797, 0.025031391143798827, 0.024898015975952148, 0.024921791076660156, 0.025159679412841796, 0.02530873680114746, 0.02725641632080078, 0.025234272003173828, 0.024887584686279298, 0.025093215942382813, 0.024875648498535158, 0.025057279586791992, 0.02513043212890625, 0.024945215225219728, 0.024856512069702148, 0.02499180793762207, 0.024862592697143554, 0.025022592544555664, 0.025220191955566407, 0.02549852752685547, 0.02510643196105957, 0.025133056640625, 0.02519478416442871, 0.0249946231842041, 0.02509212875366211]",tokens/s,39.59127553972007,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 290232 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3894, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading model, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear model._modules[name] = target_cls( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemm.py"", line 131, in __init__ assert out_features % (32 // self.w_bit) == 0 AssertionError " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1550.06976,1546.584064,0.0,1168.113664,1154.613248,s,1,8.5274755859375,8.5274755859375,0.0,8.5274755859375,8.5274755859375,8.5274755859375,8.5274755859375,[8.5274755859375],,kWh,4.070899408332024e-05,4.479947221899825e-06,1.3120843829997342e-05,5.8309785135217407e-05,,MB,1664.581632,1796.145152,0.0,1388.314624,1334.065152,s,10,0.8429779586791992,0.08429779586791993,0.0010402176392791894,0.08405886459350587,0.08557998428344726,0.08595285682678223,0.0862511548614502,"[0.08632572937011719, 0.08418966674804687, 0.08447596740722656, 0.08356633758544922, 0.08264189147949219, 0.08356393432617187, 0.08392806243896485, 0.08549712371826172, 0.08518681335449219, 0.08360243225097656]",tokens/s,3036.8528306612875,kWh,2.654149214393838e-06,2.9270417739078424e-07,1.7587312049636856e-06,4.705584596748308e-06,tokens/kWh,54403442.27939356,MB,1672.728576,1796.145152,0.0,1388.314624,1372.847616,s,10,17.796296264648436,1.7796296264648437,0.018806836818460466,1.7843126831054688,1.7998743530273436,1.8018918273925781,1.8035058068847656,"[1.754376953125, 1.8039093017578125, 1.797359130859375, 1.775194580078125, 1.799426025390625, 1.780257568359375, 1.790001953125, 1.7883677978515624, 1.7484576416015625, 1.7589453125]",tokens/s,35.40062441259013,kWh,5.1194525785188586e-05,5.646437273394922e-06,2.3576205729637518e-05,8.041716878822102e-05,tokens/kWh,783414.7974782697,,s,630,17.79308543968202,0.02824299276140001,0.0011451475570666266,0.028156975746154785,0.028709516906738283,0.02893239507675171,0.03039027721405029,"[0.028347967147827148, 0.028173887252807617, 0.02804630470275879, 0.027898815155029295, 0.027630207061767578, 0.027489696502685547, 0.02750262451171875, 0.02818307113647461, 0.028131744384765626, 0.027820287704467775, 0.027647743225097655, 0.02754150390625, 0.027928640365600586, 0.02751055908203125, 0.027539520263671874, 0.027938079833984376, 0.032262687683105466, 0.02765622329711914, 0.02770966339111328, 0.027470111846923828, 0.02747737693786621, 0.027402624130249024, 0.027320320129394532, 0.027471168518066406, 0.02734355163574219, 0.0273242244720459, 0.027297760009765627, 0.0273670711517334, 0.0274192008972168, 0.027426816940307616, 0.02728550338745117, 0.02736947250366211, 0.027922431945800782, 0.027507871627807618, 0.027493215560913085, 0.027508544921875, 0.02774822425842285, 0.02799238395690918, 0.02780364799499512, 0.027418464660644532, 0.027539615631103517, 0.027536415100097657, 0.027580671310424805, 0.02775321578979492, 0.02748752021789551, 0.027631776809692383, 0.027656192779541015, 0.027713247299194336, 0.02773801612854004, 0.02775721549987793, 0.02753971290588379, 0.02813747215270996, 0.02847123146057129, 0.02823583984375, 0.02827894401550293, 0.028329824447631834, 0.028383232116699218, 0.02840131187438965, 0.028323360443115234, 0.02848031997680664, 0.028266239166259765, 0.02809881591796875, 0.02840985679626465, 0.028321184158325196, 0.027980480194091797, 0.02810259246826172, 0.02776470375061035, 0.027598751068115233, 0.02824140739440918, 0.027593311309814454, 0.02793408012390137, 0.027883552551269532, 0.027746719360351564, 0.0280347843170166, 0.02834899139404297, 0.0281343994140625, 0.028369823455810548, 0.028210432052612304, 0.028240447998046876, 0.02841779136657715, 0.02820243263244629, 0.028291456222534178, 0.028301952362060546, 0.028539199829101563, 0.02896988868713379, 0.028889888763427733, 0.02857779121398926, 0.02877644729614258, 0.028463104248046874, 0.028487327575683594, 0.02857401657104492, 0.028485376358032225, 0.028525920867919923, 0.028576704025268556, 0.02860032081604004, 0.028614912033081055, 0.028430080413818358, 0.02853865623474121, 0.02829737663269043, 0.02852022361755371, 0.028561279296875, 0.028684703826904297, 0.028667552947998047, 0.029266271591186523, 0.028671871185302733, 0.028827775955200197, 0.02849577522277832, 0.02862886428833008, 0.02839369583129883, 0.028612607955932616, 0.028532224655151366, 0.02891811180114746, 0.028677919387817382, 0.028694623947143554, 0.02841539192199707, 0.028382080078125, 0.02847100830078125, 0.028913375854492187, 0.03040108871459961, 0.038317310333251954, 0.028854560852050782, 0.028921951293945314, 0.028577152252197265, 0.028563840866088867, 0.028265056610107423, 0.02826857566833496, 0.028184576034545897, 0.028231264114379883, 0.028139936447143556, 0.027901952743530273, 0.02787708854675293, 0.028129568099975587, 0.028105056762695313, 0.028063392639160155, 0.027867136001586915, 0.027824127197265625, 0.028065792083740236, 0.028045312881469726, 0.028097568511962892, 0.027932863235473632, 0.028128000259399415, 0.028043296813964842, 0.027934879302978517, 0.02798316764831543, 0.028115488052368163, 0.027918048858642578, 0.028054880142211913, 0.02830636787414551, 0.02913385581970215, 0.028821983337402345, 0.029135360717773437, 0.028641279220581056, 0.028837888717651368, 0.02853388786315918, 0.028982431411743163, 0.0286778564453125, 0.028540191650390626, 0.028574432373046875, 0.028938112258911134, 0.028708255767822266, 0.028738271713256835, 0.028656896591186524, 0.028739904403686522, 0.028539295196533202, 0.03204691314697266, 0.0287227840423584, 0.028695167541503905, 0.028473344802856446, 0.028641183853149413, 0.028676416397094725, 0.02856723213195801, 0.02895462417602539, 0.02892540740966797, 0.029159839630126954, 0.031244640350341798, 0.028682111740112304, 0.028450815200805665, 0.02853424072265625, 0.028308000564575195, 0.028440576553344726, 0.028318912506103515, 0.02851308822631836, 0.028380704879760743, 0.028062175750732422, 0.028473663330078124, 0.02842505645751953, 0.028358879089355468, 0.02838796806335449, 0.028399072647094726, 0.028412864685058593, 0.028499008178710938, 0.028568063735961914, 0.02851980781555176, 0.028526496887207032, 0.02850934410095215, 0.028517663955688475, 0.02850774383544922, 0.028530847549438475, 0.02863577651977539, 0.028563167572021483, 0.028403743743896485, 0.02866828727722168, 0.028665855407714845, 0.02854707145690918, 0.0284649600982666, 0.02846473693847656, 0.028299583435058593, 0.0282708797454834, 0.029572256088256837, 0.028367712020874025, 0.02833203125, 0.028167327880859374, 0.028121952056884766, 0.02811404800415039, 0.02822028732299805, 0.028296319961547852, 0.0281298885345459, 0.028228000640869142, 0.02823360061645508, 0.028096511840820314, 0.028269920349121094, 0.028242591857910157, 0.027910144805908203, 0.027777023315429687, 0.028016639709472657, 0.028052543640136717, 0.028124095916748047, 0.027918336868286132, 0.027760255813598634, 0.02764633560180664, 0.027873279571533204, 0.02807734489440918, 0.0278822078704834, 0.027623424530029295, 0.02753126335144043, 0.027678720474243163, 0.027975391387939454, 0.02793017578125, 0.02766659164428711, 0.027761215209960936, 0.02747542381286621, 0.027478559494018555, 0.027429920196533203, 0.027726816177368163, 0.027803199768066406, 0.028300031661987305, 0.0281146240234375, 0.028085695266723634, 0.028172672271728514, 0.0284136962890625, 0.02830175971984863, 0.028376768112182617, 0.028276895523071287, 0.028364383697509765, 0.028458879470825194, 0.028431327819824218, 0.03004729652404785, 0.05117843246459961, 0.02872118377685547, 0.02840982437133789, 0.028112895965576173, 0.028033023834228517, 0.027996160507202147, 0.027813888549804686, 0.027606624603271485, 0.02763408088684082, 0.028135744094848633, 0.02817513656616211, 0.028349344253540038, 0.028194816589355468, 0.02814300727844238, 0.02824457550048828, 0.02813862419128418, 0.028111743927001952, 0.02821686363220215, 0.02834480094909668, 0.02835446357727051, 0.02835465621948242, 0.028280832290649413, 0.028285152435302736, 0.028450592041015625, 0.028362207412719727, 0.028424032211303712, 0.028492128372192383, 0.02845280075073242, 0.028489248275756836, 0.028382080078125, 0.028366559982299804, 0.028551136016845703, 0.02860678482055664, 0.028708864212036132, 0.028308736801147462, 0.02853923225402832, 0.028408159255981447, 0.028465215682983398, 0.028446720123291015, 0.028237152099609374, 0.028225311279296873, 0.028033279418945314, 0.027943552017211912, 0.027768863677978515, 0.027643871307373048, 0.027741344451904296, 0.02765500831604004, 0.027676671981811524, 0.027699199676513672, 0.028555328369140626, 0.028042879104614258, 0.027953472137451172, 0.027792640686035156, 0.027548063278198243, 0.027640159606933595, 0.02766758346557617, 0.027653343200683595, 0.027841760635375978, 0.02763795280456543, 0.027631423950195313, 0.027664384841918944, 0.027641855239868163, 0.027744255065917968, 0.027840448379516602, 0.027870975494384765, 0.02790777587890625, 0.027751039505004883, 0.02790809631347656, 0.02817024040222168, 0.028057855606079103, 0.027937728881835936, 0.027847423553466796, 0.02787743949890137, 0.028012287139892577, 0.02787353515625, 0.028089696884155274, 0.02827552032470703, 0.028347808837890624, 0.028282720565795897, 0.02845756721496582, 0.028661855697631834, 0.02875775909423828, 0.028553375244140623, 0.02830326461791992, 0.02851878356933594, 0.028472511291503907, 0.02827248001098633, 0.028547136306762696, 0.02882009506225586, 0.02853446388244629, 0.028469568252563478, 0.028459007263183594, 0.028580863952636718, 0.028834815979003905, 0.029111648559570314, 0.028807840347290038, 0.028478975296020507, 0.028272607803344726, 0.028276960372924806, 0.028380928039550782, 0.02832441520690918, 0.028604415893554686, 0.02862598419189453, 0.02816819190979004, 0.027999103546142576, 0.02792246437072754, 0.02786511993408203, 0.027631616592407225, 0.027643903732299805, 0.02770652770996094, 0.027667295455932616, 0.027684864044189454, 0.028004352569580077, 0.02868409538269043, 0.028317888259887694, 0.028375040054321288, 0.028550752639770506, 0.029904287338256837, 0.028767232894897462, 0.02860851287841797, 0.028777503967285158, 0.02865190315246582, 0.028595487594604493, 0.028899904251098632, 0.029198495864868165, 0.0286167049407959, 0.028628992080688476, 0.028804096221923828, 0.028681215286254884, 0.028921344757080077, 0.028273151397705077, 0.02795315170288086, 0.028103679656982423, 0.028205440521240233, 0.02824425506591797, 0.028066144943237305, 0.027867136001586915, 0.02795302391052246, 0.028059776306152345, 0.02956492805480957, 0.02929360008239746, 0.028584831237792967, 0.028504159927368163, 0.02852681541442871, 0.028417503356933594, 0.028531007766723633, 0.029417472839355467, 0.03166435241699219, 0.02990582466125488, 0.028686496734619142, 0.028770111083984376, 0.02883260726928711, 0.028548959732055665, 0.028539264678955078, 0.028515968322753906, 0.02854729652404785, 0.02856243133544922, 0.028275808334350585, 0.029476768493652345, 0.02832399940490723, 0.02798896026611328, 0.028429183959960937, 0.027692895889282226, 0.027678911209106444, 0.02820627212524414, 0.027910400390625, 0.027757087707519532, 0.02759884834289551, 0.02773526382446289, 0.02769817543029785, 0.027866912841796876, 0.027611135482788086, 0.027463903427124025, 0.02823516845703125, 0.027617664337158204, 0.02793878364562988, 0.028162080764770506, 0.0280927677154541, 0.028001951217651366, 0.028057727813720703, 0.0281814079284668, 0.02803990364074707, 0.028211456298828125, 0.028301151275634765, 0.028246751785278322, 0.027991968154907225, 0.027998207092285156, 0.028185951232910157, 0.028715391159057618, 0.02825449562072754, 0.02813337516784668, 0.027995168685913088, 0.027949920654296877, 0.027926336288452147, 0.027947328567504884, 0.02791753578186035, 0.027996959686279296, 0.028116159439086914, 0.028574527740478514, 0.02867100715637207, 0.028666847229003905, 0.028634944915771485, 0.02863644790649414, 0.028533151626586914, 0.029053440093994142, 0.028706016540527343, 0.028749664306640624, 0.028744640350341796, 0.028702720642089844, 0.028688383102416993, 0.028667903900146483, 0.02878268814086914, 0.028759231567382814, 0.02867635154724121, 0.028561248779296874, 0.02840025520324707, 0.028188671112060547, 0.02792038345336914, 0.028002368927001954, 0.02809878349304199, 0.027899423599243165, 0.02772812843322754, 0.027600160598754885, 0.027589536666870116, 0.028364608764648438, 0.027756479263305663, 0.027684864044189454, 0.027747392654418945, 0.027784128189086914, 0.029300512313842772, 0.029427936553955078, 0.02851430320739746, 0.028366847991943358, 0.028307136535644532, 0.028549440383911134, 0.028876800537109375, 0.02838105583190918, 0.028481599807739257, 0.028417728424072267, 0.02840025520324707, 0.028271808624267578, 0.028291648864746093, 0.028431840896606445, 0.027945344924926757, 0.030363807678222655, 0.029683616638183592, 0.028149856567382812, 0.02797964859008789, 0.028027904510498046, 0.028443647384643556, 0.0277708797454834, 0.027559232711791993, 0.027859615325927733, 0.02813465690612793, 0.028023584365844727, 0.02804908752441406, 0.028262367248535158, 0.028155296325683594, 0.028170656204223633, 0.02801308822631836, 0.028020736694335937, 0.027997215270996093, 0.02822857666015625, 0.027950687408447264, 0.028074016571044923, 0.027637151718139647, 0.027653087615966798, 0.027926336288452147, 0.02780588722229004, 0.027891712188720705, 0.028100608825683594, 0.02775359916687012, 0.02756857681274414, 0.027524927139282226, 0.027510591506958008, 0.02752582359313965, 0.027549407958984376, 0.0274619197845459, 0.02741036796569824, 0.0277956485748291, 0.027865087509155274, 0.027749568939208984, 0.027547679901123046, 0.027495168685913087, 0.027539424896240235, 0.027529151916503906, 0.027419967651367186, 0.02747065544128418, 0.02749849510192871, 0.02758844757080078, 0.02748355293273926, 0.029563648223876953, 0.027705184936523436, 0.027649248123168945, 0.027410400390625, 0.027398752212524413, 0.027502975463867187, 0.027469823837280274, 0.02746918487548828, 0.027447328567504883, 0.027451999664306642, 0.027376991271972656, 0.027402912139892578, 0.027688159942626953, 0.027606143951416015, 0.027440832138061522, 0.02783228874206543, 0.02764419174194336, 0.02765180778503418, 0.027490495681762695, 0.027822080612182616, 0.027830272674560546, 0.02750771141052246, 0.027539968490600586, 0.02776655960083008, 0.02800924873352051, 0.02794879913330078, 0.027959327697753906, 0.02828441619873047, 0.028017311096191405, 0.027958879470825194, 0.027988288879394533, 0.02789596748352051, 0.027592639923095703, 0.027786975860595704, 0.027967775344848633, 0.02780076789855957, 0.027708223342895508, 0.02759884834289551, 0.02754764747619629, 0.02736262321472168, 0.027426816940307616, 0.027738559722900392, 0.027621631622314454, 0.02769273567199707, 0.02750889587402344, 0.02790220832824707, 0.02832988739013672, 0.028059167861938475, 0.029640287399291993, 0.02838969612121582, 0.028481439590454103, 0.02816067123413086, 0.028060735702514647, 0.02800326347351074, 0.02794419288635254, 0.02799827194213867, 0.02790179252624512, 0.027799968719482423, 0.028021120071411134, 0.027707456588745117, 0.02746339225769043, 0.027762271881103515, 0.027985984802246094, 0.02816067123413086, 0.027860960006713866, 0.027838464736938476, 0.02783776092529297, 0.027716287612915037, 0.027561056137084962, 0.027519071578979492, 0.02798409652709961, 0.027565919876098632, 0.027634431838989258, 0.027815168380737304, 0.028237855911254883, 0.028158655166625978, 0.02821651268005371, 0.02811382484436035, 0.02824799919128418, 0.028110336303710938, 0.028142080307006836, 0.028379135131835938]",tokens/s,35.40701258000924,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1550.159872,1546.584064,0.0,1168.113664,1154.613248,s,1,8.27979296875,8.27979296875,0.0,8.27979296875,8.27979296875,8.27979296875,8.27979296875,[8.27979296875],,kWh,3.8902994579211734e-05,4.283699223596933e-06,1.2075565215999284e-05,5.526225901880795e-05,,MB,1575.97696,1796.145152,0.0,1388.314624,1334.065152,s,10,0.8412918395996094,0.08412918395996094,0.0007119479755757377,0.08390304183959961,0.08507656478881835,0.08525823402404785,0.08540356941223144,"[0.08503619384765625, 0.08369420623779297, 0.08369564819335938, 0.08416291046142578, 0.08543990325927735, 0.08339462280273438, 0.08411043548583984, 0.08362022399902344, 0.08324924468994141, 0.08488845062255859]",tokens/s,3042.939298232542,kWh,2.653956018371059e-06,2.9267770883412247e-07,1.7620014096001901e-06,4.708635136805371e-06,tokens/kWh,54368196.42255955,MB,1581.961216,1796.145152,0.0,1388.314624,1372.847616,s,10,17.37643688964844,1.7376436889648434,0.009143634783886863,1.7364484252929686,1.7483337524414062,1.749937811279297,1.7512210583496093,"[1.7515418701171874, 1.7352840576171875, 1.7309111328125, 1.73761279296875, 1.746974609375, 1.7248458251953125, 1.7445201416015625, 1.731179443359375, 1.747977294921875, 1.7255897216796876]",tokens/s,36.25599448269549,kWh,5.164052417496419e-05,5.69565426621506e-06,2.3607491108198663e-05,8.094366954937792e-05,tokens/kWh,778319.0501583107,,s,630,17.37390524673463,0.027577627375769236,0.0005843101071754203,0.027507391929626465,0.027891829681396484,0.02806671571731567,0.0305977998161316,"[0.02744688034057617, 0.027542240142822267, 0.027391679763793947, 0.027338495254516603, 0.027394304275512694, 0.027510143280029296, 0.02774412727355957, 0.028047264099121092, 0.027975744247436523, 0.028023584365844727, 0.02783203125, 0.02764041519165039, 0.027354816436767578, 0.027553056716918944, 0.027392160415649413, 0.027251007080078125, 0.02729350471496582, 0.027663040161132812, 0.027280799865722655, 0.027158464431762695, 0.02718160057067871, 0.02736934471130371, 0.02735875129699707, 0.027222400665283204, 0.02726076889038086, 0.027504255294799804, 0.027748319625854494, 0.02782480049133301, 0.027906047821044923, 0.02817401695251465, 0.02766640090942383, 0.02770569610595703, 0.027873279571533204, 0.027684223175048827, 0.02768739128112793, 0.027562143325805664, 0.027461631774902344, 0.027249727249145508, 0.027368383407592775, 0.027613279342651367, 0.027570079803466797, 0.027463232040405273, 0.030377824783325194, 0.030687648773193358, 0.027514944076538084, 0.02774233627319336, 0.027842048645019532, 0.027688671112060546, 0.027419424057006835, 0.027529119491577148, 0.028219615936279297, 0.02777280044555664, 0.02780771255493164, 0.027934751510620116, 0.02787139129638672, 0.027628639221191405, 0.02781216049194336, 0.028502336502075197, 0.03155548858642578, 0.028169471740722655, 0.028111839294433595, 0.028016639709472657, 0.02780364799499512, 0.028270591735839845, 0.02795635223388672, 0.027689855575561525, 0.027375423431396484, 0.027332000732421875, 0.02770800018310547, 0.02742291259765625, 0.027520959854125976, 0.027319488525390626, 0.027671424865722657, 0.02774393653869629, 0.027729856491088868, 0.027932159423828123, 0.027845279693603515, 0.02774825668334961, 0.027760896682739258, 0.027927711486816408, 0.027953216552734375, 0.027796512603759767, 0.027754304885864257, 0.028010496139526365, 0.027889535903930663, 0.027828351974487305, 0.029272064208984375, 0.02766582489013672, 0.02737609672546387, 0.027611263275146486, 0.027940383911132814, 0.027848255157470702, 0.027715808868408204, 0.027713632583618163, 0.027318111419677736, 0.027294303894042967, 0.027156639099121093, 0.027294784545898437, 0.027395008087158203, 0.027288896560668945, 0.02716089630126953, 0.027351551055908203, 0.02754547119140625, 0.027701248168945314, 0.02757529640197754, 0.027325439453125, 0.027793407440185547, 0.027276800155639647, 0.027162464141845703, 0.027128480911254884, 0.027149568557739256, 0.027185920715332032, 0.027277151107788087, 0.02727494430541992, 0.02713852882385254, 0.02766339111328125, 0.027360000610351563, 0.027189664840698242, 0.02715452766418457, 0.02749228858947754, 0.027162687301635742, 0.027177888870239256, 0.027146080017089843, 0.027343839645385743, 0.02711347198486328, 0.02713145637512207, 0.027199840545654295, 0.02720796775817871, 0.027146495819091798, 0.02705449676513672, 0.02712892723083496, 0.027619840621948243, 0.027969600677490235, 0.02736742401123047, 0.02759814453125, 0.027533824920654298, 0.027459264755249024, 0.027490816116333007, 0.027940927505493166, 0.02767967987060547, 0.02770227241516113, 0.027672576904296874, 0.027613407135009767, 0.02735424041748047, 0.02735171127319336, 0.027223039627075195, 0.02736387252807617, 0.027382240295410157, 0.027278944015502928, 0.027298208236694335, 0.02753126335144043, 0.0273670711517334, 0.027104864120483397, 0.027136768341064453, 0.02711756706237793, 0.027154560089111327, 0.027324287414550782, 0.027483232498168947, 0.02769977569580078, 0.027646303176879883, 0.027561695098876952, 0.0276911678314209, 0.027713567733764648, 0.027639904022216798, 0.027627519607543945, 0.028248064041137694, 0.027700704574584963, 0.02777756881713867, 0.02773811149597168, 0.027499839782714842, 0.027407039642333986, 0.02740019226074219, 0.027232255935668945, 0.027495967864990235, 0.027400672912597655, 0.027332256317138672, 0.02727462387084961, 0.02724265670776367, 0.027281824111938476, 0.027531328201293944, 0.027557247161865233, 0.02743087959289551, 0.02739852714538574, 0.027291999816894532, 0.027361568450927735, 0.027550975799560548, 0.027820543289184572, 0.027627904891967772, 0.027599903106689454, 0.034813377380371095, 0.030335424423217773, 0.02773414421081543, 0.02781119918823242, 0.027711328506469728, 0.02734160041809082, 0.02754560089111328, 0.02743427276611328, 0.02763644790649414, 0.027543552398681642, 0.027549440383911133, 0.02788582420349121, 0.027933792114257814, 0.02727619171142578, 0.027056127548217773, 0.02718720054626465, 0.027197439193725585, 0.027133504867553712, 0.027103263854980467, 0.027147743225097658, 0.02748044776916504, 0.027572799682617187, 0.02750464057922363, 0.0275599365234375, 0.027695104598999022, 0.027674207687377928, 0.027625759124755858, 0.027618431091308595, 0.027620256423950194, 0.027655679702758788, 0.02792508888244629, 0.027828479766845705, 0.027564863204956054, 0.027343807220458986, 0.02733670425415039, 0.027362943649291992, 0.027525503158569335, 0.027535360336303712, 0.027327999114990235, 0.027181215286254883, 0.0275579833984375, 0.02727961540222168, 0.02723744010925293, 0.02721683120727539, 0.027379711151123046, 0.027395103454589845, 0.02728611183166504, 0.027443584442138673, 0.027606399536132812, 0.027376256942749023, 0.027131904602050783, 0.027125759124755858, 0.027076608657836915, 0.027119583129882812, 0.02709712028503418, 0.0272523193359375, 0.027213472366333008, 0.027144479751586913, 0.02719340705871582, 0.02718351936340332, 0.027492416381835937, 0.027183040618896485, 0.02706211280822754, 0.027336671829223634, 0.027383968353271483, 0.027332447052001954, 0.027287551879882813, 0.02792428779602051, 0.027515071868896485, 0.027686912536621092, 0.027592704772949218, 0.02753126335144043, 0.027662336349487306, 0.02758438491821289, 0.027588544845581056, 0.02743724822998047, 0.027228160858154295, 0.02727529525756836, 0.027235519409179686, 0.027346847534179687, 0.027366336822509767, 0.027385791778564452, 0.02773302459716797, 0.027433855056762695, 0.027275360107421875, 0.027250688552856447, 0.02734284782409668, 0.0275283203125, 0.027458431243896485, 0.027546720504760744, 0.027558015823364257, 0.027820543289184572, 0.027795743942260743, 0.02776054382324219, 0.027705440521240233, 0.027852800369262694, 0.0276889591217041, 0.02778112030029297, 0.027780351638793947, 0.02780771255493164, 0.02813007926940918, 0.02800041580200195, 0.02783856010437012, 0.028280479431152344, 0.027625312805175783, 0.02767487907409668, 0.02786463928222656, 0.028055328369140625, 0.031793792724609374, 0.02823139190673828, 0.028168415069580077, 0.02781180763244629, 0.027799392700195314, 0.02796076774597168, 0.027663040161132812, 0.027586111068725584, 0.02747078323364258, 0.027651744842529295, 0.027834367752075196, 0.028092159271240234, 0.028800416946411132, 0.027644767761230468, 0.027522592544555663, 0.02751740837097168, 0.02742793655395508, 0.0274564151763916, 0.02793008041381836, 0.02778780746459961, 0.0278220157623291, 0.027701311111450196, 0.02904870414733887, 0.028043392181396485, 0.027730976104736328, 0.027683807373046876, 0.0272936954498291, 0.027297183990478514, 0.027711328506469728, 0.027443103790283203, 0.027470687866210937, 0.027410432815551757, 0.02812518310546875, 0.027258880615234377, 0.027310176849365233, 0.027154207229614258, 0.027158912658691407, 0.027177919387817384, 0.02721705627441406, 0.02722371292114258, 0.02735103988647461, 0.02773196792602539, 0.02755414390563965, 0.028116640090942384, 0.027380863189697267, 0.027170848846435548, 0.027225343704223633, 0.027170303344726563, 0.02710537528991699, 0.027242496490478517, 0.02706572723388672, 0.02694806480407715, 0.027130016326904295, 0.027149568557739256, 0.027129823684692383, 0.027196191787719728, 0.027133024215698243, 0.02716963195800781, 0.027162687301635742, 0.026989919662475586, 0.02699929618835449, 0.027343008041381837, 0.02727731132507324, 0.027391328811645507, 0.027247264862060548, 0.027373472213745118, 0.027136095046997072, 0.0273787841796875, 0.02712668800354004, 0.027084800720214845, 0.02708652877807617, 0.027224384307861327, 0.027248640060424805, 0.02735923194885254, 0.027203359603881837, 0.02729315185546875, 0.027294464111328125, 0.02727244758605957, 0.027327232360839844, 0.027354143142700196, 0.02742927932739258, 0.027840511322021484, 0.027897504806518553, 0.027805152893066405, 0.027687583923339844, 0.027649471282958984, 0.027597856521606446, 0.02747939109802246, 0.027280960083007812, 0.030124319076538085, 0.03112931251525879, 0.02784752082824707, 0.027588703155517577, 0.02722601509094238, 0.027422208786010743, 0.02721628761291504, 0.02721187210083008, 0.027197439193725585, 0.02751283264160156, 0.02720358467102051, 0.027296928405761717, 0.027429727554321288, 0.02767568016052246, 0.027529184341430663, 0.027431711196899414, 0.031690975189208985, 0.02743270492553711, 0.02753500747680664, 0.027303680419921875, 0.02726905632019043, 0.027169696807861327, 0.02729145622253418, 0.027166912078857422, 0.027543552398681642, 0.02718720054626465, 0.028479488372802734, 0.028022655487060545, 0.027353216171264648, 0.027271167755126953, 0.027215871810913086, 0.027328672409057616, 0.02718499183654785, 0.02714419174194336, 0.027172864913940428, 0.027190528869628906, 0.0272043514251709, 0.027256832122802735, 0.02752921676635742, 0.027655359268188476, 0.02774684715270996, 0.027746591567993164, 0.027996160507202147, 0.02766374397277832, 0.02758924865722656, 0.028657663345336915, 0.028096736907958983, 0.02772559928894043, 0.027613183975219727, 0.02737583923339844, 0.027414304733276367, 0.027465728759765624, 0.0279815673828125, 0.027876928329467775, 0.027451391220092772, 0.02740671920776367, 0.027331008911132812, 0.027336128234863283, 0.02713632011413574, 0.02720191955566406, 0.027546911239624022, 0.027682527542114258, 0.02745142364501953, 0.027564512252807618, 0.027494911193847657, 0.027660287857055665, 0.027553056716918944, 0.027698976516723633, 0.02774726486206055, 0.027615200042724608, 0.027624832153320313, 0.027830400466918946, 0.02755756759643555, 0.027253183364868164, 0.027168895721435545, 0.02723823928833008, 0.02756390380859375, 0.02743328094482422, 0.027273056030273437, 0.027347360610961914, 0.02727231979370117, 0.02729203224182129, 0.027277824401855468, 0.0272957763671875, 0.027232032775878906, 0.027162464141845703, 0.027207839965820314, 0.027607231140136718, 0.027578367233276366, 0.027451583862304688, 0.027394176483154297, 0.027282495498657227, 0.027128448486328127, 0.02734694480895996, 0.02758246421813965, 0.027125280380249025, 0.027115327835083008, 0.027160863876342773, 0.02726335906982422, 0.027624671936035156, 0.027613792419433594, 0.0276297607421875, 0.027611135482788086, 0.027596799850463868, 0.027711488723754882, 0.027881824493408203, 0.027810911178588867, 0.027800128936767577, 0.027838464736938476, 0.027711807250976564, 0.02753299140930176, 0.027613183975219727, 0.027762208938598633, 0.027736127853393553, 0.027658655166625978, 0.027432960510253908, 0.027457279205322267, 0.027402303695678712, 0.027676416397094727, 0.027568639755249022, 0.027568063735961913, 0.0278240966796875, 0.02770844841003418, 0.027755552291870118, 0.027766143798828125, 0.02780985641479492, 0.027813568115234374, 0.02781648063659668, 0.027860799789428712, 0.027939327239990236, 0.027774335861206055, 0.027816511154174803, 0.02783033561706543, 0.02767692756652832, 0.02761292839050293, 0.027522464752197266, 0.02746940803527832, 0.027827135086059572, 0.02785446357727051, 0.02772982406616211, 0.027635936737060548, 0.02741689682006836, 0.027635711669921875, 0.02748441505432129, 0.027465471267700194, 0.027414527893066407, 0.027418367385864256, 0.027381887435913087, 0.027361408233642578, 0.02761238479614258, 0.027884319305419923, 0.027799232482910156, 0.03082271957397461, 0.02790777587890625, 0.027733919143676757, 0.027654111862182616, 0.02759702491760254, 0.02754787254333496, 0.027490304946899413, 0.027479232788085936, 0.027601760864257814, 0.02737353515625, 0.02737766456604004, 0.028261856079101564, 0.027679136276245117, 0.027515008926391603, 0.027674591064453125, 0.027639423370361328, 0.027693632125854493, 0.027464960098266603, 0.02768070411682129, 0.027873952865600585, 0.02785420799255371, 0.027816576004028322, 0.027821855545043947, 0.02787743949890137, 0.027764896392822265, 0.027983871459960938, 0.028076032638549804, 0.027930784225463866, 0.027755680084228514, 0.028053951263427735, 0.02767638397216797, 0.02727507209777832, 0.027118047714233397, 0.027181055068969725, 0.027445119857788088, 0.02712384033203125, 0.027043840408325196, 0.027038751602172853, 0.0271759033203125, 0.02707571220397949, 0.02731916809082031, 0.027675872802734376, 0.027575071334838868, 0.027387136459350585, 0.027534080505371095, 0.027679840087890626, 0.02746460723876953, 0.02756233596801758, 0.027547296524047853, 0.027494400024414063, 0.027551744461059572, 0.027420352935791016, 0.027719999313354494, 0.027483455657958983, 0.02741881561279297, 0.027261215209960936, 0.0271889591217041, 0.027181568145751952, 0.02738092803955078, 0.027329599380493164, 0.027523839950561523, 0.027302783966064455, 0.027183040618896485, 0.02720502471923828, 0.027013919830322267, 0.027051584243774414, 0.027575904846191407, 0.02701705551147461, 0.027087936401367186, 0.027398080825805665, 0.02733670425415039, 0.027347232818603514, 0.027090015411376952, 0.02702195167541504, 0.027150655746459963, 0.027204832077026366, 0.027399967193603516, 0.02741881561279297, 0.027542015075683594, 0.02748579216003418, 0.027676864624023436, 0.027891199111938478, 0.027728607177734375, 0.027584512710571288, 0.027729471206665038, 0.027791807174682617, 0.027590112686157228, 0.0273306884765625, 0.027281824111938476, 0.027371519088745116, 0.027414751052856446, 0.027215072631835938]",tokens/s,36.26127753392732,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 25201 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6655.082496,7525.564416,0.0,7147.094016,7138.9184,s,1,11.4468486328125,11.4468486328125,0.0,11.4468486328125,11.4468486328125,11.4468486328125,11.4468486328125,[11.4468486328125],,kWh,0.00013003639331662573,1.4336647016102484e-05,4.1124477344001154e-05,0.00018549751767672935,,MB,1625.870336,8226.013184,0.0,7818.182656,7724.300288,s,10,7.336798767089843,0.7336798767089843,0.0035879936953573557,0.7331307373046876,0.738969970703125,0.7389776611328125,0.7389838134765625,"[0.7282850341796875, 0.7327047729492188, 0.7337333984375, 0.7297399291992187, 0.7321773071289063, 0.7389853515625, 0.7308643798828125, 0.7335567016601563, 0.7377836303710937, 0.73896826171875]",tokens/s,348.92602090753945,kWh,2.140277468333428e-05,2.3603525715006946e-06,1.4255666166428291e-05,3.801879342126327e-05,tokens/kWh,6733511.954559388,MB,1630.072832,8372.813824,0.0,7964.983296,7904.605696,s,10,33.90838354492187,3.3908383544921876,0.007679516201068551,3.3904208984375,3.3990643310546873,3.403143005371094,3.4064059448242188,"[3.3854130859375, 3.38075048828125, 3.381290283203125, 3.38770849609375, 3.39313330078125, 3.387211181640625, 3.393403564453125, 3.4072216796875, 3.398157958984375, 3.394093505859375]",tokens/s,18.579476050970552,kWh,9.931091062333278e-05,1.0954522001515274e-05,6.582875901217209e-05,0.00017609419163702012,tokens/kWh,357763.0778978832,,s,630,33.90527875900272,0.05381790279206775,0.0005654044508975231,0.05381379127502441,0.05440257415771484,0.05455391273498535,0.05594764579772949,"[0.05611299133300781, 0.053880992889404296, 0.05342822265625, 0.05351820755004883, 0.05314691162109375, 0.05317647933959961, 0.053160640716552736, 0.053153377532958984, 0.053032993316650394, 0.05321094512939453, 0.053161663055419923, 0.05333638381958008, 0.053318046569824216, 0.05323996734619141, 0.05317529678344726, 0.053384193420410155, 0.05373302459716797, 0.053844032287597654, 0.05401833724975586, 0.05368832015991211, 0.05341593551635742, 0.05325743865966797, 0.053365184783935544, 0.05333555221557617, 0.05295600128173828, 0.052822017669677736, 0.05313299179077149, 0.0533837776184082, 0.05339263916015625, 0.05397318267822265, 0.05323775863647461, 0.05382992172241211, 0.05375897598266602, 0.05351663970947266, 0.05375151824951172, 0.054241249084472656, 0.05422396850585937, 0.0548263053894043, 0.05408396911621094, 0.05415340805053711, 0.054077438354492184, 0.05498275375366211, 0.05415248107910156, 0.05370329666137695, 0.05385420989990235, 0.05383782577514649, 0.05377024078369141, 0.053787742614746094, 0.053738399505615236, 0.05371001434326172, 0.0536748161315918, 0.05411840057373047, 0.05400140762329102, 0.05425791931152344, 0.0541921272277832, 0.05413683319091797, 0.05404796981811524, 0.05425027084350586, 0.054077438354492184, 0.05399087905883789, 0.05386431884765625, 0.05376432037353516, 0.05376396942138672, 0.055836673736572265, 0.05372518539428711, 0.052795391082763675, 0.05276790237426758, 0.05287129592895508, 0.05279305648803711, 0.05271446228027344, 0.05332156753540039, 0.05257247924804687, 0.0529222412109375, 0.05278950500488281, 0.0530296630859375, 0.05296966552734375, 0.05299894332885742, 0.05333401489257812, 0.05296899032592774, 0.05343280029296875, 0.053663326263427735, 0.05406908798217774, 0.05383430480957031, 0.0536473617553711, 0.053155742645263675, 0.05300848007202148, 0.053200897216796876, 0.05327465438842773, 0.05329302215576172, 0.053430015563964844, 0.05338684844970703, 0.05336099243164062, 0.05326812744140625, 0.05316259384155273, 0.05310879898071289, 0.053166305541992184, 0.053227294921875, 0.05355091094970703, 0.05404691314697266, 0.05405491256713867, 0.05412659072875976, 0.053907455444335936, 0.054093822479248044, 0.05402828979492187, 0.05418188858032227, 0.05397734451293945, 0.05406623840332031, 0.05396960067749024, 0.054023361206054686, 0.05376412963867187, 0.05404956817626953, 0.05395014572143555, 0.05394464111328125, 0.05408963012695313, 0.054138271331787106, 0.05435385513305664, 0.05435878372192383, 0.05438259124755859, 0.05442764663696289, 0.054497344970703125, 0.054408321380615236, 0.05416182327270508, 0.05418844985961914, 0.054255615234375, 0.05422396850585937, 0.05409913635253906, 0.055831550598144535, 0.053561599731445315, 0.05307727813720703, 0.054111774444580076, 0.05269583892822265, 0.05293462371826172, 0.05297571182250976, 0.05296752166748047, 0.05295727920532227, 0.053079967498779294, 0.05301808166503906, 0.05309468841552734, 0.05310284805297852, 0.05460297775268555, 0.05323651123046875, 0.05348966217041016, 0.053905406951904294, 0.054212608337402345, 0.054451839447021484, 0.05402377700805664, 0.05388540649414063, 0.05383737564086914, 0.053424896240234374, 0.05322111892700195, 0.05330051040649414, 0.05329315185546875, 0.0535313606262207, 0.053633182525634766, 0.0534835205078125, 0.05326812744140625, 0.053388927459716795, 0.05364809417724609, 0.053495807647705076, 0.05328076934814453, 0.05363916778564453, 0.05374697494506836, 0.05408358383178711, 0.05425980758666992, 0.05374630355834961, 0.053645057678222655, 0.05356553649902344, 0.053387168884277345, 0.05344076919555664, 0.05347686386108398, 0.05352294540405273, 0.053575328826904293, 0.053776737213134765, 0.053507713317871096, 0.05383411026000977, 0.05371599960327148, 0.05346403121948242, 0.05394847869873047, 0.054136768341064456, 0.054083072662353515, 0.05426764678955078, 0.05389564895629883, 0.05394255828857422, 0.053927776336669925, 0.05400592041015625, 0.053778270721435546, 0.05367004776000977, 0.05450038528442383, 0.05442390441894531, 0.05584681701660156, 0.05393743896484375, 0.053304065704345704, 0.05313328170776367, 0.05301571273803711, 0.052882080078125, 0.05322108840942383, 0.0531578254699707, 0.053430049896240235, 0.05337961578369141, 0.053228897094726564, 0.053484416961669924, 0.053337150573730466, 0.053306304931640625, 0.05360220718383789, 0.053749439239501956, 0.05350883102416992, 0.054189918518066406, 0.0542674560546875, 0.05396918487548828, 0.05364115142822266, 0.05350003051757812, 0.05351212692260742, 0.053386302947998045, 0.05340665435791016, 0.05326633453369141, 0.05356083297729492, 0.05334451293945312, 0.05316198348999023, 0.0533342399597168, 0.05373142242431641, 0.05348150253295898, 0.05347731018066406, 0.053620800018310544, 0.0542658576965332, 0.054178878784179686, 0.05430163192749023, 0.054405311584472656, 0.054198078155517575, 0.05398732757568359, 0.05390335845947265, 0.05380300903320313, 0.05375385665893555, 0.05384969711303711, 0.05397315216064453, 0.05376230239868164, 0.053687328338623046, 0.05358396911621094, 0.05391024017333984, 0.053928001403808594, 0.05402550506591797, 0.05411923217773437, 0.05403363037109375, 0.05426051330566406, 0.05432729721069336, 0.05399552154541016, 0.05423513412475586, 0.05408768081665039, 0.05405641555786133, 0.05410460662841797, 0.05429452896118164, 0.05409584045410156, 0.05392585754394531, 0.05595532989501953, 0.05365760040283203, 0.05325241470336914, 0.05307769775390625, 0.05297119903564453, 0.05309001541137695, 0.05329062271118164, 0.05300118255615234, 0.05289100646972656, 0.05306022262573242, 0.05335039901733398, 0.05313324737548828, 0.053112895965576175, 0.05339302444458008, 0.05358537673950195, 0.05384284973144531, 0.053512191772460936, 0.05361772918701172, 0.05440198516845703, 0.05422444915771484, 0.053959102630615235, 0.05382963180541992, 0.0539521598815918, 0.05363542556762695, 0.053628929138183595, 0.05368012619018555, 0.053567134857177734, 0.05324835205078125, 0.053337791442871096, 0.05337436676025391, 0.05357046508789062, 0.05360416030883789, 0.05379296112060547, 0.053688385009765624, 0.053813182830810546, 0.0538524169921875, 0.05434771347045898, 0.05513353729248047, 0.054058910369873044, 0.05408415985107422, 0.05431267166137695, 0.054265792846679685, 0.05405926513671875, 0.05396694564819336, 0.054195873260498045, 0.054978240966796876, 0.05427676773071289, 0.05438172912597656, 0.05418684768676758, 0.054079486846923826, 0.05403209686279297, 0.054230655670166016, 0.05409804916381836, 0.054419361114501956, 0.05455500793457031, 0.05400191879272461, 0.05412163162231445, 0.05437116622924805, 0.05419375991821289, 0.05435193634033203, 0.053719390869140626, 0.05370032119750977, 0.053782817840576175, 0.055956001281738284, 0.05390739059448242, 0.05332489776611328, 0.05314236831665039, 0.05325727844238281, 0.052891647338867184, 0.05310976028442383, 0.05306959915161133, 0.05299836730957031, 0.05306163024902344, 0.053174270629882815, 0.05324595260620117, 0.05304441452026367, 0.05298204803466797, 0.05297411346435547, 0.05347430419921875, 0.05337395095825195, 0.05447270584106445, 0.05457715225219727, 0.054042625427246097, 0.053710369110107424, 0.05345283126831055, 0.05335452651977539, 0.053432735443115234, 0.0535327033996582, 0.053563358306884766, 0.053753215789794924, 0.05339152145385742, 0.053553184509277346, 0.05332012939453125, 0.05380028915405274, 0.05324652862548828, 0.05332592010498047, 0.053901409149169924, 0.054029247283935544, 0.054130687713623046, 0.05436038589477539, 0.05435433578491211, 0.05405721664428711, 0.05420851135253906, 0.05392895889282227, 0.05381836700439453, 0.05380201721191406, 0.053691360473632814, 0.053556350708007815, 0.05367184066772461, 0.05392892837524414, 0.05383168029785156, 0.05415321731567383, 0.05424332809448242, 0.05381289672851562, 0.05360838317871094, 0.054429534912109376, 0.05429043197631836, 0.05421318435668945, 0.05398518371582031, 0.054074623107910155, 0.054131553649902346, 0.054091102600097654, 0.054356639862060546, 0.05427571105957031, 0.0542558708190918, 0.054102142333984374, 0.0559288330078125, 0.05385820770263672, 0.053222721099853515, 0.05309491348266602, 0.05277315139770508, 0.052853824615478516, 0.05314646530151367, 0.053607872009277344, 0.05389583969116211, 0.05325551986694336, 0.05311929702758789, 0.052996383666992185, 0.05317228698730469, 0.053359966278076175, 0.053523265838623046, 0.05382332611083984, 0.05400105667114258, 0.0544134407043457, 0.054436321258544924, 0.05422694396972656, 0.05398463821411133, 0.05367078399658203, 0.05357900619506836, 0.053363201141357425, 0.05368368148803711, 0.05378473663330078, 0.053763774871826174, 0.05398835372924805, 0.053657279968261716, 0.053348575592041016, 0.053617984771728515, 0.053827808380126956, 0.053712478637695314, 0.05395654296875, 0.054046497344970704, 0.0542171516418457, 0.054086143493652344, 0.054306880950927734, 0.05415020751953125, 0.05387887954711914, 0.05382774353027344, 0.053666431427001955, 0.053652767181396485, 0.053768287658691405, 0.054088321685791016, 0.05367327880859375, 0.053578304290771483, 0.05386419296264648, 0.05371244812011719, 0.053623809814453124, 0.05387795257568359, 0.0541927375793457, 0.05445840072631836, 0.05429673767089844, 0.05448483276367187, 0.054335487365722655, 0.05440499114990234, 0.05456908798217774, 0.05461196899414063, 0.05432499313354492, 0.05426131057739258, 0.054206336975097656, 0.05426262283325195, 0.05608272171020508, 0.05599296188354492, 0.05338447952270508, 0.05328287887573242, 0.05346345520019531, 0.05339775848388672, 0.05349548721313477, 0.05371526336669922, 0.05325766372680664, 0.05372518539428711, 0.05346566390991211, 0.053300769805908206, 0.05323209762573242, 0.05357920074462891, 0.05373001480102539, 0.05406067276000977, 0.05382908630371094, 0.05459020614624024, 0.05444384002685547, 0.05385388946533203, 0.0535577278137207, 0.053620704650878905, 0.05355116653442383, 0.05370470428466797, 0.053664768218994144, 0.053482177734375, 0.053571903228759765, 0.05354214477539063, 0.05342284774780273, 0.05344412612915039, 0.05364579010009766, 0.053830848693847654, 0.053949249267578124, 0.054046718597412106, 0.05433731079101563, 0.054542560577392575, 0.05435391998291016, 0.05442559814453125, 0.05430025482177735, 0.05418841552734375, 0.05392508697509766, 0.05410079956054688, 0.053975040435791016, 0.05392998504638672, 0.05402828979492187, 0.054220703125, 0.054114398956298826, 0.05429452896118164, 0.05429270553588867, 0.054335262298583986, 0.05560054397583008, 0.054376609802246095, 0.05448704147338867, 0.05459807968139648, 0.055027233123779294, 0.05473459243774414, 0.05448751831054687, 0.054649089813232424, 0.055657951354980466, 0.05415990447998047, 0.054368160247802735, 0.054275775909423826, 0.054171871185302735, 0.05671475219726563, 0.053736000061035155, 0.05292230224609375, 0.0531701774597168, 0.05304729461669922, 0.053429439544677736, 0.05330940628051758, 0.05310345458984375, 0.053303295135498044, 0.05359779357910156, 0.05357968139648438, 0.053451263427734375, 0.05326182556152344, 0.05356390380859375, 0.054216896057128906, 0.05382486343383789, 0.05388336181640625, 0.054468608856201174, 0.05472489547729492, 0.0543639030456543, 0.05397705459594727, 0.0535654411315918, 0.0543559684753418, 0.053370880126953124, 0.05328464126586914, 0.053714942932128903, 0.05349964904785156, 0.05340822219848633, 0.05331087875366211, 0.0533551025390625, 0.05355491256713867, 0.053575969696044924, 0.05349286270141602, 0.054012767791748045, 0.05397711944580078, 0.05437772750854492, 0.054702049255371095, 0.05416812896728516, 0.05410220718383789, 0.05417372894287109, 0.054091392517089845, 0.05392012786865234, 0.05402995300292969, 0.05370076751708985, 0.05391996765136719, 0.053975040435791016, 0.05414297485351562, 0.053800960540771485, 0.054056129455566405, 0.053881664276123044, 0.05406105422973633, 0.054152671813964846, 0.05448252868652344, 0.05434771347045898, 0.05452012634277344, 0.05445292663574219, 0.054263168334960935, 0.05446025466918945, 0.054399871826171876, 0.05451715087890625, 0.05441558456420898, 0.05436419296264648, 0.05425356674194336, 0.05651865768432617, 0.053999359130859376, 0.053321983337402346, 0.05293670272827149, 0.05309030532836914, 0.05311283111572265, 0.05294694519042969, 0.053191745758056644, 0.052975616455078124, 0.05312156677246094, 0.05312067031860351, 0.05310950469970703, 0.05310796737670898, 0.053063934326171874, 0.053254657745361325, 0.05329084777832031, 0.053418048858642576, 0.0542242546081543, 0.05418819046020508, 0.054059585571289065, 0.054093822479248044, 0.053645313262939455, 0.053399742126464846, 0.053294910430908206, 0.053198848724365234, 0.05324390411376953, 0.05440230560302734, 0.05384236907958984, 0.05368454360961914, 0.05381439971923828, 0.05382819366455078, 0.05372041702270508, 0.0536945915222168, 0.054946624755859375, 0.05453823852539062, 0.054254657745361326, 0.054182910919189455, 0.05414495849609375, 0.05451776123046875, 0.054128929138183596, 0.05431881713867188, 0.05413849639892578, 0.05387712097167969, 0.053833728790283204, 0.053956382751464846, 0.054204193115234375, 0.05396688079833984, 0.05393654251098633, 0.05376409530639648, 0.053700607299804685, 0.05428220748901367, 0.05416988754272461, 0.05454207992553711, 0.05455257415771484, 0.05427609634399414, 0.05433865737915039, 0.054559070587158205, 0.05477024078369141, 0.05437971115112305, 0.053898048400878903, 0.053771488189697264, 0.053992225646972654, 0.053945343017578126]",tokens/s,18.581177417180783,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3023.204352,3550.347264,0.0,3164.602368,3152.265216,s,1,10.5781298828125,10.5781298828125,0.0,10.5781298828125,10.5781298828125,10.5781298828125,10.5781298828125,[10.5781298828125],,kWh,7.760402536247663e-05,8.552701088400434e-06,2.4332241687990974e-05,0.00011048896813886804,,MB,2915.753984,4038.98368,0.0,3623.878656,3526.724608,s,10,2.520330810546875,0.25203308105468747,0.0028356776440103225,0.2528389434814453,0.2533525924682617,0.2536972541809082,0.2539729835510254,"[0.2519713592529297, 0.25313055419921876, 0.2527334442138672, 0.25284112548828125, 0.2540419158935547, 0.2532460174560547, 0.2525902099609375, 0.2532760009765625, 0.2528367614746094, 0.24366342163085938]",tokens/s,1015.7396756358812,kWh,7.695758583334584e-06,8.487068026715321e-07,5.088717521263479e-06,1.3633182907269596e-05,tokens/kWh,18777713.300060958,MB,2921.893888,4041.080832,0.0,3625.975808,3526.727168,s,10,30.687174072265623,3.068717407226562,0.006882980420189143,3.068822265625,3.0775981689453125,3.0783585327148435,3.0789668237304686,"[3.073934326171875, 3.059680908203125, 3.057628173828125, 3.07742919921875, 3.06374267578125, 3.06521875, 3.07029443359375, 3.072776611328125, 3.06735009765625, 3.079118896484375]",tokens/s,20.52974961188687,kWh,8.958111448125539e-05,9.880947588129178e-06,4.8584242083736596e-05,0.00014804630415312116,tokens/kWh,425542.53792678565,,s,630,30.68205414581296,0.048701673247322205,0.0006703565267953547,0.04856500816345215,0.04911696548461914,0.049524835777282714,0.051811116600036636,"[0.0489431037902832, 0.048742401123046876, 0.048946815490722655, 0.04887385559082031, 0.048796993255615234, 0.04881657409667969, 0.0485665283203125, 0.048758880615234375, 0.048612350463867186, 0.04922185516357422, 0.04935750579833984, 0.048809825897216795, 0.04852678298950195, 0.048753280639648434, 0.04853974533081055, 0.048858177185058596, 0.04849760055541992, 0.04957388687133789, 0.048809921264648434, 0.0488919677734375, 0.04903449630737305, 0.048747264862060546, 0.049189151763916014, 0.05065289688110351, 0.04875385665893555, 0.04898796844482422, 0.04875775909423828, 0.048535552978515625, 0.04866048049926758, 0.04878131103515625, 0.04843110275268555, 0.048376895904541015, 0.04816582489013672, 0.04815603256225586, 0.04830028915405273, 0.0484865608215332, 0.0484222412109375, 0.048204673767089846, 0.04812524795532227, 0.04839859390258789, 0.048395713806152346, 0.05008281707763672, 0.049456577301025394, 0.04864230346679688, 0.04871200180053711, 0.04832179260253906, 0.05217561721801758, 0.05014672088623047, 0.0482432975769043, 0.04809024047851562, 0.05045337677001953, 0.04857215881347656, 0.048309951782226565, 0.04812217712402344, 0.04830003356933594, 0.04825932693481445, 0.048369312286376955, 0.048174816131591795, 0.04825766372680664, 0.04872540664672852, 0.048682785034179686, 0.048520000457763675, 0.04842291259765625, 0.04844086456298828, 0.0487083854675293, 0.04844051361083984, 0.04901359939575195, 0.04825465774536133, 0.048486400604248046, 0.04819792175292969, 0.04836249542236328, 0.04827033615112305, 0.048535552978515625, 0.048467967987060545, 0.04878076934814453, 0.048243232727050785, 0.04815407943725586, 0.04926927947998047, 0.04864614486694336, 0.04858060836791992, 0.04914745712280273, 0.048331199645996095, 0.04860847854614258, 0.049084671020507814, 0.048785953521728515, 0.048379905700683595, 0.04848166275024414, 0.04843174362182617, 0.04854374313354492, 0.04837907028198242, 0.04864006423950195, 0.04838886260986328, 0.048573440551757815, 0.04863401412963867, 0.048376670837402345, 0.048731231689453126, 0.04868979263305664, 0.0487342414855957, 0.04871603012084961, 0.04944486236572266, 0.04853964614868164, 0.04907417678833008, 0.04845375823974609, 0.048541118621826175, 0.04901846313476563, 0.048564510345458986, 0.048915008544921874, 0.04850092697143555, 0.04810732650756836, 0.04838576126098633, 0.04852896118164062, 0.049234657287597655, 0.04839833450317383, 0.04842496109008789, 0.048216064453125, 0.04838399887084961, 0.048215198516845706, 0.04879171371459961, 0.04857651138305664, 0.048496574401855466, 0.048218879699707035, 0.048446849822998045, 0.0481839370727539, 0.04818124771118164, 0.048260894775390625, 0.048576736450195314, 0.04821692657470703, 0.048345088958740234, 0.04834672164916992, 0.048286113739013675, 0.0482979850769043, 0.04854579162597656, 0.04820787048339844, 0.04918425750732422, 0.048032257080078126, 0.048791553497314455, 0.048248062133789064, 0.048347232818603515, 0.048363872528076175, 0.04831584167480469, 0.048890750885009764, 0.0487770881652832, 0.048939136505126955, 0.04868220901489258, 0.04845225524902344, 0.0486954231262207, 0.048205825805664064, 0.04877107238769531, 0.04826726531982422, 0.04843689727783203, 0.04832668685913086, 0.04847980880737305, 0.04809545516967773, 0.04856816101074219, 0.048331329345703125, 0.048615550994873045, 0.04808227157592773, 0.04873257446289062, 0.04819731140136719, 0.04856480026245117, 0.048059967041015624, 0.0490371208190918, 0.048787872314453126, 0.05109078216552734, 0.04902931213378906, 0.048446144104003906, 0.04816604614257813, 0.04881615829467773, 0.04846614456176758, 0.04856892776489258, 0.049500160217285157, 0.04843094253540039, 0.048011425018310544, 0.04850825500488281, 0.04805068969726563, 0.048626014709472656, 0.04808454513549805, 0.04862329483032227, 0.04830607986450195, 0.0487143669128418, 0.04828934478759766, 0.04854121780395508, 0.04876582336425781, 0.04908652877807617, 0.048279903411865235, 0.048493537902832035, 0.04811625671386719, 0.04831244659423828, 0.04831235122680664, 0.04807267379760742, 0.04848307037353516, 0.04845363235473633, 0.04820684814453125, 0.048585056304931644, 0.04806927871704102, 0.048242271423339846, 0.04824486541748047, 0.04834489440917969, 0.04865052795410156, 0.049548702239990236, 0.04830035018920899, 0.04853193664550781, 0.048870849609375, 0.04870342254638672, 0.04864838409423828, 0.04835987091064453, 0.04813996887207031, 0.048322528839111326, 0.04850518417358399, 0.05470624160766602, 0.049243297576904294, 0.048423713684082034, 0.04864748764038086, 0.04996780776977539, 0.0483061752319336, 0.0482979850769043, 0.048484481811523435, 0.048379776000976565, 0.04887347030639649, 0.048947200775146485, 0.04809318542480469, 0.04854281616210938, 0.04905868911743164, 0.048233726501464846, 0.04905039978027344, 0.04862070465087891, 0.04854256057739258, 0.04862524795532226, 0.04834345626831055, 0.048307327270507815, 0.04873920059204102, 0.05058969497680664, 0.049435775756835935, 0.04867359924316406, 0.0484183349609375, 0.04859142303466797, 0.04867478561401367, 0.0522608642578125, 0.04878950500488281, 0.049950752258300785, 0.05109142303466797, 0.04857372665405273, 0.04997836685180664, 0.048373470306396486, 0.048330753326416016, 0.04807475280761719, 0.04831027221679687, 0.04842214584350586, 0.04845235061645508, 0.04829919815063476, 0.04848028945922851, 0.04941299057006836, 0.04806175994873047, 0.04820243072509765, 0.04921343994140625, 0.048721118927001955, 0.04867913436889648, 0.04884108734130859, 0.048535743713378904, 0.04873152160644531, 0.04888195037841797, 0.048576030731201175, 0.04833158493041992, 0.048510974884033206, 0.04847536087036133, 0.04837046432495117, 0.04856614303588867, 0.04876812744140625, 0.04848972702026367, 0.04863961410522461, 0.04862758255004883, 0.04852761459350586, 0.0486379508972168, 0.04859724807739258, 0.04834841537475586, 0.04851337432861328, 0.048705696105957035, 0.04852121734619141, 0.048156383514404294, 0.04845801544189453, 0.04906393432617188, 0.04843215942382813, 0.04860617446899414, 0.048500255584716795, 0.04849507141113281, 0.048674816131591796, 0.048766273498535156, 0.04890268707275391, 0.049545024871826174, 0.04881427383422852, 0.04887363052368164, 0.048541694641113284, 0.049006206512451175, 0.048640384674072265, 0.04862156677246094, 0.04888371276855469, 0.048844799041748044, 0.04872719955444336, 0.04853436660766602, 0.04834064102172852, 0.04829833602905274, 0.04865769577026367, 0.04866940689086914, 0.049790206909179686, 0.04851583862304688, 0.04816896057128906, 0.04836556625366211, 0.048756736755371094, 0.04858060836791992, 0.04857241439819336, 0.048740352630615234, 0.0485560302734375, 0.04840857696533203, 0.048330753326416016, 0.048396289825439455, 0.04793212890625, 0.04836332702636719, 0.04852755355834961, 0.04892454528808594, 0.04855721664428711, 0.04852361679077148, 0.04853401565551758, 0.04852659225463867, 0.04881292724609375, 0.04908809661865234, 0.04849296188354492, 0.0485560302734375, 0.048328960418701175, 0.048568065643310544, 0.04831404876708984, 0.04834467315673828, 0.049142143249511716, 0.04863375854492188, 0.048473758697509764, 0.04843500900268555, 0.048565216064453125, 0.04851948928833008, 0.048414398193359375, 0.04867795181274414, 0.04906192016601563, 0.048718753814697265, 0.04866003036499023, 0.04870409774780273, 0.04902691268920899, 0.04856217575073242, 0.04862771224975586, 0.04832665634155273, 0.048402431488037106, 0.04828966522216797, 0.048353408813476564, 0.04838304138183594, 0.04850352096557617, 0.04992432022094727, 0.05050572967529297, 0.04872345733642578, 0.0495695686340332, 0.048503456115722654, 0.04849593734741211, 0.04829056167602539, 0.04860518264770508, 0.04860502243041992, 0.048599201202392577, 0.0485, 0.04878409576416016, 0.04874649429321289, 0.04893465423583984, 0.04851327896118164, 0.048739776611328126, 0.048593471527099606, 0.04861503982543945, 0.04844134521484375, 0.0482492790222168, 0.048350528717041014, 0.048746177673339844, 0.04882092666625976, 0.04875699234008789, 0.04877926254272461, 0.048524608612060545, 0.04803334426879883, 0.04841721725463867, 0.04852336120605469, 0.04845119857788086, 0.04845391845703125, 0.048624832153320315, 0.04835184097290039, 0.04838217544555664, 0.04823801422119141, 0.048562110900878905, 0.04828019332885742, 0.048455486297607424, 0.04846432113647461, 0.048709377288818356, 0.048484352111816405, 0.048465919494628903, 0.04834304046630859, 0.04841471862792969, 0.04840179061889648, 0.048589439392089845, 0.04830537414550781, 0.04849129486083984, 0.048555839538574216, 0.0485882568359375, 0.04836959838867187, 0.0499576644897461, 0.048342369079589845, 0.04902537536621094, 0.048134464263916016, 0.04833257675170898, 0.048304351806640625, 0.04859084701538086, 0.04856217575073242, 0.04873830413818359, 0.04842396926879883, 0.04841766357421875, 0.04840995025634766, 0.04858099365234375, 0.04877756881713867, 0.04848015975952148, 0.048593025207519534, 0.048757953643798826, 0.048640830993652344, 0.05323980712890625, 0.04913945770263672, 0.04884915161132813, 0.048726016998291016, 0.04848844909667969, 0.04835532760620117, 0.051212287902832034, 0.04877107238769531, 0.04871120071411133, 0.051468769073486326, 0.048965633392333986, 0.04888780975341797, 0.04867689514160156, 0.04827747344970703, 0.04835488128662109, 0.048355777740478514, 0.04862118530273438, 0.048648574829101565, 0.048858623504638675, 0.04899248123168945, 0.048664127349853516, 0.04870326232910156, 0.049187263488769534, 0.049178848266601564, 0.04880691146850586, 0.04878953552246094, 0.04893407821655273, 0.04864521789550781, 0.048718528747558595, 0.04876287841796875, 0.04890419387817383, 0.04895743942260742, 0.04897091293334961, 0.04988191986083984, 0.049108673095703125, 0.04912700653076172, 0.049185726165771486, 0.049064865112304686, 0.048726943969726565, 0.04874444961547852, 0.04852969741821289, 0.04878639984130859, 0.048473217010498046, 0.048729312896728515, 0.04865468978881836, 0.04881366348266602, 0.04890188980102539, 0.04884143829345703, 0.04869478225708008, 0.04880844879150391, 0.04863180923461914, 0.048815521240234375, 0.04878396987915039, 0.04895129776000977, 0.048770881652832034, 0.04880723190307617, 0.04875718307495117, 0.04898819351196289, 0.048215808868408205, 0.048241310119628907, 0.04847820663452149, 0.04810956954956055, 0.04841267013549805, 0.04820172882080078, 0.04856422424316406, 0.04825632095336914, 0.0486316146850586, 0.048640350341796874, 0.048486942291259764, 0.048348545074462894, 0.048510623931884767, 0.048532222747802736, 0.04852515029907226, 0.04858099365234375, 0.04880588912963867, 0.0515497932434082, 0.04929372787475586, 0.04916428756713867, 0.04849033737182617, 0.04827139282226563, 0.04843715286254883, 0.04844563293457031, 0.04836150360107422, 0.049082080841064454, 0.048685791015625, 0.04902297592163086, 0.04849868774414062, 0.04834304046630859, 0.04849049758911133, 0.048215167999267575, 0.04858355331420899, 0.04920927810668945, 0.04919414520263672, 0.04901359939575195, 0.048595008850097654, 0.04843932723999023, 0.04876643371582031, 0.04838217544555664, 0.048689441680908205, 0.04838505554199219, 0.04912595367431641, 0.048529823303222655, 0.04885299301147461, 0.04830815887451172, 0.04888508987426758, 0.04844003295898437, 0.04850483322143555, 0.04851660919189453, 0.04875110244750976, 0.04865033721923828, 0.04875049591064453, 0.048535552978515625, 0.048809951782226565, 0.04858371353149414, 0.04874303817749023, 0.04827584075927734, 0.04853145599365234, 0.0482911376953125, 0.0481506576538086, 0.04837148666381836, 0.04849856185913086, 0.051122303009033206, 0.04944486236572266, 0.048504894256591796, 0.04851686477661133, 0.04862460708618164, 0.04854553604125977, 0.048414207458496096, 0.04835644912719726, 0.048854686737060546, 0.04855740737915039, 0.04865001678466797, 0.048550273895263674, 0.04860768127441406, 0.04961276626586914, 0.0486824951171875, 0.04872809600830078, 0.04843097686767578, 0.04898271942138672, 0.04853964614868164, 0.04822630310058594, 0.04898147201538086, 0.04861801528930664, 0.04851507186889648, 0.0482529296875, 0.048840705871582034, 0.04841056060791016, 0.04860105514526367, 0.04850902557373047, 0.04899225616455078, 0.04843267059326172, 0.048626239776611326, 0.0486003189086914, 0.048460479736328124, 0.04845059204101562, 0.048579456329345704, 0.04841068649291992, 0.049388896942138674, 0.048628383636474606, 0.048253055572509765, 0.048336769104003904, 0.04861932754516601, 0.04854393768310547, 0.04828326416015625, 0.048168991088867186, 0.04812015914916992, 0.04823206329345703, 0.048296031951904295, 0.048347232818603515, 0.0486995849609375, 0.049006816864013675, 0.049438495635986325, 0.050199615478515626, 0.049250335693359376, 0.048876449584960936, 0.04921692657470703, 0.048427616119384766, 0.049049217224121096, 0.048430782318115234, 0.048659137725830075, 0.0482529296875, 0.04859904098510742, 0.04837535858154297, 0.048662017822265625, 0.04851808166503906, 0.0485560302734375, 0.04838809585571289, 0.04849635314941406, 0.04847171020507812, 0.048523902893066406, 0.05307699203491211, 0.049115966796875, 0.04871164703369141, 0.04866396713256836, 0.04872627258300781, 0.04869375991821289, 0.04914361572265625, 0.05526144027709961, 0.04833280181884766, 0.05191785430908203, 0.04846281433105469, 0.04872192001342773, 0.04839833450317383, 0.04882636642456055, 0.048486270904541016, 0.04874457550048828, 0.04838604736328125, 0.04864006423950195, 0.04863308715820312]",tokens/s,20.533175419285694,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4254.179328,6123.552768,0.0,5737.807872,5464.489984,s,1,10.5232392578125,10.5232392578125,0.0,10.5232392578125,10.5232392578125,10.5232392578125,10.5232392578125,[10.5232392578125],,kWh,9.51701501916735e-05,1.049069735769898e-05,3.0060857382024908e-05,0.00013572170493139738,,MB,2150.514688,6526.205952,0.0,6111.100928,5872.503808,s,10,3.8765632629394533,0.3876563262939453,0.005135485548349219,0.3884448699951172,0.3922318786621094,0.3927309600830078,0.3931302252197265,"[0.37327679443359374, 0.38801309204101564, 0.38787271118164063, 0.389034423828125, 0.3921209716796875, 0.3891244812011719, 0.3870010070800781, 0.3882716064453125, 0.3932300415039062, 0.38861813354492186]",tokens/s,660.3787495160979,kWh,1.1284771726441132e-05,1.2445105647574532e-06,7.518788066306818e-06,2.0048070357505404e-05,tokens/kWh,12769308.73819291,MB,2163.474432,6528.303104,0.0,6113.19808,5872.506368,s,10,25.61273486328125,2.5612734863281252,0.004598574655440171,2.56261181640625,2.56650576171875,2.5667547119140623,2.5669538720703127,"[2.551423583984375, 2.554859130859375, 2.561478515625, 2.563290283203125, 2.56246533203125, 2.559795654296875, 2.56275830078125, 2.567003662109375, 2.566450439453125, 2.5632099609375]",tokens/s,24.59713901552841,kWh,7.528619003814142e-05,8.303134904115382e-06,4.9930980115695606e-05,0.00013352030505795235,tokens/kWh,471838.3467792098,,s,630,25.60894274520872,0.0406491154685853,0.0006306182715446964,0.040523166656494136,0.041021643447875976,0.04134299125671386,0.04451439502716065,"[0.042967041015625, 0.04102143859863281, 0.04051968002319336, 0.04025548934936524, 0.040223873138427735, 0.040317726135253903, 0.04019619369506836, 0.04030054473876953, 0.04016857528686523, 0.04023494338989258, 0.04019705581665039, 0.04002947235107422, 0.040136833190917966, 0.04011888122558594, 0.0399317741394043, 0.0401839370727539, 0.04009481430053711, 0.040278942108154296, 0.04037017440795899, 0.040302593231201174, 0.040184894561767576, 0.04022518539428711, 0.040595745086669924, 0.04084761428833008, 0.04056864166259765, 0.040556640625, 0.04063782501220703, 0.04051126480102539, 0.04054726409912109, 0.04033126449584961, 0.04044524765014648, 0.040336063385009766, 0.040275966644287106, 0.04031283187866211, 0.04027177429199219, 0.0403389778137207, 0.0413001594543457, 0.04041542434692383, 0.04025145721435547, 0.0402740478515625, 0.040564735412597655, 0.04030192184448242, 0.04047734451293945, 0.04040499114990234, 0.040514881134033204, 0.04043436813354492, 0.04056665420532227, 0.040485088348388674, 0.040817920684814456, 0.04058179092407226, 0.040537952423095706, 0.04058492660522461, 0.04055436706542969, 0.0408766098022461, 0.04062412643432617, 0.04076544189453125, 0.04063846588134765, 0.04058931350708008, 0.04074009704589844, 0.040666942596435544, 0.04072748947143555, 0.04056655883789063, 0.04079756927490234, 0.04455219268798828, 0.04104195022583008, 0.04042073440551758, 0.04002867126464844, 0.040125663757324216, 0.040199039459228515, 0.04001587295532227, 0.03997699356079101, 0.04011004638671875, 0.04003839874267578, 0.04000515365600586, 0.04001395034790039, 0.04028656005859375, 0.04034668731689453, 0.04022367858886719, 0.040232673645019534, 0.04026537704467773, 0.04056662368774414, 0.040248096466064455, 0.04041113662719727, 0.040226463317871095, 0.04020060729980469, 0.040494430541992185, 0.04075084686279297, 0.04106121444702148, 0.04083465576171875, 0.04070032119750976, 0.040622081756591794, 0.04039398574829101, 0.040248062133789064, 0.04052521514892578, 0.040175296783447265, 0.04038918304443359, 0.04033980941772461, 0.04025958251953125, 0.040162849426269534, 0.0401962890625, 0.04040031814575195, 0.0403480339050293, 0.04024956893920899, 0.04028646469116211, 0.04018380737304687, 0.04324512100219727, 0.04047660827636719, 0.04039731216430664, 0.04025958251953125, 0.04080230331420898, 0.04073267364501953, 0.040753150939941404, 0.04066860961914062, 0.04098105621337891, 0.04080640029907227, 0.04097228622436523, 0.04077363204956055, 0.04046547317504883, 0.04071315383911133, 0.04064255905151367, 0.04056595230102539, 0.04054508972167969, 0.04046847915649414, 0.04072857666015625, 0.04068560028076172, 0.04062547302246094, 0.04436352157592773, 0.041425472259521486, 0.04062160110473633, 0.040276832580566406, 0.0404398078918457, 0.04036403274536133, 0.0402487678527832, 0.04040284729003906, 0.04055033493041992, 0.040540897369384765, 0.040341503143310545, 0.04033740615844727, 0.04062822341918945, 0.04063369750976562, 0.040443744659423825, 0.04065065765380859, 0.040346527099609376, 0.04056668853759766, 0.04035184097290039, 0.04041523361206055, 0.040515583038330076, 0.040630081176757815, 0.040566974639892575, 0.041074016571044925, 0.04115500640869141, 0.04092870330810547, 0.040866592407226565, 0.04069782257080078, 0.04046031951904297, 0.040509407043457034, 0.040398880004882814, 0.04044499206542969, 0.04032403182983398, 0.040839359283447264, 0.04047647857666015, 0.040681472778320314, 0.04037779235839844, 0.04054687881469726, 0.04066624069213867, 0.04046067047119141, 0.040282623291015625, 0.04047257614135742, 0.040283454895019534, 0.04082044982910156, 0.04030710220336914, 0.04048089599609375, 0.04049760055541992, 0.040983905792236326, 0.04087363052368164, 0.04088729476928711, 0.04082057571411133, 0.040521759033203125, 0.04066016006469726, 0.04067619323730469, 0.040531967163085936, 0.0404562873840332, 0.040476318359375, 0.040627937316894534, 0.04054073715209961, 0.0407940788269043, 0.040976417541503905, 0.04086175918579102, 0.04070604705810547, 0.044421855926513674, 0.041502975463867185, 0.040453983306884767, 0.040274078369140626, 0.040308734893798825, 0.04014678573608398, 0.04046659088134766, 0.040304641723632816, 0.040374271392822264, 0.04058726501464844, 0.04030844879150391, 0.040601886749267575, 0.04039475250244141, 0.040732513427734374, 0.04038671875, 0.040621246337890625, 0.04048160171508789, 0.04052348709106445, 0.040591327667236325, 0.04066131210327149, 0.0405032958984375, 0.040460289001464846, 0.04075030517578125, 0.0408870735168457, 0.04103372955322265, 0.04095590209960937, 0.040699905395507815, 0.040721473693847654, 0.040782752990722655, 0.04047670364379883, 0.040590400695800784, 0.04124972915649414, 0.04055065536499024, 0.040345344543457035, 0.04035174560546875, 0.04046448135375977, 0.04037827301025391, 0.04062617492675781, 0.04050934219360352, 0.040519039154052736, 0.040375007629394534, 0.0405463981628418, 0.04068956756591797, 0.040505599975585935, 0.040533504486083984, 0.040960094451904294, 0.04046441650390625, 0.040484832763671874, 0.04077945709228516, 0.04068399810791016, 0.04052352142333984, 0.04136486434936523, 0.04030348968505859, 0.04044790267944336, 0.04047455978393555, 0.04056489562988281, 0.04174835205078125, 0.04061846542358399, 0.040887454986572265, 0.040686080932617184, 0.040674720764160156, 0.04078243255615235, 0.04081049728393555, 0.04511145782470703, 0.04160905456542969, 0.04058652877807617, 0.04025996780395508, 0.04031935882568359, 0.04018339157104492, 0.04006662368774414, 0.04010604858398437, 0.04025619125366211, 0.041549888610839844, 0.040144287109375, 0.04094831848144531, 0.04043775939941406, 0.0402815055847168, 0.040235393524169924, 0.04031305694580078, 0.04037017440795899, 0.040185855865478515, 0.04035583877563476, 0.04031094360351563, 0.040345439910888674, 0.04047187042236328, 0.04310464096069336, 0.040851615905761716, 0.040830238342285156, 0.04069574356079102, 0.04080940628051758, 0.040695934295654296, 0.04040457534790039, 0.040636703491210936, 0.04056268692016601, 0.04050908660888672, 0.04031523132324219, 0.040400894165039065, 0.04029971313476562, 0.04038435363769531, 0.04040995025634766, 0.04032729721069336, 0.040425472259521485, 0.04049417495727539, 0.04056492614746094, 0.040513473510742186, 0.04052252960205078, 0.04048281478881836, 0.04054544067382813, 0.04063113784790039, 0.04079513549804688, 0.04069683074951172, 0.04070604705810547, 0.04077568054199219, 0.04084307098388672, 0.04083731079101562, 0.04085145568847656, 0.04081868743896484, 0.04060483169555664, 0.04077347183227539, 0.04056156921386719, 0.040588958740234375, 0.04081110382080078, 0.04066902542114258, 0.040580158233642576, 0.04075814437866211, 0.04058323287963867, 0.0450327033996582, 0.04150348663330078, 0.04031283187866211, 0.04006911849975586, 0.040097854614257813, 0.040263614654541015, 0.040295936584472655, 0.04026166534423828, 0.04014275360107422, 0.04018806457519531, 0.04045865631103516, 0.04033331298828125, 0.04024652862548828, 0.040446720123291015, 0.040235008239746094, 0.040236190795898436, 0.04040176010131836, 0.04055206298828125, 0.040257694244384766, 0.04040521621704102, 0.04034560012817383, 0.0405401611328125, 0.04051148986816406, 0.04074700927734375, 0.040787967681884765, 0.04105215835571289, 0.0407982063293457, 0.04047161483764648, 0.04044486236572266, 0.040513534545898434, 0.04035583877563476, 0.0403732795715332, 0.040420574188232423, 0.04030809783935547, 0.04037260818481445, 0.04055222320556641, 0.040773857116699216, 0.040509441375732425, 0.040382049560546876, 0.040282527923583986, 0.04060598373413086, 0.040462047576904296, 0.040680862426757815, 0.040582912445068356, 0.040478912353515625, 0.040530689239501955, 0.04055244827270508, 0.040957313537597656, 0.04103539276123047, 0.04086646270751953, 0.04065100860595703, 0.04076134490966797, 0.040717662811279295, 0.0412064323425293, 0.04073984146118164, 0.040782848358154294, 0.04109212875366211, 0.04064495849609375, 0.04055855941772461, 0.04096681594848633, 0.04074086380004883, 0.04084275054931641, 0.040670913696289064, 0.04501504135131836, 0.041605121612548826, 0.04045961761474609, 0.0401622085571289, 0.04005862426757813, 0.040171615600585936, 0.04051548767089844, 0.04028416061401367, 0.040199295043945316, 0.04031782531738281, 0.040285823822021484, 0.04031321716308594, 0.04024934387207031, 0.04009308624267578, 0.040233119964599606, 0.04037497711181641, 0.04042291259765625, 0.04041888046264648, 0.040290016174316406, 0.04047561645507813, 0.040220672607421876, 0.04015423965454101, 0.04051238250732422, 0.04154185485839844, 0.041709342956542966, 0.040681472778320314, 0.040497150421142575, 0.04324892807006836, 0.04048355102539063, 0.04049296188354492, 0.04043564987182617, 0.040390079498291015, 0.040351966857910156, 0.04043417739868164, 0.04033740615844727, 0.04043123245239258, 0.04038054275512695, 0.04048713684082031, 0.04047670364379883, 0.040462303161621097, 0.04051091384887695, 0.04034620666503906, 0.040645694732666014, 0.04052678298950195, 0.04046233749389649, 0.04062739181518555, 0.04066579055786133, 0.04070355224609375, 0.04108047866821289, 0.04104476928710937, 0.041068672180175785, 0.04094393539428711, 0.041004638671875, 0.040836929321289066, 0.04062646484375, 0.04059340667724609, 0.04076287841796875, 0.04057958221435547, 0.04071811294555664, 0.04073699188232422, 0.040796161651611325, 0.04078364944458008, 0.04070012664794922, 0.04493724822998047, 0.04141116714477539, 0.04084121704101563, 0.04044800186157226, 0.040322494506835935, 0.040212703704833985, 0.04015894317626953, 0.04023769760131836, 0.04019935989379883, 0.04016316986083984, 0.040110591888427735, 0.040153568267822265, 0.04036403274536133, 0.04043123245239258, 0.040211967468261715, 0.04036083221435547, 0.04060895919799805, 0.040247745513916015, 0.0404562873840332, 0.04027011108398437, 0.041373695373535156, 0.04041523361206055, 0.040521278381347656, 0.04091334533691406, 0.04111750411987305, 0.04078607940673828, 0.040935585021972656, 0.04064483261108399, 0.0404983024597168, 0.04076537704467773, 0.040554527282714845, 0.04047840118408203, 0.040626880645751956, 0.04046457672119141, 0.0404185905456543, 0.04057478332519531, 0.04063654327392578, 0.040510238647460936, 0.04067327880859375, 0.0415184326171875, 0.04064633560180664, 0.04050223922729492, 0.040667137145996096, 0.040959999084472655, 0.04102963256835938, 0.04105215835571289, 0.04102348709106445, 0.04109059143066406, 0.041025535583496094, 0.04109769439697265, 0.04246323013305664, 0.041109504699707033, 0.040908447265625, 0.040821086883544924, 0.04109689712524414, 0.04047484970092773, 0.04061193466186523, 0.040671230316162106, 0.040441150665283206, 0.04046303939819336, 0.04043161773681641, 0.040912639617919924, 0.040561183929443356, 0.04473344039916992, 0.04152809524536133, 0.040623870849609375, 0.04041366577148438, 0.040516990661621094, 0.04073740768432617, 0.04041318511962891, 0.04037222290039062, 0.04026572799682617, 0.04043571090698242, 0.04036316680908203, 0.040375137329101564, 0.040499198913574216, 0.04047257614135742, 0.0404150390625, 0.04059360122680664, 0.040336734771728514, 0.04053084945678711, 0.04034844970703125, 0.0405432014465332, 0.040400672912597656, 0.04097574234008789, 0.04069366455078125, 0.04083808135986328, 0.040796161651611325, 0.04070195388793945, 0.04076339340209961, 0.041669952392578126, 0.04058492660522461, 0.040424415588378906, 0.04049110412597656, 0.04046387100219727, 0.0404156494140625, 0.040261409759521485, 0.0405302734375, 0.040451969146728516, 0.04051897430419922, 0.040377025604248044, 0.04041932678222656, 0.04044800186157226, 0.04029206466674805, 0.04091030502319336, 0.04061407852172851, 0.04043942260742187, 0.04052284622192383, 0.0405173454284668, 0.04103392028808594, 0.040948734283447266, 0.04101836776733398, 0.04076748657226562, 0.04112303924560547, 0.04082767868041992, 0.04077363204956055, 0.040724353790283205, 0.04092300796508789, 0.0406317138671875, 0.04070896148681641, 0.04080640029907227, 0.04072243118286133, 0.040879745483398434, 0.04065523147583008, 0.04086102294921875, 0.04264003372192383, 0.04495654296875, 0.04131625747680664, 0.040288448333740234, 0.04003219223022461, 0.040159423828125, 0.04019289779663086, 0.040438175201416016, 0.04017110443115234, 0.040282398223876956, 0.04019404983520508, 0.04030121612548828, 0.0401343994140625, 0.040184062957763673, 0.040210430145263674, 0.04034560012817383, 0.04052479934692383, 0.04020675277709961, 0.04027248001098633, 0.0404128303527832, 0.04019030380249024, 0.040062976837158204, 0.04014694213867188, 0.04064665603637695, 0.04081459045410156, 0.040898303985595706, 0.0408201904296875, 0.04092127990722656, 0.04076604843139649, 0.04076134490966797, 0.040253440856933595, 0.04046131134033203, 0.0403691520690918, 0.040273727416992186, 0.04039244842529297, 0.040438209533691406, 0.04033436965942383, 0.04052409744262695, 0.04034832000732422, 0.04053334426879883, 0.04043753433227539, 0.040707103729248045, 0.04063011169433594, 0.04055651092529297, 0.040578750610351565, 0.04054233551025391, 0.0406776008605957, 0.040458240509033204, 0.04070809555053711, 0.041132030487060545, 0.04106444931030274, 0.040920448303222653, 0.04126988983154297, 0.0416890869140625, 0.04092313766479492, 0.04220431900024414, 0.04095593643188476, 0.04113695907592774, 0.040953857421875, 0.04095180892944336, 0.04093952178955078, 0.04093487930297852, 0.041062942504882814, 0.040839168548583986]",tokens/s,24.600781307844844,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8202.801152,11078.139904,0.0,10699.669504,10468.411392,s,1,13.217875,13.217875,0.0,13.217875,13.217875,13.217875,13.217875,[13.217875],,kWh,0.00018690370244588242,2.0609268657214217e-05,5.958199211003601e-05,0.0002670949632131327,,MB,4041.961472,11570.970624,0.0,11163.140096,10922.852352,s,10,8.030579650878906,0.8030579650878906,0.00806742905327945,0.8072570495605469,0.8102368774414063,0.8103816223144531,0.8104974182128907,"[0.788420654296875, 0.7902467651367188, 0.7945225830078125, 0.8073043823242188, 0.8102047119140625, 0.8064562377929687, 0.807209716796875, 0.8076004028320313, 0.8105263671875, 0.8080878295898437]",tokens/s,318.7814717359563,kWh,2.336502454327157e-05,2.576762183389112e-06,1.549719188492355e-05,4.143897861158423e-05,tokens/kWh,6177758.4433134515,MB,4046.286848,11573.067776,0.0,11165.237248,10922.854912,s,10,37.21511938476563,3.7215119384765627,0.012686653035861332,3.7212767333984376,3.7377667236328125,3.7392240600585938,3.740389929199219,"[3.70255322265625, 3.727070556640625, 3.731174072265625, 3.729641845703125, 3.71548291015625, 3.71462255859375, 3.73744287109375, 3.70910986328125, 3.740681396484375, 3.707340087890625]",tokens/s,16.928603492748614,kWh,0.00010802571604214453,1.1915910298840572e-05,7.187379467847737e-05,0.00019181542101946248,tokens/kWh,328440.7461358789,,s,630,37.211775604248054,0.059066310482933405,0.0008960666131213604,0.05889635276794433,0.05975107345581055,0.06053022289276123,0.06229926246643067,"[0.06148745727539062, 0.05839769744873047, 0.05801062393188477, 0.05809151840209961, 0.057753280639648436, 0.057452064514160156, 0.06068479919433594, 0.05785164642333984, 0.05817164611816406, 0.057743263244628903, 0.058143104553222656, 0.062117889404296876, 0.05817702484130859, 0.05793622589111328, 0.05767375946044922, 0.058060928344726564, 0.05828403091430664, 0.057869503021240234, 0.05786297607421875, 0.05798428726196289, 0.058347423553466796, 0.05810054397583008, 0.058244831085205076, 0.05803023910522461, 0.058396446228027345, 0.05840332794189453, 0.05850812911987305, 0.05859254455566406, 0.0584901123046875, 0.05854051208496094, 0.058431488037109375, 0.05866291046142578, 0.05899257659912109, 0.05898783874511719, 0.05926934432983398, 0.05922243118286133, 0.059224193572998046, 0.05875302505493164, 0.058918880462646483, 0.05870595169067383, 0.05883084869384766, 0.05884486389160156, 0.058886463165283204, 0.0588963851928711, 0.05958412933349609, 0.058941825866699216, 0.060030975341796876, 0.05910704040527344, 0.059033119201660156, 0.05949747085571289, 0.05952486419677734, 0.05921791839599609, 0.05939199829101562, 0.059873279571533204, 0.059002239227294924, 0.058972801208496094, 0.059009025573730466, 0.058865215301513674, 0.05890816116333008, 0.05878879928588867, 0.05885542297363281, 0.05877334213256836, 0.05882015991210938, 0.0621143684387207, 0.06004108810424805, 0.059168224334716794, 0.05882067108154297, 0.05891078567504883, 0.05901980972290039, 0.05900697708129883, 0.05937356948852539, 0.05915852737426758, 0.0599183349609375, 0.05907660675048828, 0.05938751983642578, 0.058794368743896486, 0.058826366424560544, 0.05890403366088867, 0.05902428817749023, 0.05897987365722656, 0.05902998352050781, 0.05956198501586914, 0.05869158554077149, 0.05967248153686523, 0.05871830368041992, 0.05885542297363281, 0.059089950561523434, 0.059513278961181644, 0.05967484664916992, 0.058891616821289065, 0.05886198425292969, 0.05841936111450195, 0.05863222503662109, 0.05855065536499023, 0.05847379302978516, 0.05850182342529297, 0.05897321701049805, 0.05884822463989258, 0.05939523315429687, 0.05896892929077149, 0.05978678512573242, 0.05867340850830078, 0.059109375, 0.05916694259643555, 0.05859532928466797, 0.05972172927856445, 0.05910454559326172, 0.058858207702636715, 0.05877145767211914, 0.05887295913696289, 0.06152076721191406, 0.05909670257568359, 0.059158145904541014, 0.05913043212890625, 0.060088287353515624, 0.059019489288330076, 0.058439678192138675, 0.05845766448974609, 0.058453601837158205, 0.0587149772644043, 0.058611358642578125, 0.05971798324584961, 0.05981184005737305, 0.059211071014404294, 0.05922067260742187, 0.05956512069702148, 0.06141548919677734, 0.059000831604003906, 0.058933246612548826, 0.05879811096191406, 0.058853343963623045, 0.05904793548583984, 0.05893280029296875, 0.05906259155273438, 0.059022815704345706, 0.059224735260009764, 0.05933260726928711, 0.05895596694946289, 0.058969825744628904, 0.059188896179199216, 0.0590709114074707, 0.061290401458740235, 0.06307171249389648, 0.059205535888671876, 0.060450817108154295, 0.05887583923339844, 0.05871641540527344, 0.058433536529541016, 0.05965641784667969, 0.05984508895874024, 0.05876617431640625, 0.058659839630126956, 0.05981568145751953, 0.059281665802001955, 0.05909708786010742, 0.05883622360229492, 0.058813182830810544, 0.058920673370361325, 0.05911580657958984, 0.059250526428222657, 0.059749790191650394, 0.059178783416748044, 0.06074265670776367, 0.05875299072265625, 0.058916862487792966, 0.05852182388305664, 0.0589453125, 0.059115169525146484, 0.058759105682373046, 0.05914835357666016, 0.05936777496337891, 0.05912371063232422, 0.05884928131103516, 0.05907059097290039, 0.059008895874023436, 0.059009025573730466, 0.059066368103027345, 0.05971318435668945, 0.05937392044067383, 0.059386943817138674, 0.058952640533447266, 0.05868134307861328, 0.058627967834472654, 0.05875711822509765, 0.058676799774169924, 0.05889900970458985, 0.05944729614257813, 0.05862944030761719, 0.05846435165405273, 0.06137913513183594, 0.058831134796142576, 0.058518657684326174, 0.05842380905151367, 0.05861824035644531, 0.05839379119873047, 0.058579551696777345, 0.05876863861083984, 0.05915337753295898, 0.05929759979248047, 0.05909523010253906, 0.05988108825683594, 0.05895750427246094, 0.05969113540649414, 0.059652671813964844, 0.059045345306396484, 0.05915875244140625, 0.059713855743408206, 0.05913727951049805, 0.05888691329956055, 0.05917900848388672, 0.05899468612670898, 0.059385185241699216, 0.05974697494506836, 0.05826544189453125, 0.058966175079345706, 0.05904115295410156, 0.05866969680786133, 0.0590643196105957, 0.05826342391967773, 0.0584664306640625, 0.05865267181396484, 0.05910732650756836, 0.05890867233276367, 0.059920543670654296, 0.05942256164550781, 0.06433702087402343, 0.05947824096679687, 0.05933737564086914, 0.05907251358032226, 0.059504638671875, 0.059979774475097655, 0.05989779281616211, 0.05967059326171875, 0.059504638671875, 0.059186752319335935, 0.05889446258544922, 0.05865875244140625, 0.05887622451782227, 0.05861548614501953, 0.05877577590942383, 0.059228321075439454, 0.05879107284545899, 0.058850143432617186, 0.058850753784179685, 0.05903984069824219, 0.05906832122802735, 0.05890105438232422, 0.05916876983642578, 0.059582462310791014, 0.059463390350341795, 0.05868163299560547, 0.058644222259521483, 0.06125766372680664, 0.059450111389160155, 0.05873459243774414, 0.05852691268920898, 0.05923667144775391, 0.058511039733886716, 0.05846928024291992, 0.05891635131835937, 0.05924204635620117, 0.05925523376464844, 0.05919171142578125, 0.05929779052734375, 0.05943270492553711, 0.05909529495239258, 0.058687488555908204, 0.05852364730834961, 0.05944934463500977, 0.05906227111816406, 0.058961536407470705, 0.05867507171630859, 0.05802854537963867, 0.057915393829345706, 0.05797846221923828, 0.05810790252685547, 0.058535839080810545, 0.05831219100952149, 0.05841574478149414, 0.05820230484008789, 0.05950073623657227, 0.059246593475341794, 0.0585274543762207, 0.06479293060302735, 0.061724609375, 0.058499073028564455, 0.0591313591003418, 0.0588191032409668, 0.05864243316650391, 0.0583383674621582, 0.060135902404785155, 0.058597694396972655, 0.05850537490844727, 0.05822649765014649, 0.060080257415771485, 0.058770942687988284, 0.058556991577148436, 0.058456062316894535, 0.058494655609130856, 0.058386016845703125, 0.058689376831054685, 0.05837257766723633, 0.058896320343017575, 0.05924256134033203, 0.059244384765625, 0.058618144989013674, 0.058640670776367185, 0.05859692764282227, 0.058898239135742186, 0.05860416030883789, 0.05896806335449219, 0.05874470520019531, 0.058901695251464846, 0.0589813117980957, 0.05886569595336914, 0.061982719421386716, 0.059232257843017576, 0.05973606491088867, 0.05940633773803711, 0.05938796615600586, 0.058988479614257815, 0.060635009765625, 0.05931020736694336, 0.05919948959350586, 0.06010879898071289, 0.0595333137512207, 0.05893734359741211, 0.05946540832519531, 0.059334976196289066, 0.05990195083618164, 0.059099136352539064, 0.05928345489501953, 0.05884438323974609, 0.05868531036376953, 0.0585, 0.05860086441040039, 0.058456672668457034, 0.05858256149291992, 0.058445377349853514, 0.05856694412231445, 0.0584620475769043, 0.05965030288696289, 0.0588719367980957, 0.05871651077270508, 0.05887964630126953, 0.059578334808349606, 0.059254783630371094, 0.05876780700683594, 0.059535358428955076, 0.0590274543762207, 0.05906774520874023, 0.05877622222900391, 0.058910400390625, 0.05836572647094727, 0.05840131378173828, 0.05811977767944336, 0.058458526611328124, 0.05872953414916992, 0.058846336364746094, 0.058482398986816404, 0.05817967987060547, 0.05842124938964844, 0.05864876937866211, 0.0589944953918457, 0.05861785507202148, 0.05851359939575195, 0.05871948623657226, 0.0588304328918457, 0.058948577880859374, 0.05882022476196289, 0.05909952163696289, 0.058520896911621094, 0.05827980804443359, 0.05854864120483398, 0.05842975997924805, 0.0588043212890625, 0.058601280212402344, 0.05820435333251953, 0.06160412979125977, 0.05942937469482422, 0.06014976119995117, 0.0587325439453125, 0.058226688385009766, 0.05843344116210938, 0.05847065734863281, 0.0582562255859375, 0.05818982315063476, 0.05829119873046875, 0.05836387252807617, 0.058342750549316404, 0.058309310913085936, 0.05845811080932617, 0.05847017669677734, 0.05849724960327148, 0.059115520477294924, 0.05895372772216797, 0.059114688873291014, 0.058640480041503906, 0.05905686569213867, 0.0585011215209961, 0.058621952056884766, 0.05861785507202148, 0.059415809631347655, 0.05901798248291015, 0.05874016189575195, 0.059027809143066406, 0.05885507202148438, 0.05876588821411133, 0.058750175476074216, 0.058938144683837894, 0.06077030563354492, 0.0592342414855957, 0.059651966094970706, 0.05981817626953125, 0.059170814514160154, 0.058861568450927736, 0.05856051254272461, 0.05865884780883789, 0.05893423843383789, 0.058668033599853515, 0.059151966094970705, 0.059351455688476565, 0.0596049919128418, 0.059762622833251955, 0.059522209167480467, 0.06239292907714844, 0.06011897659301758, 0.06503462219238282, 0.05987641525268555, 0.05971039962768555, 0.05979702377319336, 0.05911520004272461, 0.06146476745605469, 0.05952163314819336, 0.05940838241577148, 0.059270721435546875, 0.06237334442138672, 0.06083478546142578, 0.05881241607666016, 0.05853936004638672, 0.05875510406494141, 0.06188246536254883, 0.05904377746582031, 0.05880428695678711, 0.05875302505493164, 0.058883262634277345, 0.05865555191040039, 0.05869881439208984, 0.05910214233398438, 0.059187198638916014, 0.05916876983642578, 0.05933407974243164, 0.05905215835571289, 0.058456321716308594, 0.0587204475402832, 0.058984447479248046, 0.05925033569335938, 0.058769760131835935, 0.05856256103515625, 0.05931008148193359, 0.058744831085205076, 0.05883903884887695, 0.05842057418823242, 0.058263359069824217, 0.05827423858642578, 0.05841961669921875, 0.0583939208984375, 0.0585645751953125, 0.05918406295776367, 0.05863529586791992, 0.05868560028076172, 0.05875772857666016, 0.05863977432250977, 0.05889904022216797, 0.05888175964355469, 0.05942915344238281, 0.05887340927124023, 0.059035743713378906, 0.05888444900512695, 0.059009025573730466, 0.05874687957763672, 0.05894144058227539, 0.05905606460571289, 0.05871760177612305, 0.05906703948974609, 0.05923404693603516, 0.058595680236816404, 0.05845596694946289, 0.05836111831665039, 0.0585489616394043, 0.059541439056396486, 0.05853590393066406, 0.05860905456542969, 0.05859187316894531, 0.05868495941162109, 0.058649120330810545, 0.05881388854980469, 0.05876588821411133, 0.05861904144287109, 0.058815135955810546, 0.05882489776611328, 0.05858508682250976, 0.05955788803100586, 0.05897625732421875, 0.06199705505371094, 0.058982398986816405, 0.05885478210449219, 0.0586901741027832, 0.058672416687011716, 0.058995136260986326, 0.060391105651855466, 0.058505825042724606, 0.058257408142089843, 0.05820006561279297, 0.05845196914672852, 0.058363903045654295, 0.058517505645751956, 0.05855641555786133, 0.05856256103515625, 0.059637599945068356, 0.059130016326904296, 0.05876646423339844, 0.061150081634521486, 0.05935923385620117, 0.05931808090209961, 0.059316417694091794, 0.05909836959838867, 0.05848163223266602, 0.0583818244934082, 0.05815059280395508, 0.058356319427490234, 0.058517505645751956, 0.058498817443847655, 0.0590456657409668, 0.05875763320922851, 0.05862396621704102, 0.05898649597167969, 0.05886361694335938, 0.05864652633666992, 0.05861171340942383, 0.06049577713012695, 0.05860976028442383, 0.058671104431152345, 0.0591646728515625, 0.059351295471191404, 0.05904358291625977, 0.05899468612670898, 0.05973116683959961, 0.05936822509765625, 0.059512832641601565, 0.06052601623535156, 0.06142214584350586, 0.061736831665039064, 0.06036249542236328, 0.060380958557128904, 0.059832542419433594, 0.06110179138183594, 0.06821932983398438, 0.05889766311645508, 0.0588175048828125, 0.05872140884399414, 0.05887788772583008, 0.05913081741333008, 0.0587243537902832, 0.059791358947753906, 0.058616928100585934, 0.06053366470336914, 0.061727550506591795, 0.05840281677246094, 0.058482654571533205, 0.05823855972290039, 0.05853638458251953, 0.058603134155273434, 0.058799583435058596, 0.05871283340454102, 0.058775711059570315, 0.05856665420532227, 0.059114593505859375, 0.05909337615966797, 0.05887030410766601, 0.05895167922973633, 0.05930294418334961, 0.05924272155761719, 0.059127967834472654, 0.05894384002685547, 0.05953971099853515, 0.05897216033935547, 0.05927459335327148, 0.05896259307861328, 0.058907745361328125, 0.05912236785888672, 0.05907849502563477, 0.05875545501708984, 0.058810367584228515, 0.05933260726928711, 0.059660289764404295, 0.05936947250366211, 0.05878521728515625, 0.05932304000854492, 0.05824812698364258, 0.05862089538574219, 0.05846323013305664, 0.05850009536743164, 0.058267742156982424, 0.058588481903076174, 0.0585643196105957, 0.05836489486694336, 0.058611488342285155, 0.058406944274902346, 0.05826473617553711, 0.05829462432861328, 0.05865900802612305, 0.05884355163574219, 0.058461406707763675, 0.058616798400878904, 0.05870937728881836, 0.05849747085571289, 0.05893734359741211, 0.058686622619628905, 0.05922083282470703, 0.05925299072265625, 0.05899411010742187, 0.058732864379882815, 0.058705982208251954, 0.058576831817626955, 0.05893734359741211, 0.05851107025146484, 0.058321182250976565, 0.05839052963256836, 0.059356544494628904]",tokens/s,16.930124665378237,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,8560.14848,9684.516864,0.0,9298.771968,9263.222784,s,1,12.163900390625,12.163900390625,0.0,12.163900390625,12.163900390625,12.163900390625,12.163900390625,[12.163900390625],,kWh,0.0001517961678916663,1.673694555879856e-05,4.8951428049989754e-05,0.0002174845415004546,,MB,1683.324928,10212.999168,0.0,9795.796992,9630.892032,s,10,9.912059020996093,0.9912059020996093,0.0038006665948832476,0.9922361145019531,0.9953161010742188,0.9953625183105469,0.9953996520996095,"[0.9832613525390625, 0.986759765625, 0.9891617431640625, 0.9897316284179688, 0.991806396484375, 0.9926658325195312, 0.9927808227539062, 0.9951767578125, 0.9953057861328125, 0.995408935546875]",tokens/s,258.27126276965384,kWh,2.9099292183709756e-05,3.2091814290894954e-06,1.9378778129272327e-05,5.1687251742071574e-05,tokens/kWh,4952865.3850176595,MB,1695.014912,10212.999168,0.0,9795.796992,9630.894592,s,10,37.24218530273438,3.724218530273437,0.008930001209085077,3.7244676513671875,3.7364590576171874,3.7371877807617184,3.7377707592773435,"[3.71283203125, 3.714314453125, 3.714672119140625, 3.717202880859375, 3.7206826171875, 3.729893798828125, 3.728252685546875, 3.73012109375, 3.736297119140625, 3.73791650390625]",tokens/s,16.916300557522455,kWh,0.00010920966293045758,1.2046577502382735e-05,7.246279534392661e-05,0.0001937190357767669,tokens/kWh,325213.26439286204,,s,630,37.23965587997444,0.0591105648888482,0.0008224849715064937,0.05888734436035156,0.060195682144165034,0.06050549945831299,0.061831126060485846,"[0.0619582405090332, 0.05828851318359375, 0.057810047149658206, 0.05768486404418945, 0.05785951995849609, 0.05775417709350586, 0.05803417587280273, 0.058087425231933595, 0.05807040023803711, 0.057836158752441406, 0.05778227233886719, 0.058218494415283206, 0.060061248779296875, 0.059869632720947266, 0.058380096435546876, 0.0589027214050293, 0.06051839828491211, 0.05910425567626953, 0.05811062240600586, 0.058030433654785156, 0.05799731063842774, 0.05807513427734375, 0.05800960159301758, 0.058152000427246095, 0.05817440032958984, 0.05889948654174805, 0.05832803344726562, 0.05837366485595703, 0.058687969207763674, 0.05880217742919922, 0.05864038467407227, 0.0586808967590332, 0.0595840950012207, 0.060341087341308594, 0.059830177307128904, 0.05893948745727539, 0.05928345489501953, 0.059221088409423826, 0.05932124710083008, 0.05901107025146484, 0.0582553596496582, 0.05859328079223633, 0.05848883056640625, 0.05851337432861328, 0.05855849456787109, 0.05882166290283203, 0.059594753265380856, 0.059261920928955075, 0.05911705780029297, 0.060499744415283205, 0.060219390869140625, 0.059988704681396485, 0.06020463943481445, 0.060068256378173826, 0.05882230377197266, 0.059695457458496096, 0.05927731323242187, 0.05868854522705078, 0.05866700744628906, 0.05848982238769531, 0.05984188842773437, 0.05987395095825195, 0.06033942413330078, 0.06173833465576172, 0.058210559844970707, 0.05783798217773437, 0.05820211029052735, 0.05821404647827148, 0.058738304138183595, 0.05843600082397461, 0.05841337585449219, 0.05831679916381836, 0.05843558502197266, 0.058136577606201174, 0.05806480026245117, 0.05803776168823242, 0.05831945419311523, 0.05812819290161133, 0.05925811386108398, 0.060353473663330076, 0.05924454498291016, 0.05845510482788086, 0.05837100982666016, 0.05832444763183594, 0.05864227294921875, 0.05849977493286133, 0.05866713714599609, 0.05879795074462891, 0.0583741455078125, 0.058521598815917966, 0.058509151458740236, 0.05788425445556641, 0.05789548873901367, 0.05875711822509765, 0.05864448165893555, 0.06025151824951172, 0.05949708938598633, 0.059178337097167966, 0.06048425674438476, 0.058692798614501954, 0.05861868667602539, 0.05987936019897461, 0.05994646453857422, 0.0588048324584961, 0.05849497604370117, 0.05885468673706055, 0.05864028930664063, 0.058549057006835936, 0.058499073028564455, 0.058777118682861326, 0.05917497634887695, 0.06012255859375, 0.05996847915649414, 0.0586566390991211, 0.060170368194580076, 0.05967388916015625, 0.05896422576904297, 0.05906480026245117, 0.05867267227172852, 0.05994118499755859, 0.05991030502319336, 0.05897564697265625, 0.059311809539794924, 0.06009145736694336, 0.06015164947509766, 0.059625473022460934, 0.06185369491577149, 0.05834137725830078, 0.058011646270751956, 0.05807308959960938, 0.05811609649658203, 0.05840281677246094, 0.05849836730957031, 0.058247550964355466, 0.05836012649536133, 0.05880627059936523, 0.05808697509765625, 0.05882467269897461, 0.05838896179199219, 0.05899059295654297, 0.05820630264282227, 0.05849625778198242, 0.058823326110839846, 0.058777248382568356, 0.058396961212158205, 0.058501182556152345, 0.058548225402832034, 0.058265598297119144, 0.05967257690429688, 0.058821792602539065, 0.05862227249145508, 0.059621566772460936, 0.059604736328125, 0.05819452667236328, 0.05841715240478516, 0.05869987106323242, 0.058273696899414064, 0.05852137756347656, 0.05899599838256836, 0.06060537719726562, 0.058887775421142576, 0.058544544219970705, 0.05916057586669922, 0.05929983901977539, 0.059104736328125, 0.058685951232910157, 0.05855644989013672, 0.05871014404296875, 0.059025279998779295, 0.059455230712890626, 0.05902937698364258, 0.05986489486694336, 0.05891027069091797, 0.0589219856262207, 0.059047744750976565, 0.0591976318359375, 0.05912099075317383, 0.059713985443115236, 0.06053043365478516, 0.059345375061035155, 0.05980364990234375, 0.05864243316650391, 0.05889228820800781, 0.05898649597167969, 0.059844192504882814, 0.05935760116577148, 0.05881849670410156, 0.059264896392822265, 0.06063894271850586, 0.06281625747680664, 0.058636287689208984, 0.05807513427734375, 0.057923583984375, 0.05764710235595703, 0.05824512100219727, 0.058121471405029296, 0.0584977912902832, 0.05858031845092773, 0.058361888885498044, 0.058627742767333985, 0.05852435302734375, 0.05840924835205078, 0.058171070098876954, 0.05812665557861328, 0.058627201080322267, 0.06017302322387695, 0.059764896392822266, 0.058175487518310545, 0.058413055419921874, 0.058528961181640624, 0.058775936126708984, 0.05848076629638672, 0.05823315048217773, 0.05869120025634766, 0.0584769287109375, 0.05848998260498047, 0.05848972702026367, 0.058605567932128906, 0.05844377517700195, 0.05854592132568359, 0.058636543273925784, 0.058552318572998044, 0.05965590286254883, 0.058947872161865235, 0.05869363021850586, 0.05884108734130859, 0.05882857513427735, 0.05867747116088867, 0.05891017532348633, 0.06028905487060547, 0.060354366302490234, 0.059265727996826174, 0.058710014343261716, 0.058843135833740234, 0.05859100723266602, 0.05932259368896484, 0.060510208129882816, 0.05993267059326172, 0.06041999816894531, 0.05936051177978516, 0.05949321746826172, 0.059617279052734375, 0.05944492721557617, 0.05885779190063477, 0.058912769317626956, 0.058916862487792966, 0.05953126525878906, 0.06072115325927734, 0.05994611358642578, 0.06032646560668945, 0.059541313171386716, 0.058628608703613284, 0.06257756805419921, 0.05857846450805664, 0.058038753509521486, 0.0578493766784668, 0.05777155303955078, 0.058051166534423826, 0.0582529296875, 0.058130271911621095, 0.058417823791503905, 0.059818206787109376, 0.05990124893188477, 0.059099838256835936, 0.058730464935302734, 0.05823900985717773, 0.05818572616577149, 0.05908396911621094, 0.058610080718994144, 0.05851119995117188, 0.0584351692199707, 0.058428382873535155, 0.058169345855712894, 0.05849087905883789, 0.05867712020874023, 0.05831411361694336, 0.05819878387451172, 0.05887180709838867, 0.058793502807617186, 0.05923612976074219, 0.05851820755004883, 0.06008118438720703, 0.06060310363769531, 0.059334911346435544, 0.05909299087524414, 0.05943091201782227, 0.05930188751220703, 0.058811809539794924, 0.05882735824584961, 0.05884108734130859, 0.058619102478027346, 0.058829601287841794, 0.05856832122802735, 0.05865100860595703, 0.05896806335449219, 0.05943091201782227, 0.059262977600097654, 0.06085126495361328, 0.059939777374267575, 0.059131294250488284, 0.05893795013427734, 0.05931942367553711, 0.06053078460693359, 0.059888256072998046, 0.060434593200683596, 0.059312126159667966, 0.058673152923583986, 0.058985984802246094, 0.05864249420166016, 0.06002220916748047, 0.060042240142822265, 0.059480064392089846, 0.05914774322509766, 0.05901571273803711, 0.05940633773803711, 0.0626439666748047, 0.058603294372558595, 0.05799913787841797, 0.05840361785888672, 0.05840691375732422, 0.05854115295410156, 0.058203041076660154, 0.05811737442016601, 0.0583502082824707, 0.05824729537963867, 0.05805875015258789, 0.059784385681152345, 0.05999216079711914, 0.059195423126220705, 0.05839468765258789, 0.058346015930175785, 0.05921187210083008, 0.058744831085205076, 0.05877884674072266, 0.058501758575439454, 0.058609825134277344, 0.05864857482910156, 0.05967462539672851, 0.05889564895629883, 0.05971795272827148, 0.059467456817626954, 0.05824720001220703, 0.058520225524902346, 0.0586769905090332, 0.05845145416259766, 0.05857974243164062, 0.058589183807373046, 0.060229633331298826, 0.05974425506591797, 0.05867046356201172, 0.058684032440185545, 0.059879009246826174, 0.05901667022705078, 0.05894240188598633, 0.05983027267456055, 0.06018867111206055, 0.05873459243774414, 0.05855964660644531, 0.05960543823242188, 0.060364513397216796, 0.05876351928710938, 0.05924703979492187, 0.05938175964355469, 0.05976678466796875, 0.05891219329833984, 0.0594837760925293, 0.06054159927368164, 0.05982182312011719, 0.06023632049560547, 0.06022143936157227, 0.05907846450805664, 0.05894982528686524, 0.059797664642333985, 0.060351646423339844, 0.05960774230957031, 0.060407806396484375, 0.06014704132080078, 0.05985887908935547, 0.0620052490234375, 0.058411422729492186, 0.05771846389770508, 0.05816912078857422, 0.05824524688720703, 0.058499519348144534, 0.05875913619995117, 0.05870995330810547, 0.0585912971496582, 0.058619903564453124, 0.058152481079101564, 0.0584051513671875, 0.058638526916503904, 0.058439678192138675, 0.05829632186889649, 0.05885747146606445, 0.058582527160644535, 0.058702335357666016, 0.059701248168945314, 0.060045310974121094, 0.05883465576171875, 0.05844406509399414, 0.05806201553344727, 0.05897299194335937, 0.059782337188720704, 0.06034310531616211, 0.058637504577636716, 0.05865555191040039, 0.05889807891845703, 0.05857519912719727, 0.05854617691040039, 0.05967871856689453, 0.059450462341308595, 0.0584815673828125, 0.058568702697753904, 0.05884688186645508, 0.05940886306762695, 0.05910105514526367, 0.059911777496337894, 0.05993308639526367, 0.05974643325805664, 0.06092787170410156, 0.05934694290161133, 0.05970678329467773, 0.05879788970947265, 0.0596890869140625, 0.05889295959472656, 0.060307262420654296, 0.06006803131103516, 0.060478462219238284, 0.05930054473876953, 0.05838595199584961, 0.05914646530151367, 0.059744033813476566, 0.058616607666015626, 0.06016128158569336, 0.06002764892578125, 0.06019436645507813, 0.06056390380859375, 0.059535358428955076, 0.058662559509277346, 0.06000019073486328, 0.05902377700805664, 0.06153100967407227, 0.058169345855712894, 0.05788671875, 0.05827337646484375, 0.057981342315673826, 0.058313953399658204, 0.058348094940185544, 0.05811017608642578, 0.05830384063720703, 0.059257503509521484, 0.05857622528076172, 0.05866499328613281, 0.058626686096191406, 0.05886975860595703, 0.05850502395629883, 0.05862828826904297, 0.06007756805419922, 0.05958092880249023, 0.058966014862060545, 0.05877350234985351, 0.058234878540039066, 0.05846220779418945, 0.058900478363037106, 0.05852979278564453, 0.05860966491699219, 0.058781089782714846, 0.05984521484375, 0.05968656158447266, 0.060442977905273435, 0.0590643196105957, 0.05936086273193359, 0.06021526336669922, 0.059128257751464845, 0.05951238250732422, 0.06044083023071289, 0.05973196792602539, 0.05933382415771484, 0.058551296234130856, 0.05839257431030274, 0.05881840133666992, 0.05953756713867187, 0.06059801483154297, 0.059898113250732424, 0.05900697708129883, 0.05995315170288086, 0.05900288009643555, 0.058916862487792966, 0.06081126403808594, 0.05974630355834961, 0.0591646728515625, 0.059734016418457034, 0.058843135833740234, 0.059666431427001954, 0.060096511840820314, 0.05891020965576172, 0.05888614273071289, 0.05893580627441406, 0.059627521514892576, 0.06061260986328125, 0.059842529296875, 0.059305248260498045, 0.059417343139648436, 0.05986713409423828, 0.06177587127685547, 0.058535934448242184, 0.05802598571777344, 0.058549503326416015, 0.05878451156616211, 0.05870105743408203, 0.05888691329956055, 0.058592735290527345, 0.05828623962402344, 0.05821683120727539, 0.05854352188110352, 0.05832966232299805, 0.05837417602539063, 0.058439456939697265, 0.05861328125, 0.058552223205566405, 0.05874505615234375, 0.059515457153320316, 0.058930912017822266, 0.05853756713867188, 0.05878239822387695, 0.05879385757446289, 0.058877086639404295, 0.0588359375, 0.05841670227050781, 0.058705406188964845, 0.05961404800415039, 0.060450912475585934, 0.05880390548706055, 0.05896633529663086, 0.05863945770263672, 0.0584815673828125, 0.059703296661376956, 0.05983990478515625, 0.05906902313232422, 0.058692928314208984, 0.05995536041259766, 0.060354175567626955, 0.059464126586914065, 0.06077014541625977, 0.059507328033447264, 0.05905817413330078, 0.059815326690673826, 0.060194686889648436, 0.060119552612304686, 0.060153057098388675, 0.05989068984985352, 0.06011904144287109, 0.0603474235534668, 0.06045552062988281, 0.059738494873046874, 0.05887180709838867, 0.0600002555847168, 0.06006070327758789, 0.05860214233398438, 0.05988179016113281, 0.06012860870361328, 0.06001651382446289, 0.05913679885864258, 0.059240447998046876, 0.06108160018920898, 0.06011084747314453, 0.05935308837890625, 0.06200380706787109, 0.05826188659667969, 0.05819596862792969, 0.05829833602905273, 0.05813417434692383, 0.05809913635253906, 0.059775936126708985, 0.06001049423217773, 0.05963980865478516, 0.058891807556152344, 0.058358238220214846, 0.058703872680664064, 0.058398239135742186, 0.05859183883666992, 0.05850099182128906, 0.058666015625, 0.058686080932617186, 0.059291648864746097, 0.05966470336914063, 0.060206558227539064, 0.0585098876953125, 0.058533599853515625, 0.058581310272216795, 0.05862192153930664, 0.05882585525512695, 0.058956672668457034, 0.05972582244873047, 0.05903926467895508, 0.058925537109375, 0.05884108734130859, 0.05909711837768555, 0.06019404983520508, 0.05965692901611328, 0.058676223754882816, 0.0588458251953125, 0.059756927490234375, 0.058697696685791015, 0.058630176544189457, 0.0587960319519043, 0.058982398986816405, 0.05952511978149414, 0.05921782302856445, 0.05936313629150391, 0.060553504943847654, 0.06005759811401367, 0.0590561294555664, 0.05884320068359375, 0.06012716674804688, 0.05981798553466797, 0.060816478729248044, 0.05976121520996094, 0.06002256011962891, 0.05980012893676758, 0.059143871307373044, 0.059334625244140626, 0.06125151824951172, 0.060125598907470705, 0.0598930549621582, 0.060759777069091796, 0.060256961822509766, 0.06069436645507813, 0.05971603012084961, 0.05924425506591797]",tokens/s,16.9174495605042,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4604.7232,7227.768832,0.0,6849.298432,6444.4416,s,1,11.1192958984375,11.1192958984375,0.0,11.1192958984375,11.1192958984375,11.1192958984375,11.1192958984375,[11.1192958984375],,kWh,0.00012169366612085166,1.3416325208518818e-05,3.794947480398325e-05,0.00017305946613335374,,MB,2513.809408,7580.090368,0.0,7172.25984,6822.664192,s,10,3.846915252685547,0.38469152526855466,0.0043924681989724905,0.3848624725341797,0.38978692932128906,0.39062753448486326,0.39130001861572267,"[0.3787657470703125, 0.38294061279296876, 0.3786228332519531, 0.3816611633300781, 0.38704428100585936, 0.3867843322753906, 0.3812321472167969, 0.3896001281738281, 0.3887958679199219, 0.3914681396484375]",tokens/s,665.4682601112291,kWh,1.1370956313141551e-05,1.2540068247944957e-06,7.534941925385192e-06,2.015990506332124e-05,tokens/kWh,12698472.497559736,MB,2518.134784,7582.18752,0.0,7174.356992,6822.666752,s,10,29.13516845703124,2.913516845703125,0.014485912871789908,2.9170203857421875,2.9270207763671876,2.928691906738281,2.9300288110351564,"[2.914953125, 2.9266494140625, 2.926602294921875, 2.919087646484375, 2.924760009765625, 2.899684814453125, 2.901277099609375, 2.9091103515625, 2.930363037109375, 2.8826806640625]",tokens/s,21.62335189271785,kWh,8.464683113810947e-05,9.336741846009291e-06,5.5548302985614514e-05,0.00014953187596973327,tokens/kWh,421314.8507061586,,s,630,29.13214030838015,0.04624149255298433,0.0007635289994446567,0.04621358299255371,0.04679281387329101,0.04714868049621582,0.049186877937316895,"[0.047429630279541016, 0.046777664184570314, 0.046368606567382814, 0.04650419235229492, 0.046519073486328125, 0.04616377639770508, 0.04576847839355469, 0.045975616455078125, 0.04576694488525391, 0.04573974227905273, 0.04565980911254883, 0.04560070419311523, 0.049310272216796874, 0.04598112106323242, 0.0456668815612793, 0.04575775909423828, 0.045660865783691405, 0.04570521545410156, 0.04565929412841797, 0.04563395309448242, 0.04575388717651367, 0.04573072052001953, 0.04565737533569336, 0.045733760833740235, 0.04581046295166016, 0.045792606353759764, 0.04572643280029297, 0.04577859115600586, 0.04562160110473633, 0.04600400161743164, 0.045865184783935545, 0.046085887908935544, 0.04598780822753906, 0.04621855926513672, 0.04605027389526367, 0.046209022521972655, 0.04614908981323242, 0.04626486587524414, 0.04634588623046875, 0.046271041870117186, 0.046976001739501956, 0.04687747192382812, 0.046181632995605466, 0.046363391876220704, 0.0463699836730957, 0.046302017211914064, 0.04765491104125977, 0.046601249694824216, 0.0462628173828125, 0.046614974975585935, 0.046311424255371096, 0.04642969512939453, 0.04631808090209961, 0.04662444686889648, 0.04639081573486328, 0.04690835189819336, 0.046438465118408205, 0.04665116882324219, 0.04669219207763672, 0.04706841659545898, 0.046582687377929685, 0.04677737426757812, 0.04655203247070312, 0.04749337768554687, 0.047298561096191405, 0.04679065704345703, 0.04670019149780273, 0.04669475173950195, 0.04661804962158203, 0.04663312149047852, 0.049396385192871095, 0.0465401611328125, 0.046370784759521486, 0.04650844955444336, 0.046413822174072264, 0.04860927963256836, 0.04674899291992188, 0.04611894226074219, 0.04630524826049805, 0.04630550384521484, 0.0463296012878418, 0.046305694580078126, 0.04632585525512695, 0.04619900894165039, 0.04653833770751953, 0.04640739059448242, 0.046367424011230465, 0.04645996856689453, 0.04681625747680664, 0.046780033111572264, 0.04620719909667969, 0.04619232177734375, 0.04621958541870117, 0.04602889633178711, 0.046069759368896485, 0.04597516632080078, 0.04617254257202148, 0.046034942626953124, 0.0462479362487793, 0.046075233459472655, 0.04624860763549805, 0.04652236938476562, 0.0462490234375, 0.04656825637817383, 0.04630684661865234, 0.04628335952758789, 0.04614348983764648, 0.046059551239013674, 0.04611641693115234, 0.046001632690429686, 0.04605843353271484, 0.0458526725769043, 0.04612460708618164, 0.04614393615722656, 0.0461578254699707, 0.04609638214111328, 0.04629852676391601, 0.046842239379882813, 0.04693398284912109, 0.04638131332397461, 0.046360576629638675, 0.04637446212768555, 0.04621052932739258, 0.04601897430419922, 0.046172737121582035, 0.046514175415039063, 0.04702022552490234, 0.04620889663696289, 0.04620505523681641, 0.04602291107177734, 0.045954334259033204, 0.0459637451171875, 0.04588329696655274, 0.04594492721557617, 0.045889537811279295, 0.04592745590209961, 0.04614371109008789, 0.04618739318847656, 0.0464505615234375, 0.04627046585083008, 0.04604108810424805, 0.04626988983154297, 0.04640169525146484, 0.04628547286987305, 0.046226497650146484, 0.04632547378540039, 0.046416000366210936, 0.046424190521240236, 0.04614806365966797, 0.04641839981079102, 0.04643344116210937, 0.04639603042602539, 0.04624998474121094, 0.04673292922973633, 0.046313888549804685, 0.046311073303222657, 0.04626985549926758, 0.04629801559448242, 0.04614166259765625, 0.04640444946289062, 0.046861248016357424, 0.04632380676269531, 0.04620278549194336, 0.04652995300292969, 0.04653116989135742, 0.04624803161621094, 0.04615142440795898, 0.046217601776123045, 0.0463779182434082, 0.046430942535400394, 0.046450080871582033, 0.046593856811523435, 0.04658473587036133, 0.046812225341796875, 0.04656428909301758, 0.04682137680053711, 0.04667801666259765, 0.04667327880859375, 0.04664131164550781, 0.04683760070800781, 0.046645889282226564, 0.04738800048828125, 0.046827392578125, 0.04794812774658203, 0.04846412658691406, 0.04683180618286133, 0.04630915069580078, 0.04626467132568359, 0.046517822265625, 0.047301025390625, 0.04690665435791016, 0.046297855377197265, 0.04638883209228516, 0.046400096893310545, 0.04610044860839844, 0.04739599990844726, 0.04614374542236328, 0.046024673461914065, 0.04628937530517578, 0.04613750457763672, 0.046137344360351565, 0.0460120964050293, 0.046287166595458985, 0.04915631866455078, 0.04710089492797852, 0.04686726379394531, 0.04642803192138672, 0.0468645133972168, 0.047253440856933594, 0.04643401718139648, 0.0472737922668457, 0.046574142456054686, 0.046609569549560546, 0.04637984085083008, 0.04722809600830078, 0.04649363327026367, 0.04743824005126953, 0.04623183822631836, 0.04704012680053711, 0.04611916732788086, 0.046313793182373046, 0.0460403823852539, 0.04590252685546875, 0.04563711929321289, 0.04562201690673828, 0.045774593353271484, 0.04588710403442383, 0.04586841583251953, 0.045780193328857424, 0.045835617065429685, 0.045924415588378904, 0.04614944076538086, 0.04589625549316406, 0.04573388671875, 0.04640972900390625, 0.04580278396606445, 0.04584521484375, 0.04687200164794922, 0.04624236679077148, 0.045894847869873044, 0.04612179183959961, 0.04612505722045898, 0.046077953338623044, 0.04577689743041992, 0.045639678955078124, 0.04572774505615235, 0.04600012969970703, 0.045969406127929685, 0.046171775817871095, 0.045779327392578124, 0.04629913711547851, 0.04633020782470703, 0.04876806259155273, 0.04646393585205078, 0.04632166290283203, 0.05014479827880859, 0.04643065643310547, 0.046186016082763674, 0.04601641464233398, 0.046037601470947265, 0.04595507049560547, 0.05178121566772461, 0.046066078186035156, 0.04599603271484375, 0.04605145645141601, 0.04620889663696289, 0.04614982223510742, 0.04607145690917969, 0.04603670501708984, 0.046249664306640625, 0.04614009475708008, 0.04631868743896484, 0.046340991973876956, 0.0465880012512207, 0.04627865600585938, 0.04634790420532227, 0.04651251220703125, 0.04663065719604492, 0.04627872085571289, 0.046344383239746094, 0.04633190536499023, 0.046236961364746094, 0.04645692825317383, 0.046446273803710934, 0.046072769165039065, 0.04611481475830078, 0.04586905670166016, 0.045932544708251956, 0.045921825408935545, 0.04633190536499023, 0.0462845458984375, 0.04624867248535156, 0.046081184387207035, 0.0463834228515625, 0.04622201538085938, 0.04663075256347656, 0.04654415893554688, 0.04627734375, 0.046448448181152346, 0.04649593734741211, 0.04601446533203125, 0.04598134231567383, 0.04578134536743164, 0.045981056213378904, 0.045689697265625, 0.045905086517333986, 0.04610108947753906, 0.046276607513427735, 0.04619468688964844, 0.04685823822021484, 0.04614963150024414, 0.04619065475463867, 0.04655916976928711, 0.046219264984130856, 0.046516128540039066, 0.04642816162109375, 0.045428737640380856, 0.04506419372558594, 0.04511350250244141, 0.04505174255371094, 0.044943359375, 0.04531824111938477, 0.04537443161010742, 0.04543379211425781, 0.045330432891845705, 0.04518678283691406, 0.045164192199707034, 0.04495219039916992, 0.045123584747314455, 0.045543071746826175, 0.045898078918457035, 0.04568476867675781, 0.045517791748046876, 0.04555263900756836, 0.04604240036010742, 0.045245151519775394, 0.045107200622558595, 0.04487081527709961, 0.04488687896728515, 0.04539801788330078, 0.04500889587402344, 0.04491059112548828, 0.044948799133300785, 0.045185150146484374, 0.04606556701660156, 0.04549903869628906, 0.04587216186523437, 0.0458106575012207, 0.04657379150390625, 0.04640534210205078, 0.046266433715820315, 0.04621327972412109, 0.046616416931152344, 0.04591817474365235, 0.046155231475830075, 0.04635500717163086, 0.04628236770629883, 0.04639180755615235, 0.046387073516845706, 0.04628297424316406, 0.04628799819946289, 0.046213886260986325, 0.046383102416992186, 0.04647315216064453, 0.04676588821411133, 0.04621327972412109, 0.04629491043090821, 0.04629721450805664, 0.05578956985473633, 0.046063617706298826, 0.046212352752685544, 0.046155712127685544, 0.04669494247436524, 0.04644655990600586, 0.046566879272460934, 0.049199359893798825, 0.046520992279052736, 0.04602259063720703, 0.04690713500976563, 0.047386878967285155, 0.04627391815185547, 0.04624867248535156, 0.04603871917724609, 0.04633417510986328, 0.04627167892456055, 0.046494110107421875, 0.046351806640625, 0.04642649459838867, 0.04647091293334961, 0.046521183013916015, 0.04625612640380859, 0.04637900924682617, 0.04603903961181641, 0.04622857666015625, 0.04966191864013672, 0.04632057571411133, 0.046835712432861325, 0.04589132690429688, 0.0458603515625, 0.045714176177978516, 0.04538777542114258, 0.04511334228515625, 0.04484291076660156, 0.0450683822631836, 0.04547071838378906, 0.04561414337158203, 0.04564940643310547, 0.04569891357421875, 0.045789825439453126, 0.04577059173583985, 0.04567827224731445, 0.04557660675048828, 0.04559408187866211, 0.04560748672485351, 0.045391712188720706, 0.04534860610961914, 0.04506880187988281, 0.04508972930908203, 0.04502422332763672, 0.04551475143432617, 0.04577084732055664, 0.045830047607421875, 0.045946304321289065, 0.046034942626953124, 0.04584096145629883, 0.046063488006591796, 0.04578265762329101, 0.04586102294921875, 0.04581343841552735, 0.04819420623779297, 0.04613119888305664, 0.04617830276489258, 0.046159999847412106, 0.04623654556274414, 0.046048255920410154, 0.046153728485107424, 0.04656742477416992, 0.04622694396972656, 0.046088287353515625, 0.04630979156494141, 0.04651020812988281, 0.046593280792236326, 0.045986560821533205, 0.04842278289794922, 0.04583200073242188, 0.04579359817504883, 0.04574518585205078, 0.0458568000793457, 0.04604121780395508, 0.045705375671386717, 0.045404830932617185, 0.04545510482788086, 0.04591571044921875, 0.04758393478393555, 0.046276256561279296, 0.046091777801513675, 0.046150623321533205, 0.046524288177490235, 0.04637491226196289, 0.04617987060546875, 0.04630499267578125, 0.04633033752441406, 0.046026081085205076, 0.046018943786621094, 0.04617001724243164, 0.04623164749145508, 0.046260833740234375, 0.0459048957824707, 0.046160865783691406, 0.046170112609863284, 0.046249183654785156, 0.045720352172851565, 0.04566185760498047, 0.045698814392089844, 0.045832801818847656, 0.04603606414794922, 0.04626467132568359, 0.04616012954711914, 0.04642969512939453, 0.047033054351806644, 0.046383232116699216, 0.04613321685791016, 0.04621673583984375, 0.04610271835327148, 0.04630374526977539, 0.04612003326416016, 0.04628310394287109, 0.046424415588378905, 0.04645273590087891, 0.0462110710144043, 0.04635660934448242, 0.046229377746582034, 0.04606671905517578, 0.04599292755126953, 0.046048641204833984, 0.045719520568847656, 0.045780895233154296, 0.04696547317504883, 0.04590595245361328, 0.04589977645874024, 0.04622480010986328, 0.04620553588867188, 0.04605763244628906, 0.04610236740112305, 0.04763852691650391, 0.046637054443359374, 0.04628793716430664, 0.04629600143432617, 0.04635062408447266, 0.046419681549072264, 0.046359935760498044, 0.04661635208129883, 0.047059391021728514, 0.04664080047607422, 0.04664499282836914, 0.04649168014526367, 0.0465409927368164, 0.04659827041625977, 0.04730438232421875, 0.046564319610595706, 0.046507614135742184, 0.046491744995117185, 0.04673891067504883, 0.04669731140136719, 0.046473217010498044, 0.04634624099731445, 0.04611427307128906, 0.04608668899536133, 0.04606320190429687, 0.04647305679321289, 0.04650390243530273, 0.04634479904174805, 0.04661782455444336, 0.04633603286743164, 0.046297855377197265, 0.047099903106689454, 0.046266368865966793, 0.04654060745239258, 0.046300479888916016, 0.04623603057861328, 0.046236351013183595, 0.04645001602172852, 0.04610889434814453, 0.046422271728515624, 0.04649369430541992, 0.046354686737060544, 0.046562881469726564, 0.04631478500366211, 0.046359455108642575, 0.046465023040771485, 0.04603289413452148, 0.04630652618408203, 0.046088993072509764, 0.04615724945068359, 0.04618502426147461, 0.048639999389648435, 0.0462330551147461, 0.04647171020507813, 0.04634624099731445, 0.04638515090942383, 0.04638131332397461, 0.04658560180664063, 0.04639334487915039, 0.04689715194702149, 0.046450687408447267, 0.04696268844604492, 0.04675993728637695, 0.047143489837646484, 0.04653039932250977, 0.046260223388671876, 0.047126529693603515, 0.046095424652099606, 0.046137439727783204, 0.04602761459350586, 0.046179840087890625, 0.04581577682495117, 0.04602320098876953, 0.04544102478027344, 0.04552908706665039, 0.04534799957275391, 0.04582038497924805, 0.045787647247314454, 0.04566944122314453, 0.04551968002319336, 0.04559667205810547, 0.0483061752319336, 0.046098270416259766, 0.046069889068603515, 0.045658145904541016, 0.04560076904296875, 0.04566780853271484, 0.04550915145874023, 0.04569190216064453, 0.045448192596435545, 0.04532428741455078, 0.045287200927734375, 0.045448894500732424, 0.04542108917236328, 0.045534400939941405, 0.04554163360595703, 0.045777473449707035, 0.04561932754516602, 0.045566078186035155, 0.04569903945922851, 0.045622753143310546, 0.045857086181640624, 0.0455819206237793, 0.04558480072021484, 0.045563137054443356, 0.045349632263183594, 0.04560451126098633, 0.04522003173828125, 0.04564092636108399, 0.04611305618286133, 0.0460909423828125, 0.045330398559570315, 0.04532595062255859, 0.045078208923339844, 0.04498303985595703, 0.0449060173034668, 0.04715292739868164, 0.045066497802734376, 0.045377918243408207, 0.045034751892089844, 0.04559539031982422, 0.045864959716796876, 0.045716510772705075, 0.04557104110717773, 0.046080001831054686, 0.04577510452270508]",tokens/s,21.625599538210885,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4628.176896,6587.02336,0.0,6201.278464,6094.567936,s,1,10.8204833984375,10.8204833984375,0.0,10.8204833984375,10.8204833984375,10.8204833984375,10.8204833984375,[10.8204833984375],,kWh,0.00010622374539584597,1.171002278867517e-05,3.372086031000476e-05,0.0001516546284945259,,MB,1984.63488,7656.57088,0.0,7237.271552,6740.83328,s,10,6.285500610351562,0.6285500610351562,0.0016798519262076473,0.6281152954101563,0.631590283203125,0.6316754821777344,0.6317436413574219,"[0.6317606811523437, 0.6287974243164063, 0.62806640625, 0.6315713500976563, 0.6284917602539063, 0.6281641845703125, 0.6270883178710938, 0.6275939331054687, 0.6265503540039062, 0.6274161987304687]",tokens/s,407.2865724941539,kWh,2.4094233450001084e-05,2.6571981577322627e-06,1.474400752169083e-05,4.149543912942418e-05,tokens/kWh,6169352.713717202,MB,1988.58752,7658.668032,0.0,7239.368704,6740.83584,s,10,44.0696572265625,4.40696572265625,0.04051366650668356,4.3963349609375,4.45908583984375,4.459823681640625,4.460413955078125,"[4.4377373046875, 4.4605615234375, 4.458921875, 4.4550537109375, 4.35621875, 4.3865625, 4.3886259765625, 4.3635986328125, 4.3583330078125, 4.4040439453125]",tokens/s,14.295550264009643,kWh,0.00016103574602791866,1.7763312511057955e-05,8.96366315383069e-05,0.0002684356900772835,tokens/kWh,234693.08414936217,,s,630,44.06776243591311,0.06994882926335409,0.0010725276021198824,0.0698565902709961,0.07101840744018555,0.07160470161437989,0.0735778109741211,"[0.07092892456054688, 0.07035305786132813, 0.07010099029541016, 0.07027507019042968, 0.0698757095336914, 0.0698564453125, 0.07003218841552734, 0.07083955383300782, 0.07030655670166015, 0.07038285064697265, 0.07017657470703124, 0.0703169937133789, 0.06998537445068359, 0.06940150451660156, 0.06912818908691407, 0.07004080200195313, 0.07034960174560546, 0.07003469085693359, 0.06972499084472657, 0.0696514892578125, 0.07046236419677734, 0.07004473876953125, 0.07048646545410156, 0.07042012786865234, 0.07293971252441406, 0.07048258972167969, 0.07031180572509765, 0.07097913360595703, 0.06993977355957032, 0.06985673522949219, 0.07054390716552734, 0.07040819549560547, 0.07036313629150391, 0.07033036804199219, 0.07041334533691407, 0.07054454040527344, 0.07070492553710937, 0.07084028625488281, 0.07106156921386719, 0.07050240325927734, 0.07052054595947266, 0.0712639389038086, 0.07085321807861328, 0.07050822448730469, 0.0705284194946289, 0.07063625335693359, 0.07058969879150391, 0.07043373107910156, 0.07035689544677734, 0.07078102111816406, 0.07039520263671875, 0.07065798187255859, 0.07041232299804688, 0.07038745880126954, 0.06960841369628906, 0.07256620788574218, 0.07079334259033203, 0.07044290924072266, 0.07062992095947265, 0.07033446502685547, 0.07048764801025391, 0.07064208221435547, 0.07031337738037109, 0.07136428833007813, 0.07079609680175782, 0.07050444793701172, 0.06990550231933594, 0.07008866882324219, 0.0701879653930664, 0.0706949462890625, 0.07057810974121094, 0.07055363464355469, 0.0709532470703125, 0.07075801849365235, 0.07130941009521484, 0.07115715026855468, 0.07129910278320313, 0.07053481292724609, 0.07050697326660156, 0.07074585723876953, 0.07083487701416015, 0.07350249481201172, 0.07096918487548828, 0.07090569305419922, 0.07072207641601562, 0.07104518127441406, 0.07076377868652343, 0.07062598419189453, 0.07158783721923828, 0.07045542144775391, 0.07074188995361329, 0.07067411041259766, 0.0722906265258789, 0.07068466949462891, 0.07059661102294922, 0.07064102172851562, 0.07066051483154297, 0.07466115570068359, 0.07125910186767578, 0.07042991638183593, 0.07043971252441407, 0.07069283294677735, 0.07080754852294922, 0.07301651000976563, 0.0709783706665039, 0.0711370849609375, 0.0705650863647461, 0.07077155303955078, 0.07048614501953125, 0.07039753723144532, 0.0705478744506836, 0.07073910522460937, 0.07039472198486328, 0.06970162963867188, 0.07001203155517578, 0.07002591705322266, 0.07082617950439453, 0.07065305328369141, 0.07037955474853516, 0.07027913665771485, 0.07049919891357422, 0.07003702545166016, 0.07029007720947265, 0.0702317123413086, 0.07021167755126953, 0.0702485122680664, 0.07132284545898437, 0.0703987808227539, 0.0701398696899414, 0.07048134613037109, 0.07012207794189453, 0.07045529937744141, 0.07023616027832032, 0.07041423797607421, 0.07025212860107422, 0.07001129913330079, 0.07078265380859375, 0.07032835388183593, 0.07035942077636718, 0.07045680236816407, 0.07019744110107422, 0.07066044616699219, 0.07046896362304687, 0.07031670379638671, 0.07120236968994141, 0.07031443023681641, 0.0701904296875, 0.07009270477294922, 0.07389842987060546, 0.07058873748779297, 0.07053311920166015, 0.07050994873046874, 0.07055359649658204, 0.07061094665527344, 0.07136441802978516, 0.07209225463867187, 0.0707995834350586, 0.07061094665527344, 0.07190704345703125, 0.07163113403320312, 0.07085670471191406, 0.07019519805908203, 0.07068672180175781, 0.07040592193603516, 0.07178467559814453, 0.07088240051269531, 0.0707449951171875, 0.07041385650634766, 0.07053097534179688, 0.07090643310546875, 0.07074755096435546, 0.07067298889160156, 0.07055974578857421, 0.07084255981445313, 0.07053907012939453, 0.07050371551513672, 0.07071202850341797, 0.07037670135498048, 0.07129747009277344, 0.07234796905517578, 0.07161849975585938, 0.0707523193359375, 0.07094429016113281, 0.07075888061523437, 0.07131536102294922, 0.07072777557373047, 0.07078092956542968, 0.07065984344482422, 0.07086105346679687, 0.07178348541259766, 0.070783935546875, 0.07089481353759766, 0.07044380950927734, 0.07049337768554688, 0.07053395080566406, 0.07061504364013672, 0.07031398773193359, 0.07065395355224609, 0.0704176025390625, 0.0713408660888672, 0.07103603363037109, 0.07067436981201172, 0.07054383850097656, 0.07116627502441407, 0.0704306869506836, 0.07107174682617187, 0.07047779083251954, 0.07073519897460938, 0.07039663696289063, 0.07043612670898437, 0.07071206665039062, 0.07046975708007812, 0.07132064056396484, 0.07098054504394531, 0.07094611358642579, 0.07076300811767579, 0.07232940673828125, 0.07128678131103515, 0.07073903656005859, 0.07127158355712891, 0.07280818939208984, 0.07218380737304687, 0.07101644897460938, 0.07085260772705078, 0.07083213043212891, 0.07137391662597656, 0.0718406753540039, 0.07237999725341797, 0.07090627288818359, 0.07116989135742187, 0.07076834869384765, 0.07094947052001953, 0.07079305267333984, 0.07078208160400391, 0.07079971313476563, 0.07144297790527344, 0.07055535888671875, 0.07085494232177734, 0.0704668197631836, 0.07027750396728516, 0.070611328125, 0.06955750274658203, 0.06933580780029297, 0.06887833404541016, 0.06883328247070312, 0.0692326431274414, 0.0697624282836914, 0.07019174194335938, 0.06990614318847656, 0.06995350646972656, 0.06970159912109375, 0.0697787857055664, 0.07104006195068359, 0.07011840057373046, 0.06999660491943359, 0.06997853088378907, 0.06949705505371094, 0.06958281707763672, 0.06931011199951172, 0.06855305480957032, 0.06861318206787109, 0.06831394958496094, 0.06818415832519531, 0.06844608306884765, 0.06891942596435546, 0.07068057250976563, 0.07037312316894531, 0.06967935943603516, 0.07313817596435547, 0.06909324645996094, 0.0687630386352539, 0.06834786987304688, 0.06820035552978515, 0.07051936340332031, 0.06827446746826171, 0.06834381103515624, 0.06807878112792969, 0.0682850570678711, 0.06831238555908203, 0.068508544921875, 0.06853427124023438, 0.06854182434082032, 0.06904828643798828, 0.06860662078857421, 0.06864895629882813, 0.06900633239746094, 0.06898697662353516, 0.06828265380859375, 0.06816627502441407, 0.06870355224609374, 0.06909388732910156, 0.06837471771240235, 0.06822291564941406, 0.06857878112792969, 0.06871100616455078, 0.06872054290771484, 0.06846678161621093, 0.06880025482177735, 0.06828672027587891, 0.06833561706542969, 0.06877593231201172, 0.06851990509033203, 0.06875497436523438, 0.06944735717773437, 0.07237213134765624, 0.06939453125, 0.0693583984375, 0.06951087951660156, 0.06992515563964843, 0.06947020721435547, 0.06927565002441406, 0.07012882995605468, 0.07059539031982422, 0.06982681274414063, 0.06939753723144532, 0.06981922912597656, 0.06959302520751953, 0.06956034851074219, 0.06958681488037109, 0.06914064025878906, 0.06942070770263672, 0.06923094177246093, 0.06909951782226563, 0.06919766235351563, 0.06918962860107422, 0.06943350219726563, 0.06906674957275391, 0.06900531005859376, 0.06883328247070312, 0.0690025634765625, 0.06895686340332031, 0.06901376342773438, 0.0690805435180664, 0.06976541137695312, 0.07002931213378906, 0.06998390197753906, 0.06975523376464844, 0.06956646728515625, 0.07141580963134765, 0.06957260894775391, 0.06923878479003906, 0.07057170867919922, 0.06947052764892578, 0.07003750610351563, 0.06945811462402343, 0.06957622528076172, 0.07377891540527344, 0.06977718353271484, 0.06931330871582031, 0.06974873352050781, 0.06938829040527343, 0.06989619445800781, 0.07070480346679688, 0.06969315338134766, 0.06941555023193359, 0.0693325424194336, 0.06922905731201172, 0.06967699432373047, 0.06936780548095703, 0.06899097442626953, 0.06938569641113282, 0.06945791625976562, 0.06913833618164063, 0.06949533081054687, 0.06928924560546874, 0.07093331146240234, 0.07057612609863281, 0.06962483215332031, 0.06960435485839844, 0.06950704193115234, 0.06911593627929688, 0.06956851196289063, 0.06978662109375, 0.0695305938720703, 0.0694354248046875, 0.06969705963134766, 0.06972463989257813, 0.06951126098632812, 0.07030169677734376, 0.06981427001953125, 0.06987750244140625, 0.07003107452392578, 0.06957520294189454, 0.07039087677001953, 0.06951618957519531, 0.06942851257324219, 0.0695917739868164, 0.06958207702636719, 0.06963890838623046, 0.06934732818603516, 0.06882911682128906, 0.06889068603515625, 0.06860546875, 0.06840777587890624, 0.068746337890625, 0.06852496337890625, 0.06845030212402343, 0.06841744232177735, 0.06849545288085937, 0.07345970916748047, 0.06873900604248047, 0.06872275543212891, 0.06867356872558594, 0.0695249252319336, 0.06898892974853515, 0.0696028823852539, 0.06950396728515625, 0.06906412506103515, 0.06946614074707032, 0.06910355377197265, 0.06986927795410157, 0.06885833740234375, 0.06933545684814453, 0.06935276794433594, 0.07009519958496094, 0.0695196762084961, 0.07012764739990235, 0.06953907012939453, 0.07032704162597657, 0.07348633575439453, 0.07093852996826172, 0.0696607666015625, 0.06968022155761719, 0.06938511657714844, 0.06913763427734375, 0.069337890625, 0.06910793304443359, 0.06904399871826172, 0.06912204742431641, 0.07488102722167969, 0.07006412506103515, 0.07049830627441406, 0.0695257568359375, 0.06931961822509766, 0.07006086730957031, 0.06967091369628907, 0.06953369903564453, 0.06947756958007813, 0.07061321258544923, 0.0699684829711914, 0.0696075210571289, 0.07321826934814453, 0.06993004608154296, 0.06947321319580078, 0.06899097442626953, 0.06917107391357422, 0.06885523223876953, 0.0686568603515625, 0.06868681335449218, 0.06961468505859375, 0.06869699096679688, 0.06877177429199219, 0.06871660614013672, 0.0688189468383789, 0.06913775634765625, 0.06918112182617188, 0.06922041320800781, 0.07089759826660157, 0.06942829132080078, 0.06908099365234376, 0.07298662567138672, 0.06954188537597657, 0.07020748901367188, 0.0742113265991211, 0.07141295623779297, 0.06960578918457032, 0.06937577819824219, 0.0704722900390625, 0.0686013412475586, 0.06879209899902344, 0.06907730865478516, 0.06978591918945312, 0.07023830413818359, 0.06874877166748047, 0.06873551940917969, 0.06845849609375, 0.06857453155517577, 0.0684668197631836, 0.0704228515625, 0.06908924865722656, 0.06877378845214843, 0.06861248016357421, 0.06851337432861328, 0.06978620910644531, 0.06898873901367188, 0.06870015716552734, 0.06872064208984376, 0.06913330841064454, 0.06850637054443359, 0.06849561309814453, 0.06862582397460938, 0.06860860443115234, 0.06887337493896484, 0.06857405090332032, 0.0683868179321289, 0.06845235443115234, 0.06860934448242187, 0.06843788909912109, 0.06855967712402344, 0.06857721710205078, 0.06855814361572266, 0.06839167785644532, 0.06869324493408203, 0.06847154998779297, 0.06968732452392579, 0.0692343978881836, 0.06895005035400391, 0.0686164779663086, 0.06861833953857421, 0.06873894500732422, 0.06861312103271484, 0.06837289428710938, 0.06847138977050782, 0.06837625885009765, 0.06857350158691407, 0.06892339324951172, 0.06895616149902344, 0.06939427185058594, 0.06931881713867187, 0.06924492645263672, 0.07010284423828125, 0.06901548767089843, 0.06909363555908203, 0.06921625518798828, 0.06932035064697266, 0.06977993774414062, 0.07021916961669922, 0.0697508773803711, 0.06925657653808594, 0.06952444458007813, 0.0699945297241211, 0.06927155303955078, 0.06974259185791015, 0.06929817962646484, 0.06985830688476563, 0.06927667236328125, 0.07028294372558594, 0.06934764862060547, 0.06996896362304687, 0.06959404754638672, 0.07023750305175781, 0.06975558471679688, 0.06912000274658203, 0.06939238739013671, 0.06962713623046875, 0.06911257934570313, 0.06912204742431641, 0.06911795043945312, 0.06893363189697266, 0.06941696166992188, 0.0687322235107422, 0.06862284851074218, 0.06883757019042969, 0.06819142150878907, 0.06923961639404297, 0.07055769348144532, 0.06881072235107422, 0.06867766571044921, 0.06866534423828125, 0.06880457305908202, 0.06840118408203125, 0.06856294250488282, 0.06880239868164062, 0.06861225891113282, 0.06877196502685547, 0.06924015808105469, 0.06878262329101563, 0.0709327392578125, 0.07048115539550781, 0.0701506576538086, 0.06931788635253906, 0.06950374603271485, 0.07052460479736328, 0.06904377746582031, 0.06861695861816407, 0.06851993560791016, 0.06865424346923828, 0.0694710693359375, 0.0697364501953125, 0.07053478240966797, 0.06983309173583985, 0.07024976348876953, 0.07032643127441406, 0.07008477020263672, 0.06976895904541015, 0.06929388427734375, 0.07016738891601562, 0.06998000335693359, 0.07009910583496094, 0.07079116821289062, 0.07335935974121094, 0.07143218994140625, 0.07360857391357421, 0.07396403503417968, 0.07145894622802734, 0.07016563415527344, 0.0694181137084961, 0.06998365020751954, 0.07133424377441407, 0.06955958557128906, 0.06933372497558593, 0.06949858856201171, 0.06941110229492188, 0.06943129730224609, 0.07172300720214844, 0.06971392059326172, 0.0693285140991211, 0.0692647705078125, 0.0691599349975586, 0.069697021484375, 0.06937446594238281, 0.06935903930664063, 0.06976914978027343, 0.06945836639404297, 0.06925936126708984, 0.06918358612060548, 0.06941439819335937, 0.06950950622558594, 0.06950310516357422, 0.06922998046875, 0.06934178924560547, 0.06932275390625, 0.06923987579345703, 0.06924998474121094, 0.06923468780517578, 0.06916710662841796, 0.06933017730712891, 0.06951974487304688, 0.06920787048339844, 0.06925920104980468]",tokens/s,14.296164932725985,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11033.808896,12225.282048,0.0,11846.811648,11814.785024,s,1,14.5446103515625,14.5446103515625,0.0,14.5446103515625,14.5446103515625,14.5446103515625,14.5446103515625,[14.5446103515625],,kWh,0.00021487755945832606,2.3695210668930488e-05,6.834561023200125e-05,0.0003069183803592578,,MB,2022.076416,13158.514688,0.0,12750.68416,12641.86368,s,10,13.2707529296875,1.32707529296875,0.004613391175216123,1.3276134033203126,1.3325015625,1.3327134887695313,1.3328830297851564,"[1.3182591552734375, 1.320686279296875, 1.3242823486328126, 1.3263507080078125, 1.327857666015625, 1.327369140625, 1.3311546630859374, 1.3294130859375, 1.3324544677734376, 1.3329254150390626]",tokens/s,192.90540736939806,kWh,3.879431871291899e-05,4.277520867026141e-06,2.583313177760005e-05,6.890497135754518e-05,tokens/kWh,3715261.6851348225,MB,2031.058944,13452.115968,0.0,13044.28544,12933.698048,s,10,51.7908515625,5.17908515625,0.0033790050210793367,5.179365478515625,5.182355322265625,5.183464282226563,5.1843514501953125,"[5.17328857421875, 5.17350244140625, 5.180556640625, 5.18149169921875, 5.17875244140625, 5.1845732421875, 5.17892431640625, 5.179806640625, 5.18210888671875, 5.1778466796875]",tokens/s,12.164310510317263,kWh,0.00015175458123291267,1.6740703577205495e-05,0.00010082966399700065,0.0002693249488071188,tokens/kWh,233918.17311777684,,s,630,51.7864017944336,0.08220063776894221,0.0006728557495673679,0.08218268966674805,0.0829502426147461,0.08315069580078124,0.08478962242126466,"[0.08456169891357422, 0.08118694305419921, 0.0809335708618164, 0.08112547302246094, 0.081331298828125, 0.08113040161132813, 0.08157324981689452, 0.08140009307861328, 0.08107599639892578, 0.08123206329345703, 0.08104338836669922, 0.08238925170898438, 0.08226630401611328, 0.08149759674072266, 0.08128543853759766, 0.08151007843017578, 0.08170700836181641, 0.08183654022216796, 0.08158793640136719, 0.08168067169189454, 0.0815288314819336, 0.08181542205810546, 0.08185049438476563, 0.08251590728759765, 0.08246041870117188, 0.08226956939697265, 0.0819352035522461, 0.0818644790649414, 0.08150169372558594, 0.08195273590087891, 0.08182870483398437, 0.08192924499511718, 0.08211759948730468, 0.08198143768310547, 0.08286790466308594, 0.0826289291381836, 0.08233369445800781, 0.08279654693603515, 0.08241356658935547, 0.08198934173583984, 0.0816701431274414, 0.08215580749511718, 0.08205913543701172, 0.08215347290039063, 0.08213497924804687, 0.0822458267211914, 0.08282112121582032, 0.08289222717285157, 0.08270697784423828, 0.08245629119873046, 0.08238111877441406, 0.08225993347167969, 0.08217552185058594, 0.08244271850585938, 0.08250736236572266, 0.08216006469726563, 0.0827734375, 0.08324147033691406, 0.08261449432373047, 0.08304009246826172, 0.08304029083251953, 0.08295011138916016, 0.08299343872070312, 0.08484451293945312, 0.08114192199707031, 0.08077705383300782, 0.08111411285400391, 0.08118521881103516, 0.0814986572265625, 0.08141417694091797, 0.0813786849975586, 0.08126643371582032, 0.08130035400390626, 0.08130355072021485, 0.08237670135498047, 0.08220262145996093, 0.08201824188232422, 0.081417724609375, 0.0818304672241211, 0.08153199768066406, 0.08151952362060547, 0.08189734649658204, 0.0816388168334961, 0.08149884796142579, 0.08167628479003906, 0.08162051391601563, 0.0826024627685547, 0.08236838531494141, 0.08186688232421875, 0.08182988739013672, 0.08182755279541015, 0.081580322265625, 0.08187699127197266, 0.08182374572753906, 0.08189520263671875, 0.08232777404785156, 0.08200396728515626, 0.08189542388916016, 0.08255795288085938, 0.08274813079833984, 0.08241728210449219, 0.08252073669433593, 0.08244429016113282, 0.08223305511474609, 0.08203907012939453, 0.08229478454589843, 0.08244124603271484, 0.08242063903808594, 0.08230944061279297, 0.08251980590820313, 0.08273824310302734, 0.08283232116699218, 0.08245862579345703, 0.08251779174804688, 0.08242825317382813, 0.08244937896728516, 0.08250790405273438, 0.0821987533569336, 0.08248582458496094, 0.08269414520263672, 0.08270662689208984, 0.08253187561035157, 0.08252444458007813, 0.08313037109375, 0.08298249816894532, 0.08253817749023437, 0.08465523529052735, 0.08204377746582031, 0.08077507019042969, 0.08093705749511719, 0.08088124847412109, 0.08112374114990234, 0.08086287689208985, 0.08139571380615235, 0.08155990600585937, 0.08210431671142578, 0.08147353363037109, 0.08223856353759766, 0.08271878051757812, 0.08185123443603516, 0.08148588562011719, 0.08150579071044922, 0.08186720275878906, 0.08174387359619141, 0.08148377227783203, 0.08144895935058594, 0.08198054504394531, 0.08347843170166015, 0.08198652648925782, 0.08210173034667968, 0.08278275299072266, 0.08242176055908203, 0.08164886474609374, 0.08170909118652343, 0.08212274932861328, 0.08173158264160156, 0.0817174072265625, 0.08179158020019531, 0.08216575622558593, 0.08243814086914063, 0.08205020904541016, 0.08352239990234375, 0.0828579864501953, 0.08253798675537109, 0.0835076446533203, 0.08181929779052734, 0.0818017578125, 0.08212057495117188, 0.08244217681884766, 0.08219638061523438, 0.0827455062866211, 0.08250342559814453, 0.0825631332397461, 0.08255302429199218, 0.08273625946044921, 0.08281568145751952, 0.08270246124267579, 0.08237267303466797, 0.08246886444091797, 0.08223334503173828, 0.0822518081665039, 0.08298697662353516, 0.08249523162841797, 0.08284515380859375, 0.08301849365234375, 0.08254847717285156, 0.08295862579345703, 0.08314675140380859, 0.08303945922851562, 0.08564070129394531, 0.0817197723388672, 0.08130928039550782, 0.08110041809082032, 0.08120531463623047, 0.08152451324462891, 0.08156047821044922, 0.08136252593994141, 0.08132653045654296, 0.08151449584960938, 0.08196915435791016, 0.08253440093994141, 0.08262860870361328, 0.0821363525390625, 0.08163555145263672, 0.08159190368652344, 0.08138758087158203, 0.08176726531982421, 0.0819603500366211, 0.08150486755371093, 0.08162268829345704, 0.08195516967773438, 0.08213206481933594, 0.08279952239990235, 0.08348262023925782, 0.08202854156494141, 0.08175926208496094, 0.081602783203125, 0.08274969482421875, 0.08165769958496094, 0.08208860778808594, 0.081829345703125, 0.08170317077636718, 0.08169296264648437, 0.08224358367919922, 0.08291709136962891, 0.08272697448730469, 0.08272509002685546, 0.08242166137695313, 0.08204502105712891, 0.08237260437011719, 0.08187289428710938, 0.08218214416503906, 0.08293990325927734, 0.08205862426757812, 0.08225247955322265, 0.08280262756347656, 0.08320800018310547, 0.08283891296386718, 0.08276051330566406, 0.08260995483398438, 0.08249366760253907, 0.08250163269042969, 0.08198925018310547, 0.08207807922363282, 0.08246451568603516, 0.0825483169555664, 0.08261698913574218, 0.08277986907958984, 0.08313065338134766, 0.08315494537353516, 0.08300051116943359, 0.08282943725585938, 0.08430976104736328, 0.08145331573486328, 0.08131075286865234, 0.08116937255859374, 0.08137145233154297, 0.08132563018798829, 0.08124758148193359, 0.08143456268310546, 0.08127369689941406, 0.08173363494873047, 0.08184422302246094, 0.08260403442382812, 0.08267916870117188, 0.08202454376220703, 0.08160924530029297, 0.08175564575195313, 0.08200863647460938, 0.08173152160644531, 0.0817623062133789, 0.08203395080566406, 0.08158806610107422, 0.08210521697998047, 0.08193638610839844, 0.08247673797607422, 0.08239750671386718, 0.08206540679931641, 0.08244818878173828, 0.08202873229980469, 0.08204902648925781, 0.08206902313232421, 0.08169500732421875, 0.0817476806640625, 0.08174639892578126, 0.08196505737304688, 0.08234544372558594, 0.08310838317871094, 0.08235401916503907, 0.0821126708984375, 0.08207360076904296, 0.08216925048828125, 0.08241359710693359, 0.08243462371826171, 0.08224931335449219, 0.08221737670898438, 0.08222310638427735, 0.08236441802978516, 0.08252210998535156, 0.08313037109375, 0.08240850830078125, 0.08220767974853516, 0.08265318298339844, 0.08250572967529297, 0.08241356658935547, 0.08283545684814453, 0.08260179138183593, 0.0823154525756836, 0.08251126098632812, 0.08239369964599609, 0.08302950286865235, 0.08290560150146484, 0.08275750732421874, 0.0829024658203125, 0.08312818908691406, 0.08488285064697265, 0.08145967864990235, 0.08114112091064453, 0.08113228607177735, 0.08113740539550782, 0.08137583923339843, 0.08147727966308593, 0.08218624114990235, 0.08142633819580078, 0.08161698913574218, 0.08174092864990234, 0.08263542175292969, 0.08244175720214844, 0.08150640106201172, 0.08192060852050781, 0.08129955291748046, 0.08168418884277344, 0.08160176086425781, 0.08171209716796875, 0.0817786865234375, 0.08165769958496094, 0.08188915252685547, 0.08218038177490235, 0.08271257781982422, 0.08263680267333984, 0.08208726501464844, 0.0823340835571289, 0.08169049835205078, 0.08273347473144531, 0.08163123321533203, 0.08192819213867188, 0.08212070465087891, 0.08179507446289062, 0.08164761352539063, 0.08220877075195313, 0.08296428680419922, 0.08287660980224609, 0.08283055877685547, 0.08254544067382813, 0.08378982543945312, 0.08177664184570313, 0.08222310638427735, 0.0823334732055664, 0.08236377716064452, 0.0832807388305664, 0.08240284729003906, 0.08249945831298829, 0.08280534362792968, 0.08390799713134765, 0.08256540679931641, 0.08284976196289062, 0.08248953247070312, 0.08232489776611328, 0.08253311920166016, 0.08247500610351563, 0.08255257415771484, 0.08242201232910157, 0.08299110412597656, 0.0827166748046875, 0.08308700561523437, 0.08287785339355469, 0.08322870635986328, 0.08301046752929687, 0.08494649505615234, 0.08111558532714844, 0.08106393432617187, 0.08129535675048828, 0.08134630584716797, 0.08140585327148438, 0.08137503814697265, 0.08191849517822265, 0.08171520233154297, 0.08159366607666016, 0.08136978912353515, 0.082766845703125, 0.08230726623535156, 0.08142665863037109, 0.0815785903930664, 0.08183715057373046, 0.08228752136230469, 0.08149811553955078, 0.08172748565673828, 0.08190086364746094, 0.08189730834960937, 0.08181539154052735, 0.08202957153320313, 0.08205449676513672, 0.0823539810180664, 0.08214611053466797, 0.08145513916015625, 0.08202649688720703, 0.08159404754638672, 0.08161721801757812, 0.08208589172363281, 0.08199922943115234, 0.08182848358154297, 0.08205721282958985, 0.0823595199584961, 0.08224553680419922, 0.08246975708007813, 0.08216371154785156, 0.08232959747314453, 0.08229388427734376, 0.08196153259277343, 0.08201859283447266, 0.08204070281982422, 0.0823493423461914, 0.0825107192993164, 0.08253833770751953, 0.08293007659912109, 0.08293555450439453, 0.08276188659667968, 0.08380598449707032, 0.08232262420654297, 0.08286707305908203, 0.08302591705322265, 0.08254054260253907, 0.08250367736816407, 0.0827448959350586, 0.08258019256591796, 0.08300240325927734, 0.08283837127685546, 0.0827042236328125, 0.08281855773925781, 0.08268569946289063, 0.08271334075927735, 0.08514598083496094, 0.08145766448974609, 0.08181705474853515, 0.08107881927490235, 0.0809881591796875, 0.08148953247070312, 0.08115238189697266, 0.08198553466796875, 0.08171056365966797, 0.08157647705078125, 0.08152012634277343, 0.08245024108886718, 0.08248390197753906, 0.08180518341064454, 0.08127910614013673, 0.0811167984008789, 0.081795166015625, 0.08169606781005859, 0.08203772735595703, 0.08164886474609374, 0.08187369537353516, 0.08174787139892578, 0.08213085174560547, 0.08208198547363281, 0.08276985931396484, 0.0824730224609375, 0.08204287719726562, 0.08161849975585937, 0.08148831939697265, 0.08260835266113281, 0.08229456329345704, 0.0817413101196289, 0.08151062774658203, 0.08187522888183593, 0.08218550109863282, 0.08298159790039063, 0.08280678558349609, 0.0828960952758789, 0.08226448059082031, 0.08251216125488281, 0.08187709045410156, 0.0819768295288086, 0.08228486633300781, 0.08198572540283203, 0.08177565002441406, 0.08192921447753906, 0.08257532501220703, 0.08283340454101562, 0.08325939178466797, 0.08313446044921875, 0.08275353240966797, 0.08253254699707031, 0.08250962829589843, 0.08279417419433593, 0.0825367660522461, 0.08274739074707031, 0.08273715209960937, 0.08243814086914063, 0.08271459197998046, 0.08297270202636718, 0.08304434967041016, 0.08290509033203125, 0.08289881896972656, 0.08501558685302735, 0.08155235290527343, 0.08131123352050781, 0.08128972625732422, 0.08149942779541015, 0.08159101104736328, 0.08147763061523437, 0.08116944122314453, 0.08162989044189453, 0.08223977661132813, 0.08157727813720703, 0.08225247955322265, 0.08317942047119141, 0.08195696258544923, 0.08169446563720703, 0.08156963348388672, 0.08179737854003906, 0.08172354888916016, 0.0816332778930664, 0.08131584167480468, 0.08195481872558594, 0.08195891571044922, 0.08179859161376953, 0.08230729675292969, 0.08319747161865235, 0.08211743927001953, 0.08221638488769531, 0.08191238403320313, 0.08189952087402344, 0.08166339111328125, 0.08199858856201171, 0.08172322845458985, 0.08180531311035157, 0.08196812438964844, 0.08308617401123047, 0.08262159729003907, 0.08290201568603515, 0.08241129302978516, 0.08182009887695313, 0.08171907043457032, 0.0817305908203125, 0.08220976257324218, 0.08226988983154297, 0.08207520294189453, 0.0821456298828125, 0.08252867126464844, 0.08285779571533203, 0.08265734100341797, 0.08322265625, 0.08275107574462891, 0.0833653793334961, 0.08231005096435547, 0.08253145599365234, 0.08270323181152343, 0.08246476745605469, 0.08242585754394531, 0.08281702423095703, 0.08272589111328126, 0.08315392303466797, 0.08317250823974609, 0.08328278350830078, 0.08295142364501953, 0.08283417510986328, 0.08503270721435546, 0.08157587432861328, 0.08065376281738282, 0.08089485168457031, 0.08105939483642578, 0.08132621002197266, 0.081498046875, 0.08180770874023438, 0.08136502075195312, 0.08162918090820312, 0.08191795349121093, 0.08256092834472656, 0.08274931335449219, 0.08222509002685546, 0.08142671966552735, 0.08163533020019531, 0.08132608032226563, 0.08155296325683593, 0.08157341003417969, 0.08151856231689453, 0.08160966491699219, 0.08197119903564454, 0.08218323516845703, 0.08241862487792968, 0.08223919677734375, 0.08195475006103516, 0.08160460662841797, 0.08144512176513671, 0.08178083038330078, 0.08185855865478515, 0.08167584228515624, 0.08186515045166015, 0.08194595336914062, 0.08188118743896484, 0.08228691101074219, 0.08284540557861328, 0.08266973114013672, 0.08301376342773438, 0.08234175872802735, 0.08224601745605468, 0.08197119903564454, 0.08232575988769532, 0.0823325424194336, 0.08237667083740234, 0.08240630340576172, 0.08217362976074219, 0.08256336212158204, 0.08278828430175782, 0.08294111633300781, 0.08277289581298829, 0.08269004821777344, 0.0824393310546875, 0.08260694122314453, 0.08225318145751953, 0.08226265716552734, 0.08320956420898437, 0.08234358215332031, 0.08263577270507813, 0.08284569549560547, 0.08293593597412109, 0.08332028961181641, 0.08305500793457031, 0.08299858856201171]",tokens/s,12.165355733746255,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3546.959872,4490.985472,0.0,4112.515072,3975.832064,s,1,9.7631337890625,9.7631337890625,0.0,9.7631337890625,9.7631337890625,9.7631337890625,9.7631337890625,[9.7631337890625],,kWh,8.129022722916588e-05,8.9596226684968e-06,2.642113224800009e-05,0.00011667098214566276,,MB,1479.933952,4688.11776,0.0,4280.287232,4101.544448,s,10,3.15926431274414,0.315926431274414,0.0010109459895497204,0.31538465881347655,0.31761447753906247,0.3176444427490234,0.3176684149169922,"[0.315052734375, 0.3151170043945313, 0.3151600952148437, 0.315346923828125, 0.317018798828125, 0.3176744079589844, 0.3156051330566406, 0.31542239379882814, 0.31525900268554685, 0.3176078186035156]",tokens/s,810.3152337312296,kWh,9.566036410752691e-06,1.0549664938000431e-06,6.345775685935489e-06,1.696677859048822e-05,tokens/kWh,15088309.111519653,MB,1483.948032,4698.60352,0.0,4290.772992,4101.547008,s,10,26.534383544921877,2.6534383544921876,0.00979076944631834,2.656810791015625,2.6639314453125,2.6643930908203126,2.6647624072265628,"[2.65593896484375, 2.64122021484375, 2.6576826171875, 2.63843017578125, 2.662216064453125, 2.653052978515625, 2.664854736328125, 2.638652587890625, 2.663828857421875, 2.65850634765625]",tokens/s,23.742778833863987,kWh,7.725948908341414e-05,8.521821049239766e-06,4.6889766902464504e-05,0.0001326710770351184,tokens/kWh,474858.58566840255,,s,630,26.531919879913332,0.04211415853954497,0.0007032065951850768,0.0420089111328125,0.04267241172790527,0.043027694702148435,0.045596519966125494,"[0.04278915023803711, 0.04206681442260742, 0.04215881729125977, 0.041765056610107425, 0.04200646209716797, 0.041768959045410156, 0.04157968139648437, 0.04172854232788086, 0.04170783996582031, 0.042733089447021484, 0.042060062408447264, 0.04209683227539063, 0.04245651245117187, 0.04193337631225586, 0.04279417419433594, 0.0428337287902832, 0.04258838272094727, 0.04172470474243164, 0.041697025299072266, 0.041691390991210935, 0.04191823959350586, 0.04171798324584961, 0.041616737365722654, 0.04158531188964844, 0.041543678283691404, 0.041744384765625, 0.041984001159667966, 0.04177920150756836, 0.04248076629638672, 0.04215283203125, 0.04303833770751953, 0.042307647705078125, 0.04219731140136719, 0.04278451156616211, 0.0425945930480957, 0.04255292892456055, 0.04247916793823242, 0.04233299255371094, 0.04220927810668945, 0.04241804885864258, 0.04294259262084961, 0.042123264312744144, 0.04210073471069336, 0.04207382583618164, 0.042313823699951174, 0.042172672271728516, 0.041960609436035155, 0.042229759216308595, 0.04238620758056641, 0.04293632125854492, 0.04228300857543945, 0.04245491027832031, 0.04188787078857422, 0.041836544036865236, 0.04183244705200195, 0.042196990966796875, 0.04196761703491211, 0.042493694305419924, 0.041865470886230466, 0.04200815963745117, 0.042098209381103514, 0.04193574523925781, 0.041981952667236325, 0.043318336486816406, 0.04226144027709961, 0.042842113494873046, 0.04219084930419922, 0.04202905654907227, 0.041850879669189454, 0.04170751953125, 0.041883647918701174, 0.042004417419433594, 0.04399929428100586, 0.04259340667724609, 0.04164412689208984, 0.04230390548706055, 0.042022911071777344, 0.04160345458984375, 0.04134108734130859, 0.04156156921386719, 0.04254348754882813, 0.04160038375854492, 0.04141120147705078, 0.041560062408447264, 0.04135696029663086, 0.04141091156005859, 0.041312255859375, 0.04151910400390625, 0.041272544860839845, 0.041685791015625, 0.04166656112670898, 0.041419998168945316, 0.04153408050537109, 0.041429153442382814, 0.04143299102783203, 0.04235651016235351, 0.04188726425170899, 0.04252751922607422, 0.041404415130615234, 0.041670654296875, 0.04206585693359375, 0.04196307373046875, 0.04193468856811523, 0.04189977645874023, 0.042740638732910154, 0.041957279205322266, 0.042109024047851565, 0.04217647933959961, 0.041842655181884764, 0.041682048797607424, 0.041683902740478514, 0.04154560089111328, 0.04179776000976562, 0.04188147354125977, 0.0419117431640625, 0.042050113677978514, 0.041807456970214846, 0.04187804794311523, 0.042205184936523435, 0.041928382873535154, 0.04147004699707031, 0.042215648651123046, 0.041711616516113284, 0.042032192230224606, 0.04232233428955078, 0.04202665710449219, 0.04261273574829102, 0.04176668930053711, 0.04155619049072266, 0.041472000122070314, 0.04195052719116211, 0.04135977554321289, 0.04144319915771484, 0.04129548645019531, 0.04130607986450195, 0.041997150421142576, 0.04146726226806641, 0.04385830307006836, 0.04354790496826172, 0.04158768081665039, 0.04199161529541016, 0.041551422119140625, 0.04158566284179688, 0.0418485107421875, 0.04220489501953125, 0.042151519775390625, 0.042038272857666016, 0.04165222549438476, 0.041688095092773436, 0.041618400573730466, 0.04149452972412109, 0.04158591842651367, 0.0415030403137207, 0.04190579223632813, 0.041874496459960935, 0.04324121475219726, 0.04225843048095703, 0.04231167984008789, 0.0451932144165039, 0.044146015167236326, 0.04240991973876953, 0.04321148681640625, 0.04193075180053711, 0.04225228881835937, 0.04235171127319336, 0.04194393539428711, 0.04182422256469726, 0.04172969436645508, 0.04223235321044922, 0.04216726303100586, 0.041847713470458986, 0.041799678802490234, 0.0425984001159668, 0.04170492935180664, 0.04182684707641601, 0.04183564758300781, 0.042019710540771485, 0.042712158203125, 0.042062366485595706, 0.041997726440429685, 0.04227139282226562, 0.04191648101806641, 0.041974014282226565, 0.04225609588623047, 0.04277008056640625, 0.04225497436523438, 0.04223385620117188, 0.045540447235107424, 0.04266662216186524, 0.04313587188720703, 0.04221952056884765, 0.041777153015136716, 0.041404415130615234, 0.04160710525512695, 0.0414035530090332, 0.041522079467773435, 0.04151295852661133, 0.041809921264648435, 0.04171139144897461, 0.04170774459838867, 0.04159718322753906, 0.04234419250488281, 0.04123209762573242, 0.04177129745483398, 0.041828353881835936, 0.04153097534179687, 0.04159280014038086, 0.04189046478271485, 0.04231964874267578, 0.0415968017578125, 0.04160019302368164, 0.041807903289794925, 0.041540512084960936, 0.0418939208984375, 0.04157350540161133, 0.04244259262084961, 0.04160742568969727, 0.04168576049804688, 0.04177305603027344, 0.04160841751098633, 0.041812767028808595, 0.04204339218139649, 0.04207846450805664, 0.04221011352539063, 0.04168307113647461, 0.04265011215209961, 0.04200019073486328, 0.042106624603271484, 0.04224691009521484, 0.042430526733398435, 0.04336172866821289, 0.04214822387695313, 0.04281692886352539, 0.04170576095581055, 0.04202950286865234, 0.04162355041503906, 0.04156620788574219, 0.04196147155761719, 0.04227811050415039, 0.04228931045532226, 0.04173068618774414, 0.04155596923828125, 0.04144057464599609, 0.04128780746459961, 0.041476032257080075, 0.04126076889038086, 0.0418436164855957, 0.041842369079589846, 0.042420543670654294, 0.04168035125732422, 0.041490974426269533, 0.04207001495361328, 0.04563478469848633, 0.04276035308837891, 0.04221401596069336, 0.04200966262817383, 0.04215478515625, 0.04230364990234375, 0.042633216857910154, 0.04267212677001953, 0.04249190521240234, 0.042055137634277345, 0.0422116813659668, 0.04330720138549805, 0.04294655990600586, 0.04249302291870117, 0.042042270660400394, 0.042780670166015625, 0.04268185424804687, 0.042608158111572265, 0.042574817657470704, 0.04175667190551758, 0.04232806396484375, 0.04200147247314453, 0.042557472229003905, 0.04238748931884766, 0.04212428665161133, 0.04202073669433594, 0.041647838592529296, 0.0417239990234375, 0.041888031005859375, 0.042127201080322266, 0.04255136108398438, 0.04272332763671875, 0.042570785522460936, 0.042183361053466796, 0.04266214370727539, 0.043644161224365235, 0.04295872116088867, 0.04245967864990234, 0.04193852615356446, 0.04267497634887695, 0.041783199310302735, 0.0422213134765625, 0.04228335952758789, 0.04266131210327148, 0.04202348709106445, 0.04144332885742188, 0.04207206344604492, 0.041605121612548826, 0.04159436798095703, 0.04171404647827148, 0.041729248046875, 0.041781536102294924, 0.04173068618774414, 0.04179753494262695, 0.041439327239990234, 0.041702816009521484, 0.041644416809082034, 0.04145993423461914, 0.0423436164855957, 0.04179167938232422, 0.042496448516845704, 0.041597057342529296, 0.041549888610839844, 0.04286054229736328, 0.04206796646118164, 0.04148588943481445, 0.041080638885498046, 0.041500831604003904, 0.04118985748291016, 0.041613311767578126, 0.041475807189941406, 0.04125699234008789, 0.04136166381835937, 0.04112998580932617, 0.04170326232910156, 0.04156947326660156, 0.042218463897705075, 0.04132767868041992, 0.04133369445800781, 0.0417628173828125, 0.04204339218139649, 0.04165017700195312, 0.04196352005004883, 0.04196352005004883, 0.04230963134765625, 0.042040863037109376, 0.041827838897705076, 0.041947166442871095, 0.04191638565063477, 0.04197679901123047, 0.0419060173034668, 0.04190326309204102, 0.04208051300048828, 0.04199523162841797, 0.04195100784301758, 0.04193280029296875, 0.04269465637207031, 0.04222512054443359, 0.04252316665649414, 0.04202880096435547, 0.04279439926147461, 0.0419951057434082, 0.04220716857910156, 0.04246739196777344, 0.04220064163208008, 0.04223980712890625, 0.04296384048461914, 0.04259404754638672, 0.04207603073120117, 0.042411678314208986, 0.04245142364501953, 0.042299198150634765, 0.04219513702392578, 0.042246143341064454, 0.0420404167175293, 0.041984928131103515, 0.042028736114501954, 0.042221729278564456, 0.042053791046142576, 0.042119102478027345, 0.04257555389404297, 0.045690975189208984, 0.044897857666015624, 0.04198873519897461, 0.042649375915527345, 0.041605438232421875, 0.043014686584472654, 0.042325729370117186, 0.04187750244140625, 0.0413298225402832, 0.04159695816040039, 0.04188857650756836, 0.04153897476196289, 0.041810527801513675, 0.0413647346496582, 0.04138265609741211, 0.04143247985839844, 0.04145414352416992, 0.04227484893798828, 0.04209235382080078, 0.04239993667602539, 0.04154937744140625, 0.04187180709838867, 0.04561942291259766, 0.049512222290039064, 0.04216579055786133, 0.04204576110839844, 0.04276035308837891, 0.04219087982177734, 0.04220451354980469, 0.04197644805908203, 0.04211097717285156, 0.04208025741577148, 0.042090496063232424, 0.041985313415527345, 0.04196220779418945, 0.04204748916625976, 0.042178558349609374, 0.04192182540893555, 0.042003360748291016, 0.042071361541748044, 0.04213593673706055, 0.04214182281494141, 0.04290691375732422, 0.0420107536315918, 0.042048095703125, 0.04223347091674805, 0.04249385452270508, 0.042015201568603514, 0.041987422943115235, 0.042201503753662106, 0.04271974563598633, 0.04233599853515625, 0.04227686309814453, 0.04223299026489258, 0.04222652816772461, 0.04246323013305664, 0.04239769744873047, 0.042221569061279295, 0.042338081359863285, 0.04232828903198242, 0.04233011245727539, 0.04236220932006836, 0.042100704193115235, 0.04197036743164063, 0.04238934326171875, 0.042374462127685544, 0.04314595031738281, 0.04208652877807617, 0.042969024658203125, 0.04216451263427735, 0.04223376083374023, 0.042218910217285154, 0.042539295196533204, 0.042307998657226564, 0.041966655731201175, 0.041603073120117184, 0.04179040145874023, 0.04154777526855469, 0.041488384246826174, 0.04158464050292969, 0.042061630249023436, 0.04203673553466797, 0.04263974380493164, 0.04173459243774414, 0.04170051193237305, 0.0416712646484375, 0.04219097518920899, 0.04255123138427734, 0.042242111206054686, 0.04192665481567383, 0.042485889434814454, 0.04183347320556641, 0.04157734298706055, 0.041338878631591795, 0.04136671829223633, 0.04133135986328125, 0.04134092712402344, 0.04135523223876953, 0.041669822692871096, 0.04183552169799805, 0.04195942306518555, 0.042489696502685546, 0.04157455825805664, 0.04161027145385742, 0.041642753601074216, 0.041467838287353516, 0.04237551879882812, 0.04159686279296875, 0.041417984008789065, 0.041343742370605466, 0.04171366500854492, 0.04166041564941406, 0.04156800079345703, 0.04142630386352539, 0.042251136779785155, 0.042049537658691405, 0.04198390579223633, 0.041910369873046874, 0.04177638244628906, 0.04165036773681641, 0.041468448638916015, 0.04138995361328125, 0.041535648345947265, 0.0416071662902832, 0.04180537414550781, 0.041763263702392577, 0.04143718338012695, 0.04174028778076172, 0.042053089141845704, 0.0433230094909668, 0.043496353149414066, 0.04261996841430664, 0.041974720001220704, 0.04211916732788086, 0.04206387329101562, 0.04197990417480469, 0.04181305694580078, 0.04188051223754883, 0.0416255989074707, 0.04186531066894531, 0.04224524688720703, 0.04215270233154297, 0.04208233642578125, 0.04301193618774414, 0.04189353561401367, 0.042735103607177735, 0.04647417449951172, 0.04221139144897461, 0.04232396697998047, 0.0421212158203125, 0.04203958511352539, 0.0420533447265625, 0.04211097717285156, 0.042051200866699216, 0.042484096527099606, 0.04224204635620117, 0.04243382263183594, 0.04240867233276367, 0.04237107086181641, 0.04254719924926758, 0.042404991149902344, 0.04232278442382813, 0.042256351470947265, 0.04245017623901367, 0.042519359588623046, 0.042390846252441404, 0.04210335922241211, 0.041992321014404296, 0.042549278259277346, 0.042108863830566404, 0.04344623947143555, 0.04219295883178711, 0.04193017578125, 0.0424288330078125, 0.04170563125610351, 0.0421396484375, 0.04192563247680664, 0.04254617691040039, 0.04182015991210938, 0.04186444854736328, 0.04205849456787109, 0.042092544555664066, 0.042180606842041016, 0.04247347259521484, 0.04207206344604492, 0.04195452880859375, 0.041863937377929684, 0.042539039611816404, 0.04204489517211914, 0.04193539047241211, 0.04182246398925781, 0.04182195281982422, 0.04362575912475586, 0.042031711578369144, 0.04241356658935547, 0.041701889038085936, 0.04174848175048828, 0.04589363098144531, 0.04178348922729492, 0.04148515319824219, 0.04243952178955078, 0.04195094299316406, 0.04182611083984375, 0.042385631561279294, 0.042199359893798825, 0.04191007995605469, 0.04213942337036133, 0.04408163070678711, 0.044423168182373046, 0.04185651016235352, 0.04317440032958984, 0.04155187225341797, 0.0416437759399414, 0.042463489532470707, 0.041912254333496095, 0.04154374313354492, 0.041692222595214844, 0.042148799896240235, 0.04151500701904297, 0.04151295852661133, 0.041801727294921875, 0.04165631866455078, 0.04184617614746094, 0.04312124633789063, 0.04183990478515625, 0.041810657501220705, 0.0417628173828125, 0.04188911819458008, 0.04166108703613281, 0.04165836715698242, 0.04170137786865234, 0.041693088531494144, 0.04232611083984375, 0.042352638244628905, 0.042866687774658206, 0.04184796905517578, 0.04169606399536133, 0.04252675247192383, 0.04571340942382812, 0.042031105041503904, 0.041940990447998046, 0.043872257232666016, 0.0416409912109375, 0.04217667388916015, 0.04214566421508789, 0.04210073471069336, 0.04203411102294922, 0.042014495849609375, 0.04169136047363281, 0.04188131332397461, 0.041642143249511716, 0.041844352722167966, 0.04177766418457031, 0.04187516784667969, 0.04239798355102539, 0.04245471954345703, 0.04158700942993164]",tokens/s,23.744983508598548,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5172.03968,5444.07552,0.0,5058.330624,5057.441792,s,1,10.30113671875,10.30113671875,0.0,10.30113671875,10.30113671875,10.30113671875,10.30113671875,[10.30113671875],,kWh,9.681012048748318e-05,1.0671838020907615e-05,3.1041691500005575e-05,0.00013852365000839638,,MB,1773.330432,5630.722048,0.0,5215.617024,5189.834752,s,10,5.009014770507813,0.5009014770507813,0.002396586407554497,0.5010776977539062,0.503737875366211,0.5039901077270508,0.5041918936157226,"[0.5036818237304688, 0.49741973876953127, 0.4983232421875, 0.4985906982421875, 0.4986664123535156, 0.5005232238769531, 0.5016321716308594, 0.5026320190429687, 0.5042423400878906, 0.5033031005859375]",tokens/s,511.0785488341588,kWh,1.520404260520877e-05,1.6767636336101721e-06,1.009114696179994e-05,2.6971953200618886e-05,tokens/kWh,9491340.804867107,MB,1778.802688,5651.693568,0.0,5234.491392,5189.837312,s,10,36.79085546875001,3.6790855468749997,0.009391955265390793,3.6792579345703125,3.6912801025390625,3.6926877807617187,3.6938139233398437,"[3.67508837890625, 3.668557861328125, 3.684935302734375, 3.66446435546875, 3.69096728515625, 3.674944580078125, 3.683427490234375, 3.684173095703125, 3.67020166015625, 3.694095458984375]",tokens/s,17.123820361696115,kWh,0.00010697945141479031,1.1800293453767187e-05,6.642994203279947e-05,0.00018520968690135696,tokens/kWh,340154.9943419208,,s,630,36.788637306213374,0.058394662390814894,0.0005951117955180048,0.0582541446685791,0.058867712783813475,0.05927761402130127,0.06118899085998536,"[0.05982278442382812, 0.05923535919189453, 0.0581363525390625, 0.05908163070678711, 0.059377662658691405, 0.05826540756225586, 0.05810172653198242, 0.058326751708984374, 0.058670719146728514, 0.05851635360717773, 0.05854412841796875, 0.05858428955078125, 0.058476608276367185, 0.05883363342285156, 0.058283424377441405, 0.05821484756469727, 0.05829033660888672, 0.058277889251708986, 0.05831174468994141, 0.058787841796875, 0.05881747055053711, 0.058199840545654295, 0.05799462509155273, 0.0581640625, 0.058146270751953125, 0.05849657440185547, 0.058352767944335936, 0.058267486572265624, 0.058621952056884766, 0.05820211029052735, 0.05810176086425781, 0.058054656982421876, 0.058219551086425785, 0.05813542556762695, 0.05816684722900391, 0.058200767517089844, 0.05812575912475586, 0.05813190460205078, 0.058048961639404296, 0.05803007888793945, 0.0580687026977539, 0.05807392120361328, 0.057966049194335935, 0.05851728057861328, 0.058337440490722654, 0.05803273773193359, 0.05807718276977539, 0.05796361541748047, 0.05812627029418945, 0.05821503829956055, 0.05800995254516601, 0.058220703125, 0.05833878326416016, 0.058356094360351565, 0.058463905334472654, 0.05810211181640625, 0.05809324645996094, 0.05810617446899414, 0.05788467025756836, 0.05808947372436524, 0.05854003143310547, 0.05828515243530273, 0.05837887954711914, 0.05857904052734375, 0.05797916793823242, 0.057896961212158204, 0.05783078384399414, 0.0579381103515625, 0.057987167358398435, 0.05863776016235352, 0.05860847854614258, 0.05812956619262695, 0.05847132873535156, 0.058033729553222654, 0.05938204956054687, 0.05833324813842773, 0.05805446243286133, 0.05818531036376953, 0.05805926513671875, 0.05825155258178711, 0.05840911865234375, 0.058095359802246095, 0.05794569778442383, 0.05794367980957031, 0.058087905883789065, 0.05805836868286133, 0.05843833541870117, 0.058506847381591794, 0.05788415908813477, 0.057904319763183595, 0.0579370231628418, 0.058066753387451174, 0.058211105346679684, 0.057960094451904295, 0.057938270568847657, 0.057933280944824216, 0.057891361236572264, 0.058636287689208984, 0.059642112731933594, 0.058216190338134764, 0.05819004821777344, 0.05801919937133789, 0.05793219375610351, 0.05829033660888672, 0.05854601669311523, 0.058458206176757815, 0.05819968032836914, 0.05810956954956055, 0.058122913360595704, 0.05825126266479492, 0.05829804611206055, 0.05808566284179688, 0.05807516860961914, 0.05858256149291992, 0.0588702392578125, 0.058560672760009765, 0.05831459045410156, 0.05830640029907227, 0.05819203186035156, 0.05819343948364258, 0.05809609603881836, 0.058238048553466794, 0.05816131210327148, 0.058057567596435544, 0.05809740829467774, 0.05802204895019531, 0.058810558319091794, 0.05822659301757813, 0.05783363342285156, 0.06180764770507813, 0.058514240264892575, 0.05790630340576172, 0.05896492767333984, 0.058227745056152344, 0.058595584869384765, 0.05817129516601562, 0.05804729461669922, 0.05810160064697266, 0.05802102279663086, 0.05816553497314453, 0.058466976165771484, 0.058167457580566406, 0.05819964981079102, 0.05799452972412109, 0.05839971160888672, 0.05879808044433594, 0.058012702941894534, 0.05883798217773437, 0.06133059310913086, 0.060310367584228516, 0.061034496307373044, 0.060176288604736325, 0.05819347381591797, 0.05793772888183594, 0.05797763061523437, 0.057855934143066404, 0.057987071990966796, 0.057968318939208986, 0.060295486450195314, 0.05857279968261719, 0.05854617691040039, 0.05793561553955078, 0.05806924819946289, 0.05786352157592774, 0.0581280632019043, 0.05840569686889648, 0.05873408126831055, 0.058004161834716794, 0.05803593444824219, 0.05811430358886719, 0.05811948776245117, 0.057772510528564455, 0.057782497406005856, 0.05799219131469727, 0.058352638244628906, 0.05845708847045898, 0.05829679870605469, 0.05857952117919922, 0.05822665786743164, 0.058492767333984376, 0.058116256713867186, 0.058245407104492185, 0.058638046264648434, 0.05829632186889649, 0.058791934967041014, 0.05825126266479492, 0.05828185653686523, 0.05827151870727539, 0.058001407623291014, 0.05854281616210937, 0.05848099136352539, 0.05835961532592773, 0.05880752182006836, 0.058052833557128904, 0.05814422225952148, 0.05799417495727539, 0.05791769790649414, 0.05797043228149414, 0.05800755310058594, 0.057947200775146486, 0.05828214263916016, 0.058012256622314455, 0.058073280334472656, 0.05787968063354492, 0.05808832168579101, 0.05851980972290039, 0.057844608306884766, 0.058203006744384764, 0.05859692764282227, 0.058665374755859374, 0.05910262298583984, 0.0581984977722168, 0.057985118865966793, 0.057958465576171875, 0.05798905563354492, 0.05809267044067383, 0.05808969497680664, 0.05787107086181641, 0.05803007888793945, 0.057855262756347656, 0.059205406188964846, 0.058198974609375, 0.05841696166992188, 0.058111297607421876, 0.05790576171875, 0.05802627182006836, 0.05824470520019531, 0.05860598373413086, 0.05821868896484375, 0.05806041717529297, 0.057931968688964844, 0.05768627166748047, 0.057865985870361326, 0.05819596862792969, 0.05845123291015625, 0.05799939346313476, 0.057846462249755856, 0.05797280120849609, 0.057933761596679685, 0.05813043212890625, 0.058029823303222657, 0.05794636917114258, 0.058438751220703126, 0.05839904022216797, 0.058776161193847654, 0.057968639373779295, 0.05804038238525391, 0.05803168106079101, 0.05804067230224609, 0.05815321731567383, 0.05807247924804688, 0.05778195190429687, 0.059635711669921876, 0.05908684921264649, 0.05816115188598633, 0.05810358428955078, 0.058028350830078124, 0.05829996871948242, 0.058912960052490235, 0.05792720031738281, 0.05827443313598633, 0.05811209487915039, 0.05878467178344727, 0.0581212158203125, 0.05801161575317383, 0.05819564819335937, 0.05828870391845703, 0.05823158264160156, 0.05812531280517578, 0.058517505645751956, 0.05870796966552734, 0.05803596878051758, 0.05801395034790039, 0.05811814498901367, 0.058257408142089843, 0.05946739196777344, 0.05824911880493164, 0.05812579345703125, 0.05809254455566406, 0.05849702453613281, 0.05838979339599609, 0.05811228942871094, 0.058066753387451174, 0.05813107299804687, 0.0618779182434082, 0.05882668685913086, 0.05878006362915039, 0.0590623664855957, 0.0586341438293457, 0.05787823867797852, 0.05888399887084961, 0.05820240020751953, 0.0580302734375, 0.05815081787109375, 0.05807308959960938, 0.05826764678955078, 0.05829017639160156, 0.05826259231567383, 0.05816972732543945, 0.058027679443359376, 0.05835561752319336, 0.05825843048095703, 0.05846243286132812, 0.05899446487426758, 0.05925846481323242, 0.06327977752685547, 0.059174655914306644, 0.05856665420532227, 0.0583454704284668, 0.05822054290771484, 0.05882611083984375, 0.05838092803955078, 0.06084787368774414, 0.05888051223754883, 0.058383201599121096, 0.05901017761230469, 0.057949089050292966, 0.05793920135498047, 0.05801171112060547, 0.05801030349731445, 0.0583251838684082, 0.05835257720947266, 0.058905376434326175, 0.058068801879882816, 0.05834124755859375, 0.05812374496459961, 0.05795238494873047, 0.058477310180664065, 0.058732608795166015, 0.05831814575195313, 0.05830329513549805, 0.05808224105834961, 0.057985374450683594, 0.058179519653320313, 0.05791775894165039, 0.05811552047729492, 0.05776678466796875, 0.057869537353515625, 0.05807593536376953, 0.057968673706054685, 0.058117919921875, 0.05792585754394531, 0.05796822357177735, 0.05807753753662109, 0.058014976501464845, 0.05814128112792969, 0.059293281555175784, 0.05839632034301758, 0.0586409912109375, 0.05810406494140625, 0.058269054412841796, 0.058162113189697266, 0.05823673629760742, 0.058142017364501954, 0.05809945678710938, 0.05824124908447265, 0.05783011245727539, 0.058039424896240234, 0.058029087066650394, 0.05812790298461914, 0.05805420684814453, 0.058364383697509764, 0.05840259170532226, 0.058433761596679686, 0.05889987182617187, 0.05886764907836914, 0.058897441864013675, 0.058654624938964846, 0.058398975372314456, 0.05828694534301758, 0.05846239852905273, 0.0580689582824707, 0.05842406463623047, 0.05816320037841797, 0.058380287170410154, 0.0582369270324707, 0.058570751190185545, 0.06251260757446289, 0.05947596740722656, 0.05872422409057617, 0.058888385772705075, 0.058353599548339845, 0.05854937744140625, 0.05850406265258789, 0.058471744537353515, 0.05858303833007812, 0.05819599914550781, 0.05836185455322265, 0.0582017936706543, 0.05810006332397461, 0.058533950805664064, 0.05836243057250977, 0.058070945739746097, 0.05827132797241211, 0.05818777465820312, 0.05804492950439453, 0.05806883239746094, 0.05981801605224609, 0.0591905288696289, 0.05829516983032226, 0.058325214385986326, 0.0580544319152832, 0.05810790252685547, 0.05798297500610351, 0.05814051055908203, 0.05824323272705078, 0.05824716949462891, 0.05830246353149414, 0.05863980865478516, 0.05827657699584961, 0.05835081481933594, 0.05830105590820313, 0.05826969528198242, 0.0582553596496582, 0.05863123321533203, 0.058364639282226564, 0.05853961563110351, 0.05836822509765625, 0.05835148620605469, 0.059062816619873046, 0.05831884765625, 0.05805065536499023, 0.05858294296264648, 0.058453441619873044, 0.058485313415527346, 0.05837209701538086, 0.05849318313598633, 0.059031360626220705, 0.05855635070800781, 0.05905408096313477, 0.05822566223144531, 0.058192222595214844, 0.05825603103637695, 0.05807718276977539, 0.058322975158691406, 0.05822870254516602, 0.05816348648071289, 0.058087135314941404, 0.06068819046020508, 0.05887171173095703, 0.058624286651611325, 0.05875360107421875, 0.058386497497558594, 0.05798297500610351, 0.05809971237182617, 0.05808486557006836, 0.0581288948059082, 0.05836211013793945, 0.05826268768310547, 0.05841775894165039, 0.06065091323852539, 0.05956464004516602, 0.05862400054931641, 0.0587011833190918, 0.058344352722167966, 0.0583719367980957, 0.05836172866821289, 0.05885491180419922, 0.06125209426879883, 0.05841468811035156, 0.05840224075317383, 0.058330078125, 0.05834979248046875, 0.058256256103515626, 0.058366622924804684, 0.058292705535888674, 0.05821737670898437, 0.05834627151489258, 0.05848892974853516, 0.058167072296142576, 0.05839187240600586, 0.05854076766967774, 0.05851126480102539, 0.05862591934204102, 0.058783424377441405, 0.05811273574829102, 0.05836579132080078, 0.05837424087524414, 0.05831846237182617, 0.05829676818847656, 0.05817161560058594, 0.05812361526489258, 0.058311103820800785, 0.05817459106445313, 0.058347553253173826, 0.05834428787231445, 0.05824486541748047, 0.05841945648193359, 0.058605567932128906, 0.058521598815917966, 0.05851340866088867, 0.05848614501953125, 0.0584114875793457, 0.058304672241210935, 0.058246177673339845, 0.058134880065917965, 0.058329376220703125, 0.058390880584716795, 0.0581429443359375, 0.058545120239257814, 0.058314655303955076, 0.05849590301513672, 0.05874236679077149, 0.058469024658203125, 0.05849116897583008, 0.05823603057861328, 0.05809609603881836, 0.05812262344360351, 0.05806422424316406, 0.05800537490844727, 0.05823683166503906, 0.05908160018920899, 0.05834486389160156, 0.058406654357910155, 0.05808736038208008, 0.05829504013061523, 0.05809571075439453, 0.05822265625, 0.05797455978393555, 0.05808560180664062, 0.058011646270751956, 0.05793574523925781, 0.0581321907043457, 0.05802230453491211, 0.058044448852539066, 0.058062816619873045, 0.05809344100952148, 0.058021728515625, 0.05858332824707031, 0.05807324981689453, 0.05837398529052734, 0.05812131118774414, 0.05831568145751953, 0.058382335662841796, 0.058824737548828124, 0.05795945739746094, 0.05815087890625, 0.058092033386230466, 0.05799913787841797, 0.05798067092895508, 0.05794611358642578, 0.057860958099365235, 0.05810966491699219, 0.058120033264160154, 0.05807772827148437, 0.05797040176391602, 0.05791171264648438, 0.05794803237915039, 0.058281185150146485, 0.05814511871337891, 0.05827635192871094, 0.0608787841796875, 0.059428863525390625, 0.05841075134277344, 0.0582658576965332, 0.05812838363647461, 0.05808329772949219, 0.05822671890258789, 0.05819801712036133, 0.058241024017333984, 0.05821366500854492, 0.05830428695678711, 0.05856966400146484, 0.05845756912231445, 0.05844390487670898, 0.05827791976928711, 0.05817792129516602, 0.0588682861328125, 0.061377662658691406, 0.05829833602905273, 0.05827062225341797, 0.0582309455871582, 0.05817660903930664, 0.058108638763427735, 0.05816700744628906, 0.0593636474609375, 0.058388065338134766, 0.05919171142578125, 0.0583372802734375, 0.05826172637939453, 0.05826128005981445, 0.05818185424804687, 0.05829199981689453, 0.05820140838623047, 0.06025270462036133, 0.05851766586303711, 0.05868134307861328, 0.05826105499267578, 0.05859702301025391, 0.058319648742675784, 0.05811759948730469, 0.05815766525268555, 0.05805075073242188, 0.05814374542236328, 0.058218368530273436, 0.060909599304199216, 0.05883580780029297, 0.05835776138305664, 0.05841715240478516, 0.05835696029663086, 0.058490753173828125, 0.058347518920898435, 0.05921392059326172, 0.058586078643798827, 0.06030723190307617, 0.059008766174316406, 0.058804542541503906, 0.05886566543579102, 0.05838643264770508, 0.05821817779541016, 0.058267967224121094, 0.06042828750610352, 0.060174144744873044, 0.058931392669677736, 0.05836825561523438, 0.05828799819946289, 0.05824288177490235, 0.05821180725097656, 0.058247806549072266, 0.05859324645996094, 0.05828992080688476, 0.058198047637939454, 0.0591190071105957, 0.05829305648803711, 0.058266815185546876, 0.05832992172241211, 0.05826150512695313, 0.0582529296875, 0.05859801483154297, 0.058114849090576175]",tokens/s,17.1248528385583,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,890.884096,638.517248,0.0,260.046848,253.520896,s,1,8.0936240234375,8.0936240234375,0.0,8.0936240234375,8.0936240234375,8.0936240234375,8.0936240234375,[8.0936240234375],,kWh,2.178515275418249e-05,2.395556827885202e-06,6.556116355999064e-06,3.073682593806676e-05,,MB,1186.992128,743.374848,0.0,335.54432,312.653824,s,11,0.17854700851440433,0.016231546228582208,0.000630683680541129,0.016063615798950194,0.016344608306884764,0.017258992195129393,0.017990499305725098,"[0.016101696014404296, 0.018173376083374024, 0.015791199684143067, 0.01610633659362793, 0.01587868785858154, 0.01605958366394043, 0.016063615798950194, 0.01605830383300781, 0.016344608306884764, 0.01611008071899414, 0.015859519958496094]",tokens/s,15771.75682432573,kWh,4.6838706919478506e-07,5.165474401098827e-08,3.0948883328730977e-07,8.29530646493083e-07,tokens/kWh,308608248.6310343,MB,1220.722688,768.540672,0.0,360.710144,313.442816,s,11,10.188017028808595,0.9261833662553268,0.012551883267073127,0.9244647827148438,0.94057861328125,0.9455420227050781,0.9495127502441406,"[0.9354699096679687, 0.9142109375, 0.9175198364257813, 0.9177374877929687, 0.910325439453125, 0.9118135375976563, 0.94057861328125, 0.9352012329101562, 0.9505054321289063, 0.9301898193359375, 0.9244647827148438]",tokens/s,68.02108771907311,kWh,2.6536873391409425e-05,2.9263875942598974e-06,1.000289315398753e-05,3.946615413965684e-05,tokens/kWh,1596304.5139150159,,s,693,10.182142732620244,0.014692846656017661,0.0003464994459911771,0.014606592178344726,0.015085388946533204,0.015232236480712891,0.015802830352783206,"[0.014614239692687988, 0.01470902442932129, 0.014744992256164552, 0.014729344367980957, 0.014690784454345704, 0.014905344009399414, 0.014589183807373047, 0.014713824272155762, 0.01480454444885254, 0.014604512214660645, 0.014548224449157715, 0.014502655982971192, 0.014434304237365723, 0.014461312294006348, 0.01456009578704834, 0.01461958408355713, 0.01464031982421875, 0.014836383819580077, 0.015117664337158204, 0.01471555233001709, 0.015302463531494141, 0.015609663963317871, 0.015290656089782716, 0.014775775909423828, 0.014844544410705566, 0.014679072380065918, 0.014588895797729493, 0.014684063911437988, 0.014686304092407227, 0.014724960327148437, 0.014532959938049317, 0.01443827247619629, 0.014497664451599121, 0.014600255966186523, 0.01458348846435547, 0.014523967742919922, 0.014648063659667969, 0.014520319938659668, 0.015224831581115723, 0.015095071792602539, 0.014969599723815919, 0.014935551643371582, 0.014897120475769044, 0.01520400047302246, 0.01493238353729248, 0.014997407913208008, 0.014850591659545899, 0.014904576301574707, 0.014811296463012696, 0.014827584266662598, 0.014745823860168457, 0.014808799743652344, 0.014698335647583008, 0.0147009916305542, 0.014768447875976563, 0.014739456176757813, 0.014978400230407715, 0.01481385612487793, 0.014935135841369629, 0.014817952156066894, 0.014854399681091309, 0.015026176452636719, 0.01817728042602539, 0.014867391586303711, 0.01584323215484619, 0.014706784248352051, 0.014812191963195801, 0.014917920112609864, 0.014889663696289062, 0.01480835247039795, 0.014758624076843262, 0.014587743759155274, 0.014481568336486816, 0.014499679565429687, 0.014436511993408204, 0.01445683193206787, 0.014389247894287109, 0.014480671882629394, 0.014519136428833007, 0.01451580810546875, 0.014390912055969238, 0.014434975624084473, 0.014417920112609863, 0.014432095527648925, 0.01487433624267578, 0.014385120391845703, 0.014516703605651856, 0.014431488037109376, 0.014664447784423829, 0.01471232032775879, 0.01463321590423584, 0.01460431957244873, 0.014417695999145507, 0.014420607566833495, 0.014417856216430665, 0.014481568336486816, 0.01434934425354004, 0.014382816314697266, 0.014328800201416015, 0.01439305591583252, 0.01442966365814209, 0.014330495834350586, 0.014367103576660156, 0.01438918399810791, 0.014426336288452149, 0.01446828842163086, 0.014430047988891602, 0.014381695747375488, 0.014360575675964356, 0.014430208206176758, 0.014317567825317384, 0.014369855880737305, 0.014385855674743653, 0.014327232360839843, 0.014361408233642578, 0.014321727752685547, 0.014345439910888672, 0.014293727874755859, 0.014419103622436523, 0.014627679824829102, 0.014380319595336914, 0.014381792068481445, 0.014395615577697755, 0.014350111961364747, 0.014395392417907715, 0.014369119644165039, 0.014117183685302735, 0.014398112297058105, 0.014665727615356445, 0.014528512001037597, 0.014538847923278808, 0.014790559768676758, 0.01469161605834961, 0.014564064025878906, 0.014527935981750489, 0.014588607788085937, 0.014555232048034668, 0.014437664031982421, 0.014676480293273926, 0.014425439834594727, 0.014437024116516112, 0.014375967979431152, 0.014429408073425293, 0.014372639656066895, 0.01439072036743164, 0.014428832054138184, 0.014454976081848144, 0.014473983764648438, 0.01443068790435791, 0.0144071683883667, 0.014535264015197754, 0.014464384078979492, 0.014821663856506347, 0.014568511962890624, 0.01455235195159912, 0.01445241641998291, 0.014437055587768555, 0.014471199989318848, 0.014391263961791993, 0.014435680389404296, 0.014373536109924317, 0.014475263595581055, 0.014367839813232423, 0.014383616447448731, 0.014451328277587891, 0.014391072273254394, 0.014424287796020507, 0.014352160453796387, 0.014342016220092773, 0.014331168174743652, 0.014411680221557617, 0.0145863676071167, 0.014642880439758301, 0.014778112411499024, 0.014664704322814942, 0.014690303802490234, 0.014650976181030274, 0.01466204833984375, 0.014618271827697754, 0.014717280387878418, 0.015019840240478515, 0.014759584426879883, 0.014907391548156738, 0.014824064254760742, 0.014813311576843262, 0.014983967781066894, 0.014774335861206054, 0.014844863891601563, 0.01488486385345459, 0.014479552268981934, 0.014705727577209473, 0.01476848030090332, 0.014633376121520996, 0.014566720008850098, 0.014535264015197754, 0.014512384414672851, 0.014499360084533692, 0.014529024124145508, 0.014560480117797852, 0.014809727668762207, 0.014993311882019043, 0.014733375549316407, 0.014892928123474121, 0.014780223846435547, 0.014715392112731934, 0.01469545555114746, 0.01475436782836914, 0.014534879684448242, 0.01453273582458496, 0.014486880302429199, 0.01444819164276123, 0.014502880096435546, 0.014491968154907227, 0.014456512451171875, 0.014628864288330079, 0.014548992156982422, 0.014505023956298829, 0.014558143615722656, 0.014520319938659668, 0.014714112281799316, 0.014648063659667969, 0.014716416358947755, 0.014848095893859863, 0.014899328231811523, 0.014731552124023438, 0.014530783653259277, 0.014485280036926269, 0.014374784469604492, 0.014373056411743164, 0.014350272178649903, 0.014413567543029785, 0.014388959884643554, 0.014424608230590821, 0.014438400268554688, 0.014392800331115722, 0.014416416168212891, 0.014585856437683106, 0.01444377613067627, 0.014432000160217286, 0.014425087928771972, 0.014475104331970214, 0.014553088188171387, 0.014522047996520996, 0.014737759590148926, 0.014430335998535156, 0.01452006435394287, 0.014571359634399414, 0.01448185634613037, 0.014391391754150391, 0.014407456398010254, 0.014351648330688476, 0.014405728340148926, 0.01460870361328125, 0.014989631652832031, 0.014610207557678223, 0.014567328453063964, 0.014427583694458007, 0.014369440078735352, 0.014406944274902344, 0.014377344131469726, 0.014529184341430664, 0.014462143898010253, 0.014496255874633789, 0.014506336212158202, 0.014372832298278808, 0.014397055625915527, 0.014415936470031738, 0.014475263595581055, 0.014882207870483399, 0.014498399734497071, 0.014442239761352539, 0.014440287590026855, 0.014360544204711913, 0.014375359535217284, 0.014340096473693848, 0.014364992141723633, 0.014370495796203614, 0.014323264122009278, 0.014354880332946778, 0.01438640022277832, 0.014323776245117187, 0.014392352104187012, 0.014356351852416993, 0.014393247604370118, 0.0144301118850708, 0.014428352355957031, 0.014393152236938477, 0.014565535545349121, 0.014665568351745605, 0.01444812774658203, 0.014350496292114257, 0.014416223526000976, 0.014366720199584961, 0.014358592033386231, 0.014393280029296875, 0.014359775543212891, 0.014422271728515625, 0.014377504348754884, 0.014375007629394532, 0.014453824043273927, 0.014449503898620606, 0.014454784393310547, 0.014462976455688477, 0.01437228775024414, 0.014409503936767577, 0.014327808380126953, 0.01437507152557373, 0.014299776077270508, 0.014430208206176758, 0.014557184219360352, 0.01450598430633545, 0.014397279739379883, 0.014460960388183593, 0.014378496170043945, 0.014529055595397949, 0.014119520187377929, 0.014393376350402833, 0.014448543548583985, 0.014396832466125489, 0.014375455856323242, 0.01447766399383545, 0.014409472465515136, 0.014401472091674805, 0.014417568206787109, 0.014398880004882812, 0.014375904083251952, 0.014432255744934081, 0.014567423820495605, 0.014413824081420898, 0.014524415969848633, 0.01459609603881836, 0.014657695770263672, 0.014692192077636718, 0.01463651180267334, 0.0146212158203125, 0.014690400123596192, 0.014626015663146972, 0.014977631568908691, 0.014694016456604004, 0.014780896186828613, 0.014547200202941894, 0.014524160385131836, 0.014519840240478516, 0.01439311981201172, 0.014412480354309082, 0.014397024154663086, 0.014307583808898926, 0.014373023986816407, 0.014307647705078126, 0.014395423889160156, 0.014458304405212402, 0.014535008430480957, 0.014538623809814454, 0.014443936347961426, 0.014395999908447265, 0.014384544372558594, 0.014768735885620117, 0.01437491226196289, 0.014316703796386719, 0.014332192420959473, 0.014332480430603027, 0.014397439956665039, 0.014442432403564453, 0.014370400428771973, 0.014315008163452148, 0.014290911674499512, 0.01438003158569336, 0.014419424057006835, 0.014352928161621094, 0.01450979232788086, 0.01436451244354248, 0.014465472221374513, 0.014389311790466309, 0.01438304042816162, 0.014411775588989258, 0.014348287582397461, 0.014509696006774903, 0.014784159660339356, 0.014319616317749024, 0.014532768249511718, 0.014601663589477539, 0.014612671852111816, 0.014545120239257813, 0.01455907154083252, 0.014571680068969726, 0.01458140754699707, 0.014508543968200683, 0.014475359916687011, 0.015687423706054686, 0.015167296409606934, 0.01461676788330078, 0.014574751853942872, 0.014708864212036132, 0.014679072380065918, 0.014587615966796875, 0.014855968475341796, 0.014998720169067383, 0.014881600379943847, 0.015017888069152833, 0.014896672248840332, 0.015026944160461426, 0.01489635181427002, 0.015038463592529297, 0.014926336288452148, 0.015067328453063964, 0.01544540786743164, 0.01521123218536377, 0.015208415985107422, 0.015228639602661133, 0.015042719841003417, 0.014979328155517578, 0.01500054359436035, 0.015006624221801757, 0.015005696296691894, 0.015031871795654296, 0.015147456169128418, 0.015120384216308593, 0.015183327674865722, 0.015212736129760741, 0.015103712081909179, 0.015173664093017578, 0.015030783653259277, 0.015546463966369628, 0.014925919532775878, 0.014909343719482422, 0.014907391548156738, 0.01490124797821045, 0.014796064376831055, 0.014870880126953125, 0.014865023612976074, 0.014750816345214844, 0.014678463935852051, 0.014590175628662109, 0.014512224197387695, 0.014552096366882324, 0.014738400459289551, 0.01580022430419922, 0.016285696029663087, 0.014831744194030761, 0.014757760047912597, 0.014772031784057617, 0.014481023788452148, 0.014837887763977051, 0.014665984153747558, 0.01463644790649414, 0.014535327911376954, 0.014540960311889648, 0.014528448104858399, 0.014579551696777344, 0.014470560073852539, 0.014491488456726074, 0.014545087814331054, 0.014586432456970215, 0.014566656112670898, 0.01471673583984375, 0.014826432228088379, 0.014934016227722167, 0.014888959884643555, 0.014899200439453125, 0.014970879554748535, 0.015467743873596191, 0.015020832061767578, 0.014929920196533204, 0.014934240341186523, 0.015047743797302246, 0.015020095825195312, 0.014965503692626954, 0.01500489616394043, 0.01503916835784912, 0.014921440124511718, 0.014979359626770019, 0.015069503784179688, 0.015308480262756347, 0.014982720375061036, 0.014976799964904785, 0.015966943740844727, 0.01477830410003662, 0.014673919677734374, 0.014792703628540039, 0.015450336456298829, 0.014694175720214843, 0.01466915225982666, 0.01478927993774414, 0.01482755184173584, 0.014856160163879395, 0.014761823654174805, 0.014881952285766602, 0.014806015968322754, 0.014869600296020509, 0.015147808074951172, 0.014712960243225097, 0.01466982364654541, 0.014606559753417969, 0.014619903564453125, 0.0146746244430542, 0.014758848190307617, 0.014719903945922852, 0.01472822380065918, 0.014679007530212403, 0.0147807035446167, 0.014726880073547364, 0.014744768142700196, 0.01479967975616455, 0.015048735618591309, 0.014773951530456544, 0.014869855880737306, 0.014902527809143067, 0.014991071701049805, 0.015122431755065918, 0.01496678352355957, 0.014996512413024903, 0.014933152198791504, 0.014984383583068847, 0.014866720199584961, 0.014836064338684082, 0.014826623916625977, 0.014895999908447265, 0.014890496253967284, 0.014895135879516602, 0.014989215850830078, 0.01519699192047119, 0.015832799911499024, 0.015226911544799805, 0.015153152465820312, 0.015089664459228515, 0.015126527786254883, 0.015046655654907226, 0.015157471656799317, 0.015032095909118652, 0.015071231842041016, 0.015036416053771973, 0.015095808029174805, 0.015102047920227051, 0.01518172836303711, 0.01519001579284668, 0.01508892822265625, 0.015329279899597169, 0.01512112045288086, 0.015469568252563477, 0.015430111885070801, 0.015186464309692383, 0.015420736312866211, 0.015291168212890624, 0.015312800407409668, 0.015278016090393066, 0.015239232063293457, 0.015313983917236328, 0.015227840423583985, 0.015194304466247558, 0.015224639892578124, 0.015208127975463868, 0.015093215942382813, 0.015339424133300781, 0.015237631797790528, 0.01574678421020508, 0.015131360054016113, 0.014887167930603027, 0.014800992012023926, 0.014777664184570313, 0.014836064338684082, 0.014712832450866698, 0.014816351890563965, 0.015000479698181152, 0.01466163158416748, 0.014682111740112304, 0.014724543571472168, 0.014832192420959473, 0.014684160232543946, 0.014946559906005859, 0.014970751762390136, 0.014952320098876953, 0.014841535568237304, 0.014899359703063966, 0.015005632400512696, 0.014923775672912597, 0.015030495643615723, 0.014972576141357422, 0.01497270393371582, 0.014933695793151855, 0.014852992057800293, 0.014999135971069336, 0.014901856422424316, 0.014874431610107422, 0.014974944114685059, 0.014817440032958984, 0.014624608039855958, 0.014554176330566406, 0.014545439720153808, 0.014555583953857422, 0.014718976020812988, 0.014593855857849121, 0.014660863876342774, 0.01458886432647705, 0.014499839782714843, 0.014533632278442383, 0.014533632278442383, 0.014589952468872071, 0.014518272399902344, 0.01459404754638672, 0.01458892822265625, 0.014652640342712402, 0.014548768043518066, 0.014442496299743653, 0.014552479743957519, 0.014517919540405273, 0.014930879592895508, 0.014667776107788086, 0.014800704002380372, 0.015275584220886231, 0.016126592636108397, 0.01489305591583252, 0.015364128112792969, 0.014784735679626465, 0.014690048217773437, 0.01478438377380371, 0.01480947208404541, 0.014774016380310059, 0.014694399833679199, 0.014593695640563965, 0.014720640182495116, 0.014914272308349609, 0.015392607688903808, 0.014518048286437988, 0.014575167655944824, 0.014459712028503418, 0.014483455657958985, 0.014360223770141602, 0.014389599800109863, 0.014359807968139649, 0.014379872322082519, 0.01410483169555664, 0.014473119735717773, 0.014403327941894532, 0.01431497573852539, 0.014499072074890137, 0.014427935600280762, 0.014423711776733399, 0.014729472160339355, 0.014969152450561523, 0.015613280296325683, 0.014866815567016602, 0.014525823593139649, 0.014502528190612794, 0.014456607818603516, 0.014381471633911133, 0.014397248268127442, 0.014507519721984862, 0.014579968452453613, 0.01440118408203125, 0.014354847908020019, 0.014507648468017578, 0.014489439964294434, 0.014462847709655762, 0.014944671630859375, 0.015681983947753907, 0.01439948844909668, 0.014361791610717774, 0.01451296043395996, 0.017330080032348632, 0.015478879928588866, 0.014782079696655273, 0.014641471862792969, 0.014608063697814942, 0.014493439674377441, 0.014694304466247558, 0.014639616012573242, 0.014704544067382813, 0.014704416275024414, 0.014606592178344726, 0.014633248329162598, 0.014667776107788086, 0.014657088279724121, 0.014944704055786132, 0.01485807991027832, 0.01493008041381836, 0.014620672225952149, 0.014595999717712402, 0.01454089641571045, 0.01454905605316162, 0.014630463600158692, 0.014600576400756836, 0.014684191703796387, 0.014694368362426758, 0.014485504150390625, 0.014575615882873535, 0.014544896125793457, 0.014522656440734864, 0.014476256370544434, 0.014951168060302734, 0.014427904129028321, 0.014528767585754394, 0.014419360160827637, 0.014467231750488281]",tokens/s,68.06033054121858,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4244.074496,4725.866496,0.0,4347.396096,4328.833024,s,1,10.40515625,10.40515625,0.0,10.40515625,10.40515625,10.40515625,10.40515625,[10.40515625],,kWh,9.143228809164157e-05,1.0074670626374124e-05,2.9218912263984098e-05,0.00013072587098199979,,MB,1385.684992,5220.794368,0.0,4812.96384,4756.928512,s,10,3.9759581909179684,0.39759581909179686,0.004339503763737649,0.39798226928710934,0.40114043884277345,0.4012296890258789,0.4013010891723633,"[0.3852103271484375, 0.39959088134765625, 0.3977213439941406, 0.3977471923828125, 0.39811508178710936, 0.3976213684082031, 0.3996629943847656, 0.39784945678710937, 0.40112060546875, 0.40131893920898437]",tokens/s,643.869949600488,kWh,1.1604514901762577e-05,1.2797745318105492e-06,7.739621576307716e-06,2.0623911009880844e-05,tokens/kWh,12412776.60077913,MB,1390.194688,5325.651968,0.0,4917.82144,4876.091904,s,10,24.099277099609377,2.4099277099609377,0.016611488617423925,2.406385498046875,2.435732788085937,2.436072814941406,2.436344836425781,"[2.4356572265625, 2.4194453125, 2.400362548828125, 2.38584423828125, 2.391546875, 2.4111103515625, 2.419173828125, 2.40166064453125, 2.398063232421875, 2.436412841796875]",tokens/s,26.141862986015113,kWh,7.10939256199046e-05,7.841736990095445e-06,4.7047033364089756e-05,0.0001259826959740898,tokens/kWh,500068.67620103067,,s,630,24.096367031097394,0.038248201636662565,0.0005840847493881605,0.03814447975158691,0.03880594062805176,0.03910825805664063,0.04053592147827149,"[0.03988630294799805, 0.03840668869018555, 0.03834470367431641, 0.038246654510498045, 0.03829939270019531, 0.038160385131835936, 0.03832012939453125, 0.038454814910888674, 0.038746593475341796, 0.03876444625854492, 0.038707168579101565, 0.03896742248535156, 0.03877478408813476, 0.0388087043762207, 0.03926105499267578, 0.03917532730102539, 0.0384048957824707, 0.0386519660949707, 0.038948863983154294, 0.0385546875, 0.03867292785644531, 0.03873628616333008, 0.03865804672241211, 0.038645759582519534, 0.038594558715820314, 0.03981052780151367, 0.03885939025878906, 0.038725440979003906, 0.038705249786376954, 0.03861503982543945, 0.0384634895324707, 0.03856700897216797, 0.038832511901855465, 0.03856643295288086, 0.038632705688476564, 0.03870934295654297, 0.03874374389648438, 0.0384950065612793, 0.03887635040283203, 0.03868707275390625, 0.03877750396728516, 0.03865190505981445, 0.03861913681030273, 0.03878297424316406, 0.038648033142089845, 0.03893430328369141, 0.038563838958740236, 0.03856588745117188, 0.03850649642944336, 0.0385904655456543, 0.03837673568725586, 0.03853385543823242, 0.03856304168701172, 0.0385208969116211, 0.038505184173583985, 0.038292545318603516, 0.0384439697265625, 0.03852627182006836, 0.03840480041503906, 0.03856588745117188, 0.038313312530517576, 0.03844339370727539, 0.03874425506591797, 0.04010860824584961, 0.0384571533203125, 0.038365345001220706, 0.038363166809082035, 0.038400001525878906, 0.038043071746826175, 0.03810569763183594, 0.03804313659667969, 0.03813833618164063, 0.03805539321899414, 0.03809302520751953, 0.038185279846191404, 0.03826224136352539, 0.0385497932434082, 0.03872998428344727, 0.0388403205871582, 0.03875632095336914, 0.03999081420898438, 0.03880563354492188, 0.038694782257080076, 0.03879100799560547, 0.03895158386230469, 0.038798721313476565, 0.038906494140625, 0.0387665901184082, 0.03841740798950195, 0.03860329437255859, 0.0385192642211914, 0.03833184051513672, 0.03807289505004883, 0.038085697174072265, 0.03816255950927734, 0.037921600341796875, 0.03835903930664063, 0.038258689880371094, 0.0384901123046875, 0.03983564758300781, 0.03822182464599609, 0.03834265518188477, 0.03842031860351563, 0.03839619064331055, 0.0386712646484375, 0.03830172729492187, 0.03850131225585938, 0.03855513763427734, 0.0381671028137207, 0.03808777618408203, 0.03811568069458008, 0.03807897567749023, 0.038282527923583984, 0.038144577026367185, 0.03824025726318359, 0.037840991973876956, 0.038113601684570314, 0.037668609619140626, 0.03790028762817383, 0.03850569534301758, 0.038466335296630856, 0.03792086410522461, 0.037881343841552735, 0.03870105743408203, 0.03762422561645508, 0.03777667236328125, 0.03981721496582031, 0.038437950134277345, 0.03827727890014648, 0.038071071624755856, 0.03789020919799805, 0.03792844772338867, 0.03787139129638672, 0.03806675338745117, 0.038136863708496095, 0.038061119079589846, 0.037924766540527344, 0.03784198379516602, 0.037829566955566406, 0.03788576126098633, 0.037676288604736326, 0.03774105453491211, 0.0376583023071289, 0.037730175018310545, 0.04069055938720703, 0.03803750228881836, 0.03792822265625, 0.03773430252075195, 0.03785766220092773, 0.03785551834106445, 0.0376668815612793, 0.03769747161865234, 0.03771171188354492, 0.03800428771972656, 0.03832406234741211, 0.03832310485839844, 0.03865766525268555, 0.03998963165283203, 0.0388485107421875, 0.03868822479248047, 0.03837996673583984, 0.038370880126953125, 0.03806057739257813, 0.03812694549560547, 0.0381300163269043, 0.03788627243041992, 0.03785113525390625, 0.037756671905517576, 0.039511489868164065, 0.03910031890869141, 0.03817155075073242, 0.03819456100463867, 0.03834044647216797, 0.037929759979248044, 0.0385986557006836, 0.03774700927734375, 0.03783545684814453, 0.03778867340087891, 0.03760323333740234, 0.03770582580566406, 0.037506462097167965, 0.037450336456298826, 0.03768230438232422, 0.037553024291992185, 0.03746723175048828, 0.03767520141601562, 0.03746480178833008, 0.03764835357666016, 0.037700927734375, 0.03943695831298828, 0.03844927978515625, 0.03794521713256836, 0.03787926483154297, 0.037873985290527344, 0.03775929641723633, 0.03782236862182617, 0.037574142456054685, 0.03777177429199219, 0.037570110321044924, 0.03757686233520508, 0.037717952728271484, 0.03769507217407227, 0.03777817535400391, 0.03784479904174805, 0.037689342498779296, 0.037816513061523435, 0.03791462326049805, 0.0380682258605957, 0.03775692749023438, 0.037760128021240236, 0.03775084686279297, 0.03784377670288086, 0.03768025588989258, 0.03773952102661133, 0.03770259094238281, 0.03772649765014648, 0.03822444915771484, 0.03783292770385742, 0.037830528259277345, 0.037705726623535156, 0.03797196960449219, 0.03779379272460937, 0.0378853759765625, 0.037494495391845704, 0.03773436737060547, 0.03770457458496094, 0.03772825622558594, 0.037654655456542965, 0.03780799865722656, 0.03799039840698242, 0.037820415496826174, 0.03802259063720703, 0.03791452789306641, 0.03785897445678711, 0.03775129699707031, 0.03765913772583008, 0.03763529586791992, 0.038010913848876955, 0.03769184112548828, 0.03780588912963867, 0.03779372787475586, 0.0377960319519043, 0.038187713623046876, 0.03863062286376953, 0.03785776138305664, 0.037992095947265624, 0.03793340682983398, 0.03781964874267578, 0.037904735565185546, 0.03770937728881836, 0.03805987167358398, 0.03819996643066406, 0.04021148681640625, 0.03832924652099609, 0.038699073791503905, 0.03825836944580078, 0.03815667343139648, 0.03794326400756836, 0.03829555130004883, 0.038362430572509765, 0.03782457733154297, 0.03789507293701172, 0.03777097702026367, 0.03764223861694336, 0.03788822555541992, 0.037700927734375, 0.037638622283935545, 0.03766796875, 0.03770185470581055, 0.037595294952392576, 0.03768534469604492, 0.03786383819580078, 0.03818905639648437, 0.03787081527709961, 0.03784991836547852, 0.03778713607788086, 0.03807484817504883, 0.03786956787109375, 0.03766681671142578, 0.03788800048828125, 0.037743614196777346, 0.03772927856445313, 0.03771596908569336, 0.03785881423950195, 0.0384640007019043, 0.038537216186523435, 0.038020896911621097, 0.03811555099487305, 0.03781836700439453, 0.03777347183227539, 0.038047584533691406, 0.03799792098999023, 0.037843616485595706, 0.03809894561767578, 0.03787161636352539, 0.03801200103759766, 0.03791584014892578, 0.03806588745117188, 0.03821529769897461, 0.03797772979736328, 0.037817089080810544, 0.037797889709472655, 0.03780198287963867, 0.03774259185791016, 0.037617664337158206, 0.037566463470458986, 0.03775660705566406, 0.037746593475341796, 0.037681537628173827, 0.03757206344604492, 0.03765919876098633, 0.038626495361328124, 0.03801948928833008, 0.037843456268310545, 0.03787926483154297, 0.039798465728759766, 0.038054206848144534, 0.037996543884277346, 0.03787571334838867, 0.03911475372314453, 0.04053753662109375, 0.03810396957397461, 0.03764188766479492, 0.037623329162597655, 0.03759939193725586, 0.03758931350708008, 0.03749884796142578, 0.03788803100585938, 0.03773974227905273, 0.03757136154174805, 0.03773807907104492, 0.03775539016723633, 0.03795452880859375, 0.038351806640625, 0.03828940963745117, 0.03853107070922852, 0.03836108779907227, 0.0383631362915039, 0.03847782516479492, 0.038090431213378906, 0.03827948760986328, 0.03820451354980469, 0.03813849639892578, 0.03820982360839844, 0.038966815948486326, 0.03832835388183594, 0.03845084762573242, 0.03829993438720703, 0.03825644683837891, 0.03824828720092773, 0.03837628936767578, 0.03817798233032227, 0.03817158508300781, 0.03898518371582031, 0.03894723129272461, 0.038508544921875, 0.03822963333129883, 0.03832617568969727, 0.038614879608154296, 0.038594497680664065, 0.038267936706542965, 0.038340255737304686, 0.03799385452270508, 0.03803798294067383, 0.03807043075561523, 0.03779971313476563, 0.03841215896606445, 0.03844745635986328, 0.038219745635986326, 0.038139232635498045, 0.0380382080078125, 0.037929183959960935, 0.03762969589233398, 0.037812255859375, 0.03777526473999023, 0.0375048942565918, 0.041302433013916014, 0.03823193740844726, 0.04130393600463867, 0.03825017547607422, 0.038338623046875, 0.03788838577270508, 0.03812934494018555, 0.037859649658203126, 0.037795841217041014, 0.03792835235595703, 0.037845600128173826, 0.03811875152587891, 0.03835318374633789, 0.04014899063110351, 0.038982017517089844, 0.03846553421020508, 0.03836435317993164, 0.03831276702880859, 0.03852447891235351, 0.039739585876464846, 0.03888102340698242, 0.042092479705810544, 0.03893731307983399, 0.038783840179443356, 0.0387217903137207, 0.038727455139160157, 0.03894704055786133, 0.03883491134643555, 0.03866812896728516, 0.03850259017944336, 0.038526657104492185, 0.038314048767089846, 0.03835315322875977, 0.03828867340087891, 0.037996543884277346, 0.03847446441650391, 0.03823622512817383, 0.03853113555908203, 0.03848537445068359, 0.038362945556640625, 0.03821779251098633, 0.03850454330444336, 0.03844723129272461, 0.03806864166259766, 0.038118465423583985, 0.03853952026367188, 0.03877503967285156, 0.03843100738525391, 0.037851200103759766, 0.03785481643676758, 0.03780454254150391, 0.037840511322021486, 0.03774006271362305, 0.03775174331665039, 0.03763596725463867, 0.037621055603027344, 0.0377371826171875, 0.03761356735229492, 0.03811894226074219, 0.03829782485961914, 0.03799020767211914, 0.03773894500732422, 0.03769452667236328, 0.03765055847167969, 0.0378171501159668, 0.04027865600585938, 0.03822796630859375, 0.038106590270996096, 0.03833091354370117, 0.03809689712524414, 0.03810508728027344, 0.03808857727050781, 0.03826451110839844, 0.03812524795532227, 0.03823795318603516, 0.03814438247680664, 0.03816716766357422, 0.038061790466308594, 0.03793948745727539, 0.03801497650146484, 0.03798387145996094, 0.03818534469604492, 0.03800038528442383, 0.038000415802001954, 0.037927391052246094, 0.03798339080810547, 0.03793596649169922, 0.03781612777709961, 0.03841046524047852, 0.03817878341674805, 0.03800678253173828, 0.03772787094116211, 0.03782905578613281, 0.03805996704101562, 0.03775078582763672, 0.03801497650146484, 0.037869823455810546, 0.037827903747558594, 0.0376693115234375, 0.03751935958862305, 0.03756851196289063, 0.03771603012084961, 0.03783878326416015, 0.03796131134033203, 0.038262462615966795, 0.0380681266784668, 0.037955902099609376, 0.03813836669921875, 0.03823740768432617, 0.03843939208984375, 0.03843932723999023, 0.038466590881347656, 0.03827382278442383, 0.038983776092529294, 0.03838521575927734, 0.038814144134521486, 0.038449153900146485, 0.038178398132324216, 0.03815190505981445, 0.03814595031738281, 0.038195999145507815, 0.038144001007080076, 0.038202880859375, 0.03798681640625, 0.0378616943359375, 0.037957054138183594, 0.037918785095214846, 0.03773574447631836, 0.04011990356445312, 0.0381192626953125, 0.03823878479003906, 0.03851468658447266, 0.038152065277099606, 0.0378966064453125, 0.038001888275146486, 0.03797804641723633, 0.037839424133300784, 0.03799859237670898, 0.037607425689697264, 0.03766204833984375, 0.03761996841430664, 0.037714305877685546, 0.03780521774291992, 0.03830435180664062, 0.0377305908203125, 0.037806079864501956, 0.03760947036743164, 0.037713920593261716, 0.037736480712890624, 0.037966976165771486, 0.037808544158935545, 0.0377592658996582, 0.03776528167724609, 0.037774463653564454, 0.0377127685546875, 0.03815411376953125, 0.03797846221923828, 0.03833343887329101, 0.038305728912353516, 0.03793094253540039, 0.03780857467651367, 0.037781505584716796, 0.037738239288330075, 0.03768726348876953, 0.037722625732421876, 0.03758720016479492, 0.03773235321044922, 0.03796899032592774, 0.03801603317260742, 0.03845312118530273, 0.03800892639160156, 0.037981182098388674, 0.03815526580810547, 0.03800259017944336, 0.03988838577270508, 0.03797452926635742, 0.038051136016845705, 0.03795833587646484, 0.038109184265136715, 0.03817609786987305, 0.03806684875488281, 0.03793100738525391, 0.03782355117797852, 0.03766982269287109, 0.03876979064941406, 0.03907843017578125, 0.03795929718017578, 0.038951488494873045, 0.03811529541015625, 0.03792300796508789, 0.03901203155517578, 0.03984979248046875, 0.038410430908203126, 0.03827916717529297, 0.03800678253173828, 0.03833651351928711, 0.038473567962646484, 0.03889705657958984, 0.03856870269775391, 0.038585697174072266, 0.03876931381225586, 0.03855081558227539, 0.038214111328125, 0.038356639862060546, 0.038476192474365234, 0.03833875274658203, 0.03835087966918945, 0.03806560134887695, 0.038263294219970705, 0.03831609725952148, 0.03829961776733398, 0.039018497467041016, 0.038407585144042966, 0.03843913650512695, 0.03872943878173828, 0.03829520034790039, 0.03838431930541992, 0.038502464294433596, 0.03838518524169922, 0.03849084854125977, 0.04077363204956055, 0.039215103149414066, 0.03843100738525391, 0.03852873611450195, 0.03837952041625976, 0.03852848052978516, 0.03890230560302734, 0.0385846061706543, 0.03908512115478516, 0.03842838287353516, 0.0384071044921875, 0.03875430297851563, 0.040531967163085936, 0.038850208282470704, 0.038349151611328125, 0.03836723327636719, 0.038381568908691405, 0.038242305755615234, 0.039354366302490236, 0.03835324859619141, 0.03831772613525391, 0.038274368286132815, 0.03828547286987305, 0.04205145645141602, 0.03860137557983399, 0.03838304138183594, 0.03832883071899414, 0.038240478515625, 0.03950732803344727, 0.038975967407226565, 0.03836044692993164, 0.038675071716308594, 0.0391162223815918, 0.03843948745727539]",tokens/s,26.145020084851687,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7116.754944,7715.291136,0.0,7329.54624,7078.776832,s,1,11.5598271484375,11.5598271484375,0.0,11.5598271484375,11.5598271484375,11.5598271484375,11.5598271484375,[11.5598271484375],,kWh,0.00012647673733329158,1.3943334059055151e-05,4.063947695595593e-05,0.00018105954834830265,,MB,3070.066688,8197.636096,0.0,7782.531072,7530.786816,s,10,5.666936645507811,0.5666936645507812,0.0033865288167490887,0.5676229553222656,0.5688596862792968,0.5700053131103515,0.5709218145751953,"[0.5575468139648437, 0.5658836059570312, 0.5662919921875, 0.5659325561523437, 0.5684188842773438, 0.5686051025390625, 0.567486328125, 0.56786083984375, 0.5677595825195313, 0.5711509399414062]",tokens/s,451.74318333509433,kWh,1.6493821429167432e-05,1.8189875070062638e-06,1.0989576692887718e-05,2.9302385629061414e-05,tokens/kWh,8736490.033292895,MB,3083.399168,8617.066496,0.0,8201.961472,7832.204288,s,10,191.84869335937498,19.1848693359375,0.07669714734742085,19.21312890625,19.244614453125,19.24499765625,19.24530421875,"[18.982322265625, 19.130607421875, 19.178095703125, 19.2036640625, 19.22259375, 19.23775, 19.23678125, 19.244529296875, 19.245380859375, 19.16696875]",tokens/s,3.283837846212853,kWh,0.0005597796766129139,6.17476097960822e-05,0.00037226417435431454,0.0009937914607633105,tokens/kWh,63393.58153833503,,s,630,191.8441319274901,0.3045144951230004,0.0018295786301799277,0.3049390563964844,0.30636752014160157,0.3067393585205078,0.3077840478515625,"[0.30390267944335936, 0.2958315124511719, 0.298835693359375, 0.3057823181152344, 0.29859921264648437, 0.29764984130859373, 0.3040333251953125, 0.299404052734375, 0.2983417663574219, 0.30091937255859375, 0.3021312561035156, 0.29884197998046874, 0.30034866333007815, 0.30317861938476565, 0.29948928833007815, 0.29901123046875, 0.302272705078125, 0.30175094604492186, 0.2988289489746094, 0.30023062133789064, 0.30378277587890623, 0.29770343017578127, 0.29927587890625, 0.30154574584960936, 0.3008510437011719, 0.2995072021484375, 0.3011161804199219, 0.3023257751464844, 0.3001436462402344, 0.30043234252929685, 0.30260208129882815, 0.3000400695800781, 0.3003310852050781, 0.30185430908203126, 0.30130853271484376, 0.3006238708496094, 0.30097323608398435, 0.3023921203613281, 0.29991448974609375, 0.3006860656738281, 0.3025285034179688, 0.29966131591796874, 0.2995561828613281, 0.30243035888671876, 0.29920028686523437, 0.30087445068359375, 0.30167648315429685, 0.3018502197265625, 0.2998358764648438, 0.30361395263671875, 0.3042972412109375, 0.30374166870117186, 0.30188134765625, 0.3040153503417969, 0.30313882446289064, 0.3025899658203125, 0.30409722900390623, 0.3043016662597656, 0.30240118408203126, 0.30311712646484373, 0.3043614807128906, 0.3025162353515625, 0.3033837890625, 0.30535317993164063, 0.2994851989746094, 0.30476602172851563, 0.30554397583007814, 0.29918020629882813, 0.30422244262695314, 0.30534213256835935, 0.29998089599609373, 0.3025897216796875, 0.30467092895507814, 0.30383053588867187, 0.30142520141601564, 0.30371627807617185, 0.3050309448242188, 0.29987454223632815, 0.30139495849609377, 0.3072389831542969, 0.30346527099609377, 0.3026515197753906, 0.3052154846191406, 0.303415283203125, 0.2999234619140625, 0.30395721435546874, 0.30463671875, 0.30088626098632815, 0.30322201538085936, 0.304918212890625, 0.3032271728515625, 0.3032028503417969, 0.3045684814453125, 0.3038165588378906, 0.3014448852539062, 0.30524847412109374, 0.3055042419433594, 0.30127102661132815, 0.303914794921875, 0.30433303833007813, 0.30367742919921875, 0.302906494140625, 0.3037663879394531, 0.30476025390625, 0.30294686889648437, 0.3043223876953125, 0.30369192504882814, 0.30298025512695315, 0.3044248046875, 0.3046246337890625, 0.3037406311035156, 0.3034991455078125, 0.3054261779785156, 0.30463394165039065, 0.3024402465820312, 0.3040234680175781, 0.3051483154296875, 0.3031290283203125, 0.30497586059570314, 0.3051990966796875, 0.30501068115234375, 0.30438516235351565, 0.30469830322265623, 0.30466983032226563, 0.30267904663085937, 0.30592926025390627, 0.30499560546875, 0.3027867431640625, 0.30443344116210935, 0.3055334777832031, 0.3022735595703125, 0.30390167236328125, 0.30646682739257813, 0.3014656066894531, 0.30413003540039063, 0.3054468994140625, 0.3034349060058594, 0.3042557373046875, 0.304646240234375, 0.30447207641601565, 0.3032324523925781, 0.30399136352539063, 0.30478131103515627, 0.3038489990234375, 0.30362466430664065, 0.3052503051757812, 0.30431027221679685, 0.30315280151367185, 0.3049040832519531, 0.30394207763671877, 0.30316650390625, 0.3047760009765625, 0.30372470092773435, 0.30462362670898435, 0.3047833557128906, 0.30420172119140626, 0.3038392333984375, 0.30503732299804687, 0.30454693603515626, 0.3042757568359375, 0.30383984375, 0.3040516357421875, 0.30444601440429686, 0.30400677490234373, 0.30440692138671877, 0.3048304748535156, 0.30389862060546874, 0.3048974609375, 0.30527545166015624, 0.3033026428222656, 0.3031019592285156, 0.3057492980957031, 0.30431280517578124, 0.3039346923828125, 0.305259521484375, 0.3045448608398437, 0.3045360107421875, 0.3049619140625, 0.30549002075195314, 0.30540185546875, 0.3042508850097656, 0.306332763671875, 0.3050480651855469, 0.3040950012207031, 0.3055683898925781, 0.30511309814453125, 0.3044536437988281, 0.305022705078125, 0.30527923583984373, 0.3053858642578125, 0.301557373046875, 0.3052154846191406, 0.3066553649902344, 0.30141986083984373, 0.3042391357421875, 0.307111572265625, 0.30189398193359374, 0.30365899658203127, 0.3069275817871094, 0.3038372192382813, 0.3021905212402344, 0.30630303955078125, 0.30547705078125, 0.30246151733398435, 0.30413003540039063, 0.30648934936523436, 0.30338253784179686, 0.3035606994628906, 0.30641546630859373, 0.30447528076171876, 0.30342166137695314, 0.305859375, 0.30466867065429687, 0.3036528625488281, 0.3049349060058594, 0.30532571411132814, 0.3039516296386719, 0.3045845031738281, 0.30547625732421874, 0.30436187744140625, 0.30533392333984377, 0.30487149047851564, 0.3052705993652344, 0.305203125, 0.30540570068359374, 0.3037331237792969, 0.30605938720703124, 0.30457650756835936, 0.30358731079101564, 0.3052030944824219, 0.30499432373046875, 0.3047568359375, 0.30582168579101565, 0.30535269165039064, 0.3033641052246094, 0.3050516357421875, 0.3057172546386719, 0.30471966552734375, 0.30448858642578125, 0.3059954833984375, 0.30530734252929687, 0.30520977783203124, 0.30478280639648436, 0.3052285461425781, 0.3052462158203125, 0.3053957214355469, 0.3067369079589844, 0.30507852172851563, 0.3058067016601563, 0.3052815856933594, 0.3049432067871094, 0.3056693115234375, 0.306166015625, 0.3023705139160156, 0.30419342041015623, 0.30791635131835937, 0.3019620666503906, 0.3044452819824219, 0.30752105712890626, 0.30064007568359374, 0.3048345947265625, 0.3081858825683594, 0.30508148193359375, 0.30163677978515624, 0.30641326904296873, 0.30527487182617186, 0.303508544921875, 0.30686505126953123, 0.3066632080078125, 0.30283584594726565, 0.3038454895019531, 0.30687435913085936, 0.3039047546386719, 0.3042344970703125, 0.3061923828125, 0.3056210021972656, 0.30365060424804685, 0.30556591796875, 0.306366455078125, 0.3044313049316406, 0.3068680419921875, 0.30708941650390625, 0.30330230712890627, 0.30567626953125, 0.3065511779785156, 0.303810546875, 0.30509664916992185, 0.30529132080078125, 0.3048529968261719, 0.3051520080566406, 0.3049060363769531, 0.3059263305664062, 0.3046461181640625, 0.30521890258789064, 0.30501565551757814, 0.3056858215332031, 0.3044582824707031, 0.304168212890625, 0.30596783447265624, 0.3050250244140625, 0.3049549560546875, 0.3045503234863281, 0.3066961975097656, 0.30503646850585936, 0.30509445190429685, 0.306258544921875, 0.30524453735351564, 0.30453924560546874, 0.30623086547851563, 0.3058307189941406, 0.304867431640625, 0.30531552124023437, 0.3064609069824219, 0.3041177673339844, 0.3049979858398438, 0.30551397705078126, 0.30342367553710936, 0.305829345703125, 0.30618710327148435, 0.302740966796875, 0.30495309448242186, 0.30790530395507815, 0.3033985290527344, 0.30540838623046873, 0.3066510009765625, 0.3040147399902344, 0.3043489990234375, 0.30661520385742186, 0.305104736328125, 0.30396575927734376, 0.30605374145507813, 0.3057397766113281, 0.303847412109375, 0.3052523498535156, 0.3066338806152344, 0.30384625244140623, 0.30489599609375, 0.30636358642578126, 0.30490090942382814, 0.3046678466796875, 0.306052978515625, 0.3059570922851563, 0.3043786315917969, 0.30483590698242186, 0.305824462890625, 0.30502520751953127, 0.3041954040527344, 0.3067166748046875, 0.30506320190429687, 0.304970458984375, 0.30547967529296877, 0.306054931640625, 0.30370989990234376, 0.3060434265136719, 0.3056148376464844, 0.30464614868164064, 0.30577621459960935, 0.305883544921875, 0.3046830139160156, 0.30488986206054686, 0.30649160766601563, 0.3051292419433594, 0.3050039672851563, 0.30724563598632815, 0.3056595153808594, 0.30444357299804686, 0.3065854797363281, 0.3063134765625, 0.3041833801269531, 0.30663882446289065, 0.3061586608886719, 0.3050526123046875, 0.30582562255859375, 0.30670855712890627, 0.3041260375976563, 0.3057582092285156, 0.30567742919921875, 0.3061973876953125, 0.30835443115234373, 0.3028601379394531, 0.30600811767578123, 0.307744873046875, 0.3016177978515625, 0.30431845092773435, 0.307800048828125, 0.3021290893554687, 0.30497735595703124, 0.3070204162597656, 0.305046875, 0.302884765625, 0.3059429931640625, 0.30550250244140625, 0.3034704895019531, 0.30584228515625, 0.3055616149902344, 0.30346240234375, 0.30563088989257814, 0.3060350341796875, 0.30427545166015624, 0.30547540283203123, 0.30594195556640624, 0.3044298095703125, 0.3052027587890625, 0.30518930053710935, 0.3047647705078125, 0.30485928344726565, 0.30640740966796876, 0.30541607666015624, 0.30445376586914064, 0.3048258361816406, 0.3053532409667969, 0.30536016845703123, 0.3053861999511719, 0.3056291809082031, 0.30479360961914065, 0.3042154541015625, 0.3054204406738281, 0.3061272888183594, 0.30529693603515623, 0.3056047058105469, 0.30732122802734374, 0.3053834228515625, 0.3053465576171875, 0.3063028564453125, 0.3051825256347656, 0.30539712524414064, 0.3061150207519531, 0.30629324340820313, 0.3048346252441406, 0.3059056091308594, 0.30551040649414063, 0.3052523498535156, 0.30584832763671876, 0.3062415466308594, 0.3045945739746094, 0.3063004455566406, 0.30528802490234375, 0.30516757202148437, 0.3067413635253906, 0.30525201416015624, 0.30538421630859375, 0.3063576354980469, 0.301951416015625, 0.30575430297851564, 0.308021240234375, 0.3014401550292969, 0.30578366088867187, 0.30833255004882815, 0.3020491027832031, 0.3047641296386719, 0.30711859130859376, 0.3052746887207031, 0.3044170532226563, 0.30588760375976565, 0.3055923156738281, 0.3041154174804688, 0.30521783447265627, 0.30668798828125, 0.3045039672851563, 0.30506478881835936, 0.30732696533203124, 0.3044945983886719, 0.30427749633789064, 0.30691085815429686, 0.3048206176757812, 0.30411294555664065, 0.3057525634765625, 0.30571334838867187, 0.3043322448730469, 0.30653289794921873, 0.30564739990234374, 0.3050887451171875, 0.30514169311523437, 0.30629074096679687, 0.30507211303710935, 0.30619033813476565, 0.30618194580078123, 0.3046783752441406, 0.3048600769042969, 0.3062015380859375, 0.3051774597167969, 0.30514996337890626, 0.30668389892578124, 0.30523721313476565, 0.3057119140625, 0.30528717041015624, 0.3058045043945313, 0.3057221374511719, 0.3055513610839844, 0.3053580627441406, 0.30651470947265624, 0.305006591796875, 0.3053179016113281, 0.30582168579101565, 0.30540594482421873, 0.30600787353515624, 0.3053468017578125, 0.305626220703125, 0.30596084594726564, 0.30563360595703126, 0.3057210693359375, 0.30569073486328124, 0.306103271484375, 0.3062864685058594, 0.3069050903320312, 0.3033333740234375, 0.30636618041992186, 0.30697232055664064, 0.3021993408203125, 0.30520944213867185, 0.30740631103515625, 0.3033072814941406, 0.30474444580078125, 0.3072122802734375, 0.3046964111328125, 0.3038992614746094, 0.3076112976074219, 0.3047450866699219, 0.3029236145019531, 0.3059998779296875, 0.3062780456542969, 0.30378036499023436, 0.30576007080078127, 0.3066060485839844, 0.3041441650390625, 0.3060330810546875, 0.30628256225585937, 0.3035667724609375, 0.3052707824707031, 0.3061842041015625, 0.304272705078125, 0.30593707275390625, 0.30521139526367186, 0.30576956176757814, 0.3061380310058594, 0.3041812438964844, 0.305860595703125, 0.3050145874023438, 0.3057761840820313, 0.3060058288574219, 0.30534738159179686, 0.3052093505859375, 0.3060694580078125, 0.3062108459472656, 0.3052359619140625, 0.3056905517578125, 0.3054408264160156, 0.3053174743652344, 0.306104736328125, 0.30526873779296876, 0.30577664184570313, 0.3058477783203125, 0.30539627075195314, 0.30606472778320315, 0.30595245361328127, 0.30556463623046876, 0.30558822631835936, 0.30604043579101564, 0.30658578491210936, 0.3043288879394531, 0.3060323486328125, 0.3064239807128906, 0.3039560852050781, 0.30640948486328123, 0.3069030456542969, 0.30553292846679686, 0.30501593017578127, 0.3052108154296875, 0.30338104248046877, 0.30708941650390625, 0.3067716369628906, 0.30079605102539064, 0.3040926818847656, 0.3045580139160156, 0.30071832275390625, 0.30369210815429687, 0.3044036560058594, 0.30321258544921875, 0.301300537109375, 0.3043370361328125, 0.30369790649414063, 0.30073037719726564, 0.3038676452636719, 0.30348016357421875, 0.3022830200195312, 0.30316717529296877, 0.30397772216796876, 0.30273712158203125, 0.3020001220703125, 0.30499020385742187, 0.3034747009277344, 0.30260348510742185, 0.304153076171875, 0.3035015869140625, 0.3024097290039062, 0.30349313354492186, 0.3038003234863281, 0.303177734375, 0.30311013793945313, 0.3033313293457031, 0.30320654296875, 0.30374899291992186, 0.30361184692382814, 0.3038001708984375, 0.30326995849609373, 0.304328857421875, 0.3032227783203125, 0.3035257568359375, 0.30393276977539063, 0.303716552734375, 0.30663705444335937, 0.30543701171875, 0.3056763000488281, 0.30543463134765625, 0.3055421447753906, 0.30534713745117187, 0.3070427551269531, 0.30533566284179686, 0.3063771057128906, 0.3056683654785156, 0.3054111633300781, 0.306484130859375, 0.3053076477050781, 0.3061939697265625, 0.30564398193359377, 0.30643405151367187, 0.3052503051757812, 0.30523577880859376, 0.3058382263183594, 0.30632757568359376]",tokens/s,3.283915925237244,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4021.9648,4435.345408,0.0,4051.697664,3779.100672,s,1,10.2070712890625,10.2070712890625,0.0,10.2070712890625,10.2070712890625,10.2070712890625,10.2070712890625,[10.2070712890625],,kWh,7.715204198331473e-05,8.502961038291074e-06,2.447251957798935e-05,0.00011012752259959515,,MB,2012.377088,4640.866304,0.0,4225.76128,4075.06944,s,10,2.7818113098144535,0.27818113098144537,0.0028002965807720974,0.2787837829589844,0.28046252746582034,0.28060687408447266,0.28072235137939455,"[0.2706141967773438, 0.27662493896484375, 0.27833132934570315, 0.27807131958007814, 0.28043045043945314, 0.27802703857421873, 0.27923623657226565, 0.2795105895996094, 0.280751220703125, 0.2802139892578125]",tokens/s,920.2637112618366,kWh,8.129749220022067e-06,8.9655983702765e-07,5.419417915777935e-06,1.4445726972827652e-05,tokens/kWh,17721503.423229225,MB,2025.979904,4909.30176,0.0,4494.196736,4256.850944,s,10,92.37178320312499,9.237178320312498,0.006351546332044287,9.23700634765625,9.24316787109375,9.245747021484375,9.247810341796875,"[9.2315048828125, 9.248326171875, 9.2403583984375, 9.22358984375, 9.2356162109375, 9.2357255859375, 9.2350556640625, 9.240724609375, 9.2425947265625, 9.238287109375]",tokens/s,6.820264567315258,kWh,0.0002739971094970614,3.022344969272627e-05,0.00018209792654242106,0.00048631848573220876,tokens/kWh,129544.73631646186,,s,630,92.36785641479504,0.14661564510284908,0.0003885311741968371,0.1465548858642578,0.14709242706298828,0.14736105880737305,0.14789856201171875,"[0.1478621826171875, 0.14613912963867187, 0.14608998107910157, 0.14662655639648436, 0.14628594970703124, 0.1459718475341797, 0.14628659057617188, 0.14630239868164063, 0.1458891143798828, 0.14601699829101564, 0.1463152618408203, 0.14641497802734374, 0.14634048461914062, 0.1466078338623047, 0.1473885192871094, 0.14627655029296874, 0.14650778198242187, 0.14616152954101563, 0.14607168579101562, 0.14630268859863282, 0.14650192260742187, 0.1464832000732422, 0.14622105407714844, 0.1461923828125, 0.14767893981933594, 0.14645277404785156, 0.14634815979003907, 0.14645030212402343, 0.14663066101074218, 0.14626937866210937, 0.14622511291503906, 0.14620553588867188, 0.146735107421875, 0.14651766967773439, 0.14637049865722657, 0.14663491821289062, 0.1464959716796875, 0.14665203857421874, 0.14659881591796875, 0.146734375, 0.14660887145996093, 0.14661209106445314, 0.14645074462890625, 0.14634188842773438, 0.14678807067871094, 0.1467147216796875, 0.1467516174316406, 0.1463256378173828, 0.14635392761230467, 0.14640467834472656, 0.14632415771484375, 0.146370361328125, 0.1467311706542969, 0.1468063659667969, 0.1466262664794922, 0.14656358337402345, 0.1463501739501953, 0.1469370880126953, 0.14645132446289064, 0.14697471618652344, 0.1469706268310547, 0.1474846649169922, 0.14693376159667967, 0.1478639373779297, 0.14599510192871093, 0.14595578002929688, 0.14644645690917968, 0.14641343688964845, 0.14620848083496094, 0.14700367736816405, 0.14676377868652343, 0.1463357391357422, 0.1461473846435547, 0.14624957275390624, 0.1461636505126953, 0.14665948486328126, 0.14637382507324218, 0.1465100555419922, 0.1463548126220703, 0.14623699951171876, 0.1465771484375, 0.14671353149414063, 0.14697853088378907, 0.14691087341308592, 0.14686029052734376, 0.14694613647460938, 0.14698291015625, 0.14676377868652343, 0.14656022644042968, 0.14689564514160156, 0.14668360900878907, 0.14699781799316405, 0.14646800231933593, 0.14671125793457032, 0.14640921020507813, 0.14639744567871094, 0.14688035583496092, 0.14710719299316405, 0.14640786743164064, 0.14637689208984375, 0.14638316345214844, 0.1461676483154297, 0.14617379760742188, 0.14700749206542968, 0.14653439331054688, 0.14673715209960939, 0.1470382080078125, 0.14701542663574219, 0.1470909423828125, 0.1472089538574219, 0.1473632354736328, 0.1471494140625, 0.14700749206542968, 0.14711532592773438, 0.14674806213378908, 0.14697474670410157, 0.14714207458496092, 0.14713912963867187, 0.1475070037841797, 0.14740614318847656, 0.1472973175048828, 0.14766677856445312, 0.147378173828125, 0.14793728637695314, 0.1470054473876953, 0.14742527770996094, 0.148451904296875, 0.1466138916015625, 0.1477966766357422, 0.1467752685546875, 0.14671133422851562, 0.14637370300292968, 0.14771827697753906, 0.14676419067382812, 0.14633155822753907, 0.14636918640136717, 0.1464443817138672, 0.14596844482421875, 0.14639149475097657, 0.1466201934814453, 0.14660179138183593, 0.14655894470214845, 0.1470529022216797, 0.1465973815917969, 0.14680706787109374, 0.14684153747558593, 0.14728640747070312, 0.14723196411132813, 0.14682322692871094, 0.14609458923339844, 0.1464793243408203, 0.14653990173339843, 0.14680128479003907, 0.14698086547851563, 0.14684774780273438, 0.14675935363769532, 0.14638914489746094, 0.14628627014160156, 0.14638121032714843, 0.14651158142089843, 0.1465040283203125, 0.14635836791992188, 0.14625782775878907, 0.146155517578125, 0.1461279754638672, 0.14642268371582032, 0.1466153564453125, 0.14655789184570311, 0.14640931701660156, 0.1472659454345703, 0.1468966064453125, 0.14654261779785155, 0.14741526794433593, 0.14724073791503905, 0.14684915161132814, 0.14645925903320312, 0.14687846374511718, 0.1463541717529297, 0.14612240600585938, 0.14695657348632812, 0.1466549072265625, 0.1463422088623047, 0.1465467529296875, 0.1464006652832031, 0.146606689453125, 0.14647821044921874, 0.14656118774414062, 0.14652252197265625, 0.1462906951904297, 0.14744621276855469, 0.1459773406982422, 0.14601171875, 0.14662815856933595, 0.1460579833984375, 0.1461658935546875, 0.14755859375, 0.14649037170410156, 0.14614630126953124, 0.14605087280273438, 0.14642381286621095, 0.14622642517089843, 0.14662733459472657, 0.14659175109863282, 0.14639129638671874, 0.14628428649902345, 0.14592819213867186, 0.1462057647705078, 0.14632847595214843, 0.1463954315185547, 0.14630281066894532, 0.14637046813964844, 0.14627212524414063, 0.1460880584716797, 0.14619853210449218, 0.14628997802734375, 0.1465924530029297, 0.14650163269042968, 0.14643795776367188, 0.14650775146484374, 0.14634573364257814, 0.1462337646484375, 0.14628582763671874, 0.14633059692382813, 0.14648655700683594, 0.14705235290527344, 0.14620748901367187, 0.14611001586914063, 0.14611279296875, 0.1464198760986328, 0.14626109313964844, 0.14623193359375, 0.1466383056640625, 0.14632655334472655, 0.14621673583984374, 0.14628250122070313, 0.14655247497558593, 0.14684194946289061, 0.14640333557128907, 0.1463905029296875, 0.1465148162841797, 0.146405029296875, 0.14638490295410156, 0.14647879028320313, 0.1464894714355469, 0.14626007080078124, 0.14648739624023438, 0.14630047607421875, 0.14623114013671876, 0.14657746887207032, 0.14648136901855469, 0.14661459350585937, 0.14676377868652343, 0.14759117126464844, 0.14581350708007812, 0.14615962219238282, 0.14692095947265624, 0.14605792236328125, 0.14624111938476564, 0.14765898132324218, 0.14681613159179688, 0.14610316467285156, 0.14651084899902345, 0.1462159423828125, 0.1459603271484375, 0.14657373046875, 0.1469522247314453, 0.14648544311523437, 0.1462733154296875, 0.14613917541503907, 0.14635414123535156, 0.14636131286621093, 0.14670828247070314, 0.14649363708496094, 0.1463723907470703, 0.14599395751953126, 0.1459928894042969, 0.14616864013671876, 0.14629580688476562, 0.14656410217285157, 0.14676991271972656, 0.14638092041015624, 0.1463356170654297, 0.14631117248535155, 0.14618949890136718, 0.14639187622070313, 0.14659584045410157, 0.14673216247558593, 0.14663360595703126, 0.14643353271484374, 0.14611917114257814, 0.14651565551757811, 0.1466160888671875, 0.14645068359375, 0.14657078552246094, 0.1470015411376953, 0.14679916381835936, 0.14659175109863282, 0.14693785095214842, 0.14725631713867188, 0.1470371856689453, 0.14708326721191406, 0.14688665771484374, 0.14723861694335938, 0.14653797912597658, 0.1469120330810547, 0.14677548217773437, 0.14726345825195314, 0.14692198181152344, 0.1471448059082031, 0.14637794494628906, 0.14639984130859374, 0.1466554260253906, 0.14669978332519532, 0.14686668395996094, 0.14704025268554688, 0.14775296020507814, 0.14604566955566406, 0.14659584045410157, 0.1467166748046875, 0.14644645690917968, 0.14670835876464844, 0.1480089569091797, 0.1474700469970703, 0.14605162048339843, 0.14647474670410157, 0.146235107421875, 0.14652239990234375, 0.14708476257324218, 0.14650003051757812, 0.1463841552734375, 0.14620323181152345, 0.1461393585205078, 0.14628790283203125, 0.14669888305664064, 0.14679833984375, 0.14673341369628906, 0.1467310028076172, 0.14652995300292967, 0.14619850158691405, 0.14636674499511718, 0.14651548767089845, 0.14642233276367186, 0.1466727294921875, 0.14656130981445312, 0.1463978271484375, 0.14618214416503905, 0.14625382995605468, 0.14713258361816406, 0.14666940307617188, 0.14647654724121092, 0.14639974975585937, 0.14648725891113282, 0.14613664245605468, 0.14627043151855468, 0.146518310546875, 0.14673312377929687, 0.14659510803222656, 0.14663539123535158, 0.14654022216796875, 0.14621504211425781, 0.1464283447265625, 0.14677171325683594, 0.14655427551269531, 0.1465321350097656, 0.14638960266113282, 0.14643423461914062, 0.14631324768066406, 0.14630697631835937, 0.14715225219726563, 0.14716796875, 0.14694717407226562, 0.14681552124023436, 0.14657369995117187, 0.14644837951660156, 0.14674534606933592, 0.1469993896484375, 0.1466243133544922, 0.1466178283691406, 0.14827728271484375, 0.1461677703857422, 0.14634201049804688, 0.14629209899902343, 0.1461920928955078, 0.14631964111328125, 0.14689036560058594, 0.1463468780517578, 0.1461944580078125, 0.14626608276367187, 0.1465384979248047, 0.14640861511230469, 0.14640847778320312, 0.14730592346191407, 0.1467088623046875, 0.14636642456054688, 0.14655007934570313, 0.14653514099121093, 0.146845703125, 0.14693939208984375, 0.14671504211425782, 0.14658755493164063, 0.1462640380859375, 0.1461291198730469, 0.1463152618408203, 0.14669815063476563, 0.14650787353515626, 0.14660202026367186, 0.1462989501953125, 0.14610421752929686, 0.14631936645507812, 0.1462804412841797, 0.14641766357421876, 0.14663066101074218, 0.14660403442382813, 0.14652621459960938, 0.14620860290527343, 0.1464034881591797, 0.14639231872558595, 0.14638362121582033, 0.1465548858642578, 0.14674534606933592, 0.14643405151367186, 0.1465548858642578, 0.14654188537597657, 0.14645718383789064, 0.14737417602539063, 0.14709078979492188, 0.1466571502685547, 0.1466071319580078, 0.1469291229248047, 0.14639683532714845, 0.14659677124023437, 0.14658335876464842, 0.14699885559082032, 0.14664151000976564, 0.14698060607910157, 0.1468620147705078, 0.14638905334472657, 0.14657289123535155, 0.14703817749023437, 0.1466741485595703, 0.14670025634765624, 0.14766114807128905, 0.14602444458007813, 0.1462845458984375, 0.1462988739013672, 0.14613037109375, 0.14661203002929687, 0.14715367126464843, 0.14688050842285155, 0.14628886413574219, 0.1463224639892578, 0.14615132141113282, 0.1462236785888672, 0.14662998962402343, 0.1474193878173828, 0.14650009155273438, 0.14654815673828125, 0.14676658630371095, 0.14622105407714844, 0.14625296020507814, 0.14728207397460938, 0.1473583984375, 0.14688665771484374, 0.14663475036621093, 0.14630867004394532, 0.14615391540527345, 0.146440185546875, 0.14672691345214844, 0.14662838745117188, 0.14692311096191407, 0.14663743591308595, 0.14634512329101562, 0.14625404357910157, 0.14641600036621094, 0.1468376922607422, 0.1465975341796875, 0.14650218200683593, 0.146765625, 0.14674949645996094, 0.1461370849609375, 0.14663626098632812, 0.1469199676513672, 0.14656521606445314, 0.1467923583984375, 0.14662042236328124, 0.14634393310546875, 0.14610226440429688, 0.1463357391357422, 0.14666163635253907, 0.1467860870361328, 0.14663267517089842, 0.14666342163085938, 0.14662156677246094, 0.1464963836669922, 0.1475966339111328, 0.14708607482910158, 0.14672233581542968, 0.14681304931640626, 0.1467476501464844, 0.14654464721679689, 0.14673513793945311, 0.1481992950439453, 0.14791270446777344, 0.14684783935546875, 0.14823695373535156, 0.14652415466308594, 0.14624359130859374, 0.14635189819335936, 0.14626179504394532, 0.14663862609863282, 0.14695913696289062, 0.1468240966796875, 0.14634605407714843, 0.14665843200683593, 0.14660585021972655, 0.14647926330566408, 0.1469561309814453, 0.14725753784179688, 0.1470543975830078, 0.14649139404296874, 0.14715011596679686, 0.14656790161132813, 0.1462906951904297, 0.14734646606445312, 0.14712535095214843, 0.14668978881835937, 0.14683718872070312, 0.14654508972167968, 0.14655453491210937, 0.14710818481445312, 0.14678016662597657, 0.14672709655761718, 0.14663848876953126, 0.14645263671875, 0.14636483764648436, 0.14650717163085938, 0.14683576965332032, 0.14688397216796875, 0.14665560913085937, 0.14662022399902344, 0.1464647674560547, 0.146501953125, 0.14653645324707032, 0.1469484100341797, 0.14700306701660157, 0.14666307067871093, 0.14691494750976564, 0.14638922119140624, 0.14665983581542968, 0.14671157836914062, 0.14674838256835937, 0.14665744018554688, 0.14656221008300782, 0.14674412536621093, 0.14655445861816407, 0.14637286376953124, 0.1464279327392578, 0.14670771789550782, 0.1467788848876953, 0.1467038116455078, 0.14690162658691405, 0.14648550415039063, 0.14632546997070311, 0.1464722900390625, 0.14691571044921875, 0.1467845458984375, 0.14671958923339845, 0.1478038787841797, 0.1464119110107422, 0.14608908081054686, 0.14619500732421875, 0.14592031860351562, 0.1463582763671875, 0.14710578918457032, 0.14657125854492187, 0.1463009338378906, 0.14608082580566406, 0.1462158660888672, 0.14628582763671874, 0.1465269775390625, 0.14701289367675782, 0.14687446594238282, 0.14624819946289064, 0.14657958984375, 0.14633267211914064, 0.14642892456054687, 0.1466940460205078, 0.14728793334960938, 0.1466590118408203, 0.1468646697998047, 0.1468345947265625, 0.14670431518554689, 0.1470145568847656, 0.14703433227539062, 0.14674227905273438, 0.14656320190429686, 0.1464797821044922, 0.14622105407714844, 0.1465232391357422, 0.14661672973632814, 0.1468074188232422, 0.1467393035888672, 0.14659561157226564, 0.14646885681152344, 0.14621490478515625, 0.1465814971923828, 0.146451904296875, 0.14670008850097657, 0.14665126037597656, 0.14647120666503907, 0.14638739013671875, 0.14642198181152344, 0.14656069946289063, 0.14712422180175783, 0.14704573059082032, 0.14671676635742187, 0.1466373748779297, 0.14638284301757812, 0.1464832305908203, 0.14652729797363281, 0.14722525024414063, 0.14693801879882812, 0.14667990112304688, 0.1468415985107422, 0.146845703125, 0.14647027587890624, 0.14668045043945313, 0.14706013488769532, 0.1467684783935547, 0.14670848083496094]",tokens/s,6.8205545138004355,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2193.03936,2479.816704,0.0,2101.346304,1978.345472,s,1,9.1729736328125,9.1729736328125,0.0,9.1729736328125,9.1729736328125,9.1729736328125,9.1729736328125,[9.1729736328125],,kWh,5.379501379585842e-05,5.926686920874674e-06,1.6791680099992323e-05,7.651338081672542e-05,,MB,2226.229248,2817.458176,0.0,2409.627648,2250.334208,s,10,1.6971580505371096,0.16971580505371092,0.0006403378374378439,0.16959552001953127,0.17055413208007814,0.1706893051147461,0.17079744354248047,"[0.16978909301757814, 0.16916297912597655, 0.1688795166015625, 0.169161376953125, 0.16909584045410156, 0.17082447814941407, 0.16940194702148437, 0.17005043029785155, 0.1705240936279297, 0.17026829528808593]",tokens/s,1508.4040046770083,kWh,5.167874112865783e-06,5.696213223745739e-07,3.4221128741403476e-06,9.159608309380703e-06,tokens/kWh,27948793.371199146,MB,2240.802816,2859.401216,0.0,2451.570688,2333.38624,s,10,23.642529052734375,2.3642529052734376,0.01051823980940855,2.363437744140625,2.3755578857421873,2.379169201660156,2.3820582543945314,"[2.36325146484375, 2.34633642578125, 2.373258544921875, 2.354318115234375, 2.357580078125, 2.370532470703125, 2.37475537109375, 2.356092041015625, 2.3636240234375, 2.382780517578125]",tokens/s,26.64689545669131,kWh,6.805154646213757e-05,7.506256485265811e-06,3.445113964666052e-05,0.00011000894259406391,tokens/kWh,572680.7158984499,,s,630,23.63888668060304,0.037522042350163536,0.0005600282889955902,0.03745980834960938,0.037916993331909184,0.038079124450683594,0.03949451812744141,"[0.03775897598266602, 0.03779379272460937, 0.038078369140625, 0.03741500854492188, 0.03747840118408203, 0.037695297241210936, 0.03734102249145508, 0.037335391998291015, 0.037410175323486325, 0.037184127807617186, 0.03738447952270508, 0.037238529205322266, 0.03717724609375, 0.037398208618164064, 0.03736409759521484, 0.03734912109375, 0.03719193649291992, 0.03741247940063477, 0.03741491317749023, 0.03741734313964844, 0.041785343170166016, 0.03758694458007812, 0.037553184509277346, 0.03763644790649414, 0.03762649536132812, 0.037659774780273436, 0.03764108657836914, 0.037555648803710935, 0.03748921585083008, 0.038176769256591796, 0.03778556823730469, 0.037515296936035156, 0.0376545295715332, 0.03802316665649414, 0.03841228866577148, 0.03777536010742188, 0.0376360969543457, 0.03759513473510742, 0.03750707244873047, 0.03772415924072266, 0.03765043258666992, 0.03746406555175781, 0.03733708953857422, 0.03752755355834961, 0.037136192321777346, 0.03715091323852539, 0.03733913421630859, 0.03724662399291992, 0.037095775604248045, 0.037123233795166015, 0.03704713439941406, 0.03699043273925781, 0.03705904006958008, 0.03711801528930664, 0.03701760101318359, 0.03708108901977539, 0.0371671028137207, 0.03705855941772461, 0.03710976028442383, 0.0370683822631836, 0.03711427307128906, 0.03750092697143555, 0.037311904907226565, 0.037195297241210935, 0.03727420806884765, 0.03734732818603516, 0.036923393249511716, 0.037070846557617186, 0.036806560516357424, 0.03680207824707031, 0.0368741455078125, 0.03698041534423828, 0.037053409576416015, 0.03721011352539062, 0.037269504547119144, 0.03717468643188476, 0.03718000030517578, 0.03706675338745117, 0.03712128067016601, 0.037206783294677734, 0.037592960357666017, 0.03731212615966797, 0.037515777587890625, 0.037564414978027344, 0.03721033477783203, 0.03724620819091797, 0.03702790451049805, 0.03704636764526367, 0.03699955368041992, 0.037070846557617186, 0.03708671951293945, 0.037118465423583984, 0.03706265640258789, 0.03720393753051758, 0.03718143844604492, 0.037170497894287106, 0.037053150177001955, 0.03717900848388672, 0.03728627014160156, 0.0372674560546875, 0.03721555328369141, 0.03769955062866211, 0.03855571365356445, 0.03753209686279297, 0.03760547256469727, 0.03785059356689453, 0.03735004806518555, 0.037504638671875, 0.03748281478881836, 0.037455936431884766, 0.037420639038085936, 0.03731036758422852, 0.0371932144165039, 0.03717552185058594, 0.03716790390014649, 0.036980735778808595, 0.036988929748535154, 0.03703603363037109, 0.03684281539916992, 0.036843936920166014, 0.03693612670898438, 0.03801891326904297, 0.03712979125976563, 0.03765497589111328, 0.03709247970581055, 0.037181983947753905, 0.03742697525024414, 0.03733071899414062, 0.03742969512939453, 0.03702783966064453, 0.03722854232788086, 0.03709302520751953, 0.037953056335449216, 0.03773049545288086, 0.037472862243652344, 0.037324127197265626, 0.038072608947753904, 0.03735798263549805, 0.03742924880981445, 0.03725628662109375, 0.03719465637207031, 0.037351425170898435, 0.03766678237915039, 0.03764543914794922, 0.037643169403076174, 0.0380673599243164, 0.03806636810302735, 0.03745846557617188, 0.03755952072143555, 0.03758172988891602, 0.037617374420166015, 0.03760924911499024, 0.03788851165771484, 0.037740287780761717, 0.03776691055297852, 0.03771814346313476, 0.03770518493652344, 0.037353439331054686, 0.037491649627685544, 0.037711742401123044, 0.037533824920654296, 0.037289535522460934, 0.03724435043334961, 0.03721887969970703, 0.037378494262695315, 0.03758489608764649, 0.038430721282958984, 0.039335006713867186, 0.03879209518432617, 0.03773440170288086, 0.03777283096313477, 0.03781475067138672, 0.03753574371337891, 0.03771731185913086, 0.03781644821166992, 0.03751129531860352, 0.03741881561279297, 0.03736025619506836, 0.03739033508300781, 0.038929920196533206, 0.03746441650390625, 0.03734339141845703, 0.0374574089050293, 0.03740016174316406, 0.037380031585693356, 0.03789718246459961, 0.03990486526489258, 0.037756767272949215, 0.03751785659790039, 0.037240768432617186, 0.037275390625, 0.037042495727539065, 0.03672265625, 0.037036064147949216, 0.03704627227783203, 0.03698483276367188, 0.03707040023803711, 0.036991199493408206, 0.036569313049316404, 0.03697868728637695, 0.036875328063964846, 0.03729094314575195, 0.037318656921386716, 0.03746406555175781, 0.037324798583984374, 0.03741408157348633, 0.03748332977294922, 0.03749478530883789, 0.03752278518676758, 0.03727951812744141, 0.03738508987426758, 0.03735491180419922, 0.03725075149536133, 0.03741788864135742, 0.03729612731933594, 0.03734649658203125, 0.03723468780517578, 0.037352256774902344, 0.037532958984375, 0.037474174499511716, 0.037690208435058596, 0.03752671813964844, 0.03752979278564453, 0.03753228759765625, 0.03762956619262695, 0.03766425704956055, 0.03747964859008789, 0.03762041473388672, 0.03766748809814453, 0.03750534439086914, 0.03714361572265625, 0.037216991424560544, 0.03716041564941406, 0.03721907043457031, 0.037736446380615234, 0.037705726623535156, 0.03735321426391602, 0.03747865676879883, 0.03781167984008789, 0.03741750335693359, 0.03735251235961914, 0.03740563201904297, 0.03752675247192383, 0.03755254364013672, 0.03767539215087891, 0.03807027053833008, 0.037240543365478516, 0.037500320434570314, 0.037144447326660154, 0.03721932983398438, 0.037039424896240236, 0.038120128631591796, 0.03791011047363281, 0.03751567840576172, 0.03754703903198242, 0.037262271881103516, 0.037161121368408205, 0.0374015998840332, 0.037282688140869144, 0.03699647903442383, 0.03713702392578125, 0.03702947235107422, 0.03731292724609375, 0.037162784576416016, 0.03714275360107422, 0.036896766662597655, 0.03702067184448242, 0.03687936019897461, 0.037077056884765626, 0.037232574462890626, 0.04012441635131836, 0.03716505432128906, 0.03718143844604492, 0.037105056762695314, 0.036894527435302735, 0.03709545516967774, 0.03761356735229492, 0.03732345581054688, 0.03726454544067383, 0.03724176025390625, 0.03714665603637695, 0.03705033493041992, 0.03695135879516601, 0.0372558708190918, 0.03705241775512695, 0.03708265686035156, 0.03716553497314453, 0.0373430404663086, 0.037947582244873046, 0.03759308624267578, 0.037477535247802736, 0.0374444465637207, 0.03737571334838867, 0.03731689453125, 0.03743923187255859, 0.03772848129272461, 0.037654560089111326, 0.037563392639160156, 0.03746223831176758, 0.03731740951538086, 0.037319934844970704, 0.037235073089599606, 0.03752793502807617, 0.037438816070556644, 0.037488670349121095, 0.03751356887817383, 0.03760976028442383, 0.037582847595214845, 0.03764223861694336, 0.038079742431640626, 0.03800051116943359, 0.037965888977050784, 0.037705982208251956, 0.03771244812011719, 0.03801702499389648, 0.03767468643188476, 0.0375951042175293, 0.03792521667480469, 0.03787545776367188, 0.037763198852539065, 0.037460063934326174, 0.037695518493652345, 0.037615230560302734, 0.037541889190673826, 0.0372259521484375, 0.03718656158447266, 0.03723811340332031, 0.037369953155517575, 0.03760121536254883, 0.03746870422363281, 0.037375904083251955, 0.03855708694458008, 0.03821020889282226, 0.037978145599365236, 0.03766067123413086, 0.03824435043334961, 0.037620864868164065, 0.03738438415527344, 0.037278400421142575, 0.03757056045532227, 0.0373125114440918, 0.03728815841674805, 0.03745769500732422, 0.037607295989990235, 0.03721993637084961, 0.037542015075683596, 0.03731907272338867, 0.03750707244873047, 0.037353473663330077, 0.03755382537841797, 0.03911033630371094, 0.03843052673339844, 0.037326976776123046, 0.03725795364379883, 0.037160961151123044, 0.03701964950561523, 0.03705759811401367, 0.0370656623840332, 0.037236736297607424, 0.037572608947753904, 0.03762790298461914, 0.03747401428222656, 0.03750358581542969, 0.03751494216918945, 0.037760704040527344, 0.03767062377929688, 0.03762361526489258, 0.038617889404296876, 0.037879104614257815, 0.03803596878051758, 0.03900998306274414, 0.03786953735351563, 0.037626399993896484, 0.03748863983154297, 0.03746524810791016, 0.03762876892089844, 0.03745177459716797, 0.037408767700195314, 0.03765248107910156, 0.03742095947265625, 0.03797615814208984, 0.03743743896484375, 0.03765584182739258, 0.037318431854248046, 0.03743593597412109, 0.03728425598144531, 0.03759132766723633, 0.03755548858642578, 0.03751776123046875, 0.03746332931518555, 0.037704414367675784, 0.037557662963867186, 0.037573215484619144, 0.03774399948120117, 0.03785587310791016, 0.037659774780273436, 0.03779593658447265, 0.03774294281005859, 0.03753209686279297, 0.037453792572021485, 0.037604927062988285, 0.03750931167602539, 0.037289886474609374, 0.037080703735351564, 0.037157630920410155, 0.037101600646972654, 0.03736716842651367, 0.03711036682128906, 0.03741686248779297, 0.037201217651367184, 0.037167743682861326, 0.03711616134643555, 0.03733084869384766, 0.03727974319458008, 0.03737190246582031, 0.037294078826904296, 0.04583161544799805, 0.03788857650756836, 0.037367809295654295, 0.03730022430419922, 0.039833377838134766, 0.03738236618041992, 0.03743097686767578, 0.03802758407592773, 0.03748230361938477, 0.03732265472412109, 0.037488319396972655, 0.037963871002197266, 0.03834726333618164, 0.0378185920715332, 0.03787139129638672, 0.037580608367919925, 0.03734956741333008, 0.038158336639404294, 0.03750841522216797, 0.03760726547241211, 0.037380958557128904, 0.03736166381835938, 0.037461505889892575, 0.03765209579467774, 0.03763494491577148, 0.037566463470458986, 0.037722110748291016, 0.03759718322753906, 0.037533695220947266, 0.037502944946289064, 0.03746614456176758, 0.037920768737792966, 0.037765121459960936, 0.03767091369628906, 0.03767232131958008, 0.03799302291870117, 0.037889408111572265, 0.03748115158081055, 0.037483551025390624, 0.037497825622558593, 0.03741667175292969, 0.03767715072631836, 0.03725331115722656, 0.037253120422363284, 0.0372305908203125, 0.03743552017211914, 0.037316478729248044, 0.03728998565673828, 0.03703564834594727, 0.037017982482910154, 0.036939777374267575, 0.03721420669555664, 0.03711385726928711, 0.03721011352539062, 0.037253120422363284, 0.03718454360961914, 0.037026782989501954, 0.03698271942138672, 0.03712416076660156, 0.03710566329956055, 0.037144577026367184, 0.03727155303955078, 0.03725641632080078, 0.03713449478149414, 0.03720153427124023, 0.03718896102905273, 0.037589950561523436, 0.03752387237548828, 0.037484416961669924, 0.03752975845336914, 0.03790195083618164, 0.03746486282348633, 0.03738201522827148, 0.03745955276489258, 0.03717161560058594, 0.03737104034423828, 0.037283870697021486, 0.03721433639526367, 0.037493217468261716, 0.0374950065612793, 0.03723263931274414, 0.038856639862060546, 0.037209888458251954, 0.037208351135253906, 0.03705241775512695, 0.03745177459716797, 0.03710079956054688, 0.037190399169921874, 0.037434688568115236, 0.03727171325683594, 0.037304862976074216, 0.03733299255371094, 0.03742281723022461, 0.03701148986816406, 0.03746022415161133, 0.03747635269165039, 0.03747779083251953, 0.037478111267089845, 0.03740147018432617, 0.03736166381835938, 0.037449726104736326, 0.037519073486328124, 0.037480735778808595, 0.03953664016723633, 0.037482143402099606, 0.03733334350585937, 0.03733078384399414, 0.03736940765380859, 0.03719782257080078, 0.03729059219360351, 0.03724844741821289, 0.03728236770629883, 0.03718352127075195, 0.03737187194824219, 0.037318656921386716, 0.0375186882019043, 0.0374191665649414, 0.037423614501953126, 0.03713148880004883, 0.03875619125366211, 0.03731296157836914, 0.037497344970703124, 0.037945343017578126, 0.0377446403503418, 0.037599231719970705, 0.03726457595825195, 0.037409599304199216, 0.0373996810913086, 0.03803839874267578, 0.037350849151611326, 0.037450302124023435, 0.03759308624267578, 0.03736700820922852, 0.03721091079711914, 0.03732070541381836, 0.03716505432128906, 0.03726131057739258, 0.03702169418334961, 0.03715878295898437, 0.03724476623535156, 0.03731875228881836, 0.037388481140136716, 0.03769900894165039, 0.03761337661743164, 0.0402250862121582, 0.03759711837768555, 0.037513408660888675, 0.037479774475097656, 0.037425537109375, 0.03757497787475586, 0.037986560821533205, 0.03748614501953125, 0.03744198226928711, 0.03753334426879883, 0.037437793731689456, 0.037574657440185545, 0.03746831893920898, 0.03762076950073242, 0.037661376953125, 0.03761695861816406, 0.038282047271728514, 0.03799763107299805, 0.03772099304199219, 0.037873409271240235, 0.03772982406616211, 0.03762400054931641, 0.037997119903564455, 0.037658145904541016, 0.03756284713745117, 0.037582847595214845, 0.037556224822998044, 0.037895198822021484, 0.03768131256103516, 0.037753662109375, 0.037738494873046875, 0.03783065414428711, 0.037730175018310545, 0.03791680145263672, 0.03768016052246094, 0.03784188842773437, 0.03772956848144531, 0.03761190414428711, 0.037937503814697265, 0.039344127655029294, 0.037795841217041014, 0.03765657424926758, 0.037831806182861326, 0.03783155059814453, 0.037550079345703126, 0.03769865417480469, 0.03780495834350586, 0.03763811111450195, 0.03777123260498047, 0.037801185607910154, 0.03765129470825195, 0.03775446319580078, 0.03780239868164063, 0.037765121459960936, 0.037760799407958984, 0.03795785522460938, 0.03786678314208984, 0.03768787384033203, 0.03939139175415039, 0.03813478469848633, 0.03792998504638672, 0.038580223083496096, 0.037754878997802735, 0.03793100738525391, 0.03791872024536133, 0.03775888061523437, 0.03782457733154297, 0.03768112182617187, 0.03767055892944336, 0.038071903228759765]",tokens/s,26.651001314581737,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1308.635136,1083.113472,0.0,704.643072,675.01056,s,1,8.2532353515625,8.2532353515625,0.0,8.2532353515625,8.2532353515625,8.2532353515625,8.2532353515625,[8.2532353515625],,kWh,3.6389911249989386e-05,4.006824042660556e-06,1.103473104999142e-05,5.143146634264136e-05,,MB,1354.149888,1401.880576,0.0,994.050048,942.608384,s,10,0.3598317146301269,0.0359831714630127,0.00035803077395493477,0.03605726432800293,0.036346069717407226,0.036394411277770995,0.03643308452606201,"[0.036157215118408206, 0.03584652709960937, 0.0359681282043457, 0.0363353271484375, 0.036146400451660156, 0.035140865325927736, 0.035953792572021484, 0.036442752838134765, 0.035644161224365235, 0.0361965446472168]",tokens/s,7114.436821199705,kWh,1.1674981185092925e-06,1.2870139494380178e-07,7.71797872860529e-07,2.0679973863136232e-06,tokens/kWh,123791258.9707578,MB,1382.817792,1412.366336,0.0,1004.535808,942.610944,s,10,20.393948486328124,2.0393948486328126,0.01703011907324134,2.038789245605469,2.056566943359375,2.067591943359375,2.076411943359375,"[2.078616943359375, 2.022477294921875, 2.0407437744140626, 2.0478001708984377, 2.01685546875, 2.02565283203125, 2.0399276123046874, 2.03765087890625, 2.0301065673828127, 2.054116943359375]",tokens/s,30.891516688018747,kWh,5.969731842607485e-05,6.584410114205411e-06,2.342149925953884e-05,8.970322779981909e-05,tokens/kWh,702315.8647154841,,s,630,20.391595325469936,0.032367611627730115,0.0011623937798228494,0.03235038375854492,0.03276208305358887,0.03306687450408936,0.03457327724456788,"[0.032945758819580076, 0.03270697784423828, 0.03256467056274414, 0.032557151794433595, 0.03245043182373047, 0.032653919219970705, 0.03277004623413086, 0.03283148956298828, 0.032628734588623046, 0.03257548904418945, 0.03254185485839844, 0.03249641418457031, 0.03252844619750977, 0.03244611358642578, 0.03246236801147461, 0.03246368026733398, 0.03249356842041016, 0.03268198394775391, 0.032573440551757815, 0.032427967071533205, 0.03231955337524414, 0.03232998275756836, 0.032701759338378905, 0.03254316711425781, 0.03232153701782227, 0.03231875228881836, 0.03239737701416016, 0.032473758697509764, 0.03245209503173828, 0.03293833541870117, 0.033382720947265625, 0.03253228759765625, 0.03245264053344726, 0.032514049530029294, 0.03258691024780273, 0.033245601654052735, 0.03255340957641602, 0.032478816986083986, 0.03237081527709961, 0.0324997444152832, 0.032422046661376956, 0.03253667068481445, 0.03251740646362305, 0.032688865661621096, 0.032537662506103515, 0.03286671829223633, 0.033009185791015624, 0.05799628829956055, 0.032523902893066406, 0.03263116836547852, 0.03252975845336914, 0.032328353881835935, 0.032519966125488284, 0.03238140869140625, 0.03269132614135742, 0.03281164932250977, 0.03255295944213867, 0.03281875228881836, 0.03240915298461914, 0.03272512054443359, 0.03272985458374023, 0.03243008041381836, 0.03250719833374023, 0.03238451385498047, 0.03219494247436523, 0.03195916748046875, 0.031889408111572266, 0.03160211181640625, 0.03178553581237793, 0.03163113594055176, 0.0315446720123291, 0.0314069766998291, 0.03137740707397461, 0.03139174461364746, 0.03187628746032715, 0.031488096237182614, 0.03141647911071777, 0.03145376014709473, 0.03141340827941894, 0.03149519920349121, 0.031450944900512694, 0.03138710403442383, 0.03148633575439453, 0.031493759155273436, 0.031769119262695315, 0.03166543960571289, 0.03191500854492187, 0.031811296463012693, 0.031680511474609374, 0.03176563262939453, 0.03262758255004883, 0.03213721466064453, 0.03201811218261719, 0.03224947357177734, 0.03204166412353516, 0.032077823638916016, 0.03212595367431641, 0.03212102508544922, 0.03229087829589844, 0.03224038314819336, 0.03244972610473633, 0.032482112884521484, 0.032394496917724606, 0.03239926528930664, 0.03256816101074219, 0.032833534240722655, 0.03276556777954102, 0.03247123336791992, 0.03241388702392578, 0.032524383544921875, 0.03256924819946289, 0.03275689697265625, 0.0327784309387207, 0.03262736129760742, 0.03248332977294922, 0.03246284866333008, 0.03251814270019531, 0.03245167922973633, 0.03292422485351563, 0.03252054214477539, 0.03236380767822265, 0.032341854095458984, 0.032600128173828125, 0.03242886352539062, 0.032308639526367186, 0.0321495361328125, 0.03229542541503906, 0.032237632751464844, 0.03215359878540039, 0.03252838516235351, 0.03196454429626465, 0.03180790328979492, 0.0316942081451416, 0.03197014427185058, 0.03168870353698731, 0.03179427146911621, 0.03270134353637695, 0.031899648666381834, 0.03229695892333984, 0.032898399353027345, 0.032417888641357424, 0.03241548919677734, 0.03241247940063476, 0.0324771842956543, 0.03251814270019531, 0.032487422943115234, 0.03262054443359375, 0.03268403244018555, 0.032646560668945314, 0.032532096862792965, 0.03300067138671875, 0.03275526428222656, 0.03234016036987305, 0.0322902717590332, 0.032207393646240236, 0.03193446350097656, 0.03170528030395508, 0.031592256546020506, 0.03159859275817871, 0.03169190406799317, 0.031449024200439456, 0.031463359832763674, 0.03177948760986328, 0.03204726409912109, 0.03167865562438965, 0.03156991958618164, 0.03181363105773926, 0.0323210563659668, 0.03204963302612305, 0.03239113616943359, 0.031831424713134764, 0.03178681564331055, 0.03217903900146484, 0.03362201690673828, 0.03216707229614258, 0.03198796844482422, 0.031871519088745116, 0.03590300750732422, 0.036490814208984375, 0.03466099166870117, 0.03233388900756836, 0.032553249359130856, 0.032595966339111326, 0.03259392166137695, 0.03248931121826172, 0.032390430450439454, 0.03309571075439453, 0.03256550216674805, 0.03254716873168945, 0.03239161682128906, 0.032583393096923825, 0.033255424499511715, 0.03249596786499023, 0.03322608184814453, 0.03275225448608399, 0.03313663864135742, 0.033576480865478514, 0.03252828979492187, 0.032797248840332034, 0.03262828826904297, 0.032557502746582034, 0.03256729507446289, 0.03251814270019531, 0.03244646453857422, 0.03242803192138672, 0.032521888732910155, 0.03272848129272461, 0.03248812866210937, 0.032504062652587894, 0.03296246337890625, 0.0340805778503418, 0.03313897705078125, 0.032857662200927736, 0.03274924850463867, 0.03258771133422852, 0.03278726577758789, 0.032489471435546875, 0.03242598342895508, 0.03245043182373047, 0.03275584030151367, 0.03256630325317383, 0.03263382339477539, 0.032575233459472656, 0.03218198394775391, 0.03246688079833984, 0.03239731216430664, 0.032664161682128906, 0.0326814079284668, 0.03214393615722656, 0.0321943359375, 0.03207551956176758, 0.03205363082885742, 0.03191203117370606, 0.03184623908996582, 0.0318939208984375, 0.032148223876953125, 0.03230774307250977, 0.0323070068359375, 0.03241027069091797, 0.03229872131347656, 0.032198944091796876, 0.03225347137451172, 0.03225843048095703, 0.03237897491455078, 0.03229500961303711, 0.03203881454467773, 0.032094207763671875, 0.032298561096191405, 0.03232368087768555, 0.03223382568359375, 0.0320463981628418, 0.031979455947875976, 0.03290192031860351, 0.03212265777587891, 0.03229695892333984, 0.032343681335449216, 0.032906944274902344, 0.033233600616455077, 0.03262259292602539, 0.032350208282470705, 0.03301513671875, 0.032457374572753904, 0.03234406280517578, 0.032292736053466796, 0.03303641510009766, 0.03435852813720703, 0.032541343688964844, 0.03250124740600586, 0.032451488494873046, 0.0322413444519043, 0.03218841552734375, 0.032282752990722655, 0.03205043029785156, 0.03179583930969238, 0.03180083274841308, 0.03169100761413574, 0.03177497673034668, 0.03186073684692383, 0.031680511474609374, 0.03153267288208008, 0.03165632057189941, 0.03160678482055664, 0.03189760017395019, 0.031752191543579104, 0.03153510475158691, 0.03165184020996094, 0.0315567684173584, 0.031863199234008786, 0.03174608039855957, 0.0316208324432373, 0.03213996887207031, 0.03197686386108398, 0.031797599792480466, 0.03161500740051269, 0.031518943786621095, 0.03153097534179688, 0.031419456481933596, 0.03148080062866211, 0.03159568023681641, 0.03147878456115723, 0.03145907211303711, 0.03146761512756348, 0.031641855239868164, 0.03202624130249023, 0.031909887313842776, 0.032018207550048826, 0.03222768020629883, 0.0318764476776123, 0.031912607192993166, 0.031851871490478516, 0.03170707130432129, 0.03166896057128906, 0.031604736328125, 0.031643648147583005, 0.031528959274291994, 0.031535871505737306, 0.03139379119873047, 0.031494144439697266, 0.03159449577331543, 0.03158835220336914, 0.031692800521850584, 0.03161507225036621, 0.031493120193481446, 0.031649791717529296, 0.03149440002441406, 0.03162998390197754, 0.031389696121215824, 0.03179724884033203, 0.031680511474609374, 0.03157401657104492, 0.03162889671325684, 0.03187139129638672, 0.031892959594726565, 0.032037216186523436, 0.03231497573852539, 0.03191804885864258, 0.03211228942871094, 0.03190473556518555, 0.032481281280517575, 0.032086017608642575, 0.032233470916748046, 0.03218227386474609, 0.032476673126220705, 0.032495391845703124, 0.03254550552368164, 0.03249097442626953, 0.0324736328125, 0.032555007934570314, 0.03239052963256836, 0.03258841705322266, 0.0329727668762207, 0.033654014587402345, 0.03287871932983399, 0.0327616958618164, 0.032144191741943356, 0.032882625579833985, 0.032295230865478516, 0.03280051040649414, 0.03225600051879883, 0.03239868927001953, 0.032131614685058596, 0.03207158279418945, 0.03205961608886719, 0.03202006530761719, 0.03225027084350586, 0.03205961608886719, 0.031880319595336916, 0.032127201080322264, 0.03204463958740234, 0.0322476806640625, 0.03229372787475586, 0.03237030410766602, 0.032274944305419925, 0.032389118194580076, 0.032394496917724606, 0.03252096176147461, 0.0324156494140625, 0.03246697616577148, 0.03265475082397461, 0.03263343811035156, 0.032342239379882814, 0.03241097640991211, 0.032379329681396486, 0.03238931274414063, 0.03215955352783203, 0.03224371337890625, 0.032317440032958986, 0.032343647003173825, 0.03244809722900391, 0.03241862487792969, 0.032589534759521484, 0.032493854522705076, 0.03245817565917969, 0.032520225524902344, 0.033309215545654296, 0.03259945678710938, 0.032514656066894534, 0.032686080932617184, 0.03273523330688476, 0.03346953582763672, 0.03259033584594727, 0.03243459320068359, 0.03248028945922852, 0.03245935821533203, 0.032159648895263675, 0.032149982452392575, 0.031936511993408204, 0.031899648666381834, 0.031643648147583005, 0.03198921585083008, 0.03212460708618164, 0.03164828872680664, 0.03155590438842774, 0.031959039688110355, 0.03186278343200684, 0.03192563247680664, 0.031879583358764646, 0.03197929573059082, 0.031995647430419924, 0.032065216064453124, 0.03216486358642578, 0.03225356674194336, 0.03229734420776367, 0.03218399810791016, 0.03221737670898438, 0.032349857330322265, 0.03230992126464844, 0.03634966278076172, 0.03226419067382812, 0.032246784210205076, 0.03287068939208984, 0.03252883148193359, 0.0324150390625, 0.03212796783447266, 0.03210444641113281, 0.03215510559082031, 0.03226883316040039, 0.03244815826416016, 0.03250435256958008, 0.032324542999267576, 0.0324389762878418, 0.0323768310546875, 0.03217027282714844, 0.032148670196533204, 0.03224355316162109, 0.03225084686279297, 0.03249935913085938, 0.032228736877441405, 0.032091102600097655, 0.03196025657653809, 0.0320827522277832, 0.032307201385498044, 0.032020481109619144, 0.031942655563354495, 0.03176652717590332, 0.033576961517333984, 0.031748096466064454, 0.03194655990600586, 0.0317926082611084, 0.031750879287719726, 0.0318156795501709, 0.031704095840454104, 0.03182691192626953, 0.031768224716186524, 0.0318057918548584, 0.031983200073242186, 0.03218207931518555, 0.03248393630981445, 0.03242758560180664, 0.03232403182983398, 0.032468097686767575, 0.0332276496887207, 0.03243318557739258, 0.03259695816040039, 0.03246284866333008, 0.03241164779663086, 0.03244169616699219, 0.034328895568847655, 0.03273353576660156, 0.032640640258789065, 0.03241516876220703, 0.03224252700805664, 0.03545651245117187, 0.033497150421142576, 0.03217798233032226, 0.03228745651245117, 0.03224915313720703, 0.03237343978881836, 0.032152641296386716, 0.0323546257019043, 0.03243276977539063, 0.03243174362182617, 0.032373119354248046, 0.03234201431274414, 0.03243027114868164, 0.03244182586669922, 0.032516448974609376, 0.03221209716796875, 0.032186592102050784, 0.0320211181640625, 0.03200601577758789, 0.03206307220458984, 0.03222742462158203, 0.03158883285522461, 0.03158527946472168, 0.03160223960876465, 0.031360416412353515, 0.031498720169067386, 0.03150691223144531, 0.03156732749938965, 0.031504255294799804, 0.03152070426940918, 0.0317541446685791, 0.03200211334228516, 0.032064254760742185, 0.03314201736450195, 0.03246889495849609, 0.03203363037109375, 0.03201638412475586, 0.032042816162109376, 0.03200019073486328, 0.031883264541625975, 0.03204857635498047, 0.03228236770629883, 0.03222345733642578, 0.03223612976074219, 0.03202867126464844, 0.0322696647644043, 0.032234142303466794, 0.032036865234375, 0.03195084762573242, 0.032062591552734374, 0.032089054107666014, 0.032126399993896486, 0.032255455017089846, 0.03221820831298828, 0.03207363128662109, 0.03226121520996094, 0.03217011260986328, 0.03235055923461914, 0.0322432975769043, 0.03235929489135742, 0.03235177612304688, 0.03220912170410156, 0.0322644157409668, 0.03208806228637695, 0.03257753753662109, 0.03224700927734375, 0.03215030288696289, 0.03236211013793945, 0.0322861442565918, 0.03226310348510742, 0.03237587356567383, 0.03231817626953125, 0.032333824157714845, 0.032639198303222657, 0.03237478256225586, 0.0335596809387207, 0.033777599334716794, 0.03280160140991211, 0.032552734375, 0.032544097900390624, 0.03248844909667969, 0.03287449645996094, 0.03255091094970703, 0.03236249542236328, 0.03243737411499024, 0.032521759033203125, 0.0322564811706543, 0.03340854263305664, 0.03487136077880859, 0.03273104095458984, 0.03243673706054687, 0.0324956169128418, 0.0324587516784668, 0.03254671859741211, 0.03305196762084961, 0.032540767669677735, 0.03225465774536133, 0.03235747146606445, 0.0323919677734375, 0.032274559020996095, 0.032396736145019533, 0.03245449447631836, 0.03248406219482422, 0.03240755081176758, 0.03242393493652344, 0.032767681121826174, 0.03246931076049805, 0.03259555053710937, 0.032725406646728517, 0.032727039337158204, 0.03271212768554688, 0.03307907104492187, 0.0339463996887207, 0.03253855895996094, 0.03263641738891602, 0.032718753814697264, 0.03233039855957031, 0.03260006332397461, 0.03265651321411133, 0.03257433700561523, 0.03258367919921875, 0.03247462463378906, 0.032346622467041015, 0.032710655212402344, 0.03256320190429687, 0.03259801483154297, 0.03244646453857422, 0.03248134231567383, 0.03237267303466797, 0.03259392166137695, 0.03235414505004883, 0.03260636901855469, 0.03252579116821289, 0.03252278518676758, 0.032486785888671876, 0.03240153503417969, 0.032661151885986325, 0.03235504150390625, 0.03231167984008789, 0.03244831848144531, 0.032638111114501954, 0.032514400482177734, 0.032549312591552734, 0.032353313446044925, 0.03254719924926758, 0.032473697662353515, 0.03271440124511719, 0.03239475250244141]",tokens/s,30.89508152474481,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1849.87648,2718.892032,0.0,2340.421632,2284.9536,s,1,9.00453125,9.00453125,0.0,9.00453125,9.00453125,9.00453125,9.00453125,[9.00453125],,kWh,6.025863509175906e-05,6.639789652902034e-06,1.9503071158000207e-05,8.64014959026613e-05,,MB,1681.14176,3087.990784,0.0,2680.160256,2578.241536,s,10,0.8896338195800781,0.0889633819580078,0.0006379165192925978,0.08893408203125,0.08968330001831054,0.08981941032409667,0.08992829856872558,"[0.08995552062988281, 0.08916140747070313, 0.08871475219726563, 0.08959474945068359, 0.08895865631103515, 0.08818233489990235, 0.08890950775146485, 0.0896530532836914, 0.08775977325439453, 0.08874406433105468]",tokens/s,2877.58844555658,kWh,2.857336710314902e-06,3.15112401888477e-07,1.9002739042522327e-06,5.0727230164556125e-06,tokens/kWh,50465992.16427769,MB,1608.097792,3087.990784,0.0,2680.160256,2578.244096,s,10,20.728130615234377,2.0728130615234375,0.009648469638570125,2.071475952148438,2.089635302734375,2.0900453124999996,2.0903733203124997,"[2.07177734375, 2.089544189453125, 2.07070458984375, 2.06433447265625, 2.071174560546875, 2.057871826171875, 2.066839599609375, 2.072115234375, 2.090455322265625, 2.0733134765625]",tokens/s,30.39347887633313,kWh,6.107347214593199e-05,6.7359504432220745e-06,2.906802864814823e-05,9.68774512373023e-05,tokens/kWh,650306.1258876523,,s,630,20.725994392395044,0.03289840379745241,0.0006040979539939706,0.03274156761169433,0.03352360649108886,0.033829865646362305,0.03522202232360841,"[0.03391078567504883, 0.03322675323486328, 0.033157119750976564, 0.03295222473144531, 0.03280495834350586, 0.03286966323852539, 0.03305340957641602, 0.03284787368774414, 0.03264675140380859, 0.0331987190246582, 0.03305654525756836, 0.03350236892700195, 0.03312112045288086, 0.03334143829345703, 0.03304636764526367, 0.03334672164916992, 0.033120990753173825, 0.033399070739746094, 0.03311206436157227, 0.03293312072753906, 0.03267046356201172, 0.03283148956298828, 0.033006816864013674, 0.032993503570556644, 0.03315491104125977, 0.03318243026733399, 0.03274095916748047, 0.03259164810180664, 0.03274399948120117, 0.03290528106689453, 0.032780288696289066, 0.03299238586425781, 0.03324198532104492, 0.033023998260498046, 0.03297689437866211, 0.03268966293334961, 0.03272496032714844, 0.032928321838378904, 0.032597183227539066, 0.03246979141235352, 0.03255091094970703, 0.03275737762451172, 0.032858081817626957, 0.03244073486328125, 0.03245260620117187, 0.03250137710571289, 0.03249395370483398, 0.03257548904418945, 0.0328007698059082, 0.03266963195800781, 0.03275968170166016, 0.03257977676391602, 0.03296377563476562, 0.03281593704223633, 0.032571392059326174, 0.0328642578125, 0.032679935455322266, 0.03274342346191406, 0.03266121673583984, 0.03273961639404297, 0.0326409912109375, 0.03276297760009766, 0.032800800323486326, 0.03390486526489258, 0.0329268798828125, 0.03312646484375, 0.032860992431640625, 0.03285590362548828, 0.03315635299682617, 0.03359632110595703, 0.03264102554321289, 0.03289807891845703, 0.03265212631225586, 0.03247526550292969, 0.03264070510864258, 0.0326577262878418, 0.03250995254516602, 0.03252633666992188, 0.03288374328613281, 0.03273791885375977, 0.03268009567260742, 0.0329048957824707, 0.032898880004882815, 0.033427711486816405, 0.032776447296142576, 0.03263302230834961, 0.03269001770019531, 0.03268009567260742, 0.03259328079223633, 0.03299596786499023, 0.03271596908569336, 0.0327562255859375, 0.032786304473876954, 0.032774593353271486, 0.03277414321899414, 0.03315097427368164, 0.033592735290527344, 0.03351919937133789, 0.03363123321533203, 0.033587200164794925, 0.03380569458007812, 0.03322534561157227, 0.03356444931030273, 0.033503456115722655, 0.03348867034912109, 0.03352783966064453, 0.0335843505859375, 0.03368844985961914, 0.03360976028442383, 0.03361183929443359, 0.033673152923583985, 0.03383097457885742, 0.033912193298339846, 0.03394575881958008, 0.03359587097167969, 0.033478656768798826, 0.033501182556152344, 0.033288192749023435, 0.03321206283569336, 0.03320252990722656, 0.033345535278320314, 0.03335782241821289, 0.03324041748046875, 0.03325942230224609, 0.03320310211181641, 0.03302105712890625, 0.03383350372314453, 0.03401043319702148, 0.03332198333740234, 0.03310313415527344, 0.032860641479492185, 0.032694305419921875, 0.032615966796875, 0.03240137481689453, 0.03234812927246094, 0.032436992645263674, 0.0324826545715332, 0.0325495376586914, 0.03252019119262695, 0.03252169418334961, 0.03305635070800781, 0.0327806396484375, 0.03246960067749023, 0.03252604675292969, 0.0324918098449707, 0.0324505615234375, 0.032452190399169925, 0.03240796661376953, 0.032451744079589846, 0.0326910400390625, 0.03258777618408203, 0.032606208801269534, 0.03246031951904297, 0.03263126373291016, 0.032546241760253905, 0.03250028610229492, 0.03299903869628906, 0.032564990997314455, 0.032594558715820315, 0.032605438232421874, 0.032733951568603516, 0.03262464141845703, 0.035332096099853515, 0.03325478363037109, 0.03284646224975586, 0.03282080078125, 0.03300396728515625, 0.032825439453125, 0.03276995086669922, 0.032933887481689454, 0.03290726470947265, 0.033279647827148436, 0.03294857788085938, 0.03272451019287109, 0.03262694549560547, 0.03278985595703125, 0.032797119140625, 0.03296505737304688, 0.03313663864135742, 0.03319398498535156, 0.034326526641845705, 0.03326464080810547, 0.033030654907226564, 0.03317958450317383, 0.03311251068115234, 0.03296598434448242, 0.03322048187255859, 0.032775039672851565, 0.03254191970825195, 0.03382851028442383, 0.033089664459228514, 0.032976192474365236, 0.03282422256469727, 0.0330313606262207, 0.033027103424072266, 0.03274729537963867, 0.03267718505859375, 0.03288899230957031, 0.03335427093505859, 0.03332710266113281, 0.033255390167236325, 0.033135967254638674, 0.0326519660949707, 0.0326195182800293, 0.03261452865600586, 0.03269472122192383, 0.03283603286743164, 0.03280607986450195, 0.03278316879272461, 0.03279052734375, 0.03252364730834961, 0.032635520935058594, 0.03253184127807617, 0.032844417572021486, 0.03274342346191406, 0.03253247833251953, 0.03252633666992188, 0.03240959930419922, 0.032393280029296874, 0.032274337768554685, 0.03232771301269531, 0.032382942199707034, 0.03240963363647461, 0.03232972717285156, 0.032399360656738284, 0.032296031951904294, 0.032914302825927735, 0.03238095855712891, 0.03232352066040039, 0.03255222320556641, 0.03250460815429688, 0.03235017776489258, 0.03224291229248047, 0.03221731185913086, 0.03230985641479492, 0.03226236724853516, 0.03233097457885742, 0.03243679809570312, 0.03239916610717773, 0.03332425689697266, 0.035050464630126954, 0.03259187316894531, 0.03410940933227539, 0.03476793670654297, 0.033292831420898436, 0.033083839416503905, 0.0328960952758789, 0.03258591842651367, 0.03252707290649414, 0.03254249572753906, 0.032319713592529296, 0.03229875183105469, 0.03421798324584961, 0.03332067108154297, 0.033328670501708985, 0.03320908737182617, 0.033067230224609376, 0.035292095184326175, 0.03331520080566406, 0.033166881561279296, 0.03729913711547852, 0.03336601638793945, 0.03299123382568359, 0.03288063812255859, 0.033043617248535155, 0.033090400695800784, 0.03321241760253906, 0.03315670394897461, 0.03332134246826172, 0.032989215850830075, 0.03292335891723633, 0.03283123016357422, 0.03273168182373047, 0.032718849182128903, 0.03316121673583984, 0.03372963333129883, 0.03303081512451172, 0.03277657699584961, 0.03246681594848633, 0.03242598342895508, 0.032550048828125, 0.032639198303222657, 0.03249216079711914, 0.03231948852539063, 0.032382976531982424, 0.03240758514404297, 0.03259142303466797, 0.032815521240234374, 0.0325909423828125, 0.03252054214477539, 0.03274095916748047, 0.03244892883300781, 0.03239123153686523, 0.0324405746459961, 0.0324508171081543, 0.03258163070678711, 0.03249356842041016, 0.03249103927612305, 0.03250569534301758, 0.03247679901123047, 0.03245948791503906, 0.032436511993408204, 0.03265945434570312, 0.03270054244995117, 0.032545726776123045, 0.03265427017211914, 0.032511329650878905, 0.03255363082885742, 0.03241958236694336, 0.03242348861694336, 0.032464897155761716, 0.03238947296142578, 0.03245820617675781, 0.032489952087402345, 0.03240796661376953, 0.03380009460449219, 0.03256326293945312, 0.03246371078491211, 0.03247292709350586, 0.03225600051879883, 0.032855777740478515, 0.032374401092529294, 0.032416671752929685, 0.03243590545654297, 0.03235539245605469, 0.032449535369873043, 0.03245260620117187, 0.03253833770751953, 0.03277635192871094, 0.03241936111450195, 0.032604766845703126, 0.03249887847900391, 0.03254278564453125, 0.032436992645263674, 0.0324136962890625, 0.032454654693603514, 0.03242393493652344, 0.03237273788452148, 0.032509441375732424, 0.03233958435058594, 0.03238374328613281, 0.03237286376953125, 0.03240700912475586, 0.032330272674560546, 0.03263071823120117, 0.03241580963134766, 0.03240345764160156, 0.03244236755371094, 0.03247228622436524, 0.03237968063354492, 0.03227961730957031, 0.03226310348510742, 0.03239430236816406, 0.0323306884765625, 0.03252044677734375, 0.03261824035644531, 0.03264921569824219, 0.03265910339355469, 0.03304483032226563, 0.032505855560302735, 0.03281203079223633, 0.03293286514282227, 0.03257078552246094, 0.032444095611572264, 0.032401344299316404, 0.03283561706542969, 0.03294713592529297, 0.03283967971801758, 0.032892929077148435, 0.03311964797973633, 0.03313724899291992, 0.033148929595947264, 0.033377857208251954, 0.033392574310302736, 0.03339049530029297, 0.03338016128540039, 0.033879905700683596, 0.03338940811157227, 0.034476318359375, 0.03386342239379883, 0.03365340805053711, 0.03350137710571289, 0.033455745697021484, 0.03345222473144531, 0.033320960998535154, 0.03315302276611328, 0.03316934585571289, 0.03328006362915039, 0.03302377700805664, 0.03273513412475586, 0.03273734283447265, 0.03271295928955078, 0.03305574417114258, 0.03297766494750977, 0.0327723503112793, 0.03354592132568359, 0.032678112030029294, 0.0326036491394043, 0.03259852981567383, 0.032559200286865236, 0.032421886444091795, 0.03322415924072265, 0.03259555053710937, 0.03257644653320312, 0.03352313613891601, 0.03259174346923828, 0.03250246429443359, 0.03250783920288086, 0.03239686584472656, 0.032567806243896484, 0.03248271942138672, 0.032567935943603514, 0.03267977523803711, 0.03277747344970703, 0.03235638427734375, 0.03248169708251953, 0.032438720703125, 0.03365881729125977, 0.03260124969482422, 0.03267059326171875, 0.0323480339050293, 0.03249987030029297, 0.03240095901489258, 0.03238662338256836, 0.03260505676269531, 0.032366497039794925, 0.032245857238769535, 0.032464897155761716, 0.032389118194580076, 0.03249331283569336, 0.03266739273071289, 0.03250985717773437, 0.0327125129699707, 0.03291830444335937, 0.03276380920410156, 0.03341648101806641, 0.03260054397583008, 0.03258403015136719, 0.032405502319335935, 0.03249497604370117, 0.03240204620361328, 0.03382067108154297, 0.033089534759521484, 0.032968704223632815, 0.03341110229492188, 0.03280892944335938, 0.03270995330810547, 0.03275846481323242, 0.03241094589233398, 0.032932350158691406, 0.03248761749267578, 0.032824638366699216, 0.032412353515625, 0.034361278533935544, 0.034791358947753905, 0.03299955368041992, 0.03264921569824219, 0.032545982360839845, 0.032583873748779295, 0.032418113708496094, 0.03230752182006836, 0.03322841644287109, 0.033591808319091795, 0.03298060989379883, 0.03248940658569336, 0.03237532806396484, 0.03242793655395508, 0.03245011138916016, 0.03243244934082031, 0.03232153701782227, 0.03246694564819336, 0.03239116668701172, 0.03238019180297851, 0.03324796676635742, 0.03245225524902344, 0.03353225708007813, 0.03264921569824219, 0.03254441452026367, 0.0326429443359375, 0.032342720031738284, 0.032331775665283204, 0.032407329559326174, 0.03241779327392578, 0.03254003143310547, 0.032471519470214844, 0.03261455917358398, 0.03253452682495117, 0.032489471435546875, 0.032694271087646484, 0.033007614135742186, 0.03308163070678711, 0.0338135986328125, 0.033187614440917966, 0.03464278411865234, 0.03335168075561523, 0.03492659378051758, 0.03305654525756836, 0.03292550277709961, 0.032938400268554685, 0.03266355133056641, 0.032780288696289066, 0.03307929611206055, 0.03290454483032226, 0.03281167984008789, 0.03365513610839844, 0.03338240051269531, 0.03315670394897461, 0.033081470489501955, 0.03316559982299805, 0.03325747299194336, 0.03277414321899414, 0.03267583847045898, 0.03255820846557617, 0.03262704086303711, 0.03240399932861328, 0.03248096084594727, 0.03240342330932617, 0.03255331039428711, 0.032894977569580076, 0.03241116714477539, 0.0324510383605957, 0.03259324645996094, 0.03257001495361328, 0.0324692497253418, 0.03248041534423828, 0.03254726409912109, 0.032495777130126954, 0.03240345764160156, 0.03247462463378906, 0.03287196731567383, 0.032791519165039064, 0.03696995162963867, 0.03336684799194336, 0.032719806671142576, 0.03431884765625, 0.037418880462646485, 0.035735198974609375, 0.03276582336425781, 0.032581600189208984, 0.03284182357788086, 0.0325599365234375, 0.03237897491455078, 0.034969505310058595, 0.03252016067504883, 0.03602630233764648, 0.03345119857788086, 0.033127201080322265, 0.033746528625488284, 0.034873886108398436, 0.03324310302734375, 0.034187297821044925, 0.03331651306152344, 0.033142974853515625, 0.033153182983398435, 0.03280486297607422, 0.03282124710083008, 0.03273523330688476, 0.03293523025512695, 0.03280486297607422, 0.03270316696166992, 0.032833534240722655, 0.0328089599609375, 0.03274710464477539, 0.03287196731567383, 0.03301635360717774, 0.0329315185546875, 0.033161407470703126, 0.03335500717163086, 0.03274399948120117, 0.0326445426940918, 0.03266579055786133, 0.03236924743652344, 0.03240342330932617, 0.03254419326782226, 0.032459327697753906, 0.0323768310546875, 0.03243971252441406, 0.03225660705566406, 0.032738815307617186, 0.032761760711669925, 0.03236924743652344, 0.03237887954711914, 0.03230499267578125, 0.032415008544921874, 0.032502655029296876, 0.03237017440795899, 0.03265283203125, 0.03269311904907227, 0.03260563278198242, 0.032524959564208984, 0.03267583847045898, 0.03252598571777344, 0.033162593841552734, 0.03270892715454102, 0.0327248649597168, 0.0325681266784668, 0.03252326583862305, 0.03310601425170898, 0.0326253776550293, 0.03286220932006836, 0.033185375213623046, 0.03304713439941406, 0.032806110382080075, 0.032742176055908206, 0.0328559684753418, 0.03283158493041992, 0.03360268783569336, 0.03317644882202148, 0.034852863311767575, 0.03409305572509766, 0.033570816040039066, 0.03341686248779297, 0.03346857452392578, 0.033771713256835936, 0.033454078674316406, 0.03339619064331055, 0.033509151458740234, 0.033579071044921874, 0.03346707153320312, 0.03362815856933594, 0.033247230529785156, 0.03332505416870117, 0.03343539047241211, 0.03372057723999024, 0.032813152313232424, 0.03253647994995117, 0.032456703186035156, 0.032401214599609374, 0.03235615921020508, 0.0323131217956543]",tokens/s,30.396611524278214,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,814.710784,537.853952,0.0,159.383552,141.514752,s,1,7.5653583984375,7.5653583984375,0.0,7.5653583984375,7.5653583984375,7.5653583984375,7.5653583984375,[7.5653583984375],,kWh,1.7524344425002408e-05,1.92576686632031e-06,5.607504486027093e-06,2.505761577734981e-05,,MB,1168.605184,630.12864,0.0,222.298112,184.525824,s,21,0.18403315448760987,0.00876348354702904,0.00010122327704860388,0.008751775741577148,0.00884108829498291,0.008887968063354491,0.009068320083618164,"[0.008619168281555175, 0.008813952445983887, 0.008810527801513672, 0.008887968063354491, 0.00884108829498291, 0.008804320335388183, 0.008806624412536622, 0.008778623580932617, 0.008696288108825683, 0.008756352424621581, 0.0086779203414917, 0.008693696022033692, 0.008673760414123535, 0.0087258882522583, 0.008704000473022461, 0.008711711883544922, 0.008771967887878419, 0.009113408088684083, 0.008683199882507325, 0.008751775741577148, 0.008710911750793457]",tokens/s,29212.12764606468,kWh,2.635768096996703e-07,2.906783153199022e-08,1.4802280234868783e-07,4.4066744358034836e-07,tokens/kWh,580936948.5525033,MB,1202.335744,630.12864,0.0,222.298112,184.528384,s,21,10.169293518066407,0.4842520722888765,0.004762281942675445,0.4825516967773438,0.4896417236328125,0.49239239501953125,0.4959256713867187,"[0.4825516967773438, 0.48272607421875, 0.48962738037109377, 0.49239239501953125, 0.48653704833984374, 0.4896417236328125, 0.4887113037109375, 0.48062728881835937, 0.4797857666015625, 0.47930374145507815, 0.4813526611328125, 0.47868646240234375, 0.48649685668945314, 0.48127764892578123, 0.48208526611328123, 0.48090081787109373, 0.4968089904785156, 0.4863446044921875, 0.4827623291015625, 0.48046170043945313, 0.48021176147460937]",tokens/s,130.097533093091,kWh,1.4080092215300685e-05,1.5527927379002416e-06,5.1344020585087585e-06,2.0767287011709686e-05,tokens/kWh,3033617.2444902067,,s,1323,10.159735195636735,0.007679316096475245,0.0001623395081562752,0.007635615825653076,0.007807667064666748,0.007904799890518188,0.008208186264038086,"[0.007541152000427246, 0.007700319766998291, 0.0076250238418579105, 0.007627200126647949, 0.007585536003112793, 0.007609824180603028, 0.007650239944458008, 0.007603392124176025, 0.007580319881439209, 0.007617760181427002, 0.007579648017883301, 0.007595104217529297, 0.007599296092987061, 0.0076121602058410644, 0.007656191825866699, 0.007610367774963379, 0.007623744010925293, 0.007621119976043702, 0.007637440204620361, 0.007581984043121338, 0.0076341118812561035, 0.007616479873657226, 0.007661632061004639, 0.007622911930084229, 0.007602176189422607, 0.007564703941345215, 0.007719776153564453, 0.0076267518997192385, 0.007608352184295654, 0.007757343769073486, 0.007627200126647949, 0.007624703884124756, 0.007620768070220947, 0.007608160018920899, 0.007636159896850586, 0.007686240196228027, 0.00769916820526123, 0.0076958398818969724, 0.007666431903839112, 0.007680928230285645, 0.007764863967895508, 0.007874335765838623, 0.007758048057556153, 0.007652383804321289, 0.007668704032897949, 0.00765340805053711, 0.007739359855651855, 0.007687200069427491, 0.007699423789978027, 0.007718912124633789, 0.007665599822998047, 0.007698783874511719, 0.007673503875732422, 0.007594048023223877, 0.007575551986694336, 0.007605247974395752, 0.007871488094329833, 0.007667712211608887, 0.007609439849853516, 0.007615615844726562, 0.007642911911010742, 0.007706624031066894, 0.007745535850524903, 0.00751804780960083, 0.007632063865661621, 0.0076169600486755374, 0.007631360054016113, 0.007603295803070068, 0.007676767826080322, 0.007614208221435547, 0.007616672039031982, 0.007597951889038086, 0.007614048004150391, 0.007627552032470703, 0.007665215969085693, 0.007756063938140869, 0.0076574721336364745, 0.0076433920860290525, 0.0076583361625671385, 0.007658400058746338, 0.0076631360054016115, 0.007623136043548584, 0.007682079792022705, 0.007636960029602051, 0.00765337610244751, 0.007630591869354248, 0.007694591999053955, 0.007666719913482666, 0.007670752048492432, 0.0076943039894104, 0.007659552097320557, 0.007608320236206055, 0.007697855949401856, 0.007651904106140137, 0.007632895946502686, 0.007628799915313721, 0.007636991977691651, 0.007626175880432129, 0.007700672149658203, 0.0077132158279418946, 0.007683328151702881, 0.007742080211639404, 0.007776319980621338, 0.007727424144744873, 0.007689023971557617, 0.007744671821594239, 0.007616223812103272, 0.007611455917358398, 0.007644095897674561, 0.007663616180419922, 0.007673855781555176, 0.007643136024475097, 0.007653664112091065, 0.007687839984893799, 0.007651360034942627, 0.007770016193389893, 0.007626560211181641, 0.007582272052764892, 0.00760972785949707, 0.007633279800415039, 0.007611936092376709, 0.007625343799591064, 0.007640927791595459, 0.00766323184967041, 0.007666048049926758, 0.007673855781555176, 0.007611904144287109, 0.0077927041053771975, 0.007643551826477051, 0.00762883186340332, 0.007662784099578857, 0.007631680011749267, 0.007603775978088379, 0.0077413439750671385, 0.007680543899536133, 0.007703999996185303, 0.007674752235412597, 0.007718592166900635, 0.007790592193603516, 0.007776319980621338, 0.007855519771575928, 0.007801504135131836, 0.007698304176330566, 0.007677951812744141, 0.007681695938110351, 0.007706975936889649, 0.007765408039093018, 0.007733856201171875, 0.0077619199752807615, 0.007759871959686279, 0.007782400131225586, 0.00773529577255249, 0.007708672046661377, 0.007812320232391357, 0.007707039833068848, 0.007704927921295166, 0.007755104064941406, 0.007829343795776367, 0.007740543842315674, 0.007765279769897461, 0.007694784164428711, 0.007734784126281738, 0.0076839041709899905, 0.007686463832855225, 0.0077535037994384765, 0.007719520092010498, 0.007786240100860596, 0.007722623825073242, 0.0077355198860168456, 0.007979423999786376, 0.008074687957763673, 0.007918144226074219, 0.007796735763549805, 0.007774367809295655, 0.007888959884643556, 0.00788431978225708, 0.007905439853668213, 0.007957791805267334, 0.0077955198287963864, 0.007780352115631104, 0.007809184074401856, 0.00776800012588501, 0.007775519847869873, 0.008122271537780761, 0.0077339839935302735, 0.007741439819335938, 0.007710720062255859, 0.007755775928497314, 0.007802879810333252, 0.007841760158538819, 0.0077291841506958005, 0.007776512145996094, 0.007683839797973633, 0.00773737621307373, 0.007706624031066894, 0.00774121618270874, 0.007734975814819336, 0.007704544067382813, 0.007717088222503662, 0.007870783805847168, 0.0076977920532226565, 0.0076778879165649416, 0.007665631771087647, 0.007731935977935791, 0.007725120067596436, 0.007729087829589844, 0.007706240177154541, 0.0077255358695983885, 0.0077003841400146485, 0.007716864109039307, 0.00777350378036499, 0.007801536083221435, 0.0077814397811889644, 0.0078121919631958005, 0.007761695861816406, 0.007700479984283447, 0.007725120067596436, 0.007716991901397705, 0.007703680038452149, 0.007703296184539795, 0.00766761589050293, 0.007681600093841553, 0.007631167888641357, 0.007667744159698486, 0.007669151782989502, 0.007664415836334228, 0.007718912124633789, 0.00775548791885376, 0.007733823776245118, 0.007736991882324218, 0.007691520214080811, 0.0077012481689453125, 0.00880031967163086, 0.009869312286376953, 0.009905247688293458, 0.007738399982452393, 0.007894911766052247, 0.007857344150543212, 0.00778323221206665, 0.007696032047271728, 0.007735648155212402, 0.007725056171417236, 0.007768191814422607, 0.007703648090362549, 0.007668799877166748, 0.007654655933380127, 0.007659999847412109, 0.007660704135894775, 0.007648096084594726, 0.0076574721336364745, 0.007654592037200928, 0.0077405118942260745, 0.007692287921905518, 0.007661568164825439, 0.007628384113311767, 0.007661471843719483, 0.007672319889068604, 0.007695807933807373, 0.008139552116394043, 0.007678751945495606, 0.007654111862182617, 0.007657120227813721, 0.00774348783493042, 0.007709311962127686, 0.007665664196014404, 0.007648447990417481, 0.007764800071716309, 0.0076201920509338375, 0.007629216194152832, 0.0076340799331665035, 0.007725887775421143, 0.007716896057128907, 0.007644320011138916, 0.007694560050964356, 0.007688543796539307, 0.00764358377456665, 0.007672832012176513, 0.007673823833465576, 0.007684671878814697, 0.007670176029205322, 0.007692192077636719, 0.007651328086853027, 0.007687935829162598, 0.007647200107574463, 0.007639328002929688, 0.007661568164825439, 0.007706560134887695, 0.007729504108428955, 0.007710432052612305, 0.007667327880859375, 0.00775222396850586, 0.007814496040344239, 0.00772873592376709, 0.00781609582901001, 0.007769375801086426, 0.00785264015197754, 0.007722335815429688, 0.007713088035583496, 0.007707104206085205, 0.007793951988220215, 0.0077012162208557125, 0.00765337610244751, 0.0076984319686889645, 0.00783126401901245, 0.007839168071746826, 0.007703519821166992, 0.007725120067596436, 0.007703839778900146, 0.0077309122085571285, 0.007723775863647461, 0.007980447769165039, 0.007748608112335205, 0.0077638721466064455, 0.007704319953918457, 0.007731200218200684, 0.007651167869567871, 0.007687744140625, 0.007674464225769043, 0.007723008155822754, 0.00764089584350586, 0.0077355198860168456, 0.007673920154571533, 0.007714719772338867, 0.007792640209197998, 0.007777344226837158, 0.007750239849090576, 0.0077948799133300785, 0.007841055870056152, 0.007806848049163819, 0.007840767860412597, 0.00788803195953369, 0.007926112174987792, 0.00793228816986084, 0.007962751865386962, 0.00789904022216797, 0.00785584020614624, 0.007788928031921387, 0.00783510398864746, 0.007856671810150146, 0.00781715202331543, 0.007778304100036621, 0.007821375846862794, 0.007755936145782471, 0.007792575836181641, 0.007822368144989014, 0.007814239978790284, 0.007796703815460205, 0.007763775825500488, 0.007745535850524903, 0.007867775917053222, 0.007715456008911133, 0.007718783855438232, 0.007699935913085937, 0.007688864231109619, 0.007730847835540772, 0.007702879905700684, 0.007731008052825928, 0.0077582402229309086, 0.007722303867340088, 0.007735775947570801, 0.00772540807723999, 0.007771711826324463, 0.007793951988220215, 0.007788512229919433, 0.007748000144958496, 0.007700928211212158, 0.007716864109039307, 0.007683775901794434, 0.007691616058349609, 0.007674975872039795, 0.007671743869781494, 0.007763904094696045, 0.007734560012817383, 0.007713503837585449, 0.007710720062255859, 0.007716127872467041, 0.0077339839935302735, 0.007788415908813477, 0.007615839958190918, 0.007761695861816406, 0.007815455913543701, 0.007823296070098877, 0.007750432014465332, 0.007779520034790039, 0.007767007827758789, 0.007808735847473144, 0.00781708812713623, 0.007791071891784668, 0.007738272190093994, 0.007693056106567383, 0.007925600051879882, 0.007817408084869385, 0.007907423973083496, 0.007763264179229737, 0.007767648220062256, 0.007791584014892578, 0.007733248233795166, 0.00769814395904541, 0.007681920051574707, 0.007719327926635742, 0.007759871959686279, 0.007692287921905518, 0.007711999893188477, 0.007717631816864014, 0.007663008213043213, 0.007697247982025147, 0.0077552638053894046, 0.007721216201782227, 0.007696383953094482, 0.007718976020812988, 0.007661759853363037, 0.007693408012390136, 0.007672800064086914, 0.007694015979766846, 0.007688191890716553, 0.007723008155822754, 0.0077021121978759766, 0.007708992004394531, 0.00765715217590332, 0.007765920162200928, 0.007721183776855468, 0.007842080116271972, 0.008282079696655273, 0.008200223922729493, 0.007775296211242675, 0.007721920013427735, 0.007753215789794922, 0.007682559967041015, 0.0076938881874084476, 0.007705056190490723, 0.007745344161987305, 0.007708831787109375, 0.007683167934417725, 0.00780787181854248, 0.007668863773345947, 0.007672736167907715, 0.007640575885772705, 0.0078504958152771, 0.007702527999877929, 0.007659520149230957, 0.007677343845367432, 0.007520864009857178, 0.007653279781341553, 0.0076425280570983885, 0.007659488201141358, 0.007627744197845459, 0.007685887813568115, 0.00767955207824707, 0.007639391899108887, 0.007645343780517578, 0.007694272041320801, 0.007607903957366944, 0.007620480060577393, 0.0076130561828613285, 0.0076512317657470704, 0.007613823890686035, 0.007576511859893799, 0.007618239879608154, 0.007587039947509765, 0.007577792167663574, 0.007602784156799316, 0.00763478422164917, 0.007631008148193359, 0.007632959842681885, 0.007630208015441895, 0.007692448139190674, 0.007661983966827392, 0.007609568119049072, 0.007643936157226562, 0.007632607936859131, 0.007616223812103272, 0.007592512130737305, 0.00760364818572998, 0.007563839912414551, 0.007628575801849365, 0.007613887786865234, 0.007717663764953613, 0.007589888095855713, 0.007616064071655273, 0.007598527908325195, 0.007586016178131104, 0.007620575904846191, 0.007584928035736084, 0.007686816215515137, 0.00759603214263916, 0.007669727802276611, 0.007622176170349121, 0.007629631996154785, 0.00764521598815918, 0.007607967853546143, 0.007632895946502686, 0.00758784008026123, 0.007602176189422607, 0.007569119930267334, 0.007586368083953857, 0.00766323184967041, 0.007622335910797119, 0.00760259199142456, 0.007540736198425293, 0.007586847782135009, 0.007598368167877198, 0.007631552219390869, 0.007669760227203369, 0.007618559837341309, 0.007479680061340332, 0.0075753917694091795, 0.007585984230041504, 0.0075651841163635256, 0.007575488090515137, 0.007557119846343994, 0.007558944225311279, 0.007629024028778076, 0.007583744049072265, 0.0075773119926452635, 0.0077272958755493166, 0.007615615844726562, 0.007620800018310547, 0.007593056201934814, 0.007599967956542969, 0.007572735786437988, 0.007553631782531738, 0.007583744049072265, 0.007559167861938477, 0.0076902399063110355, 0.007590015888214111, 0.007594880104064942, 0.0075820798873901364, 0.007607135772705078, 0.0075829439163208005, 0.0075781760215759275, 0.007587552070617676, 0.0076097922325134276, 0.007579872131347656, 0.007611008167266846, 0.007632736206054687, 0.007624864101409912, 0.007600128173828125, 0.007614463806152344, 0.007624512195587158, 0.007621056079864502, 0.007673600196838379, 0.007706943988800049, 0.007643040180206299, 0.007649087905883789, 0.007665408134460449, 0.0076555838584899906, 0.007634751796722412, 0.007606112003326416, 0.007586080074310303, 0.007623936176300049, 0.007609216213226319, 0.007619775772094726, 0.00758025598526001, 0.00759830379486084, 0.007622719764709473, 0.007638944149017334, 0.007597184181213379, 0.00758787202835083, 0.007586688041687012, 0.007636991977691651, 0.007599199771881103, 0.007580351829528808, 0.007569695949554443, 0.007577631950378418, 0.007601119995117187, 0.007602975845336914, 0.007843552112579345, 0.007479519844055176, 0.007622623920440674, 0.007594367980957031, 0.0076102399826049804, 0.00760422420501709, 0.007606272220611572, 0.007579648017883301, 0.007589920043945313, 0.007564352035522461, 0.0076315197944641115, 0.007614719867706299, 0.007619904041290283, 0.007588543891906738, 0.007603775978088379, 0.007588319778442383, 0.007579616069793701, 0.007599199771881103, 0.007617184162139893, 0.007598368167877198, 0.007614175796508789, 0.0076059517860412595, 0.007621183872222901, 0.007604320049285889, 0.0076102719306945805, 0.007578752040863037, 0.00758022403717041, 0.0075937919616699216, 0.007638976097106933, 0.007581759929656982, 0.007657055854797363, 0.007630784034729004, 0.007610464096069336, 0.00760422420501709, 0.007616415977478027, 0.007602431774139404, 0.00759497594833374, 0.007581408023834228, 0.007589920043945313, 0.007571455955505371, 0.00759932804107666, 0.0075784001350402835, 0.007604544162750244, 0.007618239879608154, 0.007587232112884521, 0.00756547212600708, 0.007567808151245117, 0.007575551986694336, 0.007546879768371582, 0.007718912124633789, 0.007585087776184082, 0.007575551986694336, 0.0075820798873901364, 0.007649504184722901, 0.007603328227996826, 0.007584735870361328, 0.007606495857238769, 0.007571231842041015, 0.007585216045379639, 0.007563680171966553, 0.00777459192276001, 0.007635776042938232, 0.007619071960449219, 0.007587327957153321, 0.007489471912384033, 0.007585824012756348, 0.007569407939910889, 0.007573376178741455, 0.007553055763244629, 0.007571968078613281, 0.0075838398933410645, 0.007526400089263916, 0.007594079971313477, 0.007565536022186279, 0.007581376075744629, 0.007599487781524659, 0.007588384151458741, 0.0075879359245300294, 0.007592000007629394, 0.007559103965759277, 0.007616511821746826, 0.0075913920402526855, 0.007621024131774902, 0.0075996479988098145, 0.0076232638359069824, 0.007624703884124756, 0.007644512176513672, 0.007616384029388428, 0.007610432147979736, 0.007695231914520264, 0.007607615947723388, 0.007622623920440674, 0.00760643196105957, 0.007616479873657226, 0.007625152111053466, 0.007661568164825439, 0.007681280136108398, 0.007727039813995361, 0.007592351913452149, 0.007584159851074219, 0.007616511821746826, 0.007593728065490722, 0.007581471920013428, 0.0075792322158813475, 0.007582592010498047, 0.007589824199676513, 0.0076282558441162105, 0.007584352016448975, 0.0076431999206542965, 0.00755295991897583, 0.007605728149414063, 0.007707168102264404, 0.00757919979095459, 0.009147071838378906, 0.007647007942199707, 0.007987167835235596, 0.007602431774139404, 0.007646336078643799, 0.007579487800598144, 0.007581696033477783, 0.007561888217926025, 0.0076183037757873535, 0.00757206392288208, 0.007601920127868652, 0.007631999969482422, 0.007588768005371093, 0.007614687919616699, 0.00753868818283081, 0.007620128154754638, 0.007612448215484619, 0.007586559772491455, 0.007589183807373047, 0.007586271762847901, 0.007557024002075195, 0.007587264060974121, 0.007586368083953857, 0.007546239852905273, 0.007577824115753174, 0.007600543975830078, 0.007560927867889405, 0.007549439907073975, 0.007585375785827637, 0.007550399780273437, 0.007536960124969482, 0.0075792641639709476, 0.007564000129699707, 0.00757155179977417, 0.007595871925354004, 0.007608704090118408, 0.007570464134216309, 0.00759065580368042, 0.00759219217300415, 0.007677696228027344, 0.0076327681541442875, 0.007585919857025146, 0.007571519851684571, 0.007614399909973145, 0.007628032207489014, 0.0075823040008544925, 0.007544991970062256, 0.007578879833221436, 0.00759884786605835, 0.007616511821746826, 0.007614016056060791, 0.007581344127655029, 0.007543231964111328, 0.007610720157623291, 0.007599167823791504, 0.007603136062622071, 0.007577856063842773, 0.007606175899505616, 0.007580863952636719, 0.0075905599594116215, 0.007590144157409668, 0.007599071979522705, 0.007574304103851319, 0.007580832004547119, 0.007648096084594726, 0.007636672019958496, 0.007565824031829834, 0.007606080055236816, 0.007644864082336425, 0.0076928319931030276, 0.007583199977874756, 0.007613887786865234, 0.007559679985046387, 0.0075779838562011715, 0.007585663795471192, 0.007637407779693604, 0.007548064231872559, 0.007473311901092529, 0.007693215847015381, 0.007611328125, 0.007622303962707519, 0.007637343883514404, 0.0076269440650939945, 0.0076204161643981935, 0.007611711978912353, 0.007622591972351074, 0.00763478422164917, 0.0075970239639282225, 0.007632832050323487, 0.007605631828308105, 0.007642816066741944, 0.008529055595397949, 0.008635552406311035, 0.008239199638366699, 0.00793449592590332, 0.0076902399063110355, 0.007673408031463623, 0.007607840061187744, 0.007637375831604004, 0.007669280052185059, 0.007630144119262695, 0.0076159038543701174, 0.007739488124847412, 0.007649792194366455, 0.007628543853759766, 0.007673791885375977, 0.00762063980102539, 0.007722976207733154, 0.007681280136108398, 0.007604991912841797, 0.007636991977691651, 0.007649343967437744, 0.007653664112091065, 0.007659167766571045, 0.007667168140411377, 0.007639584064483643, 0.0076146240234375, 0.0078887357711792, 0.007849247932434082, 0.008193920135498047, 0.008536479949951171, 0.00766102409362793, 0.007647615909576416, 0.007659743785858155, 0.007664159774780273, 0.0075959677696228025, 0.007629759788513183, 0.007633887767791748, 0.007667456150054932, 0.007648640155792237, 0.007637887954711914, 0.007607456207275391, 0.007622591972351074, 0.007691167831420899, 0.007646592140197754, 0.007615007877349853, 0.007587135791778565, 0.007671999931335449, 0.007634624004364014, 0.007727776050567627, 0.007495327949523926, 0.007616864204406738, 0.007622399806976318, 0.007741695880889893, 0.0076260800361633305, 0.007627423763275147, 0.007658592224121094, 0.007645919799804688, 0.00764089584350586, 0.0076455678939819334, 0.007643136024475097, 0.00758351993560791, 0.007628191947937011, 0.007596415996551514, 0.007672255992889405, 0.007677951812744141, 0.007608320236206055, 0.007620607852935791, 0.007595039844512939, 0.007607264041900635, 0.00763702392578125, 0.007682015895843506, 0.0076308479309082035, 0.007620607852935791, 0.0076382079124450684, 0.007619391918182373, 0.007610367774963379, 0.007645376205444336, 0.007599936008453369, 0.007622496128082275, 0.007679296016693115, 0.007641952037811279, 0.007671807765960693, 0.008210432052612305, 0.007554624080657959, 0.007600575923919678, 0.007619743824005127, 0.0076704959869384765, 0.0076284480094909665, 0.007623136043548584, 0.007619647979736328, 0.007602431774139404, 0.007633567810058594, 0.007607583999633789, 0.007625120162963867, 0.007590240001678467, 0.0075831360816955564, 0.007605855941772461, 0.0076334080696105954, 0.007621439933776855, 0.007630527973175049, 0.00758080005645752, 0.007594944000244141, 0.007636928081512452, 0.007745759963989258, 0.007571231842041015, 0.007610432147979736, 0.00766761589050293, 0.007589983940124512, 0.007612351894378662, 0.00758515214920044, 0.007553023815155029, 0.007576064109802246, 0.007474592208862305, 0.007602784156799316, 0.007615935802459717, 0.007585440158843994, 0.007613344192504883, 0.007588096141815185, 0.00760975980758667, 0.007594304084777832, 0.007591008186340332, 0.007627071857452393, 0.00760259199142456, 0.00759555196762085, 0.007590879917144775, 0.007608384132385254, 0.007579296112060547, 0.007607456207275391, 0.007568448066711426, 0.007694240093231201, 0.007640960216522216, 0.007634943962097168, 0.007616000175476074, 0.007598015785217285, 0.007612991809844971, 0.007640575885772705, 0.0075924482345581055, 0.00758784008026123, 0.007616223812103272, 0.007622943878173828, 0.007618783950805664, 0.008443679809570313, 0.007688159942626953, 0.007663040161132812, 0.007654111862182617, 0.0076358399391174316, 0.007600895881652832, 0.00760038423538208, 0.0076770238876342775, 0.007625631809234619, 0.007577600002288819, 0.007599584102630615, 0.007602464199066162, 0.00759219217300415, 0.007616511821746826, 0.00758739185333252, 0.00760262393951416, 0.007589888095855713, 0.0076528000831604, 0.007633471965789795, 0.007643136024475097, 0.007665247917175293, 0.007622943878173828, 0.007696832180023193, 0.007615520000457764, 0.0076581439971923826, 0.0077621121406555174, 0.007767648220062256, 0.007884255886077881, 0.007647647857666016, 0.0077224960327148436, 0.0077218241691589355, 0.007678207874298096, 0.007644351959228516, 0.00770304012298584, 0.007469727993011475, 0.007600255966186523, 0.007542943954467774, 0.007620351791381836, 0.007583775997161865, 0.007615871906280517, 0.007891520023345948, 0.007618559837341309, 0.007584799766540527, 0.007583744049072265, 0.007602240085601807, 0.007635871887207032, 0.007575551986694336, 0.007591936111450195, 0.007575039863586426, 0.0075905599594116215, 0.00763478422164917, 0.007597472190856934, 0.007590847969055176, 0.0075641279220581056, 0.00762553596496582, 0.0075980801582336424, 0.007571072101593018, 0.007578239917755127, 0.007578495979309082, 0.007590176105499268, 0.007620319843292236, 0.007566207885742187, 0.007541823863983155, 0.007568319797515869, 0.007589888095855713, 0.007587903976440429, 0.007571392059326172, 0.007623968124389649, 0.007577439785003662, 0.00758409595489502, 0.007592480182647705, 0.007579552173614502, 0.007558720111846924, 0.007620448112487793, 0.007596960067749023, 0.007622496128082275, 0.007638720035552979, 0.007622911930084229, 0.007688191890716553, 0.007616511821746826, 0.00764518404006958, 0.007620672225952149, 0.007633920192718506, 0.007593183994293213, 0.007622367858886718, 0.007663616180419922, 0.007696320056915283, 0.007688255786895752, 0.0077266240119934085, 0.007770592212677002, 0.007718912124633789, 0.007731008052825928, 0.007819200038909912, 0.007788640022277832, 0.007784607887268066, 0.007708672046661377, 0.007641088008880615, 0.007574687957763672, 0.007674240112304688, 0.007645408153533936, 0.007598720073699951, 0.0076284799575805666, 0.007604032039642334, 0.0075903677940368655, 0.00758355188369751, 0.007600128173828125, 0.0076614079475402835, 0.007645664215087891, 0.007653247833251953, 0.007628608226776123, 0.007677728176116943, 0.007658751964569091, 0.00766806411743164, 0.007703167915344238, 0.007821311950683594, 0.00786848020553589, 0.007813055992126465, 0.007833600044250488, 0.00788431978225708, 0.007864064216613769, 0.007885663986206055, 0.007868288040161132, 0.00789238405227661, 0.0078787841796875, 0.007868256092071533, 0.007852128028869629, 0.007918111801147461, 0.007917439937591553, 0.007886559963226318, 0.007872928142547607, 0.007989247798919678, 0.007958528041839599, 0.007968768119812012, 0.007940095901489258, 0.007974783897399902, 0.007982687950134277, 0.008073760032653808, 0.008095616340637208, 0.00816579246520996, 0.008064319610595703, 0.008014240264892579, 0.008046175956726074, 0.0080730562210083, 0.008026335716247559, 0.00804207992553711, 0.008055551528930664, 0.00806009578704834, 0.008044992446899414, 0.007952767848968506, 0.008011743545532227, 0.00802620792388916, 0.008052512168884278, 0.008025664329528809, 0.008069727897644043, 0.008006752014160156, 0.007944672107696534, 0.007956799983978271, 0.00797708797454834, 0.007997439861297608, 0.007970816135406494, 0.007845503807067871, 0.007887743949890136, 0.00789308786392212, 0.008013824462890624, 0.007878655910491944, 0.007849984169006348, 0.008056832313537597, 0.007763423919677734, 0.007776800155639648, 0.007759967803955078, 0.007698048114776612, 0.0076813120841979984, 0.00772166395187378, 0.0076455678939819334, 0.007677440166473389, 0.007696576118469238, 0.007719168186187744, 0.007651328086853027, 0.007636288166046143, 0.0076399359703063965, 0.0076080961227417, 0.00762883186340332, 0.007636832237243652, 0.007628960132598877, 0.007627967834472656, 0.007630688190460205, 0.007599071979522705, 0.007605535984039307, 0.007619296073913574, 0.007613791942596436, 0.007581855773925781, 0.007602303981781006, 0.007605631828308105, 0.007617536067962646, 0.008310751914978027, 0.0077990078926086425, 0.007837503910064698, 0.007704256057739257, 0.00794655990600586, 0.008959136009216308, 0.007687007904052735, 0.007688191890716553, 0.007788544178009033, 0.007651328086853027, 0.007658720016479492, 0.007631616115570068, 0.007641280174255371, 0.007628096103668213, 0.007593696117401123, 0.007574336051940918, 0.0075684161186218265, 0.007620672225952149, 0.00764851188659668, 0.007605663776397705, 0.007587615966796875, 0.0075548157691955565, 0.007574240207672119, 0.007581503868103028, 0.007624800205230713, 0.0075706238746643065, 0.0076111359596252445, 0.007585440158843994, 0.007571968078613281, 0.007487584114074707, 0.007585919857025146, 0.007578879833221436, 0.007582464218139648, 0.007601984024047851, 0.007567647933959961, 0.007610144138336182, 0.007622432231903076, 0.0076044478416442875, 0.0075980801582336424, 0.0075980801582336424, 0.007585792064666748, 0.007618656158447265, 0.007665919780731201, 0.007593376159667969, 0.007575104236602783, 0.007602464199066162, 0.007634431838989258, 0.007746463775634766, 0.007571167945861816, 0.007639328002929688, 0.007608320236206055, 0.007591904163360596, 0.007602208137512207, 0.007600128173828125, 0.007623871803283691, 0.007643968105316162, 0.007747583866119385, 0.007733248233795166, 0.007886303901672363, 0.00902182388305664, 0.007679840087890625, 0.007671199798583985, 0.007674784183502197, 0.007624703884124756, 0.007659520149230957, 0.007636991977691651, 0.007644703865051269, 0.007622528076171875, 0.007621024131774902, 0.007630271911621094, 0.007613471984863281, 0.007572864055633545, 0.007588319778442383, 0.007731232166290283, 0.007606112003326416, 0.007612415790557861, 0.007624512195587158, 0.007602367877960205, 0.007677760124206543, 0.007605663776397705, 0.007641407966613769, 0.007594336032867432, 0.007609856128692627, 0.007592576026916504, 0.007620319843292236, 0.007780416011810303, 0.007610591888427734, 0.00791321611404419, 0.0076512317657470704, 0.007658048152923584, 0.007630623817443847, 0.007591072082519531, 0.007565855979919434, 0.00763315200805664, 0.007617568016052246, 0.007638336181640625, 0.007646880149841309, 0.007658559799194336, 0.007615583896636963, 0.007624544143676758, 0.007577600002288819, 0.007818655967712402, 0.007632480144500732, 0.0076605439186096195, 0.007639167785644531, 0.007595839977264404, 0.007650911808013916, 0.007651968002319336, 0.007609600067138672, 0.007606016159057617, 0.0076276159286499026, 0.0077760000228881835, 0.0075840001106262205, 0.007624032020568848, 0.007635615825653076, 0.007606207847595215, 0.007597599983215332, 0.0075865921974182125, 0.0075589118003845214, 0.007577600002288819, 0.007604063987731934, 0.007583456039428711, 0.007586239814758301, 0.007606272220611572, 0.007607744216918945, 0.007594048023223877, 0.007608831882476807, 0.0075980801582336424, 0.0075980801582336424, 0.007577280044555664, 0.007621183872222901, 0.007630591869354248, 0.007579296112060547, 0.007586112022399902, 0.00759987211227417, 0.007592512130737305, 0.007591648101806641, 0.007587776184082031, 0.00758512020111084, 0.00759881591796875, 0.0075797119140625, 0.007593920230865478, 0.0075935997962951664, 0.007632512092590332, 0.007623424053192138, 0.007585792064666748, 0.007616511821746826, 0.007639039993286132, 0.007620704174041748, 0.007798208236694336, 0.0076928000450134275, 0.007634560108184815, 0.007608672142028808, 0.0075980801582336424, 0.007646624088287353, 0.007491583824157715, 0.0075959677696228025, 0.007595808029174805, 0.0076249918937683105, 0.007613887786865234, 0.00756828784942627, 0.007611328125, 0.00763478422164917, 0.0076174077987670894, 0.007571455955505371, 0.0075482878684997555, 0.007604095935821533, 0.007584288120269776, 0.007549280166625976, 0.007628543853759766, 0.007602303981781006, 0.0076080641746520995, 0.007604000091552735, 0.007606751918792724, 0.007617983818054199, 0.0076754879951477055, 0.0076171522140502925, 0.007616864204406738, 0.007618559837341309, 0.0076689600944519045, 0.007642176151275635, 0.007605504035949707, 0.0075944638252258305, 0.00763804817199707, 0.007623040199279786, 0.007673727989196777, 0.007621344089508057, 0.007671807765960693, 0.0076308479309082035, 0.007617919921875, 0.007632800102233887, 0.0076111040115356444, 0.00765337610244751, 0.007594143867492676, 0.007653215885162354, 0.007616511821746826, 0.0076406078338623045, 0.007573984146118164, 0.007622655868530274, 0.007639039993286132, 0.007658912181854248, 0.0076184959411621095, 0.007602272033691407, 0.007626399993896484, 0.007594912052154541, 0.007701727867126465, 0.007608863830566406, 0.007589407920837402, 0.007580639839172364, 0.007624447822570801, 0.007579808235168457, 0.007609407901763916, 0.007594783782958984, 0.007576831817626953, 0.007609375953674316, 0.00764463996887207, 0.0076167678833007815, 0.007700032234191895]",tokens/s,130.21992940998916,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1848.946688,2464.022528,0.0,2080.374784,2078.348288,s,1,8.7396953125,8.7396953125,0.0,8.7396953125,8.7396953125,8.7396953125,8.7396953125,[8.7396953125],,kWh,4.140046700416254e-05,4.559225126882321e-06,1.3127788279998498e-05,5.9087480411043356e-05,,MB,1876.852736,2663.251968,0.0,2248.146944,2179.810304,s,10,1.231104881286621,0.1231104881286621,0.0028143160291650995,0.12406145858764649,0.12450992279052733,0.12481982650756836,0.12506774948120117,"[0.12512973022460938, 0.11483760070800782, 0.12329555511474609, 0.12428851318359375, 0.123231201171875, 0.12444105529785156, 0.12341072082519532, 0.12421881866455078, 0.12390409851074219, 0.12434758758544921]",tokens/s,2079.4329052814396,kWh,3.852467797423144e-06,4.245736080005575e-07,2.5477103715003316e-06,6.824751776924032e-06,tokens/kWh,37510521.75488515,MB,1881.096192,2665.34912,0.0,2248.146944,2179.812864,s,10,20.138088012695313,2.013808801269531,0.004071028004413532,2.0151616821289062,2.0168884277343753,2.019225769042969,2.021095642089844,"[2.0156944580078124, 2.0161783447265624, 2.0080335693359377, 2.010281494140625, 2.0161429443359373, 2.0215631103515626, 2.0163690185546876, 2.01462890625, 2.0084796142578125, 2.010716552734375]",tokens/s,31.284002711818513,kWh,5.868016310632265e-05,6.472501104622808e-06,3.021620472850054e-05,9.5368868939446e-05,tokens/kWh,660592.9240914197,,s,630,20.13621279335022,0.03196224252912733,0.00042639986659491437,0.03183543968200683,0.03236618995666504,0.032636385917663574,0.033553160820007324,"[0.03339260864257813, 0.03230524826049805, 0.03206371307373047, 0.03196108818054199, 0.031735807418823245, 0.03173398399353027, 0.032177536010742185, 0.031856096267700196, 0.031871936798095704, 0.0330546875, 0.03215158462524414, 0.03256934356689453, 0.031942655563354495, 0.031733760833740236, 0.03180544090270996, 0.03173785591125488, 0.031790496826171875, 0.031793760299682616, 0.03183001518249512, 0.03192998313903809, 0.03168409538269043, 0.03176947212219238, 0.03223756790161133, 0.03234121704101563, 0.032269088745117185, 0.032218177795410155, 0.03241875076293945, 0.032110591888427735, 0.03196332740783692, 0.03183801651000977, 0.031707103729248044, 0.031889440536499024, 0.03192831993103027, 0.031778528213500974, 0.03180940818786621, 0.031844768524169925, 0.03174988746643066, 0.03191423988342285, 0.03176767921447754, 0.03189823913574219, 0.031627519607543945, 0.03197961616516113, 0.03239926528930664, 0.03223468780517578, 0.032053119659423826, 0.0321769905090332, 0.031885183334350586, 0.03174764823913574, 0.03187984085083008, 0.03176243209838867, 0.03180745506286621, 0.03222940826416015, 0.03215884780883789, 0.03203776168823242, 0.03182387161254883, 0.03170099258422852, 0.031839519500732424, 0.03187702369689942, 0.03200092697143555, 0.03207897567749023, 0.031916831970214846, 0.03184639930725098, 0.0318720645904541, 0.03214761734008789, 0.03191971206665039, 0.0319579849243164, 0.03194470405578613, 0.031827167510986326, 0.03183407974243164, 0.0319803524017334, 0.03227344131469727, 0.03186988830566406, 0.031827999114990235, 0.03233171081542969, 0.032122943878173826, 0.031884639739990235, 0.03222127914428711, 0.032029247283935545, 0.031940607070922854, 0.03191616058349609, 0.03189337539672851, 0.032061439514160156, 0.03198271942138672, 0.03179814338684082, 0.03190975952148437, 0.03261452865600586, 0.03180748748779297, 0.03186892890930176, 0.03344563293457031, 0.031680416107177735, 0.031865184783935546, 0.03183206367492676, 0.031692800521850584, 0.031821823120117186, 0.03204857635498047, 0.03176035118103027, 0.03183651161193848, 0.03171327972412109, 0.03187449645996094, 0.03182995223999024, 0.03190873527526855, 0.03169228744506836, 0.03177113533020019, 0.03171033668518066, 0.03189235115051269, 0.03183001518249512, 0.03175984001159668, 0.03175683212280273, 0.0318156795501709, 0.031752191543579104, 0.031745183944702146, 0.03169366455078125, 0.03180953598022461, 0.0365953598022461, 0.031930240631103515, 0.031992095947265625, 0.03186911964416504, 0.031748159408569336, 0.03195283126831055, 0.03180544090270996, 0.032436416625976565, 0.03180099105834961, 0.03189536094665527, 0.031746303558349606, 0.03180928039550781, 0.03190790367126465, 0.03176246452331543, 0.031735071182250975, 0.032160446166992186, 0.031903839111328124, 0.031750047683715824, 0.03157606315612793, 0.03211264038085938, 0.031735807418823245, 0.03171331214904785, 0.0316845760345459, 0.031628639221191406, 0.03179075241088867, 0.0316525764465332, 0.03191632080078125, 0.031669567108154294, 0.031713727951049805, 0.03157529640197754, 0.0317388801574707, 0.031711008071899416, 0.0317359676361084, 0.03170310401916504, 0.03165798377990723, 0.0316682243347168, 0.03185436820983887, 0.03175168037414551, 0.03187171173095703, 0.03183347129821777, 0.03195350456237793, 0.031880319595336916, 0.031832992553710936, 0.03174399948120117, 0.031808832168579104, 0.03168940734863281, 0.03176652717590332, 0.03419340896606445, 0.03335273742675781, 0.032256992340087894, 0.03199907112121582, 0.03235504150390625, 0.03171552085876465, 0.03194841575622558, 0.031803199768066406, 0.031855167388916014, 0.03175628852844238, 0.03183564758300781, 0.0317969913482666, 0.03186969566345215, 0.0317706241607666, 0.03175833511352539, 0.03179724884033203, 0.03180953598022461, 0.03178700828552246, 0.0317828483581543, 0.03185260772705078, 0.03189760017395019, 0.03189145660400391, 0.03197881507873535, 0.03170169639587402, 0.03201161575317383, 0.031743967056274414, 0.03166268730163574, 0.03168876838684082, 0.031694368362426756, 0.03237071990966797, 0.03182796859741211, 0.03209785461425781, 0.03163337516784668, 0.031498720169067386, 0.0317637767791748, 0.03175699234008789, 0.03167654418945313, 0.031770751953125, 0.0316023998260498, 0.03213520050048828, 0.031768575668334964, 0.0316231689453125, 0.033554431915283206, 0.032851486206054686, 0.03251811218261719, 0.031757823944091795, 0.03171827125549316, 0.03173948860168457, 0.031678464889526366, 0.031814176559448244, 0.031801343917846676, 0.03177388763427735, 0.032059776306152345, 0.031787103652954105, 0.031715679168701175, 0.03160051155090332, 0.031740032196044925, 0.0316866569519043, 0.031688575744628904, 0.03263020706176758, 0.03209423828125, 0.03183273506164551, 0.03209539031982422, 0.031843168258666996, 0.03198297691345215, 0.031715328216552735, 0.03163609504699707, 0.031643648147583005, 0.03157401657104492, 0.0316231689453125, 0.03198502349853516, 0.03168233680725098, 0.031603488922119144, 0.03163347244262695, 0.03165593528747559, 0.0316048641204834, 0.03162918472290039, 0.03171327972412109, 0.03167436790466309, 0.03177676773071289, 0.033550048828125, 0.03261798477172852, 0.03172143936157226, 0.03187795257568359, 0.031747808456420896, 0.03168489646911621, 0.03165184020996094, 0.032251296997070314, 0.03244249725341797, 0.03181644821166992, 0.03216969680786133, 0.031920127868652344, 0.03206739044189453, 0.03174828720092773, 0.03159654426574707, 0.031796735763549806, 0.03162982368469238, 0.03173785591125488, 0.03179216003417969, 0.03178720092773438, 0.03165200042724609, 0.0316822395324707, 0.03175113677978516, 0.033758655548095706, 0.03247977447509766, 0.03337625503540039, 0.03286812973022461, 0.03214358520507812, 0.031778144836425784, 0.03558876800537109, 0.03186073684692383, 0.03176569557189941, 0.031847232818603514, 0.03193052864074707, 0.03165987205505371, 0.03161087989807129, 0.03187711906433106, 0.031660032272338864, 0.032067264556884766, 0.033851711273193356, 0.031944063186645506, 0.032274559020996095, 0.032071647644042967, 0.031903392791748045, 0.03189849662780762, 0.03164105606079102, 0.03183046340942383, 0.03166012763977051, 0.031833568572998044, 0.031602527618408205, 0.031605440139770506, 0.03165529632568359, 0.031636320114135745, 0.03173353576660156, 0.033224609375, 0.031901792526245115, 0.03182163238525391, 0.031729183197021484, 0.03183625602722168, 0.03164422416687012, 0.031715328216552735, 0.03173567962646484, 0.031701120376586914, 0.031961023330688475, 0.03188742446899414, 0.031682559967041016, 0.03169865608215332, 0.03185897636413574, 0.03168646430969238, 0.03211814498901367, 0.032172000885009766, 0.031649824142456054, 0.03183289527893066, 0.03170508766174317, 0.03174195289611816, 0.0318632640838623, 0.03174198341369629, 0.03173990440368652, 0.03163465690612793, 0.03156867218017578, 0.03159859275817871, 0.031850496292114255, 0.03177267265319824, 0.0316496639251709, 0.03175027275085449, 0.03177471923828125, 0.03162931251525879, 0.03175628852844238, 0.03177676773071289, 0.031938560485839845, 0.031815296173095704, 0.03207120132446289, 0.03218521499633789, 0.032237537384033205, 0.03220275115966797, 0.03202803039550781, 0.032090751647949216, 0.03199382400512695, 0.03185257530212402, 0.0317926082611084, 0.031876895904541014, 0.03185126495361328, 0.03184339141845703, 0.03192099189758301, 0.03197270393371582, 0.032148223876953125, 0.03186892890930176, 0.03203100967407226, 0.03173721694946289, 0.031746400833129886, 0.03191804885864258, 0.03275369644165039, 0.03256063842773437, 0.03252479934692383, 0.03234380722045899, 0.0322685432434082, 0.032126976013183595, 0.03222473526000977, 0.031936704635620115, 0.03190022468566894, 0.031944480895996094, 0.03236608123779297, 0.032651775360107424, 0.03242598342895508, 0.032004096984863284, 0.03194380760192871, 0.032072574615478515, 0.032469150543212894, 0.03207075119018555, 0.03211920166015625, 0.03299299240112305, 0.03308963012695312, 0.032960769653320315, 0.03273961639404297, 0.03236991882324219, 0.03237350463867188, 0.03244441604614258, 0.032440319061279296, 0.032703201293945314, 0.0320813102722168, 0.03196979141235352, 0.03180121612548828, 0.03168409538269043, 0.03176051139831543, 0.03184067153930664, 0.03176057624816894, 0.0316964168548584, 0.031785440444946286, 0.0318317756652832, 0.03168079948425293, 0.03179929542541504, 0.031884735107421874, 0.03236716842651367, 0.032229183197021484, 0.03198374366760254, 0.031850559234619144, 0.03182304000854492, 0.031855648040771484, 0.031817344665527346, 0.03177078437805176, 0.031780864715576174, 0.03215705490112305, 0.03225459289550781, 0.032032127380371096, 0.032017024993896484, 0.031854591369628905, 0.031752191543579104, 0.03379814529418945, 0.03309097671508789, 0.031978080749511716, 0.03247296142578125, 0.03184857559204102, 0.03179248046875, 0.03176246452331543, 0.03176233673095703, 0.03232534408569336, 0.03174236869812012, 0.03164425659179688, 0.0318525447845459, 0.03176959991455078, 0.031793855667114256, 0.031830080032348634, 0.03158563232421875, 0.031609760284423825, 0.031731712341308595, 0.031649248123168945, 0.03195337677001953, 0.031744064331054686, 0.03181977653503418, 0.032143360137939454, 0.03250380706787109, 0.03210383987426758, 0.03273324966430664, 0.03243881607055664, 0.032464897155761716, 0.03221440124511719, 0.0323590087890625, 0.032174110412597656, 0.03203635025024414, 0.03182233619689941, 0.03180748748779297, 0.032212799072265624, 0.031915456771850585, 0.0317263355255127, 0.03167363166809082, 0.03164601516723633, 0.031621536254882815, 0.03181158447265625, 0.032161792755126956, 0.032234657287597654, 0.03209273529052734, 0.03204438400268555, 0.03181475257873535, 0.03246473693847656, 0.03252566528320312, 0.03209676742553711, 0.031741632461547854, 0.03193494415283203, 0.032192161560058594, 0.03219257736206055, 0.03211619186401367, 0.03204127883911133, 0.0318033275604248, 0.031807424545288086, 0.03176854324340821, 0.0318306884765625, 0.031942655563354495, 0.0320362548828125, 0.03183472061157227, 0.0319749755859375, 0.03196563148498535, 0.03207372665405273, 0.032415744781494144, 0.032702465057373044, 0.032499713897705076, 0.032050975799560545, 0.03193468856811523, 0.03187081527709961, 0.03174825668334961, 0.031780927658081055, 0.031823808670043946, 0.031662080764770506, 0.03170102310180664, 0.03167807960510254, 0.03186720085144043, 0.031766239166259765, 0.03192608070373535, 0.03175270462036133, 0.03180748748779297, 0.03172489547729492, 0.031745952606201173, 0.03224652862548828, 0.032163646697998045, 0.03216569519042969, 0.03211446380615234, 0.031928640365600586, 0.03186511993408203, 0.032026622772216795, 0.031905792236328126, 0.03177168083190918, 0.03187811279296875, 0.03242803192138672, 0.03216588973999023, 0.03202431869506836, 0.033199966430664064, 0.03228643035888672, 0.03191184043884277, 0.03214771270751953, 0.03210406494140625, 0.03186735916137695, 0.031817920684814455, 0.03181292724609375, 0.031726272583007815, 0.03179110336303711, 0.031845888137817385, 0.03178137588500977, 0.032550846099853516, 0.03194067192077637, 0.031656991958618166, 0.031675359725952146, 0.031678464889526366, 0.031692800521850584, 0.031770368576049805, 0.03181705665588379, 0.03167225646972656, 0.031747039794921876, 0.03186073684692383, 0.0317827205657959, 0.03198608016967774, 0.031759519577026364, 0.03169939231872559, 0.03175625610351562, 0.03204687881469727, 0.031703327178955076, 0.03176848030090332, 0.0317295036315918, 0.03170476722717285, 0.03168486404418945, 0.031688255310058595, 0.031681440353393556, 0.03165715217590332, 0.03229574584960938, 0.03186272048950195, 0.032139328002929686, 0.031780864715576174, 0.03183616065979004, 0.03171327972412109, 0.03182358360290528, 0.03180521583557129, 0.031674879074096676, 0.03171705627441406, 0.03168288040161133, 0.03179091262817383, 0.03172537612915039, 0.031766912460327146, 0.031687711715698244, 0.03171939277648926, 0.03164672088623047, 0.03175628852844238, 0.031675935745239255, 0.032024417877197266, 0.0316582088470459, 0.03264144134521484, 0.033078784942626956, 0.03217891311645508, 0.031896640777587894, 0.03170787239074707, 0.03183523178100586, 0.03170806312561035, 0.031698944091796875, 0.031628768920898435, 0.03161347198486328, 0.03326873779296875, 0.03299225616455078, 0.03200204849243164, 0.03204403305053711, 0.03187148857116699, 0.0319936637878418, 0.03218502426147461, 0.032159744262695314, 0.03212214279174805, 0.032100257873535154, 0.03203977584838867, 0.03184156799316406, 0.03185923194885254, 0.03169705581665039, 0.032005184173583986, 0.032104545593261716, 0.031867040634155276, 0.03179999923706055, 0.03189145660400391, 0.03195289611816406, 0.03176383972167969, 0.031695199966430665, 0.03167251205444336, 0.03171958351135254, 0.03170844841003418, 0.03222566223144531, 0.032379169464111325, 0.03215155029296875, 0.032894977569580076, 0.03188300704956055, 0.03225942230224609, 0.031882144927978515, 0.031694623947143556, 0.03161520004272461, 0.031866880416870115, 0.03227462387084961, 0.032038623809814454, 0.03177686309814453, 0.03181158447265625, 0.031703039169311525, 0.031642784118652345, 0.03172438430786133, 0.03177471923828125, 0.03178236770629883, 0.03172406387329101, 0.03169075202941894, 0.031797407150268554, 0.03187491226196289, 0.03178668785095215, 0.03176422309875488, 0.03164345550537109, 0.03190870475769043, 0.03169270324707031, 0.03176652717590332, 0.03167231941223145, 0.03163680076599121, 0.031646080017089843, 0.031703359603881834]",tokens/s,31.286916088215516,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4760.403968,6022.889472,0.0,5637.144576,5630.431232,s,1,10.4541953125,10.4541953125,0.0,10.4541953125,10.4541953125,10.4541953125,10.4541953125,[10.4541953125],,kWh,9.661609644166067e-05,1.0649981945977804e-05,3.1163358264002516e-05,0.00013842943665164098,,MB,1775.878144,6402.473984,0.0,5987.36896,5890.097152,s,10,5.146839324951172,0.5146839324951171,0.006163274416921704,0.5166555480957031,0.5207352722167968,0.5208857025146485,0.5210060467529297,"[0.5042449035644532, 0.5094728088378906, 0.5114360961914063, 0.5145950927734375, 0.5059497985839844, 0.5204075927734375, 0.5187160034179688, 0.5207018432617188, 0.5210361328125, 0.520279052734375]",tokens/s,497.3926400984525,kWh,1.500436195270898e-05,1.6547108944451825e-06,9.961077413300767e-06,2.6620150260454933e-05,tokens/kWh,9616775.168256508,MB,1789.169664,6404.571136,0.0,5987.36896,5890.099712,s,10,30.710494873046876,3.071049487304687,0.025504862259848802,3.0640999755859375,3.09434765625,3.115241455078125,3.131956494140625,"[3.07056689453125, 3.0786591796875, 3.054263671875, 3.13613525390625, 3.08970458984375, 3.058604248046875, 3.05690966796875, 3.064677001953125, 3.06352294921875, 3.037451416015625]",tokens/s,20.51415982074977,kWh,0.00010999836128895821,1.2133300459958004e-05,6.854187427789981e-05,0.000190673536026816,tokens/kWh,330407.6764545857,,s,630,30.70810174560547,0.048743018643818206,0.000907305889400842,0.048573728561401366,0.04941768836975098,0.05069592609405517,0.0525386298751831,"[0.05418844985961914, 0.04881020736694336, 0.04874063873291016, 0.04864361572265625, 0.04867939376831055, 0.04866867065429688, 0.048533153533935544, 0.050853599548339845, 0.0486693115234375, 0.04872403335571289, 0.04870892715454102, 0.048622207641601564, 0.048263168334960936, 0.04893491363525391, 0.04871987152099609, 0.048388065338134764, 0.04865232086181641, 0.048379776000976565, 0.04838374328613281, 0.04842329788208008, 0.048418815612792966, 0.04895660781860352, 0.048933151245117185, 0.04922422409057617, 0.04833280181884766, 0.04859084701538086, 0.048139999389648434, 0.04805222320556641, 0.04811600112915039, 0.04808294296264649, 0.048698558807373046, 0.04874028778076172, 0.049127422332763675, 0.04883135986328125, 0.049056896209716795, 0.04857244873046875, 0.04862652969360352, 0.048467967987060545, 0.049290943145751956, 0.04867308807373047, 0.048557727813720704, 0.048314430236816405, 0.04870918273925781, 0.04826947021484375, 0.048298561096191406, 0.04799702453613281, 0.04852521514892578, 0.04857820892333985, 0.048649665832519534, 0.04860784149169922, 0.048785919189453124, 0.04822201538085937, 0.04878131103515625, 0.04849049758911133, 0.048644065856933594, 0.048736286163330075, 0.048476158142089845, 0.04835750579833984, 0.048897918701171876, 0.049958080291748044, 0.048377727508544924, 0.04834604644775391, 0.04879888153076172, 0.04986345672607422, 0.04829129409790039, 0.04859305572509766, 0.049748416900634765, 0.04818854522705078, 0.04845043182373047, 0.04905289459228516, 0.04870131301879883, 0.04859174346923828, 0.048557342529296874, 0.05193600082397461, 0.048517120361328124, 0.04891424179077149, 0.048621761322021485, 0.04860700988769531, 0.04857596969604492, 0.04870348739624023, 0.0483950080871582, 0.048261119842529294, 0.04813164901733399, 0.04853190231323242, 0.049084415435791014, 0.04816070556640625, 0.04926259231567383, 0.04883369445800781, 0.048554912567138675, 0.04902668762207031, 0.04891891098022461, 0.04897587203979492, 0.04943030548095703, 0.048764926910400394, 0.0486577262878418, 0.048669601440429686, 0.048868736267089846, 0.04873075103759766, 0.048898048400878906, 0.04891852951049805, 0.049235969543457034, 0.04873830413818359, 0.048689151763916014, 0.04847555160522461, 0.0485382080078125, 0.04829561614990235, 0.04862188720703125, 0.04881203079223633, 0.04862726211547851, 0.0489128303527832, 0.05131216049194336, 0.04835887908935547, 0.04853395080566406, 0.048371360778808596, 0.04802617645263672, 0.048070430755615234, 0.04845174407958985, 0.048837024688720705, 0.04878745651245117, 0.04850483322143555, 0.04832979202270508, 0.048737056732177736, 0.052496543884277345, 0.0502927360534668, 0.04885504150390625, 0.04852121734619141, 0.04996227264404297, 0.04886943817138672, 0.048802112579345705, 0.04837308883666992, 0.04829596710205078, 0.04796105575561523, 0.04803583908081055, 0.04825088119506836, 0.04808848190307617, 0.0485549430847168, 0.04873795318603515, 0.04852684783935547, 0.04919347381591797, 0.048533504486083984, 0.049344512939453126, 0.048426303863525394, 0.04800521469116211, 0.04782684707641602, 0.04793740844726563, 0.047821601867675784, 0.04772252655029297, 0.04791910552978516, 0.04778188705444336, 0.04801536178588867, 0.04849260711669922, 0.04857644653320312, 0.04847967910766601, 0.0484890251159668, 0.04815052795410156, 0.04828147125244141, 0.04807011032104492, 0.04815683364868164, 0.048287582397460935, 0.048779647827148435, 0.04848873519897461, 0.04872175979614258, 0.048645729064941405, 0.04830879974365234, 0.04891852951049805, 0.04832665634155273, 0.04810956954956055, 0.04806361770629883, 0.04820991897583008, 0.04853433609008789, 0.04871689605712891, 0.04893590545654297, 0.04862771224975586, 0.048421886444091795, 0.04871680068969726, 0.048451713562011715, 0.04853747177124024, 0.04838399887084961, 0.04885670471191406, 0.04872844696044922, 0.04891852951049805, 0.049097759246826175, 0.048722911834716796, 0.04862524795532226, 0.04837827301025391, 0.04821382522583008, 0.04887980651855469, 0.04870499038696289, 0.04900259017944336, 0.049909568786621096, 0.04890265655517578, 0.048713409423828125, 0.04833257675170898, 0.05202556610107422, 0.05124095916748047, 0.05134950256347656, 0.05128582382202149, 0.05089654541015625, 0.0506677131652832, 0.050383201599121095, 0.04834918212890625, 0.04821337509155273, 0.04871567916870117, 0.04902371215820313, 0.04967206573486328, 0.04843123245239258, 0.04836048126220703, 0.04861027145385742, 0.04837529754638672, 0.04859334564208984, 0.048910400390625, 0.049393505096435544, 0.049266368865966796, 0.04920367813110352, 0.04919091033935547, 0.04921753692626953, 0.048928768157958984, 0.04904735946655273, 0.048775135040283205, 0.04861360168457031, 0.04847798538208008, 0.04862774276733398, 0.04934054565429687, 0.04903055953979492, 0.048761505126953125, 0.04928307342529297, 0.04919500732421875, 0.04875059127807617, 0.048828414916992184, 0.04871987152099609, 0.04938095855712891, 0.0492465934753418, 0.050490528106689456, 0.04906185531616211, 0.048358142852783205, 0.048332958221435546, 0.04808617782592774, 0.04816777420043945, 0.048914432525634766, 0.048807937622070315, 0.05129347229003906, 0.05291446304321289, 0.052553375244140624, 0.05120876693725586, 0.05213411331176758, 0.053431713104248046, 0.0521959342956543, 0.05250252914428711, 0.05186556625366211, 0.05314563369750976, 0.0518737907409668, 0.05226291275024414, 0.05123891067504883, 0.05030627059936523, 0.04995145416259766, 0.04999379348754883, 0.049358848571777345, 0.049006431579589844, 0.048906112670898436, 0.04865052795410156, 0.04876083374023438, 0.04909465789794922, 0.05010432052612305, 0.04891033554077148, 0.04905779266357422, 0.0494917106628418, 0.04872560119628906, 0.04900316619873047, 0.048840095520019534, 0.04905020904541016, 0.04913356781005859, 0.04902230453491211, 0.049095134735107425, 0.051744384765625, 0.04839481735229492, 0.04833065414428711, 0.04850492858886719, 0.04851507186889648, 0.04866252899169922, 0.0488611831665039, 0.048739967346191404, 0.04906636810302734, 0.05018624114990235, 0.04880588912963867, 0.04835737609863281, 0.04822544097900391, 0.04796089553833008, 0.048228382110595706, 0.04820547103881836, 0.04847446441650391, 0.049283294677734374, 0.048639774322509766, 0.048533313751220705, 0.04853164672851563, 0.05009337615966797, 0.048958145141601565, 0.04850396728515625, 0.049627391815185544, 0.04893552017211914, 0.0486379508972168, 0.049169822692871096, 0.048683616638183595, 0.04858265686035156, 0.052623199462890624, 0.048777374267578125, 0.048498016357421875, 0.04853417587280273, 0.04808246231079102, 0.04822268676757813, 0.04885094451904297, 0.049016254425048825, 0.0488924789428711, 0.048881664276123046, 0.04907417678833008, 0.04885913467407227, 0.04981171035766602, 0.048162815093994144, 0.04800716781616211, 0.047916481018066406, 0.04808761596679687, 0.04802908706665039, 0.048716384887695315, 0.04849679946899414, 0.04850688171386719, 0.048265056610107424, 0.048601089477539064, 0.04851030349731445, 0.0487371826171875, 0.04872985458374023, 0.048976959228515624, 0.048796607971191404, 0.04882582473754883, 0.04886991882324219, 0.04860905456542969, 0.0496759033203125, 0.04854790496826172, 0.04962287902832031, 0.04882912063598633, 0.049454559326171876, 0.04883481597900391, 0.0482031364440918, 0.048142398834228516, 0.04810134506225586, 0.04884531021118164, 0.04876531219482422, 0.04867654418945312, 0.04947180938720703, 0.048191200256347655, 0.04845113754272461, 0.04812464141845703, 0.04825907135009765, 0.0488138542175293, 0.048906463623046875, 0.04924854278564453, 0.04871548843383789, 0.04883158493041992, 0.0484607048034668, 0.048628734588623046, 0.04820275115966797, 0.04806860733032227, 0.048105121612548825, 0.04786320114135742, 0.048098239898681644, 0.04814438247680664, 0.048588607788085936, 0.04838825607299805, 0.048150558471679684, 0.04810083389282226, 0.04799951934814453, 0.04800716781616211, 0.04815369415283203, 0.04883475112915039, 0.0487861442565918, 0.04849871826171875, 0.04878307342529297, 0.04834044647216797, 0.04841551971435547, 0.04840857696533203, 0.050845054626464846, 0.048747360229492186, 0.0490533447265625, 0.04882854461669922, 0.04883804702758789, 0.04881059265136719, 0.04848137664794922, 0.048092063903808595, 0.04821395111083984, 0.04791507339477539, 0.04773068618774414, 0.047767551422119144, 0.048089088439941405, 0.048449535369873044, 0.04845772933959961, 0.04862499237060547, 0.0480365104675293, 0.04803782272338867, 0.04790073776245117, 0.04792729568481445, 0.04809318542480469, 0.048881664276123046, 0.04848758316040039, 0.04866336059570313, 0.04842838287353515, 0.04854780960083008, 0.04929814529418945, 0.052383743286132815, 0.048293888092041014, 0.04813372802734375, 0.048013248443603516, 0.04781923294067383, 0.047933441162109375, 0.04837376022338867, 0.04855398559570313, 0.048527359008789066, 0.0482913932800293, 0.04828409576416016, 0.048026657104492186, 0.048005985260009765, 0.048474239349365233, 0.04876083374023438, 0.04889190292358398, 0.04883865737915039, 0.04860655975341797, 0.04833731079101562, 0.048301982879638675, 0.048810142517089844, 0.04809299087524414, 0.048296321868896486, 0.04782489776611328, 0.04808294296264649, 0.04778303909301758, 0.05141712188720703, 0.04836816024780274, 0.04864236831665039, 0.04854988861083984, 0.0482712631225586, 0.04836054229736328, 0.04813721466064453, 0.04817715072631836, 0.04801126480102539, 0.04974182510375977, 0.04943564987182617, 0.04824576187133789, 0.04849868774414062, 0.048656383514404294, 0.048881664276123046, 0.04871571350097656, 0.04851103973388672, 0.04857548904418945, 0.048339710235595704, 0.04879529571533203, 0.048640609741210934, 0.04882767868041992, 0.048726497650146486, 0.04941628646850586, 0.05013315200805664, 0.051187103271484374, 0.04903523254394531, 0.04880038452148437, 0.04842086410522461, 0.047866943359375, 0.047978912353515625, 0.047921695709228516, 0.04808483123779297, 0.04778409576416016, 0.04779212951660156, 0.048317535400390625, 0.048417407989501955, 0.05071900939941406, 0.04919910430908203, 0.048517120361328124, 0.048912384033203124, 0.04816268920898437, 0.04800012969970703, 0.04856934356689453, 0.04856217575073242, 0.04875059127807617, 0.05219942474365234, 0.04886022567749024, 0.048608192443847655, 0.04849638366699219, 0.0481794548034668, 0.0482242546081543, 0.04880588912963867, 0.04864806365966797, 0.048619102478027344, 0.048316959381103516, 0.048235710144042966, 0.04794585418701172, 0.0481860466003418, 0.04852896118164062, 0.0486445426940918, 0.04849868774414062, 0.04900048065185547, 0.04849190521240234, 0.04887817764282226, 0.048490432739257815, 0.04826323318481445, 0.04852681732177734, 0.04861110305786133, 0.048153343200683596, 0.047887840270996095, 0.047821025848388675, 0.0479378547668457, 0.0499508171081543, 0.048759521484375, 0.0487891845703125, 0.048447166442871094, 0.04855791854858398, 0.04847078323364258, 0.04836700820922851, 0.04805286407470703, 0.048113407135009764, 0.048447296142578124, 0.04853500747680664, 0.048574497222900394, 0.04844620895385742, 0.04868524932861328, 0.04835123062133789, 0.04851059341430664, 0.0490643196105957, 0.04868710327148437, 0.04876259231567383, 0.04877667236328125, 0.048454463958740236, 0.048467041015625, 0.04824969482421875, 0.04874643325805664, 0.0480557746887207, 0.0484788818359375, 0.04827958297729492, 0.04803977584838867, 0.048002784729003906, 0.04826358413696289, 0.04882422256469727, 0.04862985610961914, 0.048433055877685545, 0.048250560760498044, 0.048112033843994144, 0.05388854217529297, 0.0484881591796875, 0.048097217559814456, 0.048558910369873046, 0.04880303955078125, 0.0486910400390625, 0.048572959899902346, 0.0484288330078125, 0.04842073440551758, 0.0482694091796875, 0.04856079864501953, 0.04877008056640625, 0.04901571273803711, 0.04873993682861328, 0.04925059127807617, 0.04867510223388672, 0.04863100814819336, 0.04861171340942383, 0.048640159606933596, 0.04867113494873047, 0.04838988876342774, 0.04810684967041016, 0.04850067138671875, 0.04829238510131836, 0.048844257354736326, 0.04879228973388672, 0.048639999389648435, 0.04831955337524414, 0.04988735961914063, 0.04859875106811523, 0.048261280059814456, 0.04807823944091797, 0.048184928894042967, 0.04861644744873047, 0.0487421760559082, 0.048743808746337894, 0.04844553756713867, 0.048140064239501956, 0.04782156753540039, 0.049761791229248044, 0.048779998779296875, 0.04848230361938476, 0.04886732864379883, 0.04800441741943359, 0.04791939163208008, 0.048036033630371094, 0.048140640258789065, 0.04771939086914063, 0.047893184661865235, 0.04782828903198242, 0.04779305648803711, 0.0477470703125, 0.04794489669799805, 0.04788016128540039, 0.04779708862304687, 0.04770316696166992, 0.047893535614013674, 0.04833587265014649, 0.04833776092529297, 0.04861942291259766, 0.048605281829833986, 0.048148223876953125, 0.048449790954589844, 0.04838943862915039, 0.04851123046875, 0.0486732177734375, 0.050495742797851566, 0.048979713439941404, 0.048228351593017575, 0.04787433624267578, 0.047831775665283204, 0.047823360443115234, 0.04779596710205078, 0.04792015838623047, 0.047902431488037106, 0.047892478942871096, 0.047773246765136716, 0.04797280120849609, 0.04764416122436523, 0.047806625366210935, 0.04793788909912109, 0.047982593536376954, 0.047761409759521485, 0.048048126220703126, 0.04822403335571289, 0.047874271392822264, 0.04769142532348633, 0.047849822998046875, 0.04813571166992187, 0.04803366470336914, 0.047981151580810545]",tokens/s,20.51575851933463,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,814.661632,522.059776,0.0,136.31488,125.252608,s,1,7.98327587890625,7.98327587890625,0.0,7.98327587890625,7.98327587890625,7.98327587890625,7.98327587890625,[7.98327587890625],,kWh,1.2641908795823534e-05,1.3872810539497675e-06,3.7408363260105215e-06,1.7770026175783825e-05,,MB,1278.099456,641.59744,0.0,226.492416,195.159552,s,10,0.1838239994049072,0.018382399940490724,0.0002699426648300211,0.01842867183685303,0.018690994453430174,0.018717577075958253,0.018738843173980713,"[0.018744159698486328, 0.018683488845825196, 0.01847817611694336, 0.018685087203979493, 0.018176992416381835, 0.01848886489868164, 0.018218687057495117, 0.018379167556762697, 0.017915103912353517, 0.01805427169799805]",tokens/s,13926.364393591037,kWh,4.2735840269036506e-07,4.71258320041686e-08,2.52544939306019e-07,7.270291740005526e-07,tokens/kWh,352117919.27321666,MB,1310.908416,656.277504,0.0,241.17248,195.162112,s,10,10.126364624023438,1.0126364624023438,0.015366868677085227,1.0134085388183594,1.0306595336914062,1.0320146423339844,1.033098729248047,"[1.0303583984375, 1.0333697509765625, 1.02825927734375, 1.0182559814453125, 1.0135733032226562, 1.0132437744140625, 1.0034168701171875, 1.0102005615234375, 0.9846620483398437, 0.991024658203125]",tokens/s,62.21383718549989,kWh,2.2706152926158406e-05,2.504093454661528e-06,8.9452188291541e-06,3.415546520997404e-05,tokens/kWh,1844507.156108148,,s,630,10.11942500972747,0.016062579380519804,0.0003864538407789959,0.016099263191223144,0.016443561363220217,0.016535586738586426,0.016978969516754153,"[0.01641267204284668, 0.016424448013305663, 0.016336383819580077, 0.016345760345458985, 0.01658006477355957, 0.016364255905151368, 0.01631772804260254, 0.01625177574157715, 0.016350847244262695, 0.016449920654296873, 0.016261119842529297, 0.01750822448730469, 0.01640902328491211, 0.01633273506164551, 0.016378751754760744, 0.016322879791259765, 0.016452159881591797, 0.01644879913330078, 0.01648691177368164, 0.01644537544250488, 0.016382240295410157, 0.01631340789794922, 0.016445600509643554, 0.016234943389892578, 0.016201440811157226, 0.016291872024536132, 0.016310272216796876, 0.01631702423095703, 0.01626857566833496, 0.016324960708618164, 0.016320640563964844, 0.016298240661621093, 0.016433151245117187, 0.016257024765014647, 0.016310272216796876, 0.016328704833984374, 0.016346399307250976, 0.016224384307861328, 0.016373823165893555, 0.016307743072509765, 0.016311296463012694, 0.01622425651550293, 0.016326656341552736, 0.016302080154418946, 0.01640652847290039, 0.016275455474853515, 0.016455680847167968, 0.016292991638183593, 0.016330623626708986, 0.016288768768310546, 0.016314111709594726, 0.016294143676757813, 0.016222175598144532, 0.016195615768432616, 0.016260896682739258, 0.01625718307495117, 0.016320127487182617, 0.016234943389892578, 0.016205087661743164, 0.01618182373046875, 0.016306047439575196, 0.01618675231933594, 0.016253183364868164, 0.016445728302001954, 0.016393951416015625, 0.016436735153198243, 0.016331039428710937, 0.016234912872314454, 0.016197439193725585, 0.016207775115966796, 0.016157087326049806, 0.016053951263427735, 0.01621753692626953, 0.016187967300415038, 0.016314432144165038, 0.016416383743286134, 0.01640220832824707, 0.01640297508239746, 0.017001983642578124, 0.016425472259521484, 0.0165295352935791, 0.016347007751464845, 0.016363519668579102, 0.01646329689025879, 0.01634351921081543, 0.01637331199645996, 0.016304128646850585, 0.016334943771362305, 0.016451871871948243, 0.01632896041870117, 0.01631996726989746, 0.016427743911743165, 0.01641596794128418, 0.016321023941040038, 0.01631648063659668, 0.016445087432861327, 0.016330400466918946, 0.016290719985961915, 0.01632841682434082, 0.01639574432373047, 0.016354143142700197, 0.016332479476928712, 0.01639628791809082, 0.016267263412475585, 0.016236543655395508, 0.01633910369873047, 0.0163853759765625, 0.01638860893249512, 0.016922624588012695, 0.016515071868896485, 0.016685056686401366, 0.01680179214477539, 0.01680793571472168, 0.016412960052490235, 0.016461536407470702, 0.01639193534851074, 0.01637196731567383, 0.016375455856323242, 0.01639254379272461, 0.016467903137207033, 0.016322336196899413, 0.01644339179992676, 0.016404895782470702, 0.016286720275878908, 0.01629267120361328, 0.01632057571411133, 0.01617401695251465, 0.01658297538757324, 0.01624336051940918, 0.016277023315429687, 0.01627497673034668, 0.016481216430664063, 0.0162795524597168, 0.016146495819091798, 0.016256351470947266, 0.016214624404907226, 0.016237632751464844, 0.016153535842895507, 0.016226303100585936, 0.01619353675842285, 0.016247871398925782, 0.016280479431152343, 0.01652355194091797, 0.01619753646850586, 0.016236255645751953, 0.016199743270874024, 0.016209056854248047, 0.016175935745239258, 0.0162774715423584, 0.01634320068359375, 0.016199264526367187, 0.016338943481445312, 0.016250656127929686, 0.016296224594116213, 0.01651043128967285, 0.016233535766601564, 0.016209856033325195, 0.01622819137573242, 0.016377599716186523, 0.016247039794921876, 0.016235776901245117, 0.016296224594116213, 0.01652102470397949, 0.01635968017578125, 0.016262752532958984, 0.01645039939880371, 0.01622422409057617, 0.016232448577880858, 0.01634636878967285, 0.016213855743408202, 0.01624115180969238, 0.01644585609436035, 0.016365184783935546, 0.016292224884033202, 0.01634911918640137, 0.016535615921020506, 0.016525440216064453, 0.016405664443969726, 0.016394975662231446, 0.016287744522094725, 0.016506879806518555, 0.01633196830749512, 0.016294719696044922, 0.01621196746826172, 0.016268608093261718, 0.016349855422973632, 0.016300064086914062, 0.016240352630615233, 0.016644384384155272, 0.01631974411010742, 0.01648899269104004, 0.016609312057495117, 0.01643721580505371, 0.016327871322631835, 0.01655686378479004, 0.0163492488861084, 0.01631158447265625, 0.016359552383422852, 0.017093311309814452, 0.017350719451904296, 0.0164913272857666, 0.016403423309326173, 0.016795488357543947, 0.01650089645385742, 0.01653555107116699, 0.01643724822998047, 0.016358879089355467, 0.01638694381713867, 0.016309919357299803, 0.016220159530639648, 0.01635932731628418, 0.016296031951904297, 0.016099327087402342, 0.01610166358947754, 0.016233728408813475, 0.016163679122924806, 0.01610918426513672, 0.016152576446533205, 0.01616924858093262, 0.016193248748779296, 0.016248832702636717, 0.016099327087402342, 0.01597644805908203, 0.0159171199798584, 0.01604777526855469, 0.017113311767578125, 0.01652128028869629, 0.016889888763427733, 0.01616864013671875, 0.01598902416229248, 0.015949567794799804, 0.015972607612609862, 0.01601740837097168, 0.015978495597839357, 0.015930944442749024, 0.01569580841064453, 0.015615776062011718, 0.015618559837341308, 0.015562463760375976, 0.015631967544555665, 0.015608511924743652, 0.015571167945861816, 0.015441887855529786, 0.01551353645324707, 0.015396703720092773, 0.015615519523620605, 0.015641183853149415, 0.015654784202575683, 0.0157903356552124, 0.015869407653808593, 0.015901215553283693, 0.0161112003326416, 0.01585590362548828, 0.01581683158874512, 0.016355327606201172, 0.015902655601501465, 0.01584748840332031, 0.015957440376281738, 0.015915648460388183, 0.016019264221191407, 0.016127967834472658, 0.016398271560668944, 0.016398624420166017, 0.016596927642822265, 0.01630793571472168, 0.016321855545043944, 0.016159711837768556, 0.016278560638427735, 0.01623958396911621, 0.01616281509399414, 0.01614847946166992, 0.016300031661987305, 0.01610348892211914, 0.01606447982788086, 0.015996735572814943, 0.01602908706665039, 0.016101184844970702, 0.016079999923706054, 0.016228160858154296, 0.016023839950561523, 0.01676380729675293, 0.016200864791870117, 0.016273120880126953, 0.01640559959411621, 0.016171104431152345, 0.015907584190368654, 0.01577948760986328, 0.01580412769317627, 0.015876192092895508, 0.015721055984497072, 0.0157903356552124, 0.01588806438446045, 0.015992064476013184, 0.01628646469116211, 0.016079872131347657, 0.015995903968811034, 0.016082048416137695, 0.0160183048248291, 0.01599503993988037, 0.015740768432617187, 0.015683775901794435, 0.015644479751586914, 0.01572982406616211, 0.015634847640991212, 0.01577340793609619, 0.0159648962020874, 0.015988927841186523, 0.016137151718139647, 0.0161844482421875, 0.016187135696411132, 0.016252992630004882, 0.01617318344116211, 0.016381631851196288, 0.01615007972717285, 0.016428831100463868, 0.016301088333129883, 0.016214464187622072, 0.016007360458374024, 0.015840928077697755, 0.01576591968536377, 0.016199968338012696, 0.016128000259399415, 0.016156543731689454, 0.016075199127197265, 0.015978303909301758, 0.015864928245544432, 0.015774496078491212, 0.015703455924987793, 0.015598176002502441, 0.015650527954101563, 0.01606070327758789, 0.016015296936035157, 0.016009279251098633, 0.016080799102783202, 0.01585993576049805, 0.015951744079589845, 0.015996479988098146, 0.015968704223632814, 0.016174720764160155, 0.016278047561645508, 0.016004959106445314, 0.015969280242919923, 0.01606515121459961, 0.01607859230041504, 0.015976223945617676, 0.016327808380126953, 0.016186208724975587, 0.015983360290527344, 0.016120288848876955, 0.01614803123474121, 0.01605027198791504, 0.016121952056884766, 0.016258176803588868, 0.016374143600463867, 0.016240575790405273, 0.016254751205444336, 0.016286399841308592, 0.016404287338256836, 0.016156864166259766, 0.016035839080810545, 0.016017215728759766, 0.016040128707885744, 0.016068832397460937, 0.01634486389160156, 0.016232160568237303, 0.016118047714233398, 0.01601945686340332, 0.016061471939086913, 0.016072959899902345, 0.016131935119628907, 0.01600396728515625, 0.015953920364379884, 0.016211488723754882, 0.016224512100219725, 0.01604755210876465, 0.01603798484802246, 0.016245439529418947, 0.01602355194091797, 0.01593776035308838, 0.01597644805908203, 0.015945568084716796, 0.016038047790527345, 0.01590272045135498, 0.015869343757629394, 0.01586956787109375, 0.016190431594848634, 0.015953920364379884, 0.015882240295410157, 0.015849472045898438, 0.015738880157470703, 0.015646719932556154, 0.015682592391967773, 0.015863936424255373, 0.016194400787353517, 0.016103391647338868, 0.016152608871459962, 0.01600502395629883, 0.015915103912353516, 0.015917056083679198, 0.01569993591308594, 0.015509568214416503, 0.01566431999206543, 0.015614975929260254, 0.015501088142395019, 0.015453536033630371, 0.01566972827911377, 0.015832991600036622, 0.01580668830871582, 0.015787936210632323, 0.015867103576660158, 0.015729599952697754, 0.015730688095092774, 0.015844544410705565, 0.01575126361846924, 0.01590345573425293, 0.015928704261779784, 0.015814784049987794, 0.015699872016906737, 0.015766112327575684, 0.015754847526550292, 0.015527551651000976, 0.015659744262695313, 0.015848928451538086, 0.015916864395141603, 0.015731807708740234, 0.016064191818237306, 0.016424448013305663, 0.016604736328125, 0.01646905517578125, 0.016382112503051757, 0.016229503631591796, 0.016099199295043946, 0.016167552947998046, 0.016140192031860352, 0.015980735778808593, 0.015953920364379884, 0.016062559127807616, 0.01628179168701172, 0.016167776107788086, 0.01592409610748291, 0.016122976303100587, 0.01592735958099365, 0.015978176116943358, 0.015816479682922362, 0.015738911628723144, 0.01609369659423828, 0.01597433567047119, 0.015578720092773437, 0.01601113510131836, 0.015631967544555665, 0.015558655738830567, 0.015541215896606446, 0.015594688415527344, 0.015529760360717773, 0.015461088180541992, 0.015536640167236328, 0.015963935852050783, 0.016311967849731445, 0.016335199356079102, 0.016447744369506835, 0.016686527252197266, 0.016672800064086914, 0.016736543655395508, 0.019146751403808594, 0.016778879165649414, 0.016560512542724608, 0.016773120880126953, 0.016669727325439452, 0.016526304244995117, 0.01638582420349121, 0.01609881591796875, 0.015960160255432128, 0.015915648460388183, 0.015978495597839357, 0.0159267520904541, 0.015945695877075197, 0.015926112174987794, 0.01591062355041504, 0.015842464447021483, 0.0157489595413208, 0.01588707160949707, 0.015763744354248047, 0.015662495613098144, 0.01568659210205078, 0.015845279693603515, 0.01564031982421875, 0.015626591682434084, 0.015684351921081544, 0.015674495697021486, 0.015643872261047364, 0.015708959579467774, 0.015669024467468262, 0.015577119827270508, 0.0156464319229126, 0.015716896057128907, 0.015748831748962404, 0.015579327583312989, 0.015647583961486817, 0.015502559661865234, 0.015668992042541505, 0.016823392868041992, 0.018645919799804688, 0.015630335807800294, 0.015699584007263182, 0.015310976028442383, 0.01562758445739746, 0.015631296157836913, 0.015570688247680663, 0.015790080070495604, 0.01562828826904297, 0.015567999839782715, 0.01570310401916504, 0.015537983894348144, 0.01564291191101074, 0.015507167816162109, 0.015584799766540527, 0.01548742389678955, 0.015684703826904296, 0.015952832221984865, 0.015524991989135742, 0.01564681625366211, 0.015537216186523437, 0.016304960250854494, 0.01558620834350586, 0.01560166358947754, 0.015501312255859375, 0.015507455825805663, 0.015528063774108886, 0.015449983596801758, 0.01570201587677002, 0.015506879806518555, 0.015632960319519044, 0.015484928131103515, 0.015482879638671876, 0.015491071701049805, 0.015454208374023438, 0.015591296195983886, 0.015441120147705078, 0.01550163173675537, 0.015386367797851562, 0.015503968238830566, 0.015587295532226562, 0.015490912437438964, 0.015448575973510742, 0.015420639991760254, 0.015471391677856446, 0.015445599555969238, 0.015499168395996094, 0.015644351959228517, 0.015504063606262207, 0.01552015972137451, 0.015524991989135742, 0.015589568138122558, 0.015417695999145508, 0.015499263763427735, 0.015443519592285156, 0.015560992240905762, 0.015610015869140625, 0.0158535680770874, 0.01605449676513672, 0.015950719833374025, 0.015977503776550293, 0.01604297637939453, 0.01603887939453125, 0.015906240463256834, 0.01598310375213623, 0.015946911811828612, 0.015370304107666016, 0.015723615646362304, 0.015850943565368654, 0.015723039627075195, 0.01577283191680908, 0.015813344001770018, 0.015687871932983398, 0.01577292823791504, 0.01603798484802246, 0.01605887985229492, 0.01594099235534668, 0.01582979202270508, 0.015840928077697755, 0.01579843235015869, 0.015837183952331545, 0.015925248146057128, 0.015896575927734375, 0.015820927619934082, 0.015822239875793457, 0.015845855712890624, 0.01578172779083252, 0.015691519737243653, 0.015630880355834962, 0.015549311637878418, 0.015483296394348145, 0.015735551834106444, 0.01673721694946289, 0.016032672882080077, 0.016594944000244142, 0.016181535720825195, 0.01597366428375244, 0.015831135749816896, 0.015966527938842772, 0.01583516788482666, 0.015814208030700685, 0.015739487648010253, 0.015836288452148437, 0.01565359973907471, 0.015734368324279786, 0.016028064727783203, 0.015715807914733888, 0.01572009563446045, 0.015551360130310058, 0.015620415687561034, 0.015406784057617187, 0.015383935928344727, 0.015471232414245606, 0.015478976249694824, 0.015447648048400878, 0.015462431907653808, 0.015444095611572266, 0.015421504020690919, 0.015515071868896484, 0.015446271896362305, 0.015498720169067382, 0.015367039680480957, 0.015488127708435058, 0.015637472152709962, 0.015439743995666505, 0.01539891242980957, 0.01537548828125, 0.015473664283752442, 0.015426976203918457]",tokens/s,62.25650166826686,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 221335 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,926.314496,622.723072,0.0,236.978176,214.704128,s,1,8.2764208984375,8.2764208984375,0.0,8.2764208984375,8.2764208984375,8.2764208984375,8.2764208984375,[8.2764208984375],,kWh,1.9756790316705517e-05,2.171942556059427e-06,6.55278302000295e-06,2.8481515892767894e-05,,MB,1301.594112,756.9408,0.0,341.835776,300.077568,s,10,0.2644581413269043,0.026445814132690433,0.00022780219563116175,0.026411551475524903,0.02674442825317383,0.026853062438964844,0.026939969787597655,"[0.026425695419311522, 0.026478303909301757, 0.026302976608276366, 0.026098783493041993, 0.02633907127380371, 0.02639740753173828, 0.026720287322998047, 0.02696169662475586, 0.02643404769897461, 0.02629987144470215]",tokens/s,9680.170885098638,kWh,7.845071920687655e-07,8.6487847512809e-08,5.175160529758188e-07,1.3885110925573936e-06,tokens/kWh,184370151.14405242,MB,1334.403072,798.88384,0.0,383.778816,300.080128,s,10,15.148976684570314,1.514897668457031,0.009352996026026794,1.5106460571289064,1.5295677734375002,1.531922802734375,1.533806826171875,"[1.52904443359375, 1.5050645751953124, 1.5075283203125, 1.511752685546875, 1.5195665283203126, 1.508105224609375, 1.53427783203125, 1.5095394287109376, 1.5084923095703124, 1.5156053466796875]",tokens/s,41.58696743138261,kWh,4.623332609167982e-05,5.099193655719929e-06,2.0634834202223306e-05,7.196735394962307e-05,tokens/kWh,875396.9201660494,,s,630,15.143198366165144,0.02403682280343676,0.00044826034417108933,0.023963360786437987,0.024504905128479006,0.024776206016540528,0.02566798421859742,"[0.024414207458496092, 0.023951520919799806, 0.02391708755493164, 0.023879680633544922, 0.024051616668701172, 0.024047712326049804, 0.023939199447631836, 0.024405887603759766, 0.024381439208984376, 0.024448959350585938, 0.024516672134399415, 0.02460380744934082, 0.02442313575744629, 0.024014591217041015, 0.023803775787353515, 0.024135488510131836, 0.024124191284179686, 0.02428099250793457, 0.02416182327270508, 0.024279455184936523, 0.02466796875, 0.024481216430664064, 0.02460915184020996, 0.024143775939941405, 0.024082208633422853, 0.02412015914916992, 0.02744918441772461, 0.024213567733764648, 0.024533088684082032, 0.02427606391906738, 0.024392864227294923, 0.024454656600952147, 0.024789152145385743, 0.024420352935791017, 0.024227455139160158, 0.024262815475463866, 0.02408803176879883, 0.024214271545410157, 0.024062047958374022, 0.024009920120239257, 0.02404118347167969, 0.023982463836669923, 0.023843456268310546, 0.024037376403808593, 0.02732636833190918, 0.024480031967163085, 0.024289152145385743, 0.024317279815673828, 0.023832672119140624, 0.0239169921875, 0.023963712692260743, 0.024299648284912108, 0.023922143936157228, 0.023759296417236328, 0.023963008880615234, 0.023775583267211915, 0.023609439849853517, 0.02363632011413574, 0.023578527450561524, 0.023711263656616213, 0.02415555191040039, 0.024433216094970702, 0.02430143928527832, 0.02388652801513672, 0.023858367919921877, 0.02371455955505371, 0.023643583297729493, 0.02377743911743164, 0.024138208389282226, 0.02379292869567871, 0.023703872680664064, 0.02381222343444824, 0.023780960083007813, 0.023884479522705077, 0.024140895843505858, 0.02426767921447754, 0.024320287704467772, 0.02403299140930176, 0.023866912841796876, 0.02384236717224121, 0.023942367553710937, 0.02381932830810547, 0.02395199966430664, 0.023684959411621093, 0.024058176040649415, 0.023858591079711913, 0.023968191146850587, 0.023796960830688475, 0.023720735549926757, 0.023880928039550782, 0.023734752655029296, 0.023884416580200196, 0.023809728622436525, 0.023912576675415038, 0.02368396759033203, 0.024021503448486328, 0.02394163131713867, 0.02413542366027832, 0.024250495910644532, 0.025016511917114258, 0.0242476806640625, 0.024162368774414064, 0.02477516746520996, 0.024145503997802735, 0.024449440002441408, 0.02407423973083496, 0.0237425594329834, 0.023736480712890626, 0.02390809631347656, 0.023723167419433595, 0.023673696517944334, 0.024191232681274415, 0.023641855239868163, 0.023758176803588868, 0.023681695938110352, 0.02351638412475586, 0.023560159683227538, 0.023544672012329102, 0.023668863296508788, 0.023513216018676758, 0.02348863983154297, 0.023434848785400392, 0.023551488876342775, 0.02363852882385254, 0.023531551361083983, 0.02365222358703613, 0.02390800094604492, 0.024208192825317384, 0.023993759155273436, 0.024154560089111328, 0.023818527221679688, 0.023875455856323242, 0.02382179260253906, 0.02369340705871582, 0.023652063369750977, 0.023601728439331053, 0.02371174430847168, 0.023628032684326172, 0.023625471115112304, 0.023568384170532225, 0.024356735229492187, 0.024121664047241212, 0.02427065658569336, 0.024401920318603516, 0.02403638458251953, 0.024066240310668945, 0.02389072036743164, 0.023500831604003906, 0.023613439559936524, 0.023756256103515627, 0.023828992843627928, 0.024070367813110352, 0.02425187110900879, 0.024057535171508788, 0.02418172836303711, 0.02408121681213379, 0.02394223976135254, 0.023678848266601563, 0.02379148864746094, 0.023567424774169923, 0.023706560134887696, 0.02352332878112793, 0.02367487907409668, 0.02411315155029297, 0.024380767822265625, 0.024799903869628905, 0.02434252738952637, 0.02444697570800781, 0.024244224548339844, 0.02421379280090332, 0.02391324806213379, 0.023970752716064452, 0.023832351684570312, 0.023813472747802735, 0.023870336532592774, 0.023998527526855468, 0.023902143478393555, 0.023795743942260743, 0.024287200927734374, 0.02371721649169922, 0.023613279342651367, 0.023935295104980468, 0.023564800262451172, 0.023598304748535158, 0.0235980167388916, 0.02362313652038574, 0.02358105659484863, 0.024190975189208985, 0.023996416091918944, 0.023295328140258788, 0.023620256423950194, 0.023875583648681642, 0.024082048416137695, 0.024098432540893555, 0.024017663955688478, 0.024020095825195313, 0.023872127532958986, 0.0238287353515625, 0.02368921661376953, 0.02366464042663574, 0.0237238712310791, 0.023570592880249024, 0.023824544906616212, 0.023657888412475587, 0.0245150089263916, 0.023803903579711915, 0.02397177505493164, 0.024215295791625978, 0.02453945541381836, 0.024214719772338866, 0.024154943466186525, 0.02371824073791504, 0.02356537628173828, 0.023779327392578126, 0.02366089630126953, 0.023740671157836915, 0.024293216705322265, 0.023905984878540038, 0.02383011245727539, 0.024406911849975586, 0.023987808227539063, 0.023992959976196288, 0.023692447662353514, 0.02358540725708008, 0.023595199584960938, 0.02381599998474121, 0.023717151641845704, 0.02367535972595215, 0.024200767517089845, 0.024183168411254882, 0.024590656280517577, 0.02441632080078125, 0.024204767227172852, 0.025497087478637694, 0.024265695571899414, 0.02406118392944336, 0.024779008865356444, 0.024057855606079103, 0.023769664764404296, 0.023992256164550783, 0.02384486389160156, 0.02351923179626465, 0.023890144348144533, 0.024118303298950195, 0.02431257629394531, 0.023891040802001953, 0.024009759902954102, 0.024233760833740233, 0.02411712074279785, 0.024026655197143556, 0.024042144775390625, 0.023992256164550783, 0.02350262451171875, 0.023670495986938475, 0.023525888442993165, 0.023513343811035155, 0.023625408172607422, 0.023916608810424806, 0.023957408905029298, 0.024422496795654298, 0.02689459228515625, 0.025014015197753908, 0.02418617630004883, 0.023837472915649412, 0.023607423782348633, 0.02354911994934082, 0.023618175506591798, 0.02353968048095703, 0.02387353515625, 0.023707040786743162, 0.02360585594177246, 0.02369126319885254, 0.024066047668457033, 0.02413363265991211, 0.02426860809326172, 0.024401567459106446, 0.02437174415588379, 0.02410905647277832, 0.02381417655944824, 0.02368876838684082, 0.023548320770263673, 0.023608671188354493, 0.023657119750976563, 0.023657663345336914, 0.02367161560058594, 0.0240732479095459, 0.024226783752441406, 0.024193023681640623, 0.02453913688659668, 0.02498124885559082, 0.02516102409362793, 0.024177759170532227, 0.024182207107543947, 0.024271263122558593, 0.024733568191528322, 0.024203424453735353, 0.02436911964416504, 0.024485504150390625, 0.024672000885009766, 0.024494335174560546, 0.024402175903320313, 0.024585439682006837, 0.02440060806274414, 0.02427510452270508, 0.024116256713867187, 0.024185855865478514, 0.024010784149169923, 0.024002815246582033, 0.023818304061889648, 0.02406879997253418, 0.024083423614501952, 0.024070207595825194, 0.023861120223999024, 0.023705663681030272, 0.024344575881958007, 0.023830623626708985, 0.023895488739013673, 0.023773759841918946, 0.023672063827514647, 0.02368617630004883, 0.023673856735229492, 0.023699647903442384, 0.02356790351867676, 0.02352227210998535, 0.023495040893554687, 0.02377017593383789, 0.02387004852294922, 0.024174047470092774, 0.02429567909240723, 0.024209407806396483, 0.02393731117248535, 0.023752704620361328, 0.0237260799407959, 0.02403715133666992, 0.02378169631958008, 0.02365216064453125, 0.023799903869628908, 0.023607295989990236, 0.023629823684692384, 0.02379545593261719, 0.024027679443359377, 0.02428463935852051, 0.024246528625488283, 0.024148000717163085, 0.023906272888183595, 0.023599103927612306, 0.02351513671875, 0.023826431274414063, 0.026212352752685547, 0.023748607635498048, 0.023831775665283203, 0.023745311737060546, 0.024141408920288085, 0.02377292823791504, 0.023658912658691408, 0.023672767639160156, 0.023619903564453124, 0.023842815399169923, 0.02367807960510254, 0.02382111930847168, 0.02359440040588379, 0.023742687225341796, 0.023749055862426757, 0.024027135848999022, 0.02416265678405762, 0.024278528213500978, 0.024047456741333007, 0.02388649559020996, 0.02408131217956543, 0.024118080139160156, 0.024285472869873048, 0.024759967803955077, 0.02388582420349121, 0.024733152389526367, 0.023624191284179686, 0.023625152587890625, 0.025025119781494142, 0.023811424255371094, 0.023266016006469728, 0.023549951553344727, 0.02368118476867676, 0.023648096084594727, 0.023848960876464844, 0.023854944229125978, 0.02378767967224121, 0.023782400131225585, 0.023645183563232423, 0.023809120178222655, 0.02375948715209961, 0.023707775115966796, 0.023730688095092774, 0.02422969627380371, 0.024474784851074217, 0.024410816192626954, 0.024196863174438477, 0.024002815246582033, 0.024365087509155274, 0.02437731170654297, 0.024594047546386718, 0.024504703521728517, 0.02438915252685547, 0.024108671188354493, 0.02401263999938965, 0.023718912124633788, 0.023793664932250977, 0.02461929512023926, 0.024299232482910157, 0.024223136901855468, 0.02437401580810547, 0.02441548728942871, 0.024379871368408204, 0.023901439666748046, 0.023670976638793945, 0.023626432418823243, 0.023635871887207033, 0.02371353530883789, 0.023649951934814454, 0.024848512649536133, 0.025508672714233398, 0.02414566421508789, 0.02402115249633789, 0.024781824111938477, 0.024929119110107423, 0.024856000900268554, 0.024942815780639647, 0.025229536056518554, 0.024774784088134764, 0.024777055740356446, 0.024653087615966796, 0.024309343338012695, 0.024582143783569335, 0.024535839080810546, 0.025758047103881836, 0.025733055114746092, 0.024594047546386718, 0.02605695915222168, 0.025273311614990236, 0.02499468803405762, 0.024727712631225585, 0.024871904373168944, 0.024918912887573242, 0.025060863494873048, 0.02463795280456543, 0.02430326461791992, 0.024506719589233398, 0.024565727233886718, 0.024273248672485353, 0.02411712074279785, 0.02374390411376953, 0.023658912658691408, 0.0237260799407959, 0.02382748794555664, 0.02520982360839844, 0.024614912033081054, 0.0240064640045166, 0.023636159896850587, 0.02433024024963379, 0.023559392929077147, 0.023976736068725586, 0.023609567642211914, 0.023510784149169923, 0.02340662384033203, 0.023992256164550783, 0.024329343795776368, 0.02534012794494629, 0.024014976501464842, 0.023962272644042968, 0.02377689552307129, 0.023662912368774415, 0.023547903060913086, 0.023545312881469726, 0.023585151672363282, 0.02377743911743164, 0.02409587287902832, 0.02368396759033203, 0.023698463439941406, 0.02356118392944336, 0.024170112609863282, 0.024297664642333985, 0.02424985694885254, 0.024086591720581054, 0.023967744827270508, 0.023775487899780273, 0.02377462387084961, 0.023693727493286132, 0.02356252861022949, 0.023564735412597657, 0.023463775634765625, 0.02350476837158203, 0.023765087127685547, 0.02376812744140625, 0.02395599937438965, 0.023783872604370117, 0.02391244888305664, 0.023662591934204103, 0.023817760467529297, 0.023740287780761718, 0.023574432373046874, 0.023685760498046875, 0.023640384674072267, 0.024075679779052735, 0.024122783660888672, 0.024239040374755858, 0.02418694305419922, 0.0240382080078125, 0.023926336288452147, 0.023913856506347655, 0.024123903274536132, 0.02391472053527832, 0.023738271713256837, 0.023732576370239258, 0.023873184204101564, 0.02358095932006836, 0.024102720260620117, 0.023623680114746092, 0.023832544326782227, 0.023682111740112304, 0.023557151794433594, 0.023604671478271486, 0.02353219223022461, 0.023447391510009765, 0.023559776306152344, 0.02349411201477051, 0.023681983947753907, 0.02368284797668457, 0.023666912078857422, 0.023791872024536132, 0.023641504287719727, 0.02358233642578125, 0.023585695266723633, 0.02381599998474121, 0.0240864315032959, 0.024147743225097655, 0.02402742385864258, 0.024047359466552735, 0.024244831085205077, 0.02404319953918457, 0.023908159255981446, 0.023957216262817382, 0.02395097541809082, 0.02446598434448242, 0.023875871658325196, 0.023840768814086914, 0.023764991760253908, 0.023750656127929686, 0.023724191665649413, 0.024204639434814452, 0.024192991256713866, 0.024201759338378905, 0.024586431503295897, 0.024703840255737304, 0.024333280563354494, 0.024174911499023437, 0.02407801628112793, 0.02479539108276367, 0.023913984298706056, 0.02391619110107422, 0.023978527069091798, 0.023779455184936522, 0.024159711837768556, 0.024563936233520507, 0.024060224533081053, 0.02402035140991211, 0.02399260711669922, 0.024009023666381836, 0.023863103866577147, 0.02383888053894043, 0.023991935729980467, 0.023811552047729494, 0.024011743545532226, 0.024032672882080077, 0.02410755157470703, 0.024350528717041017, 0.02449635124206543, 0.024211040496826174, 0.024096736907958983, 0.024098400115966798, 0.02411795234680176, 0.024190591812133788, 0.023893760681152343, 0.02403196716308594, 0.023697471618652342, 0.023824352264404297, 0.02386092758178711, 0.02386796760559082, 0.02395439910888672, 0.024005184173583983, 0.024227872848510742, 0.023949504852294922, 0.024006080627441407, 0.024003135681152345, 0.02388956832885742, 0.02377350425720215, 0.02384899139404297, 0.024475391387939454, 0.023759103775024413, 0.02395955276489258, 0.023854751586914063, 0.023798112869262696, 0.023730527877807616, 0.023972864151000976, 0.024204256057739258, 0.024290271759033203, 0.02419171142578125, 0.02432156753540039, 0.024234464645385742, 0.024025247573852538, 0.02393712043762207, 0.02397350311279297, 0.024008832931518554, 0.024071487426757812, 0.02406096076965332, 0.023952768325805663, 0.023928415298461913, 0.024172672271728514, 0.02421331214904785, 0.024668928146362304, 0.024371456146240235, 0.024370943069458008, 0.024128736495971678, 0.02414860725402832, 0.024057632446289064, 0.023922752380371094, 0.023971904754638673, 0.023776927947998048, 0.02365523147583008, 0.023899936676025392, 0.0240762882232666, 0.024345727920532228, 0.024140672683715822]",tokens/s,41.60283612262686,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7396.02432,7808.679936,0.0,7430.209536,7414.23104,s,1,11.616208984375,11.616208984375,0.0,11.616208984375,11.616208984375,11.616208984375,11.616208984375,[11.616208984375],,kWh,0.00013592565134166532,1.4986332407818976e-05,4.339864583000064e-05,0.00019431062957948493,,MB,1713.31584,8584.626176,0.0,8176.795648,8052.041728,s,10,8.148107971191406,0.8148107971191407,0.0047949582500165676,0.8145799255371093,0.8220338745117187,0.8220945739746094,0.8221431335449219,"[0.8057255249023437, 0.8153632202148438, 0.8114178466796875, 0.8112008056640625, 0.8127526245117187, 0.8146031494140625, 0.8221552734375, 0.8145567016601563, 0.8220203857421875, 0.8183124389648437]",tokens/s,314.18336735978227,kWh,2.38148693461525e-05,2.6263624427553085e-06,1.577599552676885e-05,4.221722731567666e-05,tokens/kWh,6063875.253715175,MB,1718.489088,8731.426816,0.0,8323.596288,8263.496192,s,10,37.15159716796875,3.715159716796875,0.007612190100120031,3.7147086181640625,3.7254406738281247,3.7262239990234374,3.7268506591796875,"[3.707013671875, 3.70830029296875, 3.706685791015625, 3.72264990234375, 3.706224365234375, 3.719031982421875, 3.714274169921875, 3.7252666015625, 3.72700732421875, 3.71514306640625]",tokens/s,16.957548208537624,kWh,0.00010844988760218326,1.196271960611995e-05,7.21384914714313e-05,0.0001925510986797345,tokens/kWh,327185.8765386031,,s,630,37.148054981231695,0.058965166636875704,0.0006265357516154737,0.05891768074035644,0.05953139877319336,0.05977858848571777,0.0611574122619629,"[0.06155952072143555, 0.05867504119873047, 0.05842534255981445, 0.05816038513183594, 0.058156993865966795, 0.058202945709228515, 0.05829609680175781, 0.058293792724609376, 0.058321182250976565, 0.05824143981933594, 0.05830556869506836, 0.05821539306640625, 0.05851094436645508, 0.05906428909301758, 0.05841459274291992, 0.05844220733642578, 0.0591341438293457, 0.05887823867797851, 0.058810367584228515, 0.05874620819091797, 0.058552993774414065, 0.05821388626098633, 0.05816400146484375, 0.058182975769042966, 0.058550464630126954, 0.05833135986328125, 0.05854003143310547, 0.058649600982666014, 0.05864873504638672, 0.058506080627441406, 0.05862604904174805, 0.05876303863525391, 0.058894336700439455, 0.059088191986083984, 0.05876588821411133, 0.058955745697021486, 0.05917103958129883, 0.058931007385253906, 0.058799617767333986, 0.058415969848632815, 0.058423297882080075, 0.058531841278076174, 0.05868339157104492, 0.05872972869873047, 0.05864934539794922, 0.0585994873046875, 0.05894771194458008, 0.05948806381225586, 0.0595513916015625, 0.05966883087158203, 0.05993267059326172, 0.05944934463500977, 0.05970345687866211, 0.05951628875732422, 0.059555679321289065, 0.059445728302001954, 0.05924265670776367, 0.05924863815307617, 0.059393726348876956, 0.05915475082397461, 0.05891228866577149, 0.05901974487304688, 0.059241855621337894, 0.06065334320068359, 0.059480224609375, 0.057825344085693356, 0.05785465621948242, 0.058525184631347656, 0.05789952087402344, 0.05812223815917969, 0.0581646728515625, 0.05834195327758789, 0.058175487518310545, 0.05834332656860351, 0.05842339324951172, 0.05833456039428711, 0.05839734268188477, 0.058218494415283206, 0.058322719573974606, 0.059764896392822266, 0.0593317756652832, 0.05877004623413086, 0.05840544128417969, 0.05844755172729492, 0.05837107086181641, 0.05831987380981445, 0.05828188705444336, 0.05842281723022461, 0.05868339157104492, 0.05853241729736328, 0.05862732696533203, 0.0585893440246582, 0.05847251129150391, 0.058659358978271486, 0.05886566543579102, 0.059184768676757815, 0.05895151901245117, 0.05925328063964844, 0.059104961395263673, 0.05880863952636719, 0.05852959823608399, 0.058856800079345704, 0.058710880279541015, 0.05882463836669922, 0.05909100723266601, 0.0587894401550293, 0.058863903045654295, 0.05890860748291016, 0.059111873626708986, 0.05914396667480469, 0.059137855529785156, 0.05913824081420899, 0.060082176208496096, 0.059323486328125, 0.059208606719970705, 0.05908889770507812, 0.05901926422119141, 0.05927923202514648, 0.05905420684814453, 0.05915852737426758, 0.059486209869384764, 0.05936947250366211, 0.05912588882446289, 0.059232128143310546, 0.06117279815673828, 0.05930192184448242, 0.06105887985229492, 0.058622142791748044, 0.05799468612670899, 0.057872352600097654, 0.057696159362792966, 0.05780080032348633, 0.05805551910400391, 0.05788441467285156, 0.05787065505981445, 0.057974464416503904, 0.058251102447509764, 0.05823430252075195, 0.05854003143310547, 0.058554496765136715, 0.058361953735351565, 0.05870217514038086, 0.058974369049072266, 0.059225601196289064, 0.0589634895324707, 0.0585797119140625, 0.05841123199462891, 0.05850521469116211, 0.058482688903808595, 0.058552127838134765, 0.05846428680419922, 0.05842736053466797, 0.058590721130371094, 0.05858972930908203, 0.05860367965698242, 0.05893116760253906, 0.05871414566040039, 0.05902473449707031, 0.05915619277954102, 0.05919334411621094, 0.05907334518432617, 0.059072544097900394, 0.05904780960083008, 0.05874502563476563, 0.05917004776000977, 0.058428062438964846, 0.059036895751953124, 0.05901609420776367, 0.0588963851928711, 0.05871567916870117, 0.05911190414428711, 0.05909503936767578, 0.059049983978271485, 0.05898649597167969, 0.0591987190246582, 0.0594310417175293, 0.05919964981079102, 0.05942729568481445, 0.05942489624023437, 0.05915225601196289, 0.05937177658081055, 0.059328193664550784, 0.05929587173461914, 0.05936633682250977, 0.0592803840637207, 0.05909503936767578, 0.05956198501586914, 0.059551742553710936, 0.059364673614501956, 0.060958526611328126, 0.058916160583496094, 0.05817638397216797, 0.05812428665161133, 0.05804431915283203, 0.05820630264282227, 0.05828403091430664, 0.0583592643737793, 0.05829891204833984, 0.05842124938964844, 0.05846835327148438, 0.05854534530639648, 0.05888288116455078, 0.05847244644165039, 0.058578495025634766, 0.05874534225463867, 0.05933356857299805, 0.05934796905517578, 0.06441340637207031, 0.058366302490234376, 0.058775489807128906, 0.05864588928222656, 0.058783775329589845, 0.05835222244262695, 0.05841715240478516, 0.058382431030273435, 0.05854304122924805, 0.058673694610595704, 0.05901737594604492, 0.058852863311767575, 0.058970878601074216, 0.059189281463623046, 0.06028006362915039, 0.06014345550537109, 0.05935811233520508, 0.059015167236328124, 0.058916160583496094, 0.05884403228759766, 0.05886137771606445, 0.058902015686035154, 0.058923519134521485, 0.05922304153442383, 0.05905311965942383, 0.059061248779296874, 0.059258975982666016, 0.059345504760742185, 0.05982028961181641, 0.059601119995117184, 0.05956697463989258, 0.05947894287109375, 0.059625473022460934, 0.059381153106689455, 0.05888470458984375, 0.05915852737426758, 0.05959804916381836, 0.05927222442626953, 0.0593098258972168, 0.0591847038269043, 0.05935968017578125, 0.05936268615722656, 0.05915497589111328, 0.05937155151367188, 0.05940966415405274, 0.06278979110717774, 0.058775390625, 0.05820172882080078, 0.05806908798217773, 0.0582064323425293, 0.05831180953979492, 0.058376640319824216, 0.05856512069702149, 0.05990134429931641, 0.0583512954711914, 0.058853790283203124, 0.05920719909667969, 0.05811692810058594, 0.05973622512817383, 0.058156959533691405, 0.058912864685058595, 0.05918243026733398, 0.059128353118896484, 0.05884121704101562, 0.058449920654296876, 0.05839052963256836, 0.05809971237182617, 0.058242240905761716, 0.058135265350341796, 0.05816511917114258, 0.05832931137084961, 0.05834511947631836, 0.05838848114013672, 0.058218494415283206, 0.058300769805908204, 0.05847836685180664, 0.05853411102294922, 0.05921564865112305, 0.058953216552734375, 0.058827327728271483, 0.058544288635253905, 0.05852127838134766, 0.05891449737548828, 0.058582656860351565, 0.0585164794921875, 0.05841100692749023, 0.058482688903808595, 0.058791393280029296, 0.05850284957885742, 0.05887062454223633, 0.059036895751953124, 0.05890127944946289, 0.05867283248901367, 0.05895110321044922, 0.05938470458984375, 0.059237857818603516, 0.0591693115234375, 0.059090240478515625, 0.05936198425292969, 0.05942617416381836, 0.059094718933105465, 0.05923116683959961, 0.05922921752929688, 0.05924249649047852, 0.05928879928588867, 0.059254528045654294, 0.05917900848388672, 0.05922611236572266, 0.06081315231323242, 0.058845279693603515, 0.0583930549621582, 0.05824512100219727, 0.05812211227416992, 0.05831840133666992, 0.05873100662231445, 0.05830047988891601, 0.0584510383605957, 0.05843868637084961, 0.058433406829833986, 0.05864652633666992, 0.05838166427612305, 0.0585447998046875, 0.05848643112182617, 0.05866019058227539, 0.05891897583007812, 0.05880928039550781, 0.05886361694335938, 0.059141887664794925, 0.058908927917480466, 0.05853148651123047, 0.05876361465454102, 0.05870620727539062, 0.05859708786010742, 0.05850316619873047, 0.05856665420532227, 0.061023841857910155, 0.05878806304931641, 0.058601665496826175, 0.058670368194580075, 0.05914303970336914, 0.05934080123901367, 0.059442142486572265, 0.060010398864746094, 0.059560928344726566, 0.05962051010131836, 0.05910025787353516, 0.058909950256347654, 0.05908089447021484, 0.05898796844482422, 0.058581886291503904, 0.059154430389404294, 0.058785888671875, 0.05880531311035156, 0.059206497192382815, 0.059189247131347655, 0.059140094757080076, 0.059858943939208986, 0.060200767517089845, 0.059474113464355466, 0.059514591217041016, 0.05916672134399414, 0.05948963165283203, 0.05960287857055664, 0.05929632186889648, 0.05924070358276367, 0.05914656066894531, 0.05934822463989258, 0.059479808807373045, 0.059233150482177734, 0.059086654663085936, 0.0593031997680664, 0.0615338249206543, 0.059033153533935546, 0.05836611175537109, 0.05816384124755859, 0.05827151870727539, 0.058477920532226564, 0.058380416870117184, 0.058315521240234376, 0.05854313659667969, 0.058411998748779295, 0.0584252815246582, 0.058460224151611326, 0.05854003143310547, 0.058662750244140624, 0.05869561767578125, 0.05872867202758789, 0.06111974334716797, 0.05922275161743164, 0.05884112167358398, 0.05951897430419922, 0.058265598297119144, 0.05832400131225586, 0.05837641525268555, 0.06045929718017578, 0.058923553466796875, 0.05846214294433594, 0.05923964691162109, 0.05889459228515625, 0.05879657745361328, 0.058711360931396485, 0.058585727691650394, 0.059168830871582034, 0.059138046264648435, 0.05899059295654297, 0.059187137603759765, 0.05924665451049805, 0.05937958526611328, 0.05906854248046875, 0.05909881591796875, 0.05894294357299805, 0.05873715209960938, 0.05887961578369141, 0.05886816024780273, 0.05893545532226562, 0.058982433319091795, 0.05915558242797852, 0.05905097579956055, 0.059133918762207034, 0.05932147216796875, 0.05939007949829102, 0.059253631591796876, 0.05886111831665039, 0.05902380752563476, 0.05880617523193359, 0.058859424591064455, 0.05891638565063476, 0.05877203369140625, 0.05888819122314453, 0.05884108734130859, 0.058922721862792966, 0.0590269775390625, 0.05924236679077149, 0.059103744506835934, 0.06097020721435547, 0.05860214233398438, 0.058246337890625, 0.05826950454711914, 0.05827276611328125, 0.05852150344848633, 0.05854601669311523, 0.05842559814453125, 0.05843555068969727, 0.05841084671020508, 0.05868563079833984, 0.05878374481201172, 0.05855641555786133, 0.058687393188476565, 0.058687583923339844, 0.05895372772216797, 0.05952899169921875, 0.05929391860961914, 0.05909503936767578, 0.05888143920898437, 0.05864486312866211, 0.058719905853271484, 0.058654815673828124, 0.05867497634887695, 0.05879167938232422, 0.0588419189453125, 0.05872243118286133, 0.05879769515991211, 0.0589725456237793, 0.05883881759643555, 0.05916422271728516, 0.05909523010253906, 0.059349441528320314, 0.05940636825561523, 0.05969465637207031, 0.05945180892944336, 0.06079286575317383, 0.05905408096313477, 0.059038719177246096, 0.058993663787841794, 0.059224063873291016, 0.05899059295654297, 0.059082752227783204, 0.059117568969726565, 0.059284736633300784, 0.05917567825317383, 0.05924172973632812, 0.059286270141601566, 0.05964556884765625, 0.05948387145996094, 0.0597429428100586, 0.06101417541503906, 0.059641441345214846, 0.0594312973022461, 0.059393856048583986, 0.05938969421386719, 0.05925628662109375, 0.059466529846191406, 0.05953692626953125, 0.059473983764648436, 0.059307998657226565, 0.0595307846069336, 0.0595874252319336, 0.06163027191162109, 0.05904150390625, 0.05828492736816406, 0.05820182418823242, 0.05837564849853515, 0.05839689636230469, 0.058415233612060545, 0.05839632034301758, 0.058294815063476564, 0.058659103393554686, 0.05864448165893555, 0.05849257659912109, 0.05848099136352539, 0.0585780143737793, 0.05862617492675781, 0.059210529327392576, 0.0595283203125, 0.059581310272216796, 0.05913600158691406, 0.05858508682250976, 0.05878521728515625, 0.05877814483642578, 0.05881449508666992, 0.058714111328125, 0.058729759216308595, 0.058646366119384764, 0.05902012634277344, 0.058834976196289065, 0.05893939208984375, 0.05880831909179687, 0.058793983459472655, 0.059264671325683596, 0.05997321701049805, 0.059357505798339844, 0.059522560119628906, 0.059585472106933594, 0.059057472229003906, 0.05913792037963867, 0.05922694396972656, 0.06008553695678711, 0.05900931167602539, 0.058937793731689454, 0.05936291122436523, 0.05947843170166016, 0.05976063919067383, 0.05942195129394531, 0.059398910522460935, 0.059434078216552735, 0.05926937484741211, 0.05965420913696289, 0.0598185920715332, 0.05958860778808594, 0.06053443145751953, 0.059662303924560546, 0.05959100723266601, 0.05938998413085937, 0.05929388809204102, 0.05929555130004883, 0.05931622314453125, 0.05920953750610351, 0.059211551666259764, 0.059514816284179685, 0.059789791107177734, 0.06131824111938477, 0.059089759826660156, 0.05840825653076172, 0.05809836959838867, 0.057864192962646485, 0.05793571090698242, 0.05817993545532227, 0.05835260772705078, 0.05852451324462891, 0.058519615173339846, 0.058560192108154295, 0.05868803024291992, 0.05860086441040039, 0.058681312561035155, 0.05842319869995117, 0.05902975845336914, 0.059226303100585936, 0.05947910308837891, 0.05885971069335937, 0.05865500640869141, 0.05839510345458984, 0.058243038177490235, 0.058744609832763674, 0.058582527160644535, 0.05859814453125, 0.05870182418823242, 0.058904064178466796, 0.05861996841430664, 0.058671550750732424, 0.05858246231079101, 0.058624576568603516, 0.0590909423828125, 0.05885721588134766, 0.05910124969482422, 0.059375808715820315, 0.05897830581665039, 0.05899059295654297, 0.05883903884887695, 0.05895372772216797, 0.05882463836669922, 0.05910739135742187, 0.059108768463134766, 0.05910905456542969, 0.05932271957397461, 0.059011646270751957, 0.058931137084960936, 0.05908832168579101, 0.05925104141235352, 0.059308319091796874, 0.059809185028076174, 0.0594950065612793, 0.0597437744140625, 0.0594967041015625, 0.05938943862915039, 0.059377822875976566, 0.05933932876586914, 0.05953740692138672, 0.05930704116821289, 0.059146976470947264, 0.05948031997680664, 0.05926707077026367, 0.05953945541381836, 0.05947391891479492]",tokens/s,16.959165165398158,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4147.081216,4758.306816,0.0,4372.56192,4292.521984,s,1,10.4303134765625,10.4303134765625,0.0,10.4303134765625,10.4303134765625,10.4303134765625,10.4303134765625,[10.4303134765625],,kWh,8.556503264168971e-05,9.430852056552683e-06,2.722641066998932e-05,0.00012222229536823173,,MB,1416.237056,5045.61664,0.0,4628.414464,4562.7008,s,10,3.9022229614257813,0.39022229614257814,0.001694630852617215,0.3903907012939453,0.3919876220703125,0.39240007629394535,0.3927300396728516,"[0.3869678649902344, 0.3893675537109375, 0.38851165771484375, 0.38892413330078124, 0.39027044677734374, 0.3915806579589844, 0.3918959655761719, 0.3913811950683594, 0.3905109558105469, 0.39281253051757814]",tokens/s,656.0363221953457,kWh,1.1916911465667604e-05,1.3133233537120736e-06,7.913039663760002e-06,2.114327448313968e-05,tokens/kWh,12107869.110063465,MB,1442.070528,5045.61664,0.0,4628.414464,4562.70336,s,10,38.4860888671875,3.8486088867187505,0.011653275781762331,3.8463636474609375,3.8654144287109373,3.867638610839844,3.8694179565429687,"[3.841705322265625, 3.842807861328125, 3.83989892578125, 3.836185791015625, 3.84991943359375, 3.832818359375, 3.85083642578125, 3.86986279296875, 3.864920166015625, 3.8571337890625]",tokens/s,16.3695511428579,kWh,8.764843768682992e-05,9.66871983711143e-06,5.46024825708391e-05,0.00015191964009478044,tokens/kWh,414692.92555389955,,s,630,38.48315692138673,0.06108437606569321,0.0008951420687282461,0.06085835266113281,0.06147976913452148,0.06254073619842529,0.06547078186035157,"[0.06219388961791992, 0.061213920593261716, 0.06080182266235352, 0.060795967102050784, 0.060545696258544925, 0.06098972702026367, 0.06104883193969726, 0.06073667144775391, 0.06139580917358398, 0.06636479949951171, 0.060861057281494144, 0.06076361465454101, 0.06085686492919922, 0.061118465423583984, 0.06068569564819336, 0.060522750854492186, 0.060770687103271485, 0.06210464096069336, 0.06105593490600586, 0.06075187301635742, 0.06061056137084961, 0.06066368103027344, 0.06429494476318359, 0.06096086502075195, 0.06078668975830078, 0.06109798431396484, 0.06051203155517578, 0.060512191772460935, 0.060776737213134764, 0.06072115325927734, 0.060560993194580075, 0.060552833557128906, 0.06048438262939453, 0.06093123245239258, 0.061181983947753905, 0.06060502243041992, 0.06091750335693359, 0.06090800094604492, 0.06089628982543945, 0.06094496154785156, 0.061030815124511716, 0.060827648162841794, 0.06075187301635742, 0.06062694549560547, 0.06058979034423828, 0.06042038345336914, 0.06093315124511719, 0.06098223876953125, 0.06062483215332031, 0.06068844985961914, 0.060945919036865234, 0.06078105545043945, 0.06073734283447266, 0.06045280075073242, 0.060827457427978515, 0.06089712142944336, 0.060899646759033206, 0.060687744140625, 0.060570049285888675, 0.06081158447265625, 0.06054851150512695, 0.060654335021972657, 0.06061590576171875, 0.06177382278442383, 0.060999614715576175, 0.0610544319152832, 0.06049033737182617, 0.06092172622680664, 0.06094655990600586, 0.060708446502685545, 0.060722591400146485, 0.06118297576904297, 0.06104608154296875, 0.06096345520019531, 0.06111238479614258, 0.06068576049804687, 0.06049635314941406, 0.06102569580078125, 0.06121542358398437, 0.061023841857910155, 0.06061673736572266, 0.06099353790283203, 0.06077644729614258, 0.06151152038574219, 0.06197068786621094, 0.060479774475097656, 0.06068835067749023, 0.0608746223449707, 0.06068598556518555, 0.06073715209960937, 0.06069747161865234, 0.06071209716796875, 0.060693313598632816, 0.060741600036621095, 0.060702785491943356, 0.0608554573059082, 0.060893791198730465, 0.060813568115234376, 0.06099296188354492, 0.060655487060546874, 0.06103542327880859, 0.06076313781738281, 0.060709663391113285, 0.061020160675048826, 0.060921855926513675, 0.06123276901245117, 0.06146732711791992, 0.061004798889160154, 0.06109049606323242, 0.061061119079589846, 0.06084812927246094, 0.061102081298828125, 0.06105702209472656, 0.06099308776855469, 0.061241790771484374, 0.0607127685546875, 0.06376432037353516, 0.06117820739746094, 0.06051225662231445, 0.060802974700927735, 0.06058329772949219, 0.060569759368896484, 0.06054969787597656, 0.06305356979370118, 0.06122623825073242, 0.06055014419555664, 0.06203596878051758, 0.06084198379516602, 0.06063622283935547, 0.06029385757446289, 0.0609917106628418, 0.060641281127929686, 0.06107328033447266, 0.06091584014892578, 0.06049587249755859, 0.06086812973022461, 0.060526336669921875, 0.0606231689453125, 0.060755359649658204, 0.0610431022644043, 0.06070742416381836, 0.0611328010559082, 0.06168547058105469, 0.06070915222167969, 0.06078464126586914, 0.06096691131591797, 0.060830879211425784, 0.06080803298950195, 0.0605296630859375, 0.06070316696166992, 0.061071937561035156, 0.06043648147583008, 0.06106521606445312, 0.06137651062011719, 0.060674049377441405, 0.060561153411865236, 0.060641536712646486, 0.060886943817138675, 0.06109193420410156, 0.06134483337402344, 0.060953536987304685, 0.06497004699707032, 0.06119289779663086, 0.06053187179565429, 0.060408096313476566, 0.06114976119995117, 0.06083993530273438, 0.060835201263427736, 0.060789344787597656, 0.06098313522338867, 0.06074079895019531, 0.06084812927246094, 0.060902400970458986, 0.061446144104003904, 0.06126729583740234, 0.06088566589355469, 0.06084934234619141, 0.06081324768066406, 0.06113983917236328, 0.060684383392333986, 0.06101375961303711, 0.06105718231201172, 0.061056865692138675, 0.06089334487915039, 0.06085126495361328, 0.06092044830322266, 0.06089344024658203, 0.06051027297973633, 0.060422080993652344, 0.06159769439697266, 0.06114281463623047, 0.06144179153442383, 0.06090185546875, 0.06061670303344727, 0.06071862411499023, 0.06156521606445312, 0.060891326904296876, 0.060731391906738284, 0.060569217681884766, 0.06068454360961914, 0.06107926559448242, 0.060735904693603515, 0.060714622497558594, 0.06069900894165039, 0.06079897689819336, 0.060590080261230465, 0.06071091079711914, 0.06051820755004883, 0.06254390335083007, 0.0611223030090332, 0.061338016510009766, 0.06066329574584961, 0.060779006958007815, 0.06071091079711914, 0.06037094497680664, 0.06066175842285156, 0.06078464126586914, 0.06078822326660156, 0.06085068893432617, 0.06068966293334961, 0.06063779067993164, 0.060383392333984376, 0.06054912185668945, 0.06078464126586914, 0.06107686233520508, 0.06063539123535156, 0.060628673553466794, 0.06048531341552734, 0.0605821762084961, 0.06047510528564453, 0.06071795272827148, 0.0607720947265625, 0.06069491195678711, 0.060683425903320314, 0.06071516799926758, 0.06238483047485351, 0.060706817626953125, 0.06062080001831055, 0.06070393753051758, 0.06099641418457031, 0.06094345474243164, 0.061434398651123046, 0.060983680725097654, 0.06093743896484375, 0.0607977294921875, 0.06122086334228516, 0.06101606369018555, 0.060911617279052734, 0.06122409439086914, 0.06088739013671875, 0.060910079956054686, 0.06137855911254883, 0.062004001617431644, 0.06076825714111328, 0.060729248046875, 0.060593441009521486, 0.061072193145751956, 0.060979072570800784, 0.06095065689086914, 0.06068204879760742, 0.06136441421508789, 0.06226473617553711, 0.0610118408203125, 0.061190879821777344, 0.06075334548950195, 0.06106547164916992, 0.06114131164550781, 0.0610918083190918, 0.062536865234375, 0.061063262939453126, 0.06087145614624023, 0.06101196670532227, 0.06073344039916992, 0.06095811080932617, 0.06076403045654297, 0.0610629768371582, 0.06122723388671875, 0.06115603256225586, 0.061217952728271484, 0.06125980758666992, 0.0610948486328125, 0.06102521514892578, 0.06090028762817383, 0.06159769439697266, 0.06183731079101563, 0.06127206420898437, 0.06098329544067383, 0.060796928405761716, 0.06109183883666992, 0.060835006713867185, 0.06064112091064453, 0.060895263671875, 0.061518688201904294, 0.06086614227294922, 0.06129510498046875, 0.06104064178466797, 0.061183998107910156, 0.061259777069091796, 0.06074982452392578, 0.0607081298828125, 0.0611250228881836, 0.06102457427978516, 0.06072524642944336, 0.060818431854248046, 0.06102860641479492, 0.06070963287353515, 0.06084198379516602, 0.060903423309326174, 0.061208576202392576, 0.06077635192871094, 0.06113238525390625, 0.06084659194946289, 0.063833984375, 0.060886302947998044, 0.06065401458740234, 0.062013439178466793, 0.06038323211669922, 0.06073942565917969, 0.06098883056640625, 0.060891902923583985, 0.06072431945800781, 0.060986270904541014, 0.06060851287841797, 0.06044672012329102, 0.06108345413208008, 0.060602336883544924, 0.06079919815063477, 0.0605816650390625, 0.060749664306640624, 0.060539264678955075, 0.06105404663085937, 0.0605943374633789, 0.060668670654296875, 0.06040694427490234, 0.06047011184692383, 0.06070476913452148, 0.06076399993896484, 0.06121590423583984, 0.060757152557373045, 0.06057353591918945, 0.0607960319519043, 0.0611453742980957, 0.06073632049560547, 0.06090518569946289, 0.06085228729248047, 0.06064323043823242, 0.06067334365844727, 0.06066255950927734, 0.06067814254760742, 0.06038937759399414, 0.06074163055419922, 0.06064726257324219, 0.060784961700439455, 0.06063087844848633, 0.06095977783203125, 0.06055830383300781, 0.06073775863647461, 0.06076617431640625, 0.06071686553955078, 0.061628414154052735, 0.061558368682861325, 0.061149600982666016, 0.060792320251464846, 0.06069475173950195, 0.060516223907470704, 0.060730945587158205, 0.06082236862182617, 0.06081254577636719, 0.06124006271362305, 0.060723201751708984, 0.06262169647216796, 0.060857856750488284, 0.06065558242797851, 0.06083433532714844, 0.06076969528198242, 0.06105155181884766, 0.06107100677490234, 0.060604705810546874, 0.062389984130859374, 0.061130943298339846, 0.06262515258789063, 0.06085833740234375, 0.06088508987426758, 0.060717632293701175, 0.060638656616210936, 0.0609202880859375, 0.06069462585449219, 0.06130444717407227, 0.06075020980834961, 0.060534080505371096, 0.06114287948608398, 0.06083164978027344, 0.06073644638061523, 0.06145024108886719, 0.0614205436706543, 0.06081228637695312, 0.06133542251586914, 0.06108992004394531, 0.061001728057861325, 0.06073705673217773, 0.060714881896972654, 0.060365310668945314, 0.061071456909179686, 0.06110822296142578, 0.060784225463867185, 0.060752288818359375, 0.0611220817565918, 0.06060393524169922, 0.06068016052246094, 0.060513248443603514, 0.06093619155883789, 0.06086556625366211, 0.06389763259887696, 0.06114390563964844, 0.060714881896972654, 0.06082579040527344, 0.060540798187255856, 0.06067420959472656, 0.060872703552246096, 0.06773078155517578, 0.06091856002807617, 0.060649345397949216, 0.06042745590209961, 0.06098723220825195, 0.06072339248657226, 0.060482177734375, 0.06073155212402344, 0.060861984252929685, 0.060547550201416014, 0.06098710250854492, 0.060629280090332034, 0.060739585876464844, 0.06096281433105469, 0.0607088623046875, 0.06096236801147461, 0.06082400131225586, 0.06503218841552734, 0.06080716705322266, 0.06078569412231445, 0.060644222259521485, 0.061214046478271486, 0.06211008071899414, 0.061044960021972655, 0.06074748611450195, 0.06073686218261719, 0.060910526275634765, 0.06339763259887696, 0.06293660736083985, 0.06105324935913086, 0.06106889724731445, 0.06097798538208008, 0.06172393417358398, 0.06055120086669922, 0.06127280044555664, 0.060620128631591795, 0.06156150436401367, 0.06105680084228516, 0.06469776153564454, 0.061748001098632814, 0.06085836791992188, 0.060409854888916016, 0.06175932693481445, 0.06091097640991211, 0.0653048324584961, 0.06125209426879883, 0.06174070358276367, 0.06103638458251953, 0.06063507080078125, 0.06132796859741211, 0.06090719985961914, 0.06105875015258789, 0.06128499221801758, 0.06351667022705078, 0.06114918518066406, 0.06100889587402344, 0.06069523239135742, 0.06082992172241211, 0.06137046432495117, 0.06064316940307617, 0.06080118560791015, 0.06550323486328125, 0.06074367904663086, 0.06094150543212891, 0.0606440315246582, 0.060904991149902346, 0.06115619277954101, 0.06132096099853516, 0.060862464904785155, 0.06118195343017578, 0.06081740951538086, 0.06149324798583984, 0.06088284683227539, 0.06134710311889648, 0.06118278503417969, 0.06116761779785156, 0.060849536895751954, 0.06075660705566406, 0.06539878082275391, 0.06093619155883789, 0.060962047576904294, 0.06096563339233398, 0.060930049896240235, 0.06101606369018555, 0.06089468765258789, 0.06329977416992187, 0.06317910385131836, 0.06308995056152343, 0.06511068725585938, 0.06109968185424805, 0.060725662231445314, 0.06079283142089844, 0.06060201644897461, 0.061185920715332034, 0.06076259231567383, 0.061209632873535154, 0.06095356750488281, 0.06099110412597656, 0.06068876647949219, 0.06074771118164062, 0.06108339309692383, 0.06096723175048828, 0.06048767852783203, 0.060499969482421874, 0.060788734436035156, 0.06550732421875, 0.060892288208007815, 0.060703712463378905, 0.06050806427001953, 0.06092800140380859, 0.06158095932006836, 0.06232025527954101, 0.060711647033691404, 0.06095894241333008, 0.06099148941040039, 0.0604824333190918, 0.060645664215087894, 0.06069926452636719, 0.06073331069946289, 0.06059196853637695, 0.06059347152709961, 0.06111276626586914, 0.06514332580566407, 0.06077199935913086, 0.06060038375854492, 0.06067023849487305, 0.0607782096862793, 0.061073696136474606, 0.060896896362304685, 0.06085055923461914, 0.06077788925170898, 0.060794784545898435, 0.06043308639526367, 0.061074718475341794, 0.06042396926879883, 0.060588768005371094, 0.060889312744140625, 0.06110819244384766, 0.0608903694152832, 0.06553206634521484, 0.06084806442260742, 0.061619998931884766, 0.06087756729125977, 0.0649115219116211, 0.06126182556152344, 0.06121446228027344, 0.06126617431640625, 0.061104129791259766, 0.06246649551391602, 0.06550019073486328, 0.06097110366821289, 0.06054182434082031, 0.060837535858154296, 0.06065187072753906, 0.06136217498779297, 0.060638622283935545, 0.06053744125366211, 0.06141097640991211, 0.06145878219604492, 0.061052734375, 0.06097903823852539, 0.06077679824829101, 0.060665855407714846, 0.06093967819213867, 0.06110678482055664, 0.061478271484375, 0.06529702758789062, 0.060641376495361325, 0.06061372756958008, 0.060838462829589844, 0.060604320526123044, 0.06069305419921875, 0.060913440704345706, 0.06145775985717773, 0.060875423431396486, 0.060609664916992184, 0.06069132614135742, 0.060604129791259766, 0.06063951873779297, 0.06051839828491211, 0.060647361755371096, 0.06075321578979492, 0.060848224639892576, 0.06517826843261719, 0.06092390441894531, 0.06060441589355469, 0.061084735870361326, 0.060650432586669925, 0.06051132965087891, 0.06413609313964844, 0.061009696960449215, 0.061478145599365236, 0.0611005744934082, 0.06113119888305664, 0.06047129440307617, 0.06081740951538086, 0.06061862564086914, 0.06100806427001953, 0.06059001541137695, 0.060665760040283206, 0.06611567687988282, 0.060970752716064454, 0.060571903228759764, 0.06058393478393555, 0.06072524642944336, 0.06059417724609375, 0.061017887115478515, 0.06083123016357422, 0.060942718505859375, 0.06104217529296875, 0.06084489440917969]",tokens/s,16.3707983024096,,, 4bit-awq-gemm-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1015.31648,867.106816,0.0,488.636416,482.553856,s,1,8.1101240234375,8.1101240234375,0.0,8.1101240234375,8.1101240234375,8.1101240234375,8.1101240234375,[8.1101240234375],,kWh,2.895301289996344e-05,3.1790054850173666e-06,8.45528454196387e-06,4.0587302926944674e-05,,MB,1269.141504,1024.393216,0.0,616.562688,581.925888,s,10,0.3571811180114745,0.03571811180114746,0.00024128084346798723,0.03568595123291016,0.0359390510559082,0.03610627670288086,0.036240057220458984,"[0.03627350234985351, 0.035744129180908205, 0.03564166259765625, 0.035899871826171874, 0.03551369476318359, 0.035539169311523434, 0.035438591003417966, 0.035730239868164065, 0.03549836730957031, 0.03590188980102539]",tokens/s,7167.232171320319,kWh,1.1306078822072933e-06,1.246858942938572e-07,7.525145445251156e-07,2.007808321026266e-06,tokens/kWh,127502210.9028559,MB,1302.872064,1039.07328,0.0,631.242752,597.192192,s,10,17.16676220703125,1.7166762207031248,0.010543409282530449,1.7162196655273436,1.7303883056640625,1.73304150390625,1.7351640625,"[1.7196602783203125, 1.7171500244140625, 1.706823974609375, 1.715289306640625, 1.6963521728515625, 1.7125345458984376, 1.7297987060546876, 1.7356947021484375, 1.71229150390625, 1.7211669921875]",tokens/s,36.69882488044026,kWh,5.007109421654183e-05,5.522389019015717e-06,1.920022363987546e-05,7.479370687543301e-05,tokens/kWh,842316.8556804501,,s,630,17.161206449508658,0.027240010237315346,0.0005188733701594881,0.02709940814971924,0.02779759616851807,0.02792737331390381,0.029234470233917238,"[0.027675647735595704, 0.027025407791137695, 0.0269354248046875, 0.026990463256835937, 0.026808319091796876, 0.02679110336303711, 0.027115488052368165, 0.02669843292236328, 0.027004255294799804, 0.026748863220214844, 0.02677235221862793, 0.026746879577636717, 0.02672230339050293, 0.02672960090637207, 0.026765727996826173, 0.026963808059692382, 0.026674816131591797, 0.026856447219848634, 0.02701692771911621, 0.027678272247314454, 0.02742755126953125, 0.027883487701416014, 0.02728940773010254, 0.027338783264160157, 0.026982175827026368, 0.026990655899047852, 0.027154399871826173, 0.027189151763916015, 0.027142623901367187, 0.027060224533081056, 0.027894880294799803, 0.027333696365356444, 0.02734467124938965, 0.02705824089050293, 0.027176895141601563, 0.027256128311157226, 0.027377792358398437, 0.02716476821899414, 0.02755638313293457, 0.027098592758178712, 0.027162815093994142, 0.02699299240112305, 0.02683679962158203, 0.027074880599975586, 0.026856416702270507, 0.027110111236572264, 0.027228511810302735, 0.027453279495239256, 0.028520448684692383, 0.02765945625305176, 0.02781279945373535, 0.027895360946655273, 0.027851295471191407, 0.02776006317138672, 0.02769875144958496, 0.027867712020874024, 0.02777110481262207, 0.027824447631835936, 0.02787705612182617, 0.028055168151855468, 0.02783475112915039, 0.02780521583557129, 0.02767305564880371, 0.027300031661987304, 0.027410240173339845, 0.027450559616088867, 0.026913600921630858, 0.026771455764770507, 0.026738687515258788, 0.02669068717956543, 0.026907520294189455, 0.027125759124755858, 0.02925542449951172, 0.027385696411132813, 0.026886528015136718, 0.026765344619750976, 0.026617855072021485, 0.026617855072021485, 0.026732255935668945, 0.02668367958068848, 0.026648832321166993, 0.026676095962524415, 0.02670476722717285, 0.02675302314758301, 0.02669935989379883, 0.02660598373413086, 0.02654207992553711, 0.030242816925048828, 0.02679311943054199, 0.026712352752685548, 0.026828704833984376, 0.027150976181030274, 0.027025440216064452, 0.02676736068725586, 0.026937280654907227, 0.02674643135070801, 0.026735103607177735, 0.026891807556152343, 0.029122848510742188, 0.028404224395751954, 0.027829023361206056, 0.027661216735839843, 0.02795315170288086, 0.02754256057739258, 0.027666559219360353, 0.02746985626220703, 0.027693056106567384, 0.02756211280822754, 0.027625696182250976, 0.02750326347351074, 0.02740947151184082, 0.027459455490112306, 0.027582847595214843, 0.027478527069091797, 0.027495744705200196, 0.02770207977294922, 0.02740559959411621, 0.027372127532958986, 0.027262975692749023, 0.027067935943603516, 0.02691440010070801, 0.027276159286499024, 0.026869760513305665, 0.02711507225036621, 0.027474111557006835, 0.027023616790771483, 0.02651817512512207, 0.02729759979248047, 0.028847360610961915, 0.02705299186706543, 0.027455039978027344, 0.02675059127807617, 0.02670675277709961, 0.026613759994506835, 0.026705919265747072, 0.026695680618286134, 0.02674844741821289, 0.026632671356201173, 0.02663417625427246, 0.0267194881439209, 0.026714912414550783, 0.026703903198242188, 0.026847232818603517, 0.02680169677734375, 0.02704636764526367, 0.02693120002746582, 0.026697727203369142, 0.026681343078613282, 0.02672982406616211, 0.026655391693115236, 0.02672640037536621, 0.02676940727233887, 0.026771455764770507, 0.026730495452880858, 0.027097215652465822, 0.02721900749206543, 0.027067071914672853, 0.026871583938598634, 0.02689039993286133, 0.02694588851928711, 0.026729791641235352, 0.02679043197631836, 0.02678374481201172, 0.027236320495605468, 0.027318336486816405, 0.027922655105590822, 0.02765817642211914, 0.02728563117980957, 0.026850368499755858, 0.02679772758483887, 0.026897119522094726, 0.02688640022277832, 0.0269434871673584, 0.02719875144958496, 0.0270097599029541, 0.027060224533081056, 0.027112480163574218, 0.027169759750366212, 0.02699673652648926, 0.027122976303100586, 0.027654111862182616, 0.027797567367553712, 0.027767488479614258, 0.02777292823791504, 0.027893024444580077, 0.027717472076416016, 0.027693952560424805, 0.027822080612182616, 0.028004352569580077, 0.02742425537109375, 0.027900415420532225, 0.02795644760131836, 0.028986143112182616, 0.028463104248046874, 0.02780963134765625, 0.02746384048461914, 0.027288959503173827, 0.027361343383789063, 0.02714681625366211, 0.026908672332763672, 0.02733260726928711, 0.0269550724029541, 0.026812864303588868, 0.026800384521484377, 0.026720256805419923, 0.026617984771728515, 0.026877824783325194, 0.026742271423339844, 0.028252447128295898, 0.027893695831298828, 0.026915103912353515, 0.026877952575683595, 0.026728288650512695, 0.026975519180297853, 0.026800512313842773, 0.02674265670776367, 0.026993280410766603, 0.027254783630371093, 0.027046176910400392, 0.02710905647277832, 0.027419872283935547, 0.027667264938354492, 0.027735359191894533, 0.027765439987182616, 0.02755075263977051, 0.02761440086364746, 0.027372928619384767, 0.027461151123046874, 0.027072608947753905, 0.0268953914642334, 0.02672368049621582, 0.026970527648925782, 0.02714419174194336, 0.02699673652648926, 0.02691196823120117, 0.026907424926757812, 0.026807872772216797, 0.028145183563232423, 0.02776678466796875, 0.027022239685058593, 0.026944831848144533, 0.026700031280517577, 0.027038143157958983, 0.02727071952819824, 0.027234752655029296, 0.027164512634277344, 0.026893728256225585, 0.026655487060546875, 0.026953727722167968, 0.026986495971679687, 0.02686502456665039, 0.026943552017211915, 0.026728448867797853, 0.02694704055786133, 0.026892831802368164, 0.026705919265747072, 0.026816511154174806, 0.026591232299804687, 0.02667241668701172, 0.02681248092651367, 0.026739360809326172, 0.026641759872436523, 0.026762079238891602, 0.02673206329345703, 0.02664886474609375, 0.026695680618286134, 0.02654003143310547, 0.026660863876342773, 0.02664019203186035, 0.026763456344604492, 0.02702035140991211, 0.02685638427734375, 0.026736640930175783, 0.026818559646606444, 0.026828351974487304, 0.026823104858398436, 0.0268155517578125, 0.02691516876220703, 0.027329120635986328, 0.02701312065124512, 0.027061824798583985, 0.026874271392822266, 0.02689846420288086, 0.027017215728759765, 0.027691007614135742, 0.027550783157348633, 0.027323328018188476, 0.02734694480895996, 0.027467967987060547, 0.02806937599182129, 0.02718137550354004, 0.027092992782592775, 0.027209728240966798, 0.026801376342773436, 0.026874975204467775, 0.027419551849365235, 0.02711222457885742, 0.026906272888183595, 0.026806623458862304, 0.026763263702392577, 0.026881311416625978, 0.02697075271606445, 0.027066463470458983, 0.027076608657836915, 0.026849023818969725, 0.026689056396484376, 0.0265960636138916, 0.02690662384033203, 0.026607328414916993, 0.026753183364868163, 0.02665894317626953, 0.026884096145629883, 0.02680143928527832, 0.026740671157836914, 0.02675996780395508, 0.026572799682617186, 0.02689023971557617, 0.02685923194885254, 0.026876192092895507, 0.026771455764770507, 0.026799488067626952, 0.026780288696289064, 0.026832767486572266, 0.026984575271606446, 0.02704150390625, 0.02704207992553711, 0.027140096664428712, 0.027099136352539063, 0.027183103561401366, 0.027020896911621094, 0.027090431213378906, 0.027034528732299806, 0.02702739143371582, 0.026974271774291993, 0.02688204765319824, 0.026955360412597655, 0.027198911666870117, 0.0270284481048584, 0.02695599937438965, 0.027191072463989257, 0.02724870491027832, 0.02709907150268555, 0.02693939208984375, 0.027181055068969725, 0.027170976638793944, 0.02739616012573242, 0.02768160057067871, 0.02782512092590332, 0.02775859260559082, 0.027550848007202148, 0.027509248733520508, 0.02762326431274414, 0.02776915168762207, 0.0277926082611084, 0.0275382080078125, 0.027506879806518555, 0.027514911651611327, 0.027639455795288086, 0.027627647399902342, 0.02737993621826172, 0.02717081642150879, 0.02733670425415039, 0.02750828742980957, 0.027148767471313475, 0.027076576232910158, 0.027256799697875977, 0.02745756721496582, 0.02735923194885254, 0.027586559295654296, 0.027131168365478516, 0.026825439453125, 0.026828800201416016, 0.026736480712890625, 0.02685148811340332, 0.026959871292114256, 0.026947519302368165, 0.027002784729003908, 0.026846752166748047, 0.026652063369750977, 0.027099679946899415, 0.02711155128479004, 0.02717750358581543, 0.026966976165771483, 0.02694003105163574, 0.026951776504516602, 0.02674518394470215, 0.027111072540283204, 0.027107328414916993, 0.026789888381958008, 0.026836448669433594, 0.026787679672241212, 0.026834911346435546, 0.02684364891052246, 0.02695395278930664, 0.027218975067138673, 0.027685855865478514, 0.027789024353027342, 0.027781408309936525, 0.02851430320739746, 0.029511680603027345, 0.030965856552124024, 0.027887840270996094, 0.027801279067993165, 0.027985664367675783, 0.02776835250854492, 0.027732032775878906, 0.02792723274230957, 0.02804230308532715, 0.027927488327026368, 0.027728927612304687, 0.0275567684173584, 0.02750048065185547, 0.02722831916809082, 0.027073600769042968, 0.02691600036621094, 0.027563680648803712, 0.0268657283782959, 0.026861568450927735, 0.027056127548217773, 0.026986431121826172, 0.0273918399810791, 0.027475872039794923, 0.027519296646118165, 0.02794495964050293, 0.02756387138366699, 0.027594911575317384, 0.02753657531738281, 0.027451616287231445, 0.027570783615112306, 0.027578464508056642, 0.02762656021118164, 0.027341663360595705, 0.02739200019836426, 0.02794495964050293, 0.02750771141052246, 0.027405311584472656, 0.027289600372314454, 0.02737299156188965, 0.02699673652648926, 0.026806272506713868, 0.02713043212890625, 0.0267509765625, 0.027082752227783204, 0.029183168411254883, 0.0270601921081543, 0.027363616943359374, 0.0274366397857666, 0.02719228744506836, 0.029930944442749022, 0.02762588882446289, 0.02756537628173828, 0.02751148796081543, 0.027473440170288087, 0.02741926383972168, 0.027438432693481445, 0.027830944061279297, 0.027522079467773436, 0.027829216003417968, 0.03010918426513672, 0.027225631713867188, 0.027374080657958984, 0.027316640853881836, 0.027043903350830078, 0.02692131233215332, 0.026938175201416014, 0.027058944702148438, 0.027060096740722656, 0.027314207077026368, 0.0273635196685791, 0.027230207443237304, 0.027236255645751953, 0.0275313606262207, 0.027582592010498046, 0.02736729621887207, 0.027313695907592774, 0.027101984024047853, 0.026998464584350585, 0.02705161666870117, 0.027558559417724608, 0.02771718406677246, 0.027769023895263673, 0.02766819190979004, 0.027689407348632813, 0.027718591690063476, 0.027810720443725585, 0.027717695236206055, 0.027776960372924805, 0.027758848190307616, 0.027612415313720703, 0.029039295196533203, 0.027840320587158202, 0.02787705612182617, 0.028334400177001954, 0.027736320495605468, 0.028142784118652345, 0.027413055419921874, 0.02750048065185547, 0.02742073631286621, 0.02735923194885254, 0.027064096450805663, 0.026884319305419922, 0.026790176391601563, 0.0267238712310791, 0.02691004753112793, 0.02657148742675781, 0.026826751708984374, 0.026949567794799803, 0.02719526481628418, 0.027017375946044923, 0.027109632492065428, 0.027217695236206055, 0.027428863525390625, 0.02742032051086426, 0.02733296012878418, 0.027085056304931642, 0.026963104248046876, 0.026813024520874022, 0.027033599853515625, 0.027056127548217773, 0.027170047760009766, 0.026650623321533205, 0.0268374080657959, 0.026980703353881835, 0.02696396827697754, 0.02694144058227539, 0.026957151412963867, 0.027073183059692384, 0.027066368103027344, 0.0276081600189209, 0.02755471992492676, 0.027613183975219727, 0.027027231216430664, 0.02705135917663574, 0.02694009590148926, 0.026818016052246093, 0.026875776290893556, 0.027091392517089842, 0.026954143524169923, 0.02703385543823242, 0.02754979133605957, 0.02772777557373047, 0.02759561538696289, 0.02744403266906738, 0.027399904251098634, 0.02721232032775879, 0.02703548812866211, 0.027013023376464843, 0.02701683235168457, 0.027056608200073242, 0.02689580726623535, 0.02703971290588379, 0.026884864807128907, 0.027742271423339845, 0.0286176643371582, 0.027697792053222658, 0.02693734359741211, 0.0267675838470459, 0.02695577621459961, 0.02735103988647461, 0.027060159683227537, 0.027136064529418944, 0.02730803108215332, 0.02735206413269043, 0.027282560348510742, 0.027322240829467773, 0.02780486488342285, 0.027283807754516602, 0.02771011161804199, 0.029391136169433594, 0.027764799118041993, 0.0275742073059082, 0.027507743835449218, 0.027846656799316406, 0.02774457550048828, 0.02774700736999512, 0.02823779106140137, 0.027706432342529296, 0.027570783615112306, 0.027691423416137697, 0.02768492889404297, 0.027774911880493164, 0.027725759506225585, 0.027731327056884764, 0.027709440231323244, 0.02770800018310547, 0.02782316780090332, 0.027797855377197266, 0.027777727127075196, 0.02766431999206543, 0.027555200576782228, 0.02740902328491211, 0.027363391876220704, 0.027571231842041015, 0.027827104568481444, 0.02751804733276367, 0.027197856903076172, 0.026838592529296875, 0.026887104034423827, 0.026804224014282226, 0.026740480422973632, 0.027191551208496093, 0.02691276741027832, 0.0271646728515625, 0.02663827133178711, 0.026941024780273437, 0.02692553520202637, 0.026850688934326173, 0.027027168273925782, 0.02703657531738281, 0.026921247482299803, 0.026982112884521483, 0.02694265556335449, 0.027019968032836916, 0.02691494369506836, 0.02677299118041992, 0.026746368408203124, 0.026895360946655275, 0.026952863693237305, 0.02690287971496582, 0.02679612731933594, 0.026759103775024416, 0.02676140785217285, 0.026656063079833984, 0.027648992538452148, 0.027062271118164064, 0.027082752227783204, 0.02720774459838867, 0.026980287551879884, 0.026990591049194337, 0.027250303268432616]",tokens/s,36.710705733514274,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpnu7l5whr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 115469 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp2w614cxh/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 295177 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4088, in from_pretrained hf_quantizer.postprocess_model(model) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model return self._process_model_after_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 460, in post_init_awq_exllama_modules model = exllamav2_post_init( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 193, in exllamav2_post_init submodule.post_init(scratch_space=model.scratch_spaces[device]) File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 78, in post_init self.q_handle = exlv2_ext.make_q_matrix( RuntimeError: q_weight and gptq_scales have incompatible shapes " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 31170 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp6vdra2iq/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpp18bxefd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpb2il6fo7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 226513 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfvmzarta/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1012.8384,1067.319296,0.0,681.5744,672.587776,s,1,10.267689453125,10.267689453125,0.0,10.267689453125,10.267689453125,10.267689453125,10.267689453125,[10.267689453125],,kWh,2.2938498450004846e-05,2.5226424933914885e-06,6.878616613999536e-06,3.233975755739587e-05,,MB,1423.613952,1436.418048,0.0,1021.313024,979.26144,s,10,0.35313161849975594,0.03531316184997559,0.00033003100983630673,0.03523537635803223,0.03554098358154297,0.03587041206359863,0.036133954849243165,"[0.0361998405456543, 0.03504012680053711, 0.03527462387084961, 0.03519612884521484, 0.035066272735595705, 0.03505052947998047, 0.03508611297607422, 0.035364734649658204, 0.03538547134399414, 0.03546777725219727]",tokens/s,7249.421648720955,kWh,1.1380818729571994e-06,1.2550902648326417e-07,7.510263249883288e-07,2.014617224428792e-06,tokens/kWh,127071285.25250454,MB,1480.343552,1474.166784,0.0,1056.964608,979.264,s,10,16.619723388671872,1.6619723388671872,0.006999651961998271,1.66373779296875,1.6692269531250001,1.6695753295898437,1.6698540307617187,"[1.651552490234375, 1.6610023193359376, 1.6668665771484374, 1.6691495361328126, 1.6552791748046876, 1.6507735595703126, 1.6664732666015625, 1.6688211669921875, 1.6699237060546874, 1.659881591796875]",tokens/s,37.90676807710366,kWh,4.841569379412559e-05,5.339894738674767e-06,1.895437832821175e-05,7.270996686101211e-05,tokens/kWh,866456.1781526722,,s,630,16.617072961807246,0.02637630628858294,0.00042335862806021634,0.026304960250854492,0.02662941131591797,0.02683236312866211,0.02864504718780518,"[0.026714912414550783, 0.026504703521728516, 0.02619059181213379, 0.026153919219970703, 0.026118976593017578, 0.02608332824707031, 0.02615235137939453, 0.026071456909179686, 0.026255552291870116, 0.02609971237182617, 0.026066944122314452, 0.02636595153808594, 0.026154464721679687, 0.026059295654296873, 0.026030336380004883, 0.02616908836364746, 0.026292224884033204, 0.02588470458984375, 0.026257375717163085, 0.02623427200317383, 0.026145376205444337, 0.026312000274658204, 0.026311359405517577, 0.02610313606262207, 0.026093568801879883, 0.026120864868164062, 0.02612620735168457, 0.026127872467041017, 0.026258047103881837, 0.026124256134033203, 0.026015775680541992, 0.02609152030944824, 0.026990591049194337, 0.0261441593170166, 0.026118751525878905, 0.026394208908081054, 0.02599158477783203, 0.0261079044342041, 0.02655574417114258, 0.026142879486083983, 0.026259967803955078, 0.026056703567504884, 0.02631884765625, 0.026220319747924804, 0.02614908790588379, 0.02630451202392578, 0.026566656112670898, 0.02624505615234375, 0.026046144485473634, 0.02638627243041992, 0.026124832153320312, 0.026050687789916992, 0.02604787254333496, 0.026171520233154298, 0.026141056060791014, 0.026547679901123045, 0.026271936416625976, 0.02617487907409668, 0.026101696014404298, 0.026220960617065428, 0.026182239532470702, 0.02632035255432129, 0.026257280349731446, 0.026647071838378906, 0.02593811225891113, 0.026136383056640625, 0.026224063873291015, 0.026146623611450197, 0.026003616333007813, 0.026091264724731444, 0.026216672897338866, 0.026176128387451172, 0.026162752151489256, 0.026356447219848634, 0.026170944213867186, 0.02621072006225586, 0.026109695434570312, 0.02607513618469238, 0.026032127380371094, 0.026193920135498046, 0.026123647689819337, 0.026147008895874024, 0.028559392929077148, 0.027947423934936523, 0.02632294464111328, 0.026198015213012696, 0.026261503219604493, 0.02625484848022461, 0.026083200454711915, 0.026339136123657226, 0.02626233673095703, 0.0261345272064209, 0.02613574409484863, 0.026161407470703123, 0.02612236785888672, 0.0264105281829834, 0.026153600692749024, 0.02624336051940918, 0.026392736434936524, 0.026144607543945313, 0.02620150375366211, 0.026446432113647462, 0.026660863876342773, 0.02647612762451172, 0.026110368728637694, 0.026267648696899414, 0.02627174377441406, 0.026638303756713867, 0.026296352386474608, 0.02694655990600586, 0.026155040740966796, 0.02631715202331543, 0.026086015701293944, 0.026668224334716797, 0.026311487197875977, 0.02627174377441406, 0.026200063705444337, 0.02607923126220703, 0.026076511383056642, 0.026203039169311524, 0.026208127975463867, 0.026652544021606446, 0.02868003273010254, 0.026580448150634765, 0.026411712646484373, 0.026238367080688475, 0.026414527893066406, 0.025995840072631837, 0.026109952926635743, 0.02613657569885254, 0.02634547233581543, 0.02649087905883789, 0.026275840759277344, 0.026275840759277344, 0.02626915168762207, 0.026483232498168946, 0.026353567123413087, 0.026280031204223633, 0.026615104675292968, 0.026344127655029297, 0.026451135635375978, 0.026164031982421874, 0.026214176177978516, 0.02631923294067383, 0.026247007369995117, 0.026223808288574218, 0.026517791748046873, 0.026638208389282228, 0.02647452735900879, 0.02637068748474121, 0.026293632507324218, 0.026319488525390625, 0.02622175979614258, 0.026344255447387697, 0.026152383804321288, 0.02634553527832031, 0.026755327224731444, 0.02647235107421875, 0.029047231674194336, 0.027791263580322266, 0.029104127883911132, 0.026333343505859374, 0.026210208892822266, 0.026611648559570312, 0.02615910339355469, 0.026029312133789062, 0.026341983795166016, 0.02633247947692871, 0.026137439727783204, 0.026277887344360353, 0.026300416946411134, 0.0265031681060791, 0.026162559509277344, 0.026386112213134767, 0.02644812774658203, 0.026309120178222657, 0.02660780715942383, 0.026172767639160155, 0.026304800033569335, 0.02646284866333008, 0.026320640563964843, 0.02637004852294922, 0.026202112197875976, 0.027068416595458986, 0.026265663146972658, 0.026285375595092773, 0.0263045768737793, 0.02656435203552246, 0.02626848030090332, 0.026872127532958985, 0.02693270492553711, 0.028686111450195312, 0.026901248931884766, 0.026476543426513673, 0.026552223205566407, 0.026485952377319336, 0.026844064712524415, 0.026350912094116212, 0.02625721549987793, 0.026446367263793947, 0.02628793525695801, 0.026431264877319335, 0.026212255477905275, 0.02649295997619629, 0.026444000244140627, 0.026457887649536133, 0.026135040283203126, 0.02630512046813965, 0.026280895233154297, 0.026398880004882812, 0.026464895248413087, 0.02636787223815918, 0.026191999435424804, 0.026574304580688476, 0.026233375549316405, 0.0262841911315918, 0.026197248458862305, 0.026259424209594727, 0.026420896530151367, 0.026743776321411134, 0.02625712013244629, 0.02623529624938965, 0.026261375427246093, 0.02643312072753906, 0.02641961669921875, 0.026376192092895507, 0.02652364730834961, 0.026475519180297852, 0.026630592346191407, 0.026606143951416014, 0.026513408660888672, 0.026267328262329102, 0.026446144104003907, 0.02654115104675293, 0.026477119445800782, 0.026628000259399414, 0.02658348846435547, 0.026330976486206054, 0.026484031677246094, 0.026506080627441406, 0.02631270408630371, 0.026482528686523437, 0.02634067153930664, 0.026692447662353517, 0.026312448501586913, 0.02648431968688965, 0.02655913543701172, 0.02660063934326172, 0.026428224563598633, 0.02651955223083496, 0.026495071411132814, 0.026643423080444335, 0.026832895278930666, 0.026391807556152343, 0.026899200439453125, 0.02646236801147461, 0.02726896095275879, 0.026511232376098634, 0.02631078338623047, 0.02621023941040039, 0.026425407409667968, 0.026097375869750975, 0.026196256637573242, 0.02613475227355957, 0.026295808792114257, 0.026251359939575194, 0.026367839813232423, 0.026077184677124023, 0.02612873649597168, 0.026189823150634766, 0.026521600723266602, 0.026236928939819337, 0.026191871643066408, 0.026001407623291017, 0.026171392440795898, 0.026435583114624024, 0.026247167587280275, 0.027432960510253908, 0.026398719787597655, 0.02633852767944336, 0.026380319595336914, 0.026194047927856446, 0.026073631286621095, 0.026142784118652344, 0.02625334358215332, 0.026258655548095703, 0.02610051155090332, 0.02619385528564453, 0.026185792922973634, 0.02621788787841797, 0.026348031997680665, 0.026138336181640624, 0.02608780860900879, 0.02631884765625, 0.026384223937988283, 0.026114208221435547, 0.02612944030761719, 0.026192863464355468, 0.026307615280151367, 0.026139616012573242, 0.026273792266845702, 0.02634752082824707, 0.02619500732421875, 0.026139583587646485, 0.026056703567504884, 0.026034368515014648, 0.026044223785400392, 0.026163040161132814, 0.02617683219909668, 0.025978944778442384, 0.025979679107666017, 0.02616089630126953, 0.02606118392944336, 0.026042240142822266, 0.026187711715698243, 0.026522655487060547, 0.02595315170288086, 0.026109279632568358, 0.026043136596679686, 0.025896991729736328, 0.026043615341186523, 0.026059520721435546, 0.02596659278869629, 0.025943712234497072, 0.02595625686645508, 0.026260063171386717, 0.026338752746582032, 0.02623632049560547, 0.02617651176452637, 0.026085407257080077, 0.02606822395324707, 0.026442272186279297, 0.026169504165649414, 0.02604444885253906, 0.02614271926879883, 0.02631804847717285, 0.026004255294799803, 0.026032127380371094, 0.025937280654907226, 0.026088064193725585, 0.02600912094116211, 0.02612272071838379, 0.026072736740112304, 0.026672927856445313, 0.026143295288085937, 0.026236095428466798, 0.026052703857421877, 0.026073823928833006, 0.026072927474975586, 0.026036319732666017, 0.02607097625732422, 0.02607695960998535, 0.026086015701293944, 0.02609328079223633, 0.026060800552368164, 0.02637414360046387, 0.026269695281982423, 0.026245119094848633, 0.026427391052246094, 0.026402816772460938, 0.02637824058532715, 0.026587135314941408, 0.026321151733398437, 0.026281312942504884, 0.02620867156982422, 0.02637414360046387, 0.026226688385009765, 0.027145248413085937, 0.026040992736816405, 0.02620038414001465, 0.02612384033203125, 0.026255071640014647, 0.026125024795532227, 0.026227903366088868, 0.026499967575073242, 0.02626335906982422, 0.02641279983520508, 0.026397056579589843, 0.026608959197998047, 0.02629078483581543, 0.026298240661621095, 0.026237152099609376, 0.026187776565551758, 0.026422496795654296, 0.026524448394775392, 0.026470176696777342, 0.0263242244720459, 0.026175519943237305, 0.026573759078979492, 0.026219648361206056, 0.026305343627929686, 0.026323007583618163, 0.02652390480041504, 0.026464000701904297, 0.026498783111572267, 0.026371936798095703, 0.02639302444458008, 0.026269695281982423, 0.026628320693969726, 0.026335008621215822, 0.026687488555908204, 0.026427391052246094, 0.026232831954956053, 0.026420543670654297, 0.02670457649230957, 0.02636595153808594, 0.02694758415222168, 0.026595327377319337, 0.02652774429321289, 0.026277088165283204, 0.026661312103271485, 0.02644207954406738, 0.02632441520690918, 0.026496959686279298, 0.026501760482788086, 0.026412736892700194, 0.026412767410278322, 0.026517120361328125, 0.026667808532714842, 0.026478527069091796, 0.026607872009277344, 0.02659459114074707, 0.026651199340820313, 0.02647056007385254, 0.02656051254272461, 0.026558624267578126, 0.026601152420043947, 0.02654345512390137, 0.02632963180541992, 0.02631644821166992, 0.0264520320892334, 0.026370176315307615, 0.02645568084716797, 0.026352447509765627, 0.026402816772460938, 0.02637116813659668, 0.02630108833312988, 0.026351936340332033, 0.026433055877685546, 0.02649456024169922, 0.02644630432128906, 0.026650623321533205, 0.02662544059753418, 0.026542688369750978, 0.02637824058532715, 0.02628553581237793, 0.0262923526763916, 0.02661008071899414, 0.026318336486816408, 0.02635318374633789, 0.026461151123046874, 0.026445823669433592, 0.026492927551269533, 0.026419200897216798, 0.026619007110595703, 0.030114688873291016, 0.026413055419921876, 0.02647007942199707, 0.02637151908874512, 0.026491775512695312, 0.026314399719238282, 0.02652400016784668, 0.026216447830200194, 0.026322591781616212, 0.026384735107421876, 0.02652569580078125, 0.026521600723266602, 0.026393888473510742, 0.026290111541748047, 0.02622710418701172, 0.026176959991455077, 0.026267808914184572, 0.026386528015136718, 0.026270336151123046, 0.026425407409667968, 0.026179584503173828, 0.026220544815063477, 0.026189247131347657, 0.026386783599853515, 0.025999584197998048, 0.02674483108520508, 0.02637004852294922, 0.0261278076171875, 0.026089408874511718, 0.026361728668212892, 0.026262271881103517, 0.026189823150634766, 0.026005184173583985, 0.02619772720336914, 0.026347360610961913, 0.026284799575805665, 0.026384384155273437, 0.026787424087524415, 0.03138924789428711, 0.02683171272277832, 0.026294271469116212, 0.026220544815063477, 0.026281984329223632, 0.02613862419128418, 0.02606015968322754, 0.026120832443237305, 0.02624710464477539, 0.026123455047607422, 0.02612928009033203, 0.026572799682617186, 0.0260598087310791, 0.026210975646972657, 0.026440000534057616, 0.02625660705566406, 0.026501792907714844, 0.026281919479370117, 0.026253503799438478, 0.026591360092163088, 0.026443647384643554, 0.026456064224243164, 0.026284032821655274, 0.026089279174804688, 0.026112192153930663, 0.026214399337768556, 0.026482688903808595, 0.026449920654296875, 0.02641663932800293, 0.026309120178222657, 0.02629631996154785, 0.02625279998779297, 0.026223104476928712, 0.02612601661682129, 0.026261823654174805, 0.02632851219177246, 0.02897977638244629, 0.026537984848022462, 0.026293983459472658, 0.026445568084716795, 0.02635420799255371, 0.026365375518798827, 0.026169248580932617, 0.02620675277709961, 0.026218080520629884, 0.026276224136352538, 0.026488639831542968, 0.026380159378051757, 0.02632547187805176, 0.02634137535095215, 0.026310592651367187, 0.026308671951293945, 0.026199424743652344, 0.026124479293823243, 0.026263999938964843, 0.026413055419921876, 0.026550271987915038, 0.02683456039428711, 0.02650534439086914, 0.026951936721801757, 0.02677676773071289, 0.027277536392211914, 0.026702432632446288, 0.026492927551269533, 0.02650931167602539, 0.026793983459472655, 0.026845184326171875, 0.026681343078613282, 0.027274431228637694, 0.02706105613708496, 0.0272805118560791, 0.027165376663208007, 0.026487136840820314, 0.0265350399017334, 0.026887231826782227, 0.026708959579467773, 0.02657072067260742, 0.026650623321533205, 0.026629280090332032, 0.026731359481811524, 0.026531808853149413, 0.026427423477172852, 0.026246912002563478, 0.026192127227783205, 0.026351615905761717, 0.026148672103881835, 0.026306751251220704, 0.026306367874145507, 0.026394624710083008, 0.02653011131286621, 0.02631612777709961, 0.026173280715942382, 0.02634822463989258, 0.026162559509277344, 0.02605939292907715, 0.026447328567504882, 0.026097343444824218, 0.026462047576904298, 0.026192895889282225, 0.02594611167907715, 0.026441728591918946, 0.02639580726623535, 0.02644870376586914, 0.026328800201416015, 0.026314239501953125, 0.02627052879333496, 0.026250911712646485, 0.02626940727233887, 0.026514047622680663, 0.0265743350982666, 0.026697599411010742, 0.026303104400634766, 0.026283519744873047, 0.02625529670715332, 0.02628256034851074, 0.02640640068054199, 0.026310655593872072, 0.0262488956451416, 0.026358591079711915, 0.0261529598236084, 0.026224639892578124, 0.026250463485717773, 0.02622902488708496, 0.026210624694824217, 0.026158687591552734, 0.02602047920227051, 0.026150304794311522, 0.02630099105834961, 0.02615091133117676, 0.026163200378417968, 0.026169343948364256, 0.026236928939819337, 0.02641001510620117, 0.02638947105407715, 0.02637004852294922, 0.026445024490356444, 0.026794784545898436]",tokens/s,37.91281421511445,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpqjq6hild/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1042.612224,1144.91392,0.0,742.391808,732.914176,s,1,10.1074755859375,10.1074755859375,0.0,10.1074755859375,10.1074755859375,10.1074755859375,10.1074755859375,[10.1074755859375],,kWh,2.750154889999597e-05,3.0263804994625563e-06,7.804450687999218e-06,3.833238008745775e-05,,MB,1424.216064,1442.709504,0.0,1025.507328,998.26688,s,10,1.6002793121337888,0.1600279312133789,0.0005895228223081523,0.16004956817626953,0.16052415313720703,0.16086465377807618,0.1611370542907715,"[0.1612051544189453, 0.1602379913330078, 0.16027462768554687, 0.1600589141845703, 0.1597403564453125, 0.15999049377441407, 0.160448486328125, 0.16004022216796876, 0.15886671447753906, 0.15941635131835938]",tokens/s,1599.720736617243,kWh,4.857955478756871e-06,5.35743062550531e-07,3.2048067532786993e-06,8.598505294586102e-06,tokens/kWh,29772616.4291817,MB,1457.84832,1480.45824,0.0,1063.256064,998.793728,s,10,13.25366162109375,1.325366162109375,0.0033258237405454586,1.3258905639648437,1.3289847900390623,1.3294173217773437,1.3297633471679686,"[1.329849853515625, 1.32727490234375, 1.3204161376953125, 1.323845947265625, 1.328888671875, 1.3269556884765625, 1.320719482421875, 1.324825439453125, 1.322035400390625, 1.32885009765625]",tokens/s,47.53403383992609,kWh,3.844562345582635e-05,4.240225227914658e-06,1.6075930893521183e-05,5.876177957726218e-05,tokens/kWh,1072125.460685976,,s,630,13.250970167160029,0.0210332859796191,0.00030195376754496247,0.020975600242614745,0.02125663433074951,0.021512806129455567,0.02224334568023682,"[0.021622848510742188, 0.021127103805541992, 0.021073312759399415, 0.021097312927246092, 0.021083168029785156, 0.021270751953125, 0.021021440505981447, 0.021061632156372072, 0.021110784530639647, 0.020942047119140626, 0.02083919906616211, 0.021047584533691405, 0.02094460868835449, 0.021029951095581055, 0.02100111961364746, 0.020949024200439453, 0.020940799713134766, 0.020742143630981445, 0.020793312072753905, 0.02080771255493164, 0.021034528732299804, 0.020995712280273436, 0.020945440292358397, 0.020904256820678712, 0.02111065673828125, 0.020981887817382812, 0.021309663772583008, 0.021003711700439454, 0.020988256454467773, 0.020899839401245117, 0.020940799713134766, 0.02094419288635254, 0.022268447875976562, 0.02102288055419922, 0.021151744842529296, 0.0212174072265625, 0.021553216934204103, 0.021246912002563477, 0.02132467269897461, 0.021275999069213868, 0.021189279556274414, 0.021166080474853514, 0.021157472610473634, 0.021461408615112306, 0.021157632827758788, 0.02123119926452637, 0.021385887145996093, 0.021370880126953123, 0.021345375061035156, 0.021218175888061522, 0.020976800918579102, 0.021206911087036134, 0.020980287551879882, 0.020980159759521486, 0.021086208343505858, 0.021043392181396486, 0.02095420837402344, 0.020971328735351562, 0.021018911361694335, 0.021072511672973634, 0.021123071670532227, 0.020971456527709962, 0.020883520126342772, 0.02170307159423828, 0.021096031188964845, 0.021454463958740233, 0.02104422378540039, 0.021037055969238282, 0.020901248931884765, 0.021005151748657226, 0.021071647644042967, 0.02107574462890625, 0.02093814468383789, 0.020908863067626952, 0.020983808517456053, 0.020944032669067383, 0.0210993595123291, 0.021006336212158205, 0.020953088760375976, 0.021131391525268554, 0.021030815124511718, 0.021063648223876952, 0.021048704147338868, 0.021066368103027342, 0.02132761573791504, 0.021092607498168946, 0.021097888946533205, 0.021137920379638672, 0.021207040786743164, 0.020970687866210938, 0.02111110305786133, 0.02117487907409668, 0.021972991943359374, 0.02090188789367676, 0.02087446403503418, 0.02103376007080078, 0.02104729652404785, 0.020917312622070312, 0.02100115203857422, 0.021112831115722656, 0.02111676788330078, 0.02118489646911621, 0.021157663345336915, 0.021161216735839844, 0.02112179183959961, 0.020977088928222656, 0.02127027130126953, 0.02114022445678711, 0.021277984619140624, 0.021176864624023437, 0.02125644874572754, 0.020834144592285157, 0.020899999618530275, 0.020987520217895506, 0.020978015899658205, 0.020959264755249025, 0.021009824752807618, 0.020985504150390626, 0.02097475242614746, 0.02083612823486328, 0.02088684844970703, 0.02097427177429199, 0.02078870391845703, 0.02072425651550293, 0.020875200271606446, 0.020910144805908203, 0.021722944259643554, 0.02095302391052246, 0.02094095993041992, 0.02089708709716797, 0.021179168701171876, 0.020807104110717775, 0.02081439971923828, 0.020815872192382814, 0.020866336822509764, 0.02077769660949707, 0.02085068893432617, 0.020942304611206056, 0.020932512283325197, 0.020847232818603515, 0.02084249687194824, 0.020786624908447265, 0.0209290885925293, 0.020860992431640624, 0.020912288665771484, 0.020881343841552734, 0.020827552795410157, 0.02104159927368164, 0.020946943283081054, 0.020842016220092772, 0.021131744384765627, 0.020941951751708984, 0.021064096450805665, 0.02134844779968262, 0.02099452781677246, 0.020921600341796874, 0.02086390495300293, 0.020946687698364257, 0.020811647415161134, 0.0209880313873291, 0.02083328056335449, 0.02107494354248047, 0.020965375900268556, 0.02089779281616211, 0.021028799057006838, 0.020897855758666994, 0.02090345573425293, 0.021034528732299804, 0.021024831771850584, 0.020992895126342774, 0.020948223114013672, 0.020985727310180665, 0.020988800048828124, 0.020940799713134766, 0.02085068893432617, 0.0209562873840332, 0.021003135681152342, 0.020922367095947265, 0.020951007843017577, 0.020962400436401366, 0.021211135864257814, 0.021093215942382813, 0.02097702407836914, 0.020849376678466796, 0.02099001693725586, 0.020860864639282228, 0.021006336212158205, 0.02097148895263672, 0.02080156707763672, 0.021665887832641603, 0.022387584686279296, 0.02100655937194824, 0.020961055755615233, 0.021176319122314453, 0.02080460739135742, 0.020736799240112305, 0.02085193634033203, 0.020935871124267577, 0.020896831512451173, 0.021002016067504882, 0.021078559875488283, 0.020941247940063478, 0.02085273551940918, 0.02091007995605469, 0.020793344497680662, 0.02103215980529785, 0.020898208618164063, 0.020849023818969727, 0.021522111892700195, 0.020941120147705078, 0.02091007995605469, 0.020875263214111327, 0.020815872192382814, 0.020975391387939454, 0.02088297653198242, 0.02098636817932129, 0.020998336791992187, 0.021039167404174806, 0.021163967132568358, 0.021086208343505858, 0.020975616455078124, 0.020907840728759765, 0.020960704803466797, 0.020951711654663085, 0.02109823989868164, 0.020991935729980468, 0.020975584030151366, 0.02094441604614258, 0.020796319961547852, 0.020960256576538085, 0.02084147262573242, 0.020998144149780275, 0.02108006477355957, 0.020922367095947265, 0.02110873603820801, 0.021099647521972655, 0.021620960235595704, 0.020861440658569336, 0.02099420738220215, 0.02084659194946289, 0.020963167190551756, 0.021291168212890624, 0.020991392135620117, 0.020886112213134765, 0.020999776840209962, 0.02096988868713379, 0.02081497573852539, 0.02089664077758789, 0.02090188789367676, 0.020875040054321288, 0.020981983184814455, 0.021092512130737304, 0.02162553596496582, 0.02108025550842285, 0.021128543853759764, 0.021145824432373048, 0.021079967498779297, 0.0212524471282959, 0.022181888580322266, 0.02149772834777832, 0.021145727157592772, 0.020809728622436522, 0.020801504135131835, 0.020858015060424805, 0.02091302490234375, 0.020856576919555662, 0.020955392837524414, 0.020903423309326173, 0.02073414421081543, 0.020965343475341798, 0.02094838333129883, 0.020849599838256835, 0.020987520217895506, 0.021059968948364257, 0.02106572723388672, 0.021755903244018555, 0.021966272354125977, 0.021522207260131834, 0.021008512496948243, 0.021152416229248048, 0.021343936920166017, 0.021168447494506835, 0.02099590492248535, 0.021385408401489257, 0.020995935440063476, 0.020942655563354493, 0.020949344635009765, 0.02095052719116211, 0.021123584747314454, 0.021037055969238282, 0.02100223922729492, 0.020992000579833983, 0.02102672004699707, 0.0210883846282959, 0.02101759910583496, 0.021068416595458984, 0.021139232635498047, 0.02102943992614746, 0.021185951232910158, 0.02102947235107422, 0.020963327407836914, 0.020963327407836914, 0.020996095657348633, 0.02099350357055664, 0.020933151245117188, 0.020884992599487305, 0.020931200027465822, 0.021215103149414063, 0.021037055969238282, 0.0208855037689209, 0.021012479782104493, 0.021082111358642578, 0.02104934310913086, 0.02104729652404785, 0.020908031463623047, 0.021950464248657226, 0.023009279251098632, 0.02127052879333496, 0.0210861759185791, 0.021082271575927736, 0.020870975494384766, 0.02113747215270996, 0.020944896697998046, 0.021194175720214845, 0.02092089653015137, 0.020867071151733398, 0.0209770565032959, 0.02097724723815918, 0.02112393569946289, 0.021067615509033202, 0.021088031768798827, 0.021118656158447265, 0.021056352615356447, 0.020974943161010742, 0.0209083194732666, 0.021006048202514647, 0.020943519592285156, 0.020948991775512696, 0.02101862335205078, 0.0208907527923584, 0.02099446487426758, 0.020840927124023436, 0.021112831115722656, 0.0209039363861084, 0.020824064254760744, 0.020805215835571288, 0.020844959259033204, 0.021323776245117186, 0.02087936019897461, 0.020891647338867187, 0.020985855102539062, 0.02083951950073242, 0.02114771270751953, 0.021078752517700194, 0.020987295150756837, 0.02120275115966797, 0.02079545593261719, 0.0213656005859375, 0.021787872314453127, 0.024209375381469726, 0.020663103103637694, 0.02079539108276367, 0.02108608055114746, 0.020709503173828126, 0.02070528030395508, 0.020832256317138673, 0.020778112411499024, 0.020742624282836915, 0.021186975479125975, 0.020785152435302736, 0.020787200927734374, 0.020762624740600585, 0.020684799194335936, 0.02071347236633301, 0.020772863388061523, 0.020695039749145508, 0.02089369583129883, 0.02081177520751953, 0.021518335342407227, 0.020948991775512696, 0.02099344062805176, 0.020979711532592774, 0.020928672790527344, 0.02087161636352539, 0.02079539108276367, 0.02087936019897461, 0.021154912948608398, 0.02082854461669922, 0.020709280014038087, 0.021025407791137696, 0.020948223114013672, 0.02079158401489258, 0.02139516830444336, 0.02127129554748535, 0.02092451286315918, 0.020903680801391603, 0.02080169677734375, 0.020989952087402345, 0.020733951568603515, 0.020832256317138673, 0.021037055969238282, 0.020882944107055663, 0.020887456893920898, 0.020994176864624025, 0.02083475112915039, 0.02089743995666504, 0.020855167388916015, 0.020973567962646485, 0.020766719818115235, 0.02086911964416504, 0.02083020782470703, 0.020967424392700194, 0.020966943740844728, 0.02087779235839844, 0.020815872192382814, 0.020766719818115235, 0.020850528717041017, 0.020938528060913085, 0.020892032623291014, 0.02096931266784668, 0.02105276870727539, 0.021035839080810546, 0.020992000579833983, 0.02109555244445801, 0.021067968368530275, 0.020939071655273436, 0.020938207626342773, 0.021121952056884767, 0.021117952346801756, 0.02095350456237793, 0.02093935966491699, 0.02105900764465332, 0.020993759155273437, 0.021021535873413086, 0.020980735778808594, 0.021246431350708007, 0.02091804885864258, 0.02090867233276367, 0.02091587257385254, 0.021019008636474608, 0.021012575149536132, 0.021673568725585936, 0.021233823776245116, 0.021332128524780274, 0.021221824645996094, 0.0211331844329834, 0.02126860809326172, 0.021149696350097655, 0.021174272537231444, 0.02103196716308594, 0.020988447189331055, 0.02087161636352539, 0.02141814422607422, 0.02096918487548828, 0.02203766441345215, 0.020984800338745117, 0.021059135437011718, 0.021090496063232423, 0.021293279647827148, 0.020934207916259766, 0.02103068733215332, 0.021033855438232423, 0.02110995292663574, 0.021052032470703124, 0.02092860794067383, 0.020899744033813478, 0.020961280822753905, 0.020942848205566408, 0.020996095657348633, 0.020976863861083984, 0.020859071731567383, 0.020875871658325194, 0.0208789119720459, 0.020763071060180664, 0.02078495979309082, 0.021190208435058595, 0.020824064254760744, 0.022053407669067382, 0.020955072402954102, 0.020922527313232422, 0.020975616455078124, 0.020895488739013673, 0.020844959259033204, 0.02106505584716797, 0.02095897674560547, 0.021062400817871092, 0.02097148895263672, 0.020938783645629882, 0.02096870422363281, 0.02088832092285156, 0.020996095657348633, 0.020992000579833983, 0.02092176055908203, 0.02079804801940918, 0.020946943283081054, 0.02087731170654297, 0.021103679656982424, 0.020754432678222655, 0.020673471450805662, 0.020669824600219728, 0.020804031372070313, 0.020789440155029298, 0.020926111221313475, 0.0208305606842041, 0.021631551742553712, 0.021006336212158205, 0.021161407470703126, 0.02107244873046875, 0.02099126434326172, 0.02098454475402832, 0.020961280822753905, 0.02084454345703125, 0.02086297607421875, 0.02082809638977051, 0.020856895446777345, 0.021506048202514647, 0.020957183837890626, 0.02086297607421875, 0.02081177520751953, 0.020921600341796874, 0.020820735931396484, 0.020924415588378906, 0.02093615913391113, 0.020875808715820312, 0.0208855037689209, 0.020955135345458984, 0.020928512573242186, 0.02091644859313965, 0.021003168106079103, 0.021037248611450194, 0.021258304595947266, 0.021102815628051757, 0.021227935791015624, 0.020793344497680662, 0.020846176147460937, 0.020799903869628905, 0.020936704635620116, 0.02075551986694336, 0.020769088745117188, 0.020815872192382814, 0.020858976364135744, 0.020809919357299804, 0.023826784133911132, 0.021671104431152343, 0.021535392761230468, 0.020871328353881835, 0.020717567443847656, 0.020715200424194335, 0.02069126319885254, 0.021102048873901366, 0.020922496795654298, 0.02088924789428711, 0.020703392028808595, 0.020859487533569337, 0.02084659194946289, 0.020791296005249024, 0.02068889617919922, 0.020762624740600585, 0.020758527755737305, 0.02113030433654785, 0.020794143676757814, 0.02078326416015625, 0.020695039749145508, 0.020930559158325195, 0.020822015762329102, 0.020991008758544923, 0.020761568069458006, 0.021674943923950196, 0.02114089584350586, 0.0212076473236084, 0.021266399383544923, 0.021014560699462892, 0.020891647338867187, 0.021199968338012694, 0.0214718074798584, 0.02246281623840332, 0.021987327575683592, 0.021155839920043946, 0.021121023178100586, 0.020886911392211913, 0.020923007965087892, 0.02097475242614746, 0.02082697677612305, 0.020912128448486327, 0.02086297607421875, 0.02088755226135254, 0.02091007995605469, 0.02088243293762207, 0.02091110420227051, 0.021244991302490236, 0.020966335296630858, 0.02107187271118164, 0.021102592468261717, 0.021198848724365234, 0.021112831115722656, 0.02096531105041504, 0.020887456893920898, 0.021018272399902345, 0.020889120101928713, 0.020801536560058592, 0.02079747200012207, 0.020876224517822266, 0.021342208862304687, 0.02109644889831543, 0.02104047966003418, 0.02099043273925781, 0.021079328536987303, 0.021063648223876952, 0.02104409599304199, 0.020865055084228517, 0.020841728210449217, 0.020859424591064452, 0.021026239395141602, 0.020867040634155273, 0.020816896438598635, 0.020858720779418947, 0.020864767074584963, 0.02102851104736328, 0.020994655609130858, 0.020970848083496092, 0.02228700828552246, 0.020948991775512696, 0.021045248031616212, 0.021235712051391603, 0.021956607818603514, 0.021057056427001952, 0.020979583740234373, 0.0209785270690918, 0.02107366371154785, 0.02087321662902832]",tokens/s,47.543688654686825,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp2ml8qurm/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1010.950144,1067.319296,0.0,681.5744,672.587776,s,1,10.1887001953125,10.1887001953125,0.0,10.1887001953125,10.1887001953125,10.1887001953125,10.1887001953125,[10.1887001953125],,kWh,2.337521926249811e-05,2.5711296368454564e-06,6.917505533998725e-06,3.286385443334229e-05,,MB,1418.3424,1436.418048,0.0,1021.313024,979.26144,s,10,0.34678134918212894,0.034678134918212894,0.00013174736112590835,0.03464809608459472,0.0347925422668457,0.03490297546386719,0.034991322021484375,"[0.03501340866088867, 0.0346209602355957, 0.034768001556396484, 0.034540641784667966, 0.03452959823608399, 0.03473196792602539, 0.03463772964477539, 0.03461715316772461, 0.03466342544555664, 0.03465846252441406]",tokens/s,7382.173251351798,kWh,1.074472192585785e-06,1.1849412997031472e-07,7.095839009999977e-07,1.9025502235560972e-06,tokens/kWh,134556237.63850236,MB,1475.260416,1474.166784,0.0,1056.964608,979.264,s,10,13.963991210937499,1.39639912109375,0.00638702064411931,1.3962006225585937,1.4059607421875,1.4069640136718748,1.407766630859375,"[1.40796728515625, 1.3882437744140625, 1.387829345703125, 1.3928572998046875, 1.39798193359375, 1.391772705078125, 1.3950648193359374, 1.3991998291015626, 1.39733642578125, 1.40573779296875]",tokens/s,45.116041000265255,kWh,4.095779170283147e-05,4.517250723638457e-06,1.6827624573199993e-05,6.23026669996699e-05,tokens/kWh,1011192.6669260208,,s,630,13.961331571578983,0.022160843764411083,0.00027131999943118696,0.022116352081298828,0.02241216278076172,0.02257004632949829,0.02335178361892701,"[0.02265999984741211, 0.022281984329223632, 0.022362112045288086, 0.02246451187133789, 0.022576383590698242, 0.022210847854614257, 0.022518239974975585, 0.022167455673217772, 0.022255680084228516, 0.02198121643066406, 0.022228832244873046, 0.02218992042541504, 0.022040191650390624, 0.022118335723876954, 0.022213375091552735, 0.022078720092773438, 0.02210700798034668, 0.022212480545043944, 0.02229987144470215, 0.02228508758544922, 0.02266739273071289, 0.022152448654174806, 0.022295167922973633, 0.022255615234375, 0.022146207809448242, 0.02227020835876465, 0.022337247848510742, 0.022325504302978517, 0.022239839553833008, 0.022130624771118164, 0.02219603157043457, 0.022415647506713866, 0.022312511444091798, 0.022276544570922853, 0.022302112579345702, 0.02257571220397949, 0.02249113655090332, 0.022599103927612305, 0.022458240509033202, 0.02231884765625, 0.02237740707397461, 0.02260531234741211, 0.022200832366943358, 0.02240716743469238, 0.022269439697265626, 0.02241177558898926, 0.022482336044311522, 0.022218816757202147, 0.023094911575317383, 0.022329727172851564, 0.022260032653808593, 0.022323423385620118, 0.022310848236083983, 0.022409280776977538, 0.022345727920532226, 0.022410783767700195, 0.02216991996765137, 0.0222762565612793, 0.022079488754272462, 0.02207043266296387, 0.022215423583984376, 0.02300931167602539, 0.023416191101074218, 0.022702335357666015, 0.022130687713623046, 0.022157312393188477, 0.022090848922729493, 0.02202511978149414, 0.02206064033508301, 0.022270368576049804, 0.021905216217041015, 0.022100160598754883, 0.022108160018920898, 0.02219820785522461, 0.022162656784057617, 0.022074207305908203, 0.022112255096435548, 0.021994592666625977, 0.022, 0.021915807723999023, 0.021917280197143556, 0.02203727912902832, 0.021897216796875, 0.02182143974304199, 0.021831680297851562, 0.02185420799255371, 0.021921791076660157, 0.02187264060974121, 0.02195631980895996, 0.02179849624633789, 0.021937887191772462, 0.02195132827758789, 0.021909536361694334, 0.02218137550354004, 0.02208793640136719, 0.022010208129882813, 0.022095775604248045, 0.02201024055480957, 0.02200137519836426, 0.02213999938964844, 0.022128992080688477, 0.022126720428466796, 0.02191814422607422, 0.0221265926361084, 0.022080575942993164, 0.022219200134277344, 0.022083648681640623, 0.022163583755493165, 0.022219072341918944, 0.022099967956542968, 0.0223191032409668, 0.022017311096191407, 0.021983680725097657, 0.02195894432067871, 0.0219238395690918, 0.021888608932495116, 0.021858720779418944, 0.0219783992767334, 0.021822111129760742, 0.021990720748901366, 0.021944608688354492, 0.022057439804077147, 0.02183683204650879, 0.02191663932800293, 0.02191155242919922, 0.022142303466796874, 0.022497695922851564, 0.022161312103271484, 0.02197283172607422, 0.021827840805053712, 0.021381120681762695, 0.02183171272277832, 0.021999584197998048, 0.02199692726135254, 0.021941951751708984, 0.02159244728088379, 0.022083295822143554, 0.022209375381469727, 0.022024192810058595, 0.02209382438659668, 0.022028160095214844, 0.022567039489746095, 0.02213203239440918, 0.022164159774780274, 0.022085823059082032, 0.02204038429260254, 0.021855680465698243, 0.021905376434326173, 0.021946975708007813, 0.02205695915222168, 0.021906944274902345, 0.022045087814331055, 0.02192793655395508, 0.021944416046142577, 0.022042623519897463, 0.021970943450927736, 0.022018047332763673, 0.02212224006652832, 0.022238847732543945, 0.022190656661987305, 0.022630527496337892, 0.022065088272094725, 0.02187059211730957, 0.022140127182006836, 0.022115104675292967, 0.022032352447509767, 0.022138912200927733, 0.022003616333007812, 0.022015487670898438, 0.022073440551757813, 0.022020511627197266, 0.02190550422668457, 0.022085472106933592, 0.021872095108032227, 0.02192595291137695, 0.021834367752075194, 0.021825759887695313, 0.02185807991027832, 0.021979135513305666, 0.021865760803222656, 0.021850175857543945, 0.02212112045288086, 0.022147071838378905, 0.021946176528930664, 0.021893312454223633, 0.021894527435302735, 0.021991264343261718, 0.022245536804199217, 0.022430335998535156, 0.02211840057373047, 0.02185420799255371, 0.022046144485473634, 0.02209174346923828, 0.021959264755249022, 0.022029407501220705, 0.022150047302246095, 0.022008960723876952, 0.02219443130493164, 0.022039167404174803, 0.022062143325805663, 0.022025152206420897, 0.021975135803222655, 0.022061119079589842, 0.022271839141845703, 0.0220032958984375, 0.02194268798828125, 0.02192086410522461, 0.021949344635009766, 0.021936256408691405, 0.02184899139404297, 0.021973663330078125, 0.021999935150146484, 0.02180633544921875, 0.02210630416870117, 0.021698528289794922, 0.021924448013305665, 0.02195769691467285, 0.021912031173706055, 0.021968832015991212, 0.021934207916259767, 0.02192220878601074, 0.02196892738342285, 0.021983072280883788, 0.022207935333251952, 0.022133440017700196, 0.022054912567138672, 0.022368255615234374, 0.022367647171020508, 0.022260320663452147, 0.02208099174499512, 0.02313475227355957, 0.02211756706237793, 0.022311391830444335, 0.022245119094848633, 0.022047584533691406, 0.02195430374145508, 0.022208511352539064, 0.02207334327697754, 0.022374399185180666, 0.022177440643310547, 0.022130016326904298, 0.022199296951293947, 0.022169599533081053, 0.02224127960205078, 0.022390335083007813, 0.022359647750854493, 0.022342496871948243, 0.02226380729675293, 0.02220857620239258, 0.022120672225952147, 0.02219817543029785, 0.022185792922973634, 0.022704448699951172, 0.02229043197631836, 0.02218115234375, 0.022138944625854494, 0.021914272308349608, 0.022212064743041993, 0.022120992660522462, 0.022175743103027345, 0.022117727279663085, 0.022178464889526368, 0.022204416275024414, 0.02209382438659668, 0.022167552947998048, 0.022116352081298828, 0.022161407470703123, 0.022026239395141603, 0.02209587287902832, 0.02202191925048828, 0.02205923271179199, 0.022071296691894532, 0.022204383850097657, 0.021931392669677734, 0.02193270492553711, 0.021995519638061522, 0.02209791946411133, 0.022261152267456053, 0.022297183990478517, 0.02207539176940918, 0.02215760040283203, 0.022007423400878905, 0.02206729507446289, 0.021959936141967774, 0.02206380844116211, 0.022136640548706055, 0.02260403251647949, 0.023971839904785155, 0.022099967956542968, 0.02216134452819824, 0.022130752563476564, 0.021993120193481444, 0.022132415771484375, 0.022014591217041017, 0.022169631958007814, 0.02216134452819824, 0.0222957763671875, 0.022256223678588868, 0.022165760040283203, 0.022103776931762697, 0.022271743774414064, 0.022331487655639647, 0.022748735427856444, 0.022240224838256835, 0.022075103759765624, 0.02212268829345703, 0.022222623825073243, 0.022264095306396486, 0.021939552307128907, 0.022069856643676757, 0.02228428840637207, 0.022144351959228516, 0.0222740478515625, 0.02208425521850586, 0.02215936088562012, 0.024025087356567384, 0.022286336898803712, 0.022519168853759767, 0.021969535827636718, 0.02208768081665039, 0.02214473533630371, 0.02206064033508301, 0.022040735244750975, 0.02204719924926758, 0.022062719345092772, 0.022028736114501953, 0.022033952713012697, 0.02216582489013672, 0.022027711868286132, 0.022004735946655272, 0.022174720764160157, 0.02210032081604004, 0.022058591842651368, 0.021997919082641603, 0.022240991592407226, 0.02199407958984375, 0.02196406364440918, 0.021990079879760743, 0.021942432403564454, 0.021833728790283204, 0.021954559326171876, 0.021763935089111328, 0.021940383911132812, 0.022355295181274413, 0.022053535461425782, 0.02213430404663086, 0.021909696578979492, 0.021944576263427735, 0.022056383132934572, 0.021915552139282226, 0.02210793685913086, 0.022037408828735353, 0.02205081558227539, 0.02208563232421875, 0.02196054458618164, 0.022021728515625, 0.022000192642211914, 0.021927007675170897, 0.022004640579223633, 0.021874303817749022, 0.022722496032714843, 0.023193376541137695, 0.02207606315612793, 0.02209587287902832, 0.022030336380004883, 0.02189926338195801, 0.021913183212280272, 0.021809312820434572, 0.021938367843627928, 0.021835424423217772, 0.021827295303344728, 0.021899711608886718, 0.022077695846557617, 0.02220595169067383, 0.02201036834716797, 0.0220928955078125, 0.021943199157714845, 0.022046720504760742, 0.022303232192993162, 0.022141151428222657, 0.0220897274017334, 0.02191974449157715, 0.022022144317626953, 0.02198646354675293, 0.02211020851135254, 0.021911584854125976, 0.02221670341491699, 0.02180291175842285, 0.021900192260742187, 0.02222822380065918, 0.02189299201965332, 0.021982080459594728, 0.021849727630615233, 0.021866527557373047, 0.021813600540161134, 0.021864479064941406, 0.02192790412902832, 0.021826719284057616, 0.021954912185668946, 0.02234828758239746, 0.02341472053527832, 0.022232383728027345, 0.022934431076049804, 0.022210399627685548, 0.02225324821472168, 0.02216991996765137, 0.022535776138305662, 0.02241798400878906, 0.022407007217407227, 0.022312959671020507, 0.022157312393188477, 0.022163040161132814, 0.022136831283569337, 0.02208723258972168, 0.02196566390991211, 0.022128128051757814, 0.022305280685424804, 0.022001407623291017, 0.022118656158447266, 0.021946367263793946, 0.022074880599975585, 0.022084096908569335, 0.022188032150268554, 0.02210201644897461, 0.021994592666625977, 0.022259616851806642, 0.022195199966430663, 0.02205881690979004, 0.022075456619262697, 0.022141056060791017, 0.022109535217285155, 0.02211292839050293, 0.02226924705505371, 0.022243520736694337, 0.022098304748535157, 0.022067583084106446, 0.022292224884033204, 0.022103231430053712, 0.02214080047607422, 0.02220128059387207, 0.022116352081298828, 0.022607648849487304, 0.022163679122924804, 0.022099967956542968, 0.022185983657836913, 0.02207744026184082, 0.02219968032836914, 0.02208992004394531, 0.021850559234619142, 0.022027616500854493, 0.021988000869750977, 0.022228864669799803, 0.021986623764038087, 0.02205779266357422, 0.022130687713623046, 0.022091424942016602, 0.022157663345336913, 0.022209983825683594, 0.02215555191040039, 0.022018367767333985, 0.0220296630859375, 0.022102367401123046, 0.021877023696899416, 0.021917695999145507, 0.022153215408325197, 0.022030080795288086, 0.02233513641357422, 0.021946880340576173, 0.02211199951171875, 0.022109535217285155, 0.022608160018920898, 0.022131423950195312, 0.022092832565307616, 0.022042623519897463, 0.022186368942260743, 0.022094432830810546, 0.02207926368713379, 0.02203392028808594, 0.022528608322143553, 0.022130304336547852, 0.02243356704711914, 0.022207008361816407, 0.022112096786499023, 0.021913951873779296, 0.022019424438476563, 0.022031007766723634, 0.022261600494384765, 0.022144384384155273, 0.022229312896728515, 0.02220412826538086, 0.0222392635345459, 0.02256355285644531, 0.02245631980895996, 0.02247270393371582, 0.022232063293457033, 0.0224201602935791, 0.022186271667480467, 0.022195327758789064, 0.02224220848083496, 0.02224947166442871, 0.022449760437011718, 0.02252227210998535, 0.022364160537719727, 0.02388991928100586, 0.022601728439331056, 0.022446592330932616, 0.02222870445251465, 0.022287744522094727, 0.022260448455810548, 0.022474943161010744, 0.022132736206054687, 0.022337535858154296, 0.022336671829223633, 0.022114944458007813, 0.022253791809082032, 0.02204876708984375, 0.022032608032226564, 0.02204444885253906, 0.021907039642333984, 0.02200953674316406, 0.02198806381225586, 0.02198873519897461, 0.021926528930664064, 0.02257219123840332, 0.022102144241333006, 0.022087711334228516, 0.022942399978637694, 0.022095232009887694, 0.02199001693725586, 0.02244812774658203, 0.022173696517944336, 0.022204416275024414, 0.022185983657836913, 0.02209587287902832, 0.022226943969726562, 0.022370304107666016, 0.0223191032409668, 0.022396928787231447, 0.022235136032104492, 0.022321151733398437, 0.022212608337402344, 0.022178943634033204, 0.02209791946411133, 0.022000511169433593, 0.0222423038482666, 0.0219368953704834, 0.0223110408782959, 0.021967168807983398, 0.02187993621826172, 0.02198124885559082, 0.021836416244506836, 0.02207744026184082, 0.021979135513305666, 0.02290073585510254, 0.022091775894165038, 0.022013151168823242, 0.022030752182006837, 0.021929664611816405, 0.0221964168548584, 0.022042623519897463, 0.022062816619873048, 0.02226255989074707, 0.022037952423095704, 0.02206163215637207, 0.02195180892944336, 0.021954816818237306, 0.022657215118408205, 0.023197696685791015, 0.02243737602233887, 0.022567424774169922, 0.02218943977355957, 0.022010623931884767, 0.022199968338012695, 0.02233776092529297, 0.022120447158813478, 0.022200319290161134, 0.021975040435791016, 0.022263551712036134, 0.022112287521362305, 0.02232547187805176, 0.02209791946411133, 0.02232524871826172, 0.022441984176635742, 0.022316736221313478, 0.02219036865234375, 0.022157344818115234, 0.02211625671386719, 0.022081279754638673, 0.02223904037475586, 0.02225551986694336, 0.022057600021362304, 0.022116352081298828, 0.023633920669555664, 0.023980031967163085, 0.022480159759521483, 0.02226425552368164, 0.022276384353637695, 0.0222740478515625, 0.022319040298461913, 0.022308927536010742, 0.02231667137145996, 0.02224985694885254, 0.02210201644897461, 0.022126527786254884, 0.022265920639038084, 0.021995519638061522, 0.02212803268432617, 0.02217635154724121, 0.022108383178710937, 0.022193248748779298, 0.022368959426879883, 0.022343679428100584, 0.022460256576538086, 0.022319263458251953, 0.022222208023071288, 0.022272640228271485, 0.022277280807495116, 0.022146175384521485, 0.022238048553466797, 0.02229132843017578, 0.022101760864257813, 0.02214668846130371, 0.02208627128601074, 0.02245996856689453, 0.022388351440429687, 0.0222271671295166, 0.022276704788208007, 0.022477888107299806, 0.022354240417480468, 0.02248111915588379]",tokens/s,45.1246356244764,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfwovd2cl/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,1032.044544,1165.88544,0.0,763.363328,738.157568,s,1,10.490732421875,10.490732421875,0.0,10.490732421875,10.490732421875,10.490732421875,10.490732421875,[10.490732421875],,kWh,2.8986129462504092e-05,3.188998627560698e-06,9.09111838399805e-06,4.126624647406284e-05,,MB,1512.947712,1442.709504,0.0,1025.507328,1004.427776,s,10,0.25071494483947754,0.025071494483947753,0.00017393241500969782,0.025026816368103028,0.0253453893661499,0.025374679088592526,0.025398110866546628,"[0.0251429443359375, 0.025116031646728515, 0.02488819122314453, 0.02496614456176758, 0.025338880538940428, 0.024915136337280274, 0.024890239715576173, 0.025403968811035155, 0.025087488174438476, 0.024965919494628907]",tokens/s,10210.799366743226,kWh,7.424954950296073e-07,8.183753351226407e-08,4.894399008578689e-07,1.3137729293997402e-06,tokens/kWh,194858635.21100697,MB,1549.512704,1480.45824,0.0,1063.256064,1004.430336,s,10,13.331399902343747,1.333139990234375,0.004286310141339791,1.3331047973632812,1.3390624389648438,1.3404491882324219,1.3415585876464844,"[1.3387542724609376, 1.330295654296875, 1.331462890625, 1.3328902587890625, 1.3418359375, 1.3337685546875, 1.3257952880859376, 1.329802978515625, 1.3334747314453126, 1.3333193359375]",tokens/s,47.25685258974504,kWh,3.897711356580307e-05,4.2987738850243576e-06,1.562410076774202e-05,5.889998821856944e-05,tokens/kWh,1069609.7215879909,,s,630,13.32868243598938,0.02115663878728473,0.00033133480333597703,0.021107391357421873,0.021373133277893065,0.0214826416015625,0.022166827812194825,"[0.021362688064575194, 0.020964672088623047, 0.020976320266723632, 0.022194175720214843, 0.021352447509765626, 0.021094400405883788, 0.020987903594970703, 0.02105548858642578, 0.021184255599975586, 0.021030303955078124, 0.021299840927124024, 0.02108233642578125, 0.021338144302368165, 0.021224992752075195, 0.02108051109313965, 0.02116211128234863, 0.020958816528320313, 0.021049631118774413, 0.021278303146362306, 0.021033376693725587, 0.021159679412841796, 0.021034303665161132, 0.021034048080444335, 0.021310976028442383, 0.021763711929321288, 0.02111769676208496, 0.021321727752685548, 0.0214835205078125, 0.021153791427612305, 0.02123980712890625, 0.021053440093994142, 0.021440511703491212, 0.02110054397583008, 0.021196800231933592, 0.02111609649658203, 0.021290048599243164, 0.021309183120727538, 0.02129100799560547, 0.021237119674682618, 0.021422719955444335, 0.021227519989013673, 0.02144211196899414, 0.0213787841796875, 0.02115862464904785, 0.02111471939086914, 0.021233823776245116, 0.021394784927368165, 0.021207103729248045, 0.02138368034362793, 0.021197919845581056, 0.021049695968627928, 0.021559520721435545, 0.02126483154296875, 0.021298463821411134, 0.021264575958251954, 0.021248544692993164, 0.021202272415161132, 0.021362783432006836, 0.021533248901367187, 0.021327871322631836, 0.021170175552368165, 0.021344255447387696, 0.021329919815063478, 0.02150809669494629, 0.021217279434204102, 0.021215391159057618, 0.021197792053222655, 0.020990848541259766, 0.021129215240478515, 0.0216494083404541, 0.0216944637298584, 0.021176319122314453, 0.021295103073120117, 0.02132569694519043, 0.02111827278137207, 0.020976448059082033, 0.02108415985107422, 0.021221248626708985, 0.0209880313873291, 0.0211539192199707, 0.020963199615478517, 0.021155487060546874, 0.020912479400634766, 0.020967424392700194, 0.021077407836914062, 0.021090879440307617, 0.021291040420532228, 0.02104934310913086, 0.02123980712890625, 0.021737152099609375, 0.020969343185424805, 0.02094291114807129, 0.020991519927978517, 0.020891647338867187, 0.02104729652404785, 0.021056352615356447, 0.02083990478515625, 0.02100432014465332, 0.020844255447387695, 0.020912351608276366, 0.020969823837280275, 0.02115782356262207, 0.021192352294921876, 0.02114419174194336, 0.021069311141967775, 0.02099404716491699, 0.021137920379638672, 0.02102681541442871, 0.020985855102539062, 0.021172224044799806, 0.021186559677124024, 0.021211135864257814, 0.02100147247314453, 0.02086783981323242, 0.021092319488525392, 0.02107734489440918, 0.021076671600341795, 0.021139455795288087, 0.021564735412597656, 0.021082815170288087, 0.021172096252441406, 0.021150943756103515, 0.02100931167602539, 0.020919456481933593, 0.020822879791259765, 0.020880640029907225, 0.020944896697998046, 0.02100361633300781, 0.02090665626525879, 0.0210513916015625, 0.02105036735534668, 0.021109760284423826, 0.02123161506652832, 0.021354496002197267, 0.020944896697998046, 0.02099404716491699, 0.020857120513916017, 0.020864736557006835, 0.020996095657348633, 0.021112991333007813, 0.021186399459838866, 0.021067935943603514, 0.021136383056640624, 0.021035327911376953, 0.02107788848876953, 0.02116227149963379, 0.021410175323486328, 0.021155839920043946, 0.021200895309448242, 0.021164031982421876, 0.02117945671081543, 0.02103811264038086, 0.021100448608398437, 0.02117238426208496, 0.021014368057250977, 0.021116479873657227, 0.021187007904052733, 0.021187679290771484, 0.021236480712890624, 0.02110812759399414, 0.02129177665710449, 0.021218656539916992, 0.021559104919433594, 0.021314111709594727, 0.021137311935424806, 0.02162451171875, 0.021346559524536134, 0.02140166473388672, 0.021461376190185545, 0.021147647857666017, 0.021336063385009766, 0.02151219177246094, 0.021028863906860353, 0.02103001594543457, 0.021057727813720704, 0.02083705520629883, 0.02100223922729492, 0.021004159927368163, 0.020947071075439454, 0.02101215934753418, 0.021174816131591796, 0.021112607955932616, 0.02127257537841797, 0.021020288467407226, 0.020961824417114257, 0.02091811180114746, 0.021006336212158205, 0.020938175201416016, 0.0211661434173584, 0.021776735305786134, 0.021536767959594725, 0.021202943801879884, 0.020885696411132814, 0.02107744026184082, 0.021195135116577148, 0.021304832458496094, 0.021078527450561522, 0.020938751220703124, 0.021020671844482423, 0.02094060707092285, 0.02093484878540039, 0.021063199996948244, 0.02089561653137207, 0.02113164710998535, 0.02100655937194824, 0.020938079833984376, 0.021020479202270508, 0.020996192932128906, 0.0211463680267334, 0.02128211212158203, 0.02123436737060547, 0.02166169548034668, 0.021372928619384765, 0.021743616104125976, 0.021331968307495116, 0.02134966468811035, 0.021129951477050782, 0.021170175552368165, 0.02110416030883789, 0.020924896240234376, 0.020922367095947265, 0.02090188789367676, 0.02105753517150879, 0.02124799919128418, 0.02127840042114258, 0.021078336715698243, 0.02113564872741699, 0.021126911163330077, 0.02127676773071289, 0.02108336067199707, 0.02120966339111328, 0.021131359100341796, 0.02124799919128418, 0.02103910446166992, 0.021090303421020508, 0.02119820785522461, 0.020904159545898436, 0.02140611267089844, 0.02100774383544922, 0.02105107116699219, 0.02110374450683594, 0.02103891181945801, 0.021249887466430663, 0.02109859275817871, 0.02121241569519043, 0.021107519149780273, 0.021188512802124023, 0.021160032272338865, 0.021168127059936523, 0.02104729652404785, 0.021274463653564453, 0.021158048629760742, 0.02131043243408203, 0.021147327423095705, 0.021223455429077148, 0.021335296630859375, 0.02109129524230957, 0.021236831665039063, 0.02145167922973633, 0.021202943801879884, 0.021221088409423827, 0.02118492889404297, 0.020981632232666015, 0.021123071670532227, 0.02120697593688965, 0.02134022331237793, 0.02109222412109375, 0.021188640594482423, 0.021137311935424806, 0.025980255126953126, 0.02393600082397461, 0.021045087814331054, 0.021317632675170898, 0.02107094383239746, 0.021186912536621094, 0.02097823905944824, 0.0210731201171875, 0.02122742462158203, 0.02111782455444336, 0.020963327407836914, 0.021053247451782227, 0.020943103790283205, 0.021020095825195314, 0.021354272842407228, 0.021086944580078124, 0.02101862335205078, 0.02107574462890625, 0.021000255584716798, 0.021500064849853517, 0.0216760311126709, 0.0212807674407959, 0.021184223175048828, 0.021184799194335937, 0.021435712814331053, 0.02118931198120117, 0.021042207717895507, 0.021250015258789064, 0.021152767181396484, 0.021218944549560546, 0.02106582450866699, 0.02112745666503906, 0.021129215240478515, 0.02123116874694824, 0.021160320281982423, 0.021069887161254883, 0.021364736557006835, 0.02135856056213379, 0.021379104614257814, 0.021180416107177736, 0.020934656143188478, 0.020957183837890626, 0.02105062484741211, 0.021144319534301757, 0.021028863906860353, 0.021360639572143555, 0.02209987258911133, 0.02273004722595215, 0.021258752822875978, 0.0211110725402832, 0.02112895965576172, 0.02102911949157715, 0.020933727264404296, 0.021000415802001952, 0.021099199295043947, 0.0208855037689209, 0.02095871925354004, 0.020961727142333984, 0.021068063735961914, 0.021169952392578125, 0.021161279678344726, 0.021099199295043947, 0.021438207626342774, 0.021132991790771483, 0.021090879440307617, 0.021061632156372072, 0.021131263732910157, 0.021177759170532228, 0.021070655822753907, 0.02137183952331543, 0.021187423706054687, 0.021223424911499023, 0.021110784530639647, 0.021374975204467773, 0.02129849624633789, 0.021258943557739256, 0.02107187271118164, 0.021130367279052733, 0.021449184417724608, 0.021129632949829103, 0.021194751739501954, 0.021061632156372072, 0.021059392929077148, 0.021433887481689454, 0.021056095123291017, 0.021148927688598634, 0.020910911560058594, 0.020837823867797853, 0.02142201614379883, 0.0213305606842041, 0.021972991943359374, 0.02114905548095703, 0.021066368103027342, 0.021172224044799806, 0.021211135864257814, 0.0211146240234375, 0.021079456329345703, 0.020957664489746095, 0.02099648094177246, 0.021048383712768556, 0.02099500846862793, 0.020973567962646485, 0.02103091239929199, 0.020953088760375976, 0.020928512573242186, 0.02087936019897461, 0.02099404716491699, 0.02089779281616211, 0.021212160110473634, 0.021204992294311522, 0.02105286407470703, 0.020930112838745116, 0.021083040237426756, 0.020973663330078125, 0.020920320510864256, 0.021097984313964844, 0.020924928665161133, 0.021108383178710936, 0.020963455200195314, 0.0210098876953125, 0.021107263565063476, 0.02101046371459961, 0.020954336166381836, 0.021056447982788086, 0.020858879089355468, 0.020999296188354492, 0.021011327743530272, 0.021030431747436525, 0.021108543395996094, 0.02108892822265625, 0.021433696746826172, 0.020971328735351562, 0.021013343811035156, 0.021198112487792967, 0.021068511962890626, 0.021098495483398438, 0.021083328247070314, 0.02098054313659668, 0.020954719543457033, 0.021029279708862304, 0.02112019157409668, 0.021033792495727538, 0.0210882568359375, 0.020983808517456053, 0.02122547149658203, 0.02106777572631836, 0.021008384704589843, 0.021118175506591796, 0.02110032081604004, 0.02102783966064453, 0.020968799591064454, 0.021090560913085938, 0.021006431579589844, 0.020913631439208984, 0.021202880859375, 0.020966175079345704, 0.020920448303222657, 0.021129215240478515, 0.020992000579833983, 0.02107187271118164, 0.021127103805541992, 0.021034719467163086, 0.020947296142578124, 0.020929983139038086, 0.02100281524658203, 0.020844703674316407, 0.020968992233276366, 0.021086528778076173, 0.020944896697998046, 0.020930559158325195, 0.021280479431152344, 0.021045536041259767, 0.021435007095336914, 0.021215103149414063, 0.021444959640502928, 0.021294879913330077, 0.02112249565124512, 0.021199424743652343, 0.021129215240478515, 0.02111190414428711, 0.021121152877807615, 0.020790048599243164, 0.020930335998535155, 0.02083008003234863, 0.020926816940307617, 0.02084556770324707, 0.0209453125, 0.02095280075073242, 0.021058431625366213, 0.02087721633911133, 0.020779104232788087, 0.0212410888671875, 0.020983903884887696, 0.020882080078125, 0.02079539108276367, 0.02104729652404785, 0.021069120407104493, 0.021014400482177734, 0.021016927719116212, 0.022095935821533203, 0.0214815673828125, 0.0211376953125, 0.021177759170532228, 0.021145984649658202, 0.021005599975585938, 0.020938848495483397, 0.021080127716064455, 0.021176799774169922, 0.021121376037597655, 0.021202943801879884, 0.021061632156372072, 0.0214354248046875, 0.021117919921875, 0.02122956848144531, 0.021102592468261717, 0.021131263732910157, 0.021207040786743164, 0.021121023178100586, 0.02115488052368164, 0.020988319396972658, 0.021084928512573244, 0.020969247817993163, 0.02089401626586914, 0.02101126480102539, 0.020949888229370116, 0.021080160140991212, 0.021387008666992186, 0.021020832061767577, 0.021144735336303712, 0.021740320205688477, 0.021043264389038085, 0.02107561683654785, 0.020978015899658205, 0.020899839401245117, 0.021073247909545897, 0.02140729522705078, 0.021237951278686523, 0.021127231597900392, 0.021114559173583985, 0.020874176025390625, 0.021086208343505858, 0.021131263732910157, 0.021157888412475585, 0.02126643180847168, 0.021115999221801757, 0.020869247436523436, 0.0210251522064209, 0.02107379150390625, 0.021037376403808594, 0.02106390380859375, 0.02112428855895996, 0.021109567642211915, 0.0209649600982666, 0.02090230369567871, 0.020868511199951173, 0.0208492488861084, 0.021012479782104493, 0.0210882568359375, 0.02099404716491699, 0.021198848724365234, 0.021042591094970704, 0.021060192108154296, 0.021127328872680665, 0.021091552734375, 0.021039007186889648, 0.022095775604248045, 0.0209866886138916, 0.021161983489990235, 0.020905664443969726, 0.020958560943603516, 0.021008575439453125, 0.020918783187866212, 0.021045536041259767, 0.020961280822753905, 0.021114528656005858, 0.021116735458374024, 0.02109494400024414, 0.02098543930053711, 0.021264799118041994, 0.02143027114868164, 0.021009727478027342, 0.021136064529418946, 0.021213184356689452, 0.021050752639770506, 0.02115350341796875, 0.021079999923706055, 0.02100495910644531, 0.02090220832824707, 0.020920608520507814, 0.02303971290588379, 0.02403046417236328, 0.020951808929443358, 0.020850528717041017, 0.02155683135986328, 0.021283039093017576, 0.021032831192016602, 0.02097404861450195, 0.02090768051147461, 0.02134169578552246, 0.02142883110046387, 0.021194496154785157, 0.021151744842529296, 0.02111065673828125, 0.02119161605834961, 0.020890432357788084, 0.020982719421386718, 0.02103091239929199, 0.021017728805541994, 0.02099446487426758, 0.02091788864135742, 0.020849119186401366, 0.022308704376220703, 0.021025279998779296, 0.02102889633178711, 0.021114368438720704, 0.021031423568725584, 0.02108982467651367, 0.021074399948120118, 0.02112512016296387, 0.021334016799926758, 0.02127462387084961, 0.021288223266601562, 0.02143449592590332, 0.021305952072143555, 0.0210565128326416, 0.021398752212524415, 0.021155616760253907, 0.021315584182739256, 0.021390655517578124, 0.02118239974975586, 0.021035776138305665, 0.021225568771362304, 0.020971424102783204, 0.02104934310913086, 0.020944160461425783, 0.02098863983154297, 0.02097724723815918, 0.02099446487426758, 0.02100223922729492, 0.020956319808959963, 0.021227968215942382, 0.02116649627685547, 0.021175647735595705, 0.021314207077026366, 0.021028863906860353, 0.02101036834716797, 0.021370176315307618, 0.021059328079223633, 0.021021696090698243, 0.021124351501464845, 0.021168895721435547, 0.021278175354003906, 0.02110256004333496, 0.021528736114501953, 0.02121494483947754, 0.021179071426391603, 0.02123904037475586, 0.021012863159179686, 0.021401983261108398, 0.02107187271118164, 0.021168127059936523]",tokens/s,47.26648736854203,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp72dcfv7k/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4928.659456,7235.043328,0.0,6849.298432,6445.09696,s,1,11.387568359375,11.387568359375,0.0,11.387568359375,11.387568359375,11.387568359375,11.387568359375,[11.387568359375],,kWh,0.00012017866807918835,1.324901310198794e-05,4.040864343801687e-05,0.00017383632461919315,,MB,2814.431232,7587.364864,0.0,7172.25984,6823.3216,s,10,3.746279113769531,0.3746279113769531,0.0022526261216574742,0.3748661804199219,0.3766276489257812,0.3768137023925781,0.37696254516601563,"[0.3688858642578125, 0.37485842895507815, 0.37487393188476564, 0.37255511474609376, 0.3765863037109375, 0.37595068359375, 0.3748364562988281, 0.37607952880859374, 0.37465304565429686, 0.376999755859375]",tokens/s,683.3447061086996,kWh,1.1013471898764063e-05,1.2145244708521853e-06,7.320633428517417e-06,1.9548629798133665e-05,tokens/kWh,13095546.984292509,MB,2818.60096,7591.559168,0.0,7174.356992,6823.32416,s,10,23.458792724609374,2.345879272460937,0.003864573791480087,2.34478369140625,2.3513216064453126,2.3523865112304687,2.3532384350585938,"[2.348079833984375, 2.34691162109375, 2.339985595703125, 2.345116455078125, 2.344450927734375, 2.343015625, 2.343910888671875, 2.342785400390625, 2.353451416015625, 2.3510849609375]",tokens/s,26.85560196536033,kWh,6.818470084374108e-05,7.520745404262604e-06,4.527088086688512e-05,0.00012097632711488882,tokens/kWh,520763.04102182033,,s,630,23.45631365585327,0.0372322438981798,0.0005255021219551778,0.0371220645904541,0.03760292930603027,0.03791191959381104,0.03941275997161865,"[0.03835084915161133, 0.03713452911376953, 0.03699248123168945, 0.03702614212036133, 0.03688819122314453, 0.037150177001953125, 0.03684569549560547, 0.037550655364990235, 0.037056129455566404, 0.03727011108398438, 0.03728179168701172, 0.03720608139038086, 0.03705644989013672, 0.03712204742431641, 0.03689625549316406, 0.037019329071044924, 0.03699555206298828, 0.037015518188476565, 0.036862110137939455, 0.03703376007080078, 0.037091201782226565, 0.0371343994140625, 0.0371352653503418, 0.03776396942138672, 0.038075008392333985, 0.0377724494934082, 0.03760393524169922, 0.03761391830444336, 0.03745312118530274, 0.03737260818481445, 0.03731417465209961, 0.037290401458740234, 0.037312000274658204, 0.0375274543762207, 0.03780460739135742, 0.037424800872802734, 0.03701497650146485, 0.03787868881225586, 0.038449119567871094, 0.03732204818725586, 0.03703414535522461, 0.03718182373046875, 0.03710995101928711, 0.037295360565185544, 0.036983264923095706, 0.03699257659912109, 0.0369835205078125, 0.03718278503417969, 0.03722310256958008, 0.03723427200317383, 0.03706240081787109, 0.03738281631469727, 0.03738009643554688, 0.037220352172851565, 0.03723468780517578, 0.03720783996582031, 0.03705184173583984, 0.037346080780029295, 0.03699235153198242, 0.03709814453125, 0.03709542465209961, 0.03733440017700195, 0.03709811019897461, 0.03829388809204102, 0.03846118545532227, 0.037343807220458984, 0.037109664916992184, 0.037482494354248046, 0.03824435043334961, 0.037730304718017575, 0.03762995147705078, 0.03726540756225586, 0.03726131057739258, 0.03761356735229492, 0.03723199844360352, 0.0373438720703125, 0.03715609741210937, 0.03728192138671875, 0.037524097442626955, 0.037298240661621095, 0.036936927795410156, 0.037001953125, 0.03688969421386719, 0.03685219192504883, 0.03722284698486328, 0.03703539276123047, 0.03702588653564453, 0.03716969680786133, 0.03710976028442383, 0.036956161499023435, 0.04078790283203125, 0.03722655868530273, 0.03720601654052735, 0.0369758415222168, 0.037010208129882816, 0.037230270385742184, 0.037385761260986326, 0.037318782806396486, 0.03710224151611328, 0.03709337615966797, 0.03735948944091797, 0.03696828842163086, 0.03678211212158203, 0.037559585571289064, 0.03695100784301758, 0.037144577026367184, 0.03696640014648438, 0.036972030639648434, 0.03684403228759765, 0.0368240966796875, 0.03678691101074219, 0.03712639999389648, 0.03690636825561523, 0.03702150344848633, 0.03677062225341797, 0.03684966278076172, 0.03682889556884766, 0.03686358261108398, 0.03674982452392578, 0.036944065093994144, 0.036972095489501956, 0.03747651290893555, 0.03755408096313476, 0.03723712158203125, 0.03727769470214844, 0.037130241394042966, 0.03808665466308594, 0.03719372940063476, 0.037029888153076174, 0.03687014389038086, 0.037181407928466796, 0.037005054473876954, 0.03681014251708985, 0.037071102142333986, 0.036756095886230467, 0.03681497573852539, 0.0371569938659668, 0.037421825408935544, 0.037266433715820314, 0.03739788818359375, 0.03736435317993164, 0.037416702270507814, 0.03720832061767578, 0.037449726104736326, 0.03733071899414062, 0.03701372909545898, 0.03687014389038086, 0.037160961151123044, 0.03722454452514649, 0.03720534515380859, 0.03721683120727539, 0.037058433532714846, 0.03706880187988281, 0.03704230499267578, 0.037107585906982425, 0.037235870361328124, 0.037086177825927734, 0.03707904052734375, 0.037071903228759764, 0.0369486083984375, 0.03706073760986328, 0.036833248138427734, 0.036989185333251955, 0.03696025466918945, 0.03695820617675781, 0.037425151824951174, 0.03754956817626953, 0.037118465423583984, 0.03708108901977539, 0.037119998931884765, 0.037183647155761716, 0.037519199371337894, 0.037449726104736326, 0.037187583923339845, 0.03725107192993164, 0.037619998931884766, 0.03723974227905273, 0.03720476913452148, 0.03677811050415039, 0.03704204940795899, 0.03783235168457031, 0.036825439453125, 0.036931583404541016, 0.03691439819335938, 0.03688118362426758, 0.03678966522216797, 0.03679190444946289, 0.03706265640258789, 0.03692556762695313, 0.03813606262207031, 0.037195903778076175, 0.03700326538085937, 0.036841472625732424, 0.037099327087402344, 0.037230304718017575, 0.03693353652954102, 0.036952129364013674, 0.03685599899291992, 0.03680611038208008, 0.042767200469970706, 0.037133567810058596, 0.03723545455932617, 0.03699097442626953, 0.03699897766113281, 0.036710590362548826, 0.036947711944580075, 0.03682048034667969, 0.03689497756958008, 0.036848129272460936, 0.03727155303955078, 0.03714825439453125, 0.03711369705200195, 0.03706719970703125, 0.037181537628173826, 0.0371220817565918, 0.03701510238647461, 0.037496543884277346, 0.03724566268920899, 0.037054462432861326, 0.03722649765014648, 0.038922271728515624, 0.03795503997802734, 0.03724937438964844, 0.036957695007324216, 0.03683804702758789, 0.036890625, 0.036790271759033204, 0.03700310516357422, 0.036759071350097657, 0.03691993713378906, 0.036850849151611326, 0.03677632141113281, 0.03703260803222656, 0.03689023971557617, 0.037709537506103515, 0.0372782096862793, 0.03731571197509766, 0.0375079345703125, 0.03726131057739258, 0.037349441528320315, 0.037109375, 0.03729030227661133, 0.03720191955566406, 0.037005313873291014, 0.03711180877685547, 0.037246017456054686, 0.03732371139526367, 0.03701939010620117, 0.03695420837402344, 0.03693952178955078, 0.03699283218383789, 0.037081535339355466, 0.03841513442993164, 0.03801667022705078, 0.03753814315795898, 0.037322303771972654, 0.03738476943969726, 0.037207073211669925, 0.03710038375854492, 0.03715686416625977, 0.036999008178710935, 0.037283649444580076, 0.03749923324584961, 0.03737728118896484, 0.03708390426635742, 0.036908191680908205, 0.03940028762817383, 0.037367809295654295, 0.03704739379882813, 0.03694041442871094, 0.03713872146606445, 0.036977855682373044, 0.0369315185546875, 0.03816128158569336, 0.03722854232788086, 0.03727155303955078, 0.03723171234130859, 0.03703081512451172, 0.037220352172851565, 0.03687628936767578, 0.03698483276367188, 0.037058048248291016, 0.0369705924987793, 0.03695862579345703, 0.037043998718261716, 0.03691747283935547, 0.03698499298095703, 0.03677785491943359, 0.036932991027832034, 0.03734179306030273, 0.03739225769042969, 0.036894622802734374, 0.03684374237060547, 0.03685171127319336, 0.037077247619628904, 0.037080833435058594, 0.037082145690917966, 0.03693862533569336, 0.03713417434692383, 0.03727609634399414, 0.03715024185180664, 0.03687452697753906, 0.036874240875244144, 0.03710121536254883, 0.03692319869995117, 0.03710416030883789, 0.03777740859985351, 0.03747452926635742, 0.03728976058959961, 0.037326847076416016, 0.037142337799072264, 0.03717548751831055, 0.0372525749206543, 0.03707094573974609, 0.03700576019287109, 0.03814118576049805, 0.03725791931152344, 0.0369846076965332, 0.03736608123779297, 0.0390302734375, 0.03745024108886719, 0.0370601921081543, 0.037396896362304685, 0.036972545623779295, 0.03706060791015625, 0.036982784271240236, 0.036779617309570314, 0.03692108917236328, 0.03693840026855469, 0.03685516738891602, 0.03679916763305664, 0.0369312629699707, 0.03699942398071289, 0.03708927917480469, 0.037103614807128905, 0.03730963134765625, 0.037251903533935544, 0.03713564682006836, 0.03703267288208008, 0.03698412704467773, 0.036956863403320314, 0.03706265640258789, 0.03714812850952148, 0.03736342239379883, 0.03743606567382812, 0.03698294448852539, 0.037065761566162106, 0.03699811172485352, 0.03714022445678711, 0.03725747299194336, 0.03843193435668945, 0.037507328033447265, 0.037278270721435545, 0.037209217071533206, 0.03718633651733398, 0.03716924667358398, 0.03697868728637695, 0.0369477767944336, 0.03703827285766602, 0.037015552520751956, 0.037275646209716795, 0.037236736297607424, 0.037130241394042966, 0.037443584442138675, 0.03729532623291015, 0.03718633651733398, 0.03686918258666992, 0.037048896789550784, 0.03770355224609375, 0.03702780914306641, 0.03679695892333985, 0.03710902404785156, 0.03694015884399414, 0.037056865692138674, 0.03680051040649414, 0.036988929748535154, 0.03753683090209961, 0.037294750213623044, 0.03831548690795898, 0.03740351867675781, 0.03733913421630859, 0.03731660842895508, 0.037204063415527344, 0.037128097534179685, 0.037082752227783206, 0.03710598373413086, 0.03700537490844726, 0.03707817459106445, 0.03705059051513672, 0.03696902465820313, 0.03707910537719727, 0.03687628936767578, 0.03686105728149414, 0.03741785430908203, 0.03769753646850586, 0.037318656921386716, 0.0372674560546875, 0.03785113525390625, 0.037324127197265626, 0.03733776092529297, 0.03697983932495117, 0.036952960968017576, 0.03691475296020508, 0.03712992095947266, 0.03686041641235351, 0.03675104141235352, 0.03702035140991211, 0.037062526702880856, 0.03725721740722656, 0.037169151306152344, 0.03706880187988281, 0.03691110229492187, 0.03692547225952148, 0.036712417602539064, 0.03710732650756836, 0.03704665756225586, 0.036765697479248044, 0.036953407287597655, 0.036702911376953126, 0.03670425415039062, 0.0368455696105957, 0.03705193710327148, 0.03711590576171875, 0.03719830322265625, 0.03702710342407227, 0.03714870452880859, 0.040110145568847656, 0.03708550262451172, 0.03705427169799805, 0.037279582977294924, 0.037124767303466796, 0.03726921463012695, 0.03712438583374023, 0.036939777374267575, 0.03688447952270508, 0.037220352172851565, 0.03720191955566406, 0.03941785430908203, 0.037131870269775394, 0.037364383697509766, 0.037033729553222657, 0.03844278335571289, 0.03766908645629883, 0.037246238708496096, 0.03723526382446289, 0.03782672119140625, 0.03718300628662109, 0.03722902297973633, 0.04056268692016601, 0.03775859069824219, 0.03727967834472656, 0.03711779022216797, 0.03686646270751953, 0.03726908874511719, 0.037329345703125, 0.03718326568603516, 0.03694220733642578, 0.03708028793334961, 0.03704297637939453, 0.037146625518798826, 0.037103614807128905, 0.03703744125366211, 0.03698342514038086, 0.03703603363037109, 0.036969886779785154, 0.03737251281738281, 0.03706380844116211, 0.0369119987487793, 0.03682089614868164, 0.03692294311523438, 0.03698518371582031, 0.037025726318359375, 0.036937889099121095, 0.03681001663208008, 0.03728876876831055, 0.03681484985351562, 0.036824512481689456, 0.037048896789550784, 0.036659198760986327, 0.036705631256103516, 0.036647552490234374, 0.036896800994873045, 0.03717244720458984, 0.03698767852783203, 0.03690086364746094, 0.03693772888183594, 0.03719782257080078, 0.037263103485107425, 0.03727795028686524, 0.03740182495117188, 0.03746691131591797, 0.03763814544677734, 0.03749068832397461, 0.0373493766784668, 0.03736550521850586, 0.037146881103515626, 0.03696223831176758, 0.03700332641601563, 0.03683097457885742, 0.03687174224853516, 0.03688518524169922, 0.03706060791015625, 0.03711385726928711, 0.036946048736572264, 0.03823283386230469, 0.0374582405090332, 0.03716022491455078, 0.03720876693725586, 0.03724230575561523, 0.03731206512451172, 0.03717824172973633, 0.03718963241577149, 0.03706412887573242, 0.036956607818603514, 0.036981086730957034, 0.037109535217285154, 0.03717318344116211, 0.036991039276123044, 0.037023937225341794, 0.037117759704589845, 0.036697311401367186, 0.036926143646240236, 0.037031070709228515, 0.037055423736572266, 0.03703398513793945, 0.037244544982910154, 0.03701606369018555, 0.037212032318115235, 0.037253120422363284, 0.037012542724609375, 0.037129150390625, 0.03723174285888672, 0.03709836959838867, 0.03712409591674805, 0.03732080078125, 0.03729939270019531, 0.03746892929077148, 0.03732783889770508, 0.03732992172241211, 0.0369716796875, 0.037061473846435544, 0.037090526580810544, 0.036915550231933596, 0.03707689666748047, 0.03684793472290039, 0.0369062385559082, 0.037388351440429686, 0.03733375930786133, 0.03764982223510742, 0.03723334503173828, 0.037257152557373045, 0.0407512321472168, 0.03835686492919922, 0.03762598419189453, 0.03765862274169922, 0.037722110748291016, 0.037574657440185545, 0.041457504272460935, 0.03744147109985352, 0.03761993789672852, 0.03718265533447265, 0.03713222503662109, 0.03715750503540039, 0.03738345718383789, 0.037128799438476565, 0.036962688446044924, 0.037088768005371094, 0.03833542251586914, 0.03792832183837891, 0.03704083251953125, 0.03702758407592773, 0.03705033493041992, 0.03708335876464844, 0.037072769165039064, 0.03724284744262695, 0.03722447967529297, 0.03889779281616211, 0.03748863983154297, 0.0373043212890625, 0.03770281600952148, 0.03748659133911133, 0.03751375961303711, 0.03709983825683594, 0.037185535430908204, 0.037029056549072265, 0.03695840072631836, 0.03725503921508789, 0.03739315032958984, 0.037408287048339844, 0.03760281753540039, 0.03937174224853516, 0.03738214492797851, 0.03764985656738281, 0.03736191940307617, 0.03722681427001953, 0.03721388626098633, 0.03696364974975586, 0.0370731201171875, 0.03691600036621094, 0.03783475112915039, 0.0372490234375, 0.03702329635620117, 0.03710815811157227, 0.03726131057739258, 0.03740262222290039, 0.03723788833618164, 0.03700825500488281, 0.037144577026367184, 0.03706864166259766, 0.03708329772949219, 0.03750467300415039, 0.0369339828491211, 0.03691241455078125, 0.037292766571044925, 0.037029888153076174, 0.03789187240600586, 0.03759321594238281, 0.03762799835205078, 0.036830528259277344, 0.037141376495361325, 0.0368485107421875, 0.03674003219604492, 0.03768662261962891, 0.03716934585571289, 0.036970657348632814, 0.036935359954833984, 0.036849536895751954, 0.03749049758911133, 0.0371578254699707, 0.037308414459228514]",tokens/s,26.858440300690223,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1332.649984,1090.387968,0.0,704.643072,675.01056,s,1,8.2217880859375,8.2217880859375,0.0,8.2217880859375,8.2217880859375,8.2217880859375,8.2217880859375,[8.2217880859375],,kWh,2.9531209458385392e-05,3.2502978609092385e-06,9.594452120009755e-06,4.2375959439304385e-05,,MB,1425.89952,1411.252224,0.0,996.1472,942.610432,s,10,0.33146281814575196,0.0331462818145752,0.0003174195228558357,0.0331023998260498,0.0333537841796875,0.03363873176574707,0.033866689834594725,"[0.03392367935180664, 0.0330300178527832, 0.03304374313354492, 0.03316105651855469, 0.032573665618896484, 0.03303670501708984, 0.033025791168212894, 0.03318083190917969, 0.033290462493896486, 0.03319686508178711]",tokens/s,7723.339873597249,kWh,1.111301570548012e-06,1.2255646020950926e-07,7.341989397032363e-07,1.9680569704607574e-06,tokens/kWh,130077535.27585424,MB,1446.1952,1421.737984,0.0,1006.63296,942.612992,s,10,18.761693359375,1.8761693359374998,0.0038221001940809568,1.8750606689453124,1.882103771972656,1.8825993103027343,1.8829957409667968,"[1.8830948486328125, 1.87466943359375, 1.8780423583984376, 1.88199365234375, 1.8696715087890625, 1.8753275146484376, 1.873607177734375, 1.87361083984375, 1.8747938232421875, 1.8768822021484375]",tokens/s,33.57905856004178,kWh,5.452420276528608e-05,6.0137210541726355e-06,2.2104235997702324e-05,8.264215981716104e-05,tokens/kWh,762322.7676936603,,s,630,18.759476253509515,0.0297769464341421,0.00060572117531575,0.029646816253662108,0.030108877563476563,0.030615232372283935,0.03267911727905274,"[0.029636608123779298, 0.029689376831054687, 0.029818687438964844, 0.02946656036376953, 0.029698623657226562, 0.02943168067932129, 0.02939523124694824, 0.029978239059448242, 0.034201984405517576, 0.029649919509887695, 0.029467103958129882, 0.029512224197387697, 0.03060960006713867, 0.029322719573974608, 0.029319520950317382, 0.029400415420532226, 0.029391519546508788, 0.02977305603027344, 0.029456480026245117, 0.029603647232055663, 0.030108800888061525, 0.029848480224609376, 0.029741695404052734, 0.029654272079467775, 0.029838272094726562, 0.0299233283996582, 0.029894655227661132, 0.030080095291137695, 0.029938047409057617, 0.02986857604980469, 0.029833215713500977, 0.02993097686767578, 0.029673055648803712, 0.029809600830078126, 0.029550592422485353, 0.029570335388183593, 0.02963443183898926, 0.029608800888061525, 0.029586847305297852, 0.029549152374267577, 0.02959769630432129, 0.029433855056762694, 0.032257438659667965, 0.03192278480529785, 0.029654016494750978, 0.02958847999572754, 0.02949734306335449, 0.029453344345092773, 0.03155167961120606, 0.031474592208862305, 0.029743263244628906, 0.029894367218017578, 0.0296690559387207, 0.02980076789855957, 0.029683712005615235, 0.02965724754333496, 0.0296343994140625, 0.029681055068969727, 0.02975328063964844, 0.029639360427856445, 0.029497312545776366, 0.029555904388427735, 0.029776704788208007, 0.029444671630859374, 0.02938652801513672, 0.029646816253662108, 0.02949488067626953, 0.029292991638183594, 0.029360128402709962, 0.029249536514282228, 0.029646816253662108, 0.029256799697875976, 0.02946143913269043, 0.029267967224121092, 0.029345727920532225, 0.029560895919799806, 0.03447001647949219, 0.029884288787841797, 0.02969759941101074, 0.029608383178710937, 0.029656255722045898, 0.02970707130432129, 0.02984899139404297, 0.029835872650146485, 0.029603551864624024, 0.02952351951599121, 0.029872928619384766, 0.02988230323791504, 0.029840703964233398, 0.02957494354248047, 0.02974799919128418, 0.029612159729003905, 0.029496992111206054, 0.029608287811279298, 0.029594783782958985, 0.029659360885620118, 0.029520511627197266, 0.02953215980529785, 0.029511072158813476, 0.02991564750671387, 0.029403232574462892, 0.029491199493408202, 0.029535999298095705, 0.029382911682128907, 0.02936419105529785, 0.029480287551879883, 0.029278911590576173, 0.029296640396118165, 0.029304864883422852, 0.029378528594970702, 0.029849599838256836, 0.0343240966796875, 0.02964518356323242, 0.029425664901733397, 0.02940028762817383, 0.029694751739501955, 0.02941542434692383, 0.0297511043548584, 0.029745119094848633, 0.029933792114257812, 0.029908031463623048, 0.029856704711914064, 0.029865407943725587, 0.030120384216308593, 0.030167167663574218, 0.029814048767089842, 0.029840223312377928, 0.029682783126831053, 0.029647647857666017, 0.02958355140686035, 0.029456384658813478, 0.029868032455444334, 0.029574880599975584, 0.02948534393310547, 0.029489152908325194, 0.029442047119140623, 0.029500640869140626, 0.029496095657348634, 0.0297938232421875, 0.0294814395904541, 0.029417472839355467, 0.029439199447631837, 0.02952272033691406, 0.02959564781188965, 0.03231129455566406, 0.031710847854614255, 0.029743648529052733, 0.0296712646484375, 0.029414560317993162, 0.029578304290771483, 0.029402944564819337, 0.02942255973815918, 0.02987884712219238, 0.029927871704101563, 0.02963625526428223, 0.0294935359954834, 0.02946668815612793, 0.030496768951416016, 0.029613664627075195, 0.029425727844238282, 0.029442399978637696, 0.029466623306274413, 0.029498815536499023, 0.029435712814331053, 0.029494016647338868, 0.02979216003417969, 0.029507423400878908, 0.029626367568969726, 0.02944166374206543, 0.02945449638366699, 0.029411327362060546, 0.029388256072998047, 0.029450687408447265, 0.029298336029052734, 0.029659936904907227, 0.029649023056030274, 0.02953215980529785, 0.02941279983520508, 0.029686431884765625, 0.033828609466552736, 0.02972892761230469, 0.03035308837890625, 0.032131393432617186, 0.02994118309020996, 0.030619840621948242, 0.02973734474182129, 0.029607936859130858, 0.030187519073486328, 0.029514944076538086, 0.02946713638305664, 0.029687456130981445, 0.029764991760253906, 0.029727712631225586, 0.029697343826293944, 0.029534912109375, 0.031573535919189454, 0.029567455291748045, 0.029600927352905274, 0.02963555145263672, 0.030302080154418945, 0.029410400390625, 0.029436832427978517, 0.029601791381835937, 0.029591552734375, 0.029451616287231447, 0.029482688903808594, 0.029314016342163084, 0.029274112701416017, 0.02924729537963867, 0.029370336532592772, 0.029443391799926756, 0.029268320083618165, 0.029592031478881835, 0.03399689483642578, 0.029700000762939452, 0.029538496017456055, 0.02975529670715332, 0.029812736511230467, 0.030267391204833984, 0.0311081600189209, 0.030024639129638674, 0.030121984481811522, 0.029771615982055664, 0.029650495529174804, 0.029862495422363283, 0.02997657585144043, 0.029784063339233398, 0.029700096130371095, 0.029829120635986327, 0.029869600296020506, 0.029644607543945312, 0.029682336807250978, 0.031066112518310547, 0.03271475219726563, 0.029847551345825195, 0.029748607635498046, 0.029748992919921877, 0.029669408798217774, 0.029754207611083983, 0.029708320617675782, 0.0295219841003418, 0.02950713539123535, 0.02950793647766113, 0.029569023132324217, 0.029663232803344725, 0.02978144073486328, 0.029672000885009767, 0.029851648330688478, 0.02978816032409668, 0.029633535385131835, 0.02996735954284668, 0.029892000198364257, 0.02968448066711426, 0.029681663513183593, 0.029499103546142578, 0.02948851203918457, 0.02952889633178711, 0.029569440841674805, 0.029489856719970703, 0.029533184051513672, 0.03314588928222656, 0.029864927291870118, 0.02976323127746582, 0.029878656387329103, 0.03006460762023926, 0.029790464401245116, 0.02969481658935547, 0.029483936309814454, 0.029560352325439455, 0.029525823593139648, 0.02940585517883301, 0.029607872009277343, 0.029573183059692382, 0.02940928077697754, 0.029245439529418944, 0.02934169578552246, 0.029370527267456054, 0.029396064758300783, 0.029415775299072265, 0.029364639282226563, 0.02943180847167969, 0.02972876739501953, 0.030068735122680663, 0.02989446449279785, 0.030187711715698243, 0.030158111572265625, 0.02983990478515625, 0.029865184783935548, 0.02985638427734375, 0.02979408073425293, 0.029646816253662108, 0.029574848175048827, 0.029500320434570314, 0.02936627197265625, 0.029378559112548826, 0.029425664901733397, 0.0294072322845459, 0.02945849609375, 0.02939801597595215, 0.029466976165771486, 0.02954694366455078, 0.03140163230895996, 0.0304434871673584, 0.029734432220458986, 0.029493568420410156, 0.029553600311279297, 0.02940185546875, 0.029361152648925783, 0.029415168762207032, 0.02933087921142578, 0.029352224349975586, 0.029489696502685545, 0.029386432647705078, 0.02940342330932617, 0.029328544616699218, 0.029890560150146486, 0.029837312698364257, 0.029951135635375978, 0.030035839080810547, 0.03020716857910156, 0.029861663818359373, 0.029668447494506835, 0.02958019256591797, 0.02975935935974121, 0.030050304412841795, 0.02982310485839844, 0.029771360397338867, 0.02978160095214844, 0.029815296173095703, 0.029693471908569337, 0.03020204734802246, 0.02984815979003906, 0.029648895263671874, 0.02976153564453125, 0.029918880462646485, 0.029767391204833984, 0.029604480743408202, 0.029616128921508788, 0.02972876739501953, 0.02971238327026367, 0.02974515151977539, 0.029711711883544923, 0.029667999267578123, 0.029597312927246093, 0.02976095962524414, 0.029760448455810547, 0.02958950424194336, 0.02971833610534668, 0.029921472549438475, 0.02999068832397461, 0.0304597110748291, 0.03032310485839844, 0.029936927795410156, 0.029831520080566408, 0.029700479507446288, 0.029607679367065428, 0.02953856086730957, 0.029506752014160156, 0.02955721664428711, 0.029421184539794924, 0.029412063598632812, 0.029458431243896483, 0.029465951919555665, 0.02957119941711426, 0.029661088943481444, 0.029702079772949218, 0.03194950485229492, 0.029667327880859375, 0.029575168609619142, 0.029677568435668947, 0.029466623306274413, 0.02963046455383301, 0.02950752067565918, 0.029446271896362303, 0.029433055877685545, 0.029557472229003907, 0.029466304779052734, 0.02958572769165039, 0.03220889663696289, 0.029978624343872072, 0.0299683837890625, 0.03120947265625, 0.029714239120483397, 0.029945535659790037, 0.029749759674072264, 0.029734207153320313, 0.029864639282226563, 0.02956492805480957, 0.029612031936645508, 0.029626367568969726, 0.02956460762023926, 0.029729087829589843, 0.02962345504760742, 0.029508447647094725, 0.029446144104003907, 0.029820768356323243, 0.029833375930786134, 0.029839359283447265, 0.029494304656982422, 0.029361055374145507, 0.029311040878295898, 0.029378559112548826, 0.029329376220703127, 0.029302175521850587, 0.029456640243530275, 0.029428096771240236, 0.029453472137451173, 0.02946905517578125, 0.0293986873626709, 0.02965315246582031, 0.02953606414794922, 0.029387615203857423, 0.029607328414916992, 0.029569023132324217, 0.029721183776855467, 0.029706239700317383, 0.030208000183105467, 0.030260255813598633, 0.030061824798583985, 0.03070560073852539, 0.030869279861450195, 0.029625471115112306, 0.029631328582763673, 0.029640127182006835, 0.029516223907470704, 0.029665279388427734, 0.029634719848632814, 0.02967571258544922, 0.02949100875854492, 0.02977177619934082, 0.02976972770690918, 0.029582815170288088, 0.02962281608581543, 0.029535808563232423, 0.029505983352661132, 0.02949510383605957, 0.02965727996826172, 0.029550592422485353, 0.029519968032836914, 0.02964476776123047, 0.02965011215209961, 0.029642751693725586, 0.030744672775268555, 0.02958064079284668, 0.029467199325561525, 0.02949260711669922, 0.029597503662109375, 0.030138431549072267, 0.029862239837646486, 0.02999737548828125, 0.029922815322875978, 0.030044031143188477, 0.030591455459594727, 0.029673728942871094, 0.029747200012207032, 0.029564224243164062, 0.029563104629516602, 0.029527839660644532, 0.029512128829956054, 0.02970240020751953, 0.02946998405456543, 0.02989743995666504, 0.029892288208007812, 0.029577535629272463, 0.029505216598510742, 0.029516096115112304, 0.029435903549194335, 0.02944000053405762, 0.029474815368652343, 0.02971379280090332, 0.029770368576049804, 0.029855743408203125, 0.02995609664916992, 0.02997039985656738, 0.029786079406738282, 0.029978015899658202, 0.030054752349853515, 0.029884735107421876, 0.029747200012207032, 0.029723712921142578, 0.02970515251159668, 0.029372255325317384, 0.029683456420898438, 0.029481599807739258, 0.029472543716430665, 0.03108572769165039, 0.02982588768005371, 0.029851551055908202, 0.029669471740722656, 0.029634559631347656, 0.029671520233154298, 0.02956265640258789, 0.029612159729003905, 0.02954355239868164, 0.0297890567779541, 0.030265344619750976, 0.029878496170043945, 0.029779584884643554, 0.029461727142333985, 0.02944428825378418, 0.0295629768371582, 0.02930672073364258, 0.02931999969482422, 0.02939904022216797, 0.029571359634399413, 0.029456384658813478, 0.029351520538330077, 0.02925609588623047, 0.029379999160766602, 0.02948566436767578, 0.029464576721191408, 0.02944819259643555, 0.029468671798706055, 0.030010976791381837, 0.02965545654296875, 0.030045631408691407, 0.030667327880859376, 0.030260671615600587, 0.030309055328369142, 0.030259071350097658, 0.030136320114135744, 0.030109695434570313, 0.02998428726196289, 0.029843936920166014, 0.029822975158691405, 0.031045631408691408, 0.029928831100463866, 0.029777664184570313, 0.029700992584228515, 0.02977996826171875, 0.030089216232299806, 0.029847551345825195, 0.0297205753326416, 0.02965475273132324, 0.02972438430786133, 0.029654592514038087, 0.02959872055053711, 0.029474815368652343, 0.029419519424438476, 0.029572608947753907, 0.029403648376464843, 0.02977132797241211, 0.029481407165527343, 0.029569023132324217, 0.029618175506591796, 0.029483007431030273, 0.029507711410522462, 0.029746976852416993, 0.029462623596191406, 0.029880319595336914, 0.02962227249145508, 0.029550592422485353, 0.02944607925415039, 0.029515840530395507, 0.029427711486816405, 0.030012575149536133, 0.029687744140625, 0.029567903518676757, 0.029525056838989257, 0.029409215927124022, 0.029543264389038086, 0.029546655654907227, 0.029815839767456054, 0.030710527420043945, 0.030718175888061524, 0.029884416580200194, 0.029671295166015625, 0.029564992904663086, 0.029968448638916016, 0.02943427276611328, 0.029464576721191408, 0.02956287956237793, 0.0295546875, 0.02934899139404297, 0.029418367385864258, 0.029396991729736328, 0.029423423767089844, 0.029483200073242188, 0.02968079948425293, 0.02993174362182617, 0.03009756851196289, 0.02979475212097168, 0.02952809524536133, 0.029451648712158204, 0.029348480224609376, 0.029417152404785155, 0.02940345573425293, 0.029337663650512696, 0.029705823898315428, 0.029872480392456054, 0.02960771179199219, 0.029724895477294924, 0.03259187316894531, 0.03108032035827637, 0.029775167465209963, 0.03032374382019043, 0.02986899185180664, 0.02975152015686035, 0.0294748477935791, 0.02978803253173828, 0.029631200790405272, 0.029730079650878906, 0.029666015625, 0.029565120697021486, 0.029595455169677733, 0.029702144622802733, 0.029491199493408202, 0.029584672927856444, 0.02934351921081543, 0.02953926467895508, 0.029620223999023438, 0.02946393585205078, 0.029725311279296875, 0.02976358413696289, 0.030084096908569335, 0.030206527709960938, 0.029925695419311525, 0.030166688919067382, 0.03007734489440918, 0.030109567642211912, 0.030037311553955077, 0.030098304748535157, 0.02973004722595215, 0.0299814395904541, 0.02980659294128418, 0.029955263137817382, 0.029901376724243166, 0.03026531219482422, 0.02988470458984375, 0.029802080154418945]",tokens/s,33.5830271318017,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1210.12224,1155.39968,0.0,752.877568,710.554112,s,1,8.6234169921875,8.6234169921875,0.0,8.6234169921875,8.6234169921875,8.6234169921875,8.6234169921875,[8.6234169921875],,kWh,4.352849923325266e-05,4.794215726103372e-06,1.4686956194054357e-05,6.300967115341039e-05,,MB,1491.402752,1436.418048,0.0,1019.215872,949.099008,s,10,0.2715139503479004,0.027151395034790037,0.0003761240318524589,0.027135231971740722,0.027384604835510255,0.02774891061782837,0.02804035524368286,"[0.026776416778564453, 0.027303647994995118, 0.02705401611328125, 0.027216447830200195, 0.027276159286499024, 0.026912031173706056, 0.026831680297851563, 0.02679167938232422, 0.02723865509033203, 0.028113216400146485]",tokens/s,9428.613140208015,kWh,7.897284567004569e-07,8.709265713696791e-08,5.082211272973687e-07,1.3850422411347935e-06,tokens/kWh,184831907.93535218,MB,1513.549824,1444.806656,0.0,1027.60448,949.101568,s,10,13.20731884765625,1.320731884765625,0.0069583848623168435,1.3215374755859375,1.32559208984375,1.3304466186523438,1.3343302416992189,"[1.3245133056640626, 1.3208558349609374, 1.3215789794921875, 1.3186351318359375, 1.323444091796875, 1.310372802734375, 1.3090889892578126, 1.3220325927734375, 1.3353011474609375, 1.3214959716796875]",tokens/s,47.700824616027106,kWh,3.8208459589123395e-05,4.213961466929588e-06,1.5798736362700294e-05,5.822115741875329e-05,tokens/kWh,1082080.8584562324,,s,630,13.205166519165035,0.020960581776452443,0.00038297868476818575,0.020877344131469727,0.02120663738250732,0.02138211669921875,0.02285748323440552,"[0.02097417640686035, 0.021114751815795897, 0.02087936019897461, 0.02077859115600586, 0.021254528045654298, 0.020860736846923827, 0.020940095901489257, 0.02094895935058594, 0.020877887725830078, 0.020923776626586912, 0.020835487365722657, 0.020825952529907227, 0.020783103942871094, 0.020871103286743162, 0.021010496139526366, 0.020919456481933593, 0.020951904296875, 0.021102592468261717, 0.02089904022216797, 0.021064479827880858, 0.02090188789367676, 0.020764192581176757, 0.020890079498291015, 0.020858879089355468, 0.021078016281127928, 0.020938751220703124, 0.020813823699951172, 0.020939872741699218, 0.020972383499145507, 0.020979167938232422, 0.021086816787719728, 0.021334016799926758, 0.021217279434204102, 0.021534719467163087, 0.02101375961303711, 0.020992767333984374, 0.021093631744384767, 0.02126908874511719, 0.020973472595214843, 0.020902143478393555, 0.02083020782470703, 0.02087731170654297, 0.020779008865356444, 0.02086092758178711, 0.02079974365234375, 0.02070297622680664, 0.020715520858764647, 0.022310367584228517, 0.02081772804260254, 0.021187328338623048, 0.021034496307373047, 0.020789728164672852, 0.02087049674987793, 0.02102662467956543, 0.0228176326751709, 0.021868000030517577, 0.021152095794677736, 0.021000383377075195, 0.020911968231201172, 0.020859039306640625, 0.020846527099609376, 0.020940479278564454, 0.020920000076293944, 0.021100000381469728, 0.021175167083740235, 0.020844127655029295, 0.020955135345458984, 0.020801952362060547, 0.02077872085571289, 0.020866912841796877, 0.020864511489868166, 0.02101753616333008, 0.020879199981689453, 0.020881568908691406, 0.020979711532592774, 0.020797279357910155, 0.020908191680908204, 0.021260383605957032, 0.021096351623535157, 0.02120659255981445, 0.02083475112915039, 0.02084864044189453, 0.020781055450439453, 0.020852479934692383, 0.02077926445007324, 0.02089574432373047, 0.02105958366394043, 0.020997760772705078, 0.020802976608276368, 0.020799488067626954, 0.020825056076049803, 0.020911968231201172, 0.020826271057128906, 0.020781055450439453, 0.020827199935913084, 0.02112745666503906, 0.020907968521118165, 0.021856992721557618, 0.022873760223388672, 0.02316524887084961, 0.02107561683654785, 0.02099468803405762, 0.02077609634399414, 0.020843103408813478, 0.020838144302368165, 0.020619520187377928, 0.020666368484497072, 0.02062934494018555, 0.020596895217895508, 0.02067865562438965, 0.0207620792388916, 0.021189151763916017, 0.021383167266845703, 0.020809663772583007, 0.020723072052001953, 0.02058720016479492, 0.020618623733520507, 0.02060732841491699, 0.020913888931274414, 0.021057695388793946, 0.020748416900634767, 0.020838560104370116, 0.020801311492919923, 0.020981887817382812, 0.02087343978881836, 0.02085683250427246, 0.021101215362548827, 0.021071359634399413, 0.020832767486572267, 0.02072166442871094, 0.02071347236633301, 0.020735519409179688, 0.020782623291015625, 0.02080659294128418, 0.020856672286987305, 0.02060304069519043, 0.02065407943725586, 0.020740095138549804, 0.02073923110961914, 0.020794240951538086, 0.020739423751831056, 0.020580863952636717, 0.021135488510131837, 0.020704288482666016, 0.02075651168823242, 0.02074515151977539, 0.02069708824157715, 0.020835359573364257, 0.020831199645996095, 0.02090598487854004, 0.02086297607421875, 0.021034048080444335, 0.02080636787414551, 0.020828384399414063, 0.020736000061035157, 0.020721952438354493, 0.020714784622192384, 0.020779520034790038, 0.020839712142944337, 0.021057184219360352, 0.020888576507568358, 0.020684799194335936, 0.02068675231933594, 0.021033056259155275, 0.021053440093994142, 0.021165407180786133, 0.02115385627746582, 0.021276544570922852, 0.02115043258666992, 0.021135360717773437, 0.02126192092895508, 0.021467391967773437, 0.02143561553955078, 0.021534688949584962, 0.021097440719604493, 0.021026399612426756, 0.021404064178466797, 0.024549055099487304, 0.020924736022949218, 0.02083635139465332, 0.02081996726989746, 0.02089289665222168, 0.020957984924316407, 0.020846431732177734, 0.020817279815673828, 0.020769567489624025, 0.020946943283081054, 0.02104934310913086, 0.021026432037353517, 0.021112831115722656, 0.021075584411621093, 0.021070207595825195, 0.02180012893676758, 0.02117420768737793, 0.023689855575561525, 0.021804800033569337, 0.021154304504394532, 0.021045248031616212, 0.020960927963256836, 0.020969823837280275, 0.020873279571533204, 0.020940095901489257, 0.02071206474304199, 0.02068396759033203, 0.02086787223815918, 0.02070521545410156, 0.020692672729492188, 0.020903711318969728, 0.020909952163696288, 0.020820735931396484, 0.02075587272644043, 0.02104105567932129, 0.02091628837585449, 0.020761215209960937, 0.02078121566772461, 0.020641632080078125, 0.020725759506225586, 0.020825887680053713, 0.020725183486938477, 0.020919071197509766, 0.02080089569091797, 0.020810367584228516, 0.020748287200927733, 0.020782655715942382, 0.02075276756286621, 0.020864479064941405, 0.020752832412719725, 0.020834463119506836, 0.020985631942749022, 0.020945119857788085, 0.020679935455322266, 0.02075503921508789, 0.02074844741821289, 0.02087936019897461, 0.02067865562438965, 0.020856224060058593, 0.021104543685913087, 0.020864831924438478, 0.0208536319732666, 0.020841791152954103, 0.02077903938293457, 0.020836992263793944, 0.020744224548339844, 0.02073094367980957, 0.021015487670898437, 0.020711423873901368, 0.020793184280395508, 0.020668575286865234, 0.020760576248168947, 0.02102396774291992, 0.020802335739135744, 0.020962783813476563, 0.020896831512451173, 0.02112403106689453, 0.020862239837646485, 0.020986591339111328, 0.02082815933227539, 0.020801536560058592, 0.02088960075378418, 0.02099404716491699, 0.020809728622436522, 0.020913408279418944, 0.020914560317993165, 0.02092448043823242, 0.02154323196411133, 0.02106550407409668, 0.021358816146850586, 0.020975744247436524, 0.020903072357177734, 0.020941055297851563, 0.02072380828857422, 0.0209800968170166, 0.020779008865356444, 0.021004287719726563, 0.02098896026611328, 0.021005056381225587, 0.020946207046508788, 0.02086979293823242, 0.020807968139648438, 0.02081814384460449, 0.02083331108093262, 0.02089036750793457, 0.020817920684814452, 0.02102876853942871, 0.020799583435058593, 0.020772863388061523, 0.02082815933227539, 0.02085273551940918, 0.02097939109802246, 0.020932640075683594, 0.021096160888671875, 0.020956863403320314, 0.020975711822509766, 0.021013280868530274, 0.020800928115844726, 0.0208155517578125, 0.020668447494506834, 0.02084124755859375, 0.020770912170410157, 0.020723712921142577, 0.020628896713256836, 0.020697120666503907, 0.020645599365234375, 0.020824064254760744, 0.021431135177612304, 0.0241213436126709, 0.024397823333740236, 0.021110815048217775, 0.020901151657104492, 0.020882112503051758, 0.020778432846069336, 0.020738624572753907, 0.02062726402282715, 0.020719808578491213, 0.02067251205444336, 0.02086297607421875, 0.020809728622436522, 0.020663904190063476, 0.02073027229309082, 0.02073321533203125, 0.020613759994506837, 0.02062883186340332, 0.02070400047302246, 0.020692415237426758, 0.020631744384765626, 0.02061939239501953, 0.020747648239135743, 0.020758495330810547, 0.02072220802307129, 0.02086300849914551, 0.020654367446899413, 0.020719263076782228, 0.02061520004272461, 0.020714975357055663, 0.020773088455200196, 0.020744064331054687, 0.020875904083251955, 0.020711616516113283, 0.02078447914123535, 0.020787872314453126, 0.020876800537109375, 0.020740320205688476, 0.02079977607727051, 0.020840288162231446, 0.020794527053833008, 0.020739072799682616, 0.02073129653930664, 0.020687456130981444, 0.02079539108276367, 0.020736000061035157, 0.02073936080932617, 0.020718015670776368, 0.02092880058288574, 0.021007423400878907, 0.020877376556396484, 0.020769407272338867, 0.02073401641845703, 0.020682752609252928, 0.020795583724975586, 0.02097279930114746, 0.02102947235107422, 0.021121183395385743, 0.0208035831451416, 0.02079961585998535, 0.020833183288574218, 0.02082204818725586, 0.020761280059814452, 0.020782751083374025, 0.020856544494628905, 0.020849536895751954, 0.020940799713134766, 0.021001247406005858, 0.021025535583496093, 0.020833887100219727, 0.020815935134887695, 0.020903583526611327, 0.02082499122619629, 0.02103209686279297, 0.02082102394104004, 0.020778207778930663, 0.020677631378173827, 0.02073472023010254, 0.020938751220703124, 0.02085209655761719, 0.02073049545288086, 0.020742143630981445, 0.020885183334350587, 0.020766944885253907, 0.020725536346435546, 0.020658496856689454, 0.020741983413696288, 0.02082537651062012, 0.020808479309082032, 0.020717664718627928, 0.020684480667114258, 0.020672832489013672, 0.02065203285217285, 0.020692991256713866, 0.020658176422119142, 0.020692991256713866, 0.020604736328125, 0.020867263793945313, 0.02106572723388672, 0.02083430480957031, 0.020916000366210937, 0.021055200576782226, 0.02093212890625, 0.021023712158203124, 0.020779008865356444, 0.020879264831542968, 0.02077408027648926, 0.02085980796813965, 0.02083020782470703, 0.020795007705688477, 0.020862688064575197, 0.020843168258666993, 0.02093199920654297, 0.02071753692626953, 0.020826240539550782, 0.020634111404418946, 0.02074185562133789, 0.020673856735229493, 0.020884191513061524, 0.020762399673461916, 0.020836095809936523, 0.020680831909179687, 0.020660831451416017, 0.020801311492919923, 0.020783327102661134, 0.020752384185791017, 0.020719615936279297, 0.020684799194335936, 0.02068889617919922, 0.020807552337646484, 0.020715648651123048, 0.020750335693359375, 0.02068172836303711, 0.020745119094848632, 0.020607072830200194, 0.020649663925170897, 0.020762975692749024, 0.020748544692993164, 0.020883487701416015, 0.02068671989440918, 0.02068284797668457, 0.020710880279541016, 0.020698976516723634, 0.020740800857543946, 0.020815872192382814, 0.02088479995727539, 0.021006271362304686, 0.020837024688720705, 0.021317760467529298, 0.02094486427307129, 0.02107302474975586, 0.020900735855102538, 0.020796800613403322, 0.020777599334716797, 0.02145894432067871, 0.020799488067626954, 0.020719615936279297, 0.020747615814208985, 0.020780832290649413, 0.02074835205078125, 0.020732736587524413, 0.020942752838134765, 0.020762655258178712, 0.020805696487426757, 0.020903743743896485, 0.020787391662597656, 0.02089779281616211, 0.02109187126159668, 0.020775392532348634, 0.020858879089355468, 0.020819456100463866, 0.020793855667114256, 0.020936704635620116, 0.020774911880493165, 0.02075372886657715, 0.02095110321044922, 0.02077350425720215, 0.02081177520751953, 0.020832256317138673, 0.020766719818115235, 0.020963327407836914, 0.020780576705932616, 0.020777408599853515, 0.02082614326477051, 0.02102681541442871, 0.021091808319091798, 0.021117439270019533, 0.021092384338378907, 0.021137088775634767, 0.021112415313720705, 0.023472864151000975, 0.02215936088562012, 0.02126848030090332, 0.021358591079711914, 0.02116111946105957, 0.02119910430908203, 0.02121766471862793, 0.021358816146850586, 0.021149696350097655, 0.02099955177307129, 0.0211615047454834, 0.021276704788208006, 0.020985343933105468, 0.021132160186767578, 0.021086271286010743, 0.020954336166381836, 0.020947744369506836, 0.02107302474975586, 0.02111372756958008, 0.020993663787841798, 0.021569055557250978, 0.021604768753051756, 0.02123414421081543, 0.021352287292480468, 0.021163520812988282, 0.021174623489379884, 0.02113315200805664, 0.020988000869750976, 0.02109257507324219, 0.021301279067993163, 0.021370399475097657, 0.021215391159057618, 0.02108457565307617, 0.021198720932006837, 0.021202560424804687, 0.02114201545715332, 0.020924192428588867, 0.021129344940185545, 0.02132796859741211, 0.02127257537841797, 0.021159936904907226, 0.021219327926635743, 0.021714111328125, 0.021226207733154298, 0.021145696640014647, 0.021032352447509766, 0.021111263275146484, 0.02123788833618164, 0.021207040786743164, 0.0218621768951416, 0.021299423217773436, 0.021358591079711914, 0.021622783660888673, 0.021628992080688476, 0.021297088623046877, 0.02113302421569824, 0.021212799072265625, 0.021101215362548827, 0.021184768676757813, 0.02098124885559082, 0.020907327651977538, 0.021015487670898437, 0.021071647644042967, 0.020860191345214843, 0.0211393928527832, 0.021191680908203125, 0.021147647857666017, 0.020948991775512696, 0.020987295150756837, 0.020820575714111327, 0.020874271392822264, 0.021691360473632813, 0.021476768493652345, 0.02138083267211914, 0.021308128356933593, 0.021098751068115235, 0.021200191497802733, 0.021202400207519533, 0.021111743927001953, 0.020998176574707032, 0.02103251266479492, 0.02103932762145996, 0.020996320724487306, 0.020936704635620116, 0.020789247512817383, 0.020864608764648438, 0.020867136001586913, 0.020945215225219728, 0.020895776748657228, 0.020899551391601563, 0.021005983352661132, 0.02093529510498047, 0.020923744201660155, 0.020922239303588868, 0.020935136795043944, 0.020916608810424803, 0.02104528045654297, 0.020928415298461914, 0.020918272018432618, 0.02104934310913086, 0.021017919540405272, 0.0210316162109375, 0.02105526351928711, 0.02087343978881836, 0.02082815933227539, 0.02090188789367676, 0.020989952087402345, 0.020874559402465822, 0.020894367218017577, 0.020848480224609375, 0.020764223098754885, 0.020834943771362305, 0.020746240615844725, 0.020952064514160155, 0.02072015953063965, 0.020891712188720702, 0.021087968826293945, 0.021512895584106444, 0.021018207550048826, 0.020982175827026366, 0.020893247604370117, 0.02086751937866211, 0.02091561508178711, 0.021086816787719728, 0.02105753517150879, 0.021082111358642578, 0.02099135971069336, 0.020988544464111327, 0.02094268798828125, 0.020877471923828127, 0.020987743377685546, 0.020774368286132813, 0.021084447860717774, 0.021090112686157226, 0.020918880462646484, 0.020760480880737304]",tokens/s,47.70859943989823,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3152.740352,4376.625152,0.0,3990.880256,3908.719616,s,1,10.2110595703125,10.2110595703125,0.0,10.2110595703125,10.2110595703125,10.2110595703125,10.2110595703125,[10.2110595703125],,kWh,9.043820330416565e-05,9.968596250208437e-06,3.0304190909991036e-05,0.00013071099046436512,,MB,3171.139584,4733.140992,0.0,4318.035968,4274.557952,s,10,2.202844192504883,0.22028441925048825,0.0022714960509002823,0.21978108978271485,0.22317620849609374,0.2232936248779297,0.22338755798339843,"[0.2177252197265625, 0.2229659881591797, 0.21902688598632813, 0.22152531433105468, 0.21901795959472656, 0.21670278930664064, 0.22053529357910157, 0.22341104125976563, 0.22315011596679687, 0.21878358459472655]",tokens/s,1162.1339397086415,kWh,6.745642624716972e-06,7.439233036910739e-07,4.4587030619080474e-06,1.1948268990316093e-05,tokens/kWh,21425697.74814113,MB,3175.317504,4735.238144,0.0,4320.13312,4274.560512,s,10,31.346081054687502,3.1346081054687502,0.006965655819228732,3.1348283691406253,3.1435971679687498,3.1444771728515626,3.1451811767578124,"[3.126269775390625, 3.145357177734375, 3.134798828125, 3.127242919921875, 3.143401611328125, 3.12987451171875, 3.14272265625, 3.125734619140625, 3.13485791015625, 3.135821044921875]",tokens/s,20.098206180890024,kWh,9.133203610986031e-05,1.0074056613719308e-05,4.8045755608291436e-05,0.00014945184833187105,tokens/kWh,421540.45402036735,,s,630,31.343413505554196,0.049751450008816187,0.0005529966225793758,0.04964680099487305,0.05027058753967285,0.050800572395324706,0.05215154853820801,"[0.05089465713500976, 0.05002214431762695, 0.04979097747802735, 0.049619873046875, 0.04948787307739258, 0.051863552093505856, 0.049686527252197264, 0.04999168014526367, 0.04994355010986328, 0.04971763229370117, 0.04998371124267578, 0.04948214340209961, 0.049290782928466795, 0.049209823608398436, 0.04912947082519531, 0.049303169250488284, 0.04926902389526367, 0.04930748748779297, 0.04905187225341797, 0.04953286361694336, 0.04946851348876953, 0.049594688415527347, 0.04924691009521484, 0.04928307342529297, 0.04892041778564453, 0.049066143035888674, 0.049696769714355465, 0.0495814094543457, 0.04910351943969726, 0.049876991271972655, 0.04956774520874024, 0.04937011337280273, 0.04913622283935547, 0.04929372787475586, 0.04925203323364258, 0.04903760147094727, 0.04899593734741211, 0.04892448043823242, 0.04889254379272461, 0.04907417678833008, 0.04944015884399414, 0.049479713439941404, 0.049422752380371096, 0.04937948989868164, 0.04961299133300781, 0.04976620864868164, 0.04988518524169922, 0.050237438201904294, 0.04986265563964844, 0.05027635192871094, 0.04994867324829102, 0.049721343994140625, 0.0498485107421875, 0.04960140609741211, 0.049920703887939455, 0.049637630462646486, 0.049614112854003904, 0.05117411041259766, 0.04969891357421875, 0.049579936981201174, 0.04996300888061524, 0.049928192138671876, 0.05003571319580078, 0.05087689590454102, 0.05010111999511719, 0.04997840118408203, 0.050177150726318356, 0.05023382568359375, 0.04980758285522461, 0.04998675155639649, 0.05188793563842774, 0.051009952545166014, 0.04992611312866211, 0.049786880493164064, 0.05011110305786133, 0.049950977325439454, 0.04996684646606445, 0.049893375396728515, 0.049911808013916016, 0.050010112762451174, 0.04989728164672851, 0.04999388885498047, 0.0496968002319336, 0.049713409423828125, 0.04969555282592773, 0.049799198150634764, 0.0499967041015625, 0.04975001525878906, 0.04957593536376953, 0.0497022705078125, 0.049827934265136715, 0.049817760467529296, 0.049855968475341794, 0.049460094451904295, 0.04953705596923828, 0.049372512817382815, 0.04988380813598633, 0.04995238494873047, 0.05002867126464844, 0.0512938232421875, 0.0499370231628418, 0.04990563201904297, 0.05047504043579101, 0.04998867034912109, 0.04976326370239258, 0.04950361633300781, 0.049910240173339844, 0.04976860809326172, 0.05014527893066406, 0.04941619110107422, 0.049553310394287106, 0.04926473617553711, 0.05317577743530273, 0.04966867065429687, 0.04938256072998047, 0.0493504638671875, 0.04966502380371094, 0.04918617630004883, 0.04943731307983398, 0.049459201812744144, 0.04938137435913086, 0.04953497695922852, 0.04981350326538086, 0.049589534759521485, 0.04979328155517578, 0.04956729507446289, 0.05033193588256836, 0.04985590362548828, 0.04967497634887695, 0.04949980926513672, 0.04957830429077149, 0.04944025421142578, 0.049406463623046876, 0.04938518524169922, 0.04920348739624023, 0.049756160736083986, 0.0493383674621582, 0.04961075210571289, 0.051058433532714845, 0.05022272109985351, 0.04977932739257813, 0.04976230239868164, 0.049928192138671876, 0.04947148895263672, 0.04950588989257813, 0.05007596969604492, 0.05206630325317383, 0.04998153686523438, 0.05030908966064453, 0.04937321472167969, 0.04951859283447266, 0.04926668930053711, 0.04960255813598633, 0.04951587295532227, 0.05055136108398438, 0.05218636703491211, 0.04973814392089844, 0.04967059326171875, 0.04944486236572266, 0.04935628890991211, 0.04932191848754883, 0.049472000122070314, 0.04949161529541016, 0.04945270538330078, 0.04959331130981445, 0.04974758529663086, 0.04993859100341797, 0.04972048187255859, 0.04952560043334961, 0.049432575225830076, 0.04935990524291992, 0.04941104125976563, 0.04946739196777344, 0.049385055541992184, 0.04953440093994141, 0.04975872039794922, 0.04971772766113281, 0.049786880493164064, 0.05033903884887695, 0.04987369537353516, 0.04981171035766602, 0.04994022369384766, 0.04965990447998047, 0.04942572784423828, 0.04951110458374024, 0.049686527252197264, 0.04965545654296875, 0.04968278503417969, 0.04938547134399414, 0.050372257232666015, 0.04975568008422852, 0.050000480651855465, 0.04987894439697266, 0.052770782470703125, 0.04986710357666015, 0.049555454254150394, 0.04972057723999023, 0.049533409118652345, 0.04947177505493164, 0.04971247863769531, 0.04943119812011719, 0.049242111206054685, 0.049637374877929685, 0.049685726165771486, 0.04937603378295898, 0.04917862319946289, 0.0498678092956543, 0.04948428726196289, 0.04960099029541016, 0.04914176177978516, 0.049342464447021485, 0.049364990234375, 0.0491879997253418, 0.04992659378051758, 0.04964188766479492, 0.05013011169433594, 0.04937811279296875, 0.0494202880859375, 0.04968038558959961, 0.04929667282104492, 0.04957843017578125, 0.049567455291748046, 0.049464992523193356, 0.04954777526855469, 0.049645984649658206, 0.04908236694335937, 0.05052604675292969, 0.04924367904663086, 0.04936284637451172, 0.049193695068359376, 0.04919910430908203, 0.049278335571289064, 0.049449825286865236, 0.049767646789550785, 0.05033180618286133, 0.04983395385742188, 0.04976601409912109, 0.04957881546020508, 0.04966809463500976, 0.04907190322875977, 0.04940755081176758, 0.04917295837402344, 0.04916857528686523, 0.04933222579956055, 0.05172563171386719, 0.04979369735717774, 0.04958006286621094, 0.04937113571166992, 0.049582080841064455, 0.04934630584716797, 0.04926899337768555, 0.04942956924438477, 0.05051827239990234, 0.04999612808227539, 0.04969411087036133, 0.04939004898071289, 0.04924406433105469, 0.05048275375366211, 0.052855232238769534, 0.050064960479736326, 0.05013753509521485, 0.05049686431884766, 0.049911903381347655, 0.04990147018432617, 0.04990838241577149, 0.050255870819091795, 0.04974387359619141, 0.04967424011230469, 0.05003817749023438, 0.04983049774169922, 0.049860607147216796, 0.04989270401000977, 0.05007539367675781, 0.049705886840820314, 0.0498790397644043, 0.0494505615234375, 0.04970761489868164, 0.04980064010620117, 0.04940227127075195, 0.04928307342529297, 0.0495022087097168, 0.049285118103027346, 0.049391616821289064, 0.049358848571777345, 0.049484001159667966, 0.04983331298828125, 0.049539104461669925, 0.049699230194091795, 0.049934337615966794, 0.0502210578918457, 0.0506879997253418, 0.050948097229003904, 0.05007769775390625, 0.050348033905029295, 0.049983486175537106, 0.04970470428466797, 0.04962489700317383, 0.04960710525512695, 0.04970204925537109, 0.049627998352050784, 0.05202105712890625, 0.05000211334228516, 0.05000601577758789, 0.04978073501586914, 0.049616897583007816, 0.04954057693481445, 0.04953507232666016, 0.04960095977783203, 0.049532638549804685, 0.049428768157958984, 0.049423423767089844, 0.04974185562133789, 0.04942124938964844, 0.05018803024291992, 0.049530303955078125, 0.050561023712158204, 0.049620990753173826, 0.049209342956542966, 0.049888992309570314, 0.04966428756713867, 0.04965539169311523, 0.0493138542175293, 0.049514846801757814, 0.04947148895263672, 0.0497108154296875, 0.049981727600097656, 0.05037055969238281, 0.049751838684082034, 0.049452415466308595, 0.0495129280090332, 0.04919334411621094, 0.049145118713378906, 0.049142494201660156, 0.04922518539428711, 0.04946384048461914, 0.04915609741210938, 0.049658912658691406, 0.0494431037902832, 0.049195713043212894, 0.049219585418701174, 0.04989948654174805, 0.04937321472167969, 0.05159731292724609, 0.05050163269042969, 0.0497151985168457, 0.049601535797119144, 0.04992432022094727, 0.050025279998779294, 0.049870815277099606, 0.05000601577758789, 0.050032062530517576, 0.04975833511352539, 0.04990345764160156, 0.049656513214111325, 0.04967209625244141, 0.049251487731933594, 0.04938838577270508, 0.0491412467956543, 0.049207809448242185, 0.04897353744506836, 0.04962460708618164, 0.05002521514892578, 0.05000518417358398, 0.050133216857910154, 0.05001071929931641, 0.05014303970336914, 0.050507968902587894, 0.0496798095703125, 0.049500736236572265, 0.04955324935913086, 0.05003641510009765, 0.049494560241699216, 0.04967580795288086, 0.04935523223876953, 0.04966320037841797, 0.049414878845214845, 0.04947564697265625, 0.04930284881591797, 0.05070073699951172, 0.04990544128417969, 0.050229248046875, 0.05196294403076172, 0.05003974533081055, 0.050069473266601563, 0.04983606338500977, 0.0509194221496582, 0.04979507064819336, 0.049837825775146484, 0.04967606353759765, 0.049780799865722654, 0.049551776885986325, 0.04978483200073242, 0.049737728118896485, 0.05031731033325195, 0.04943036651611328, 0.049637535095214846, 0.04971110534667969, 0.05009427261352539, 0.049940288543701174, 0.049823551177978515, 0.04970924758911133, 0.05056905746459961, 0.04997539138793945, 0.049807422637939455, 0.04973315048217773, 0.04986841583251953, 0.04940864181518555, 0.049317279815673826, 0.04930604934692383, 0.04980326461791992, 0.04927648162841797, 0.049400638580322266, 0.04961881637573242, 0.049577663421630856, 0.049590240478515624, 0.049512161254882815, 0.04956646347045898, 0.049719200134277344, 0.04965702438354492, 0.04983894348144531, 0.04972934341430664, 0.05016806411743164, 0.05076294326782226, 0.05045126342773437, 0.04991385650634766, 0.04971865463256836, 0.0494936637878418, 0.0493680305480957, 0.049235969543457034, 0.04949401473999023, 0.049604606628417966, 0.049358848571777345, 0.05115903854370117, 0.049671360015869144, 0.052327232360839845, 0.04987289428710937, 0.0494571533203125, 0.050149375915527344, 0.04938751983642578, 0.04973555374145508, 0.0493521614074707, 0.050635807037353514, 0.04974895858764648, 0.049248096466064456, 0.04951875305175781, 0.050874366760253906, 0.05083135986328125, 0.049594303131103516, 0.04931590270996094, 0.04919884872436524, 0.049145694732666015, 0.04911964797973633, 0.04930064010620117, 0.048984928131103514, 0.04914777755737305, 0.04921689605712891, 0.04974873733520508, 0.04978483200073242, 0.04976435089111328, 0.04968447875976562, 0.04980025482177734, 0.04947347259521485, 0.04940697479248047, 0.04935580825805664, 0.04925657653808594, 0.049477695465087894, 0.04927961730957031, 0.04913353729248047, 0.04928460693359375, 0.04906054306030273, 0.049124351501464845, 0.04902336120605469, 0.04935327911376953, 0.0493240966796875, 0.04935475158691406, 0.04941619110107422, 0.04973158264160156, 0.05004083251953125, 0.04984832000732422, 0.04970700836181641, 0.04964761734008789, 0.04948688125610352, 0.04998857498168945, 0.04931584167480469, 0.04933340835571289, 0.04981244659423828, 0.04938896179199219, 0.050843425750732425, 0.050899646759033204, 0.049855712890625, 0.04974671936035156, 0.04978432083129883, 0.04959283065795898, 0.049606689453125, 0.049288192749023435, 0.04938985443115235, 0.04947219085693359, 0.04995481491088867, 0.04996505737304688, 0.049990974426269534, 0.05018694305419922, 0.04945305633544922, 0.04964147186279297, 0.049513919830322266, 0.05027401733398437, 0.05008009719848633, 0.049636928558349606, 0.04944358444213867, 0.05049494552612305, 0.0496396484375, 0.0495206413269043, 0.04939571380615235, 0.04968038558959961, 0.051492862701416016, 0.04997529602050781, 0.04963840103149414, 0.04948275375366211, 0.04986880111694336, 0.04933599853515625, 0.04919942474365235, 0.04938460922241211, 0.049436737060546875, 0.04927772903442383, 0.0493568000793457, 0.04969267272949219, 0.049342208862304685, 0.04936115264892578, 0.049290817260742185, 0.04942668914794922, 0.04929964828491211, 0.052531200408935545, 0.04967628860473633, 0.04945644760131836, 0.049369792938232425, 0.04962713623046875, 0.049827838897705076, 0.04951030349731445, 0.05002249526977539, 0.049598464965820314, 0.05073241424560547, 0.04958620834350586, 0.05018051147460938, 0.04957408142089844, 0.049528289794921875, 0.04974227142333985, 0.04932774353027344, 0.04907465744018555, 0.04933222579956055, 0.04926054382324219, 0.04937686538696289, 0.0493155517578125, 0.04938003158569336, 0.049496318817138674, 0.05000576019287109, 0.05123891067504883, 0.049895423889160156, 0.04992409515380859, 0.049581470489501955, 0.04963183975219727, 0.04967817687988281, 0.04962319946289063, 0.0498930892944336, 0.049847713470458986, 0.0501523208618164, 0.05015273666381836, 0.05031545639038086, 0.05006595230102539, 0.05091312026977539, 0.05013315200805664, 0.05036441421508789, 0.05242265701293945, 0.05015251159667969, 0.049912158966064456, 0.050227294921875, 0.04975814437866211, 0.04972198486328125, 0.049661888122558596, 0.04964886474609375, 0.04953168106079102, 0.04951039886474609, 0.04951417541503906, 0.04952201461791992, 0.04921644973754883, 0.049452289581298825, 0.049389728546142576, 0.049715167999267576, 0.049576255798339845, 0.04945340728759766, 0.049539070129394534, 0.05010432052612305, 0.050270206451416014, 0.05005619049072266, 0.050202945709228515, 0.05037535858154297, 0.04986265563964844, 0.04968447875976562, 0.049669921875, 0.04958230209350586, 0.04959401702880859, 0.04959471893310547, 0.04930335998535156, 0.04939731216430664, 0.049549407958984375, 0.04939836883544922, 0.04949190521240234, 0.04932198333740234, 0.049512222290039064, 0.04934678268432617, 0.049769824981689456, 0.049861312866210934, 0.05107452774047851, 0.049924606323242186, 0.04955292892456055, 0.05053283309936524, 0.049622398376464844, 0.049578495025634765, 0.049653888702392575, 0.0494439697265625, 0.049406848907470706, 0.049258785247802736, 0.05099625778198242, 0.04913359832763672, 0.04947625732421875, 0.0495552978515625, 0.04958019256591797, 0.04942233657836914, 0.04945462417602539, 0.04922639846801758, 0.04929209518432617, 0.05004390335083008]",tokens/s,20.09991668228354,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3154.436096,4647.15776,0.0,4244.635648,4125.520384,s,1,11.659623046875,11.659623046875,0.0,11.659623046875,11.659623046875,11.659623046875,11.659623046875,[11.659623046875],,kWh,0.00013382405216249633,1.4754344468547778e-05,5.193643043804874e-05,0.00020051482706909284,,MB,3198.767104,4821.221376,0.0,4404.0192,4310.79936,s,10,1.157678077697754,0.1157678077697754,0.00019931256886106306,0.1158173942565918,0.1159079071044922,0.11604156951904297,0.11614849945068359,"[0.11583865356445312, 0.1158263702392578, 0.11541244506835938, 0.11582931518554687, 0.11580841827392578, 0.11617523193359375, 0.1155478744506836, 0.11559305572509766, 0.11576850891113281, 0.11587820434570313]",tokens/s,2211.322861957453,kWh,3.4637170935285905e-06,3.8198784834877066e-07,2.2829658786579562e-06,6.128670820535317e-06,tokens/kWh,41770884.33958986,MB,3202.973696,4823.318528,0.0,4406.116352,4310.80192,s,10,22.066776123046875,2.2066776123046874,0.008571461987149986,2.2095023193359373,2.2147143310546875,2.215151477050781,2.2155011938476563,"[2.196512451171875, 2.18813330078125, 2.21010595703125, 2.2146171875, 2.21225537109375, 2.2003046875, 2.215588623046875, 2.214185546875, 2.208898681640625, 2.20617431640625]",tokens/s,28.549707328657696,kWh,6.44636048748078e-05,7.110163045999197e-06,3.3964007563347716e-05,0.0001055377754841547,tokens/kWh,596942.655944636,,s,630,22.06426353073119,0.03502264052497016,0.000581050024881464,0.034918254852294925,0.03546875991821289,0.03566784496307373,0.03688737731933596,"[0.03543667221069336, 0.03461939239501953, 0.03520841598510742, 0.034452064514160156, 0.03450080108642578, 0.0349587516784668, 0.0344617919921875, 0.03451136016845703, 0.03504880142211914, 0.035195552825927734, 0.03469420623779297, 0.03454457473754883, 0.03486896133422852, 0.03480928039550781, 0.03463462448120117, 0.03493824005126953, 0.03558176040649414, 0.035438625335693356, 0.035270401000976566, 0.03506687927246094, 0.03511471939086914, 0.03473417663574219, 0.03471583938598633, 0.03476601409912109, 0.03479430389404297, 0.03467388916015625, 0.03488438415527344, 0.034799327850341795, 0.03477657699584961, 0.03470006561279297, 0.0347586555480957, 0.034801856994628906, 0.03489535903930664, 0.03467910385131836, 0.03459052658081055, 0.03469945526123047, 0.03458816146850586, 0.03573401641845703, 0.03485414505004883, 0.03440224075317383, 0.03557187271118164, 0.03611920166015625, 0.035095680236816404, 0.0348941764831543, 0.03530192184448242, 0.03463753509521485, 0.034859294891357424, 0.03469420623779297, 0.03495212936401367, 0.03481190490722656, 0.034791168212890626, 0.03478252792358399, 0.034823104858398436, 0.03460230255126953, 0.03473478317260742, 0.03456777572631836, 0.034694625854492185, 0.03453843307495117, 0.03582361602783203, 0.0345285758972168, 0.0345074577331543, 0.03429171371459961, 0.03445471954345703, 0.03530137634277344, 0.034847774505615235, 0.03462060928344726, 0.034316062927246094, 0.03446988677978516, 0.034674526214599606, 0.035157310485839845, 0.03493360137939453, 0.03504854583740234, 0.03471811294555664, 0.0346627197265625, 0.03452947235107422, 0.03461452865600586, 0.03447615814208985, 0.03445116806030273, 0.03423884963989258, 0.03430374526977539, 0.034159358978271485, 0.03463087844848633, 0.03431676864624023, 0.03458287811279297, 0.03468902587890625, 0.034510078430175783, 0.03561929702758789, 0.03476639938354492, 0.03449724960327148, 0.03462348937988281, 0.03449446487426758, 0.034884960174560546, 0.03467718505859375, 0.03452540969848633, 0.03445145416259766, 0.03456195068359375, 0.03457443237304687, 0.034879264831542967, 0.03566140747070313, 0.0349804801940918, 0.03505676651000977, 0.035103614807128904, 0.035102718353271486, 0.03510480117797852, 0.03488764953613281, 0.03490816116333008, 0.03470463943481445, 0.03480640029907227, 0.03466387176513672, 0.034656513214111326, 0.03477958297729492, 0.0346130256652832, 0.03457251358032227, 0.03472793579101562, 0.034508800506591795, 0.0348438720703125, 0.03449935913085937, 0.03462313461303711, 0.034533729553222654, 0.03501260757446289, 0.03500646209716797, 0.03475046539306641, 0.03461119842529297, 0.03501875305175781, 0.03459231948852539, 0.034750560760498046, 0.03590758514404297, 0.03487334442138672, 0.03531161499023437, 0.03493247985839844, 0.03496575927734375, 0.03488972854614258, 0.034871295928955076, 0.034788734436035154, 0.03488419342041016, 0.03523555374145508, 0.03473030471801758, 0.03498089599609375, 0.034874271392822266, 0.035388641357421875, 0.034768798828125, 0.03490828704833984, 0.03538739013671875, 0.03550291061401367, 0.034947071075439456, 0.0350904312133789, 0.03469311904907227, 0.0346130256652832, 0.03480963134765625, 0.03518099212646485, 0.03594649505615234, 0.03523104095458984, 0.03513948822021484, 0.035482398986816405, 0.03608575820922852, 0.03530137634277344, 0.03530310440063476, 0.03528646469116211, 0.0351321907043457, 0.03530665588378906, 0.03498025512695312, 0.03523408126831055, 0.035289344787597654, 0.03514374542236328, 0.03519276809692383, 0.035127296447753906, 0.03538521575927735, 0.035049598693847654, 0.03502284622192383, 0.03633152008056641, 0.03490816116333008, 0.03479244613647461, 0.03458560180664062, 0.0348590087890625, 0.03479721450805664, 0.03465046310424805, 0.03466854476928711, 0.03462963104248047, 0.034883007049560544, 0.03487942504882813, 0.034748416900634765, 0.03527948760986328, 0.03533545684814453, 0.035257057189941404, 0.03509657669067383, 0.034797569274902344, 0.034731937408447267, 0.03470451354980469, 0.03474121475219726, 0.0354837760925293, 0.03616019058227539, 0.03617587280273438, 0.03501875305175781, 0.035297279357910154, 0.03495673751831055, 0.03480352020263672, 0.03472051239013672, 0.034617534637451174, 0.03465584182739258, 0.03503081512451172, 0.03476051330566406, 0.034581119537353516, 0.03454771041870117, 0.03458777618408203, 0.034655105590820315, 0.03501260757446289, 0.03499135971069336, 0.03489664077758789, 0.03478732681274414, 0.03491603088378906, 0.03512351989746094, 0.03478851318359375, 0.03486329650878906, 0.034989887237548825, 0.0350810546875, 0.0348834228515625, 0.03510038375854492, 0.03491884613037109, 0.0363355827331543, 0.03481504058837891, 0.03467567825317383, 0.034961406707763674, 0.035250175476074216, 0.03481926345825195, 0.034844959259033206, 0.03459740829467774, 0.03471155166625976, 0.03472588729858399, 0.034813953399658204, 0.03482953643798828, 0.034777889251708986, 0.03469491195678711, 0.03472000122070312, 0.03494412612915039, 0.03513183975219727, 0.035557823181152345, 0.03554227066040039, 0.035369823455810544, 0.03533939361572266, 0.03545702362060547, 0.03545782470703125, 0.03546726226806641, 0.03550611114501953, 0.03525843048095703, 0.03558195114135742, 0.03543654251098633, 0.0357902717590332, 0.0353818244934082, 0.03532185745239258, 0.03604841613769531, 0.0382509765625, 0.035528255462646485, 0.03566918563842773, 0.03519161605834961, 0.034807167053222655, 0.03484889602661133, 0.034777599334716795, 0.03461734390258789, 0.034697216033935545, 0.03487948989868164, 0.03544268798828125, 0.03535871887207031, 0.03525603103637695, 0.03508371353149414, 0.03509539031982422, 0.03520668792724609, 0.03480547332763672, 0.0350233268737793, 0.034918689727783205, 0.03502899169921875, 0.03509420776367188, 0.03495353698730469, 0.03478707122802734, 0.03475686264038086, 0.034947071075439456, 0.03482419204711914, 0.03493497467041016, 0.03494892883300781, 0.03494911956787109, 0.03584204864501953, 0.034938880920410156, 0.035098560333251955, 0.03507001495361328, 0.034918174743652344, 0.03453155136108398, 0.03469311904907227, 0.035048927307128906, 0.03469366455078125, 0.03492393493652344, 0.035445343017578124, 0.03531753540039063, 0.03469686508178711, 0.034955360412597655, 0.0349183349609375, 0.03487612915039062, 0.035286846160888674, 0.034971649169921876, 0.03482624053955078, 0.03475388717651367, 0.0347979850769043, 0.0351910400390625, 0.03498188781738281, 0.03462144088745117, 0.034969856262207034, 0.03505126571655273, 0.035057247161865236, 0.035006881713867184, 0.038739070892333985, 0.03517862319946289, 0.03492326354980469, 0.03527475357055664, 0.034815711975097655, 0.035018974304199216, 0.03816624069213867, 0.03549753570556641, 0.035631103515625, 0.03500812911987305, 0.03494540786743164, 0.034862846374511716, 0.034842750549316404, 0.03523596954345703, 0.03520307159423828, 0.03486860656738281, 0.03478489685058594, 0.034941951751708986, 0.03497366333007813, 0.03561065673828125, 0.034653759002685545, 0.034757057189941404, 0.03492454528808594, 0.03481536102294922, 0.03464255905151367, 0.03461939239501953, 0.034608448028564456, 0.03492524719238281, 0.034944320678710936, 0.03487814331054687, 0.034983009338378904, 0.034872222900390625, 0.03467468643188477, 0.03494899368286133, 0.035135616302490236, 0.03530543899536133, 0.036493343353271486, 0.03520899200439453, 0.035078369140625, 0.03512908935546875, 0.034871551513671876, 0.03471516799926758, 0.03464444732666016, 0.03487680053710938, 0.03471366500854492, 0.034729888916015625, 0.03466649627685547, 0.034584510803222654, 0.03465615844726563, 0.03461203384399414, 0.03463577651977539, 0.03480166244506836, 0.034713600158691404, 0.03481804656982422, 0.03474124908447265, 0.03469823837280273, 0.03473775863647461, 0.035074462890625, 0.03471769714355469, 0.03474208068847656, 0.0347437744140625, 0.034855648040771486, 0.03476684951782227, 0.0349224967956543, 0.035280895233154294, 0.0351907844543457, 0.03504537582397461, 0.03510067367553711, 0.034905120849609374, 0.03505865478515625, 0.03491430282592774, 0.035808609008789065, 0.035330078125, 0.03484137725830078, 0.034751617431640625, 0.034554271697998046, 0.034451934814453126, 0.03461328125, 0.03462144088745117, 0.03470131301879883, 0.0347044792175293, 0.03471452713012695, 0.034915454864501955, 0.034730239868164064, 0.03460671997070312, 0.034612224578857424, 0.03470060729980469, 0.03460537719726563, 0.03468531036376953, 0.034661727905273436, 0.038443679809570315, 0.03559123229980469, 0.035087295532226566, 0.034909503936767575, 0.034675392150878906, 0.03485599899291992, 0.03495993423461914, 0.034957504272460936, 0.03508652877807617, 0.03579904174804688, 0.035422206878662106, 0.03528908920288086, 0.035368801116943356, 0.03503343963623047, 0.03523750305175781, 0.035296863555908206, 0.03538800048828125, 0.0352168960571289, 0.035357185363769535, 0.03552870559692383, 0.03519420623779297, 0.03510953521728516, 0.03513753509521484, 0.03504537582397461, 0.035089534759521486, 0.03527318572998047, 0.03518300628662109, 0.035348255157470705, 0.03548767852783203, 0.035666206359863284, 0.03565347290039062, 0.03533430480957031, 0.03551641464233399, 0.035760128021240234, 0.03557545471191406, 0.035238014221191404, 0.03505939102172852, 0.035172897338867186, 0.03525222396850586, 0.03518873596191406, 0.035198974609375, 0.03512134552001953, 0.03498559951782226, 0.03562105560302734, 0.035797119140625, 0.03549728012084961, 0.03532051086425781, 0.03517030334472656, 0.035192127227783206, 0.035084991455078124, 0.03490803146362305, 0.03483622360229492, 0.03487577438354492, 0.03496931076049805, 0.03582559967041016, 0.03479142379760742, 0.03451267242431641, 0.0345912971496582, 0.03465216064453125, 0.03496550369262695, 0.034947040557861325, 0.03482998275756836, 0.03534822463989258, 0.03548223876953125, 0.03554304122924805, 0.04401561737060547, 0.037599231719970705, 0.03561471939086914, 0.035178497314453126, 0.03509657669067383, 0.03493478393554687, 0.03504348754882813, 0.03483849716186523, 0.034674560546875, 0.034770591735839844, 0.03450300979614258, 0.03475609588623047, 0.035066017150878905, 0.03478262329101563, 0.03481081771850586, 0.03504848098754883, 0.03482108688354492, 0.03461529541015625, 0.034674560546875, 0.034652095794677734, 0.034785472869873046, 0.0344881591796875, 0.03479158401489258, 0.03483011245727539, 0.0348138542175293, 0.03471084976196289, 0.0347064323425293, 0.03466815948486328, 0.0348304328918457, 0.035180831909179686, 0.03488297653198242, 0.0347465934753418, 0.034778816223144535, 0.03495391845703125, 0.034902015686035154, 0.03502428817749023, 0.034878047943115234, 0.03482620620727539, 0.03581135940551758, 0.034888896942138675, 0.03487622451782227, 0.034928638458251955, 0.036227455139160154, 0.03576422500610352, 0.03561471939086914, 0.0354890251159668, 0.035334911346435544, 0.03527219009399414, 0.03528140640258789, 0.03514777755737305, 0.035227649688720705, 0.03544630432128906, 0.03519740676879883, 0.03509423828125, 0.03516854476928711, 0.03533798217773437, 0.03646694564819336, 0.035418113708496096, 0.03509862518310547, 0.035119102478027346, 0.03513945770263672, 0.03484428787231445, 0.03482783889770508, 0.03518560028076172, 0.03493097686767578, 0.03468777465820312, 0.034929119110107425, 0.03492502212524414, 0.03492233657836914, 0.03510287857055664, 0.035000446319580075, 0.03542547225952149, 0.035039295196533204, 0.03478796768188477, 0.03489379119873047, 0.034684288024902345, 0.034923168182373045, 0.03503833770751953, 0.034779998779296876, 0.03484060668945312, 0.03488716888427734, 0.03482470321655273, 0.034887680053710936, 0.03468697738647461, 0.03490115356445313, 0.03512390518188477, 0.03487145614624024, 0.03472518539428711, 0.03468563079833984, 0.0347606086730957, 0.034938976287841796, 0.03500646209716797, 0.03461939239501953, 0.034772735595703125, 0.0347589111328125, 0.03483647918701172, 0.034766334533691406, 0.03470182418823242, 0.03500032043457031, 0.03535257720947266, 0.035211265563964846, 0.034904064178466795, 0.03491788864135742, 0.03494144058227539, 0.034942081451416016, 0.035883007049560545, 0.03546502304077148, 0.03505302429199219, 0.03471023941040039, 0.034781185150146485, 0.03482624053955078, 0.034977790832519534, 0.03485712051391601, 0.03480057525634766, 0.03518147277832031, 0.03484672164916992, 0.03504252624511719, 0.035054367065429685, 0.03462489700317383, 0.03553548812866211, 0.037048320770263675, 0.034786846160888674, 0.03466902542114258, 0.03508736038208008, 0.03516841506958008, 0.03551270294189453, 0.03521993637084961, 0.03498188781738281, 0.034971359252929685, 0.03459305572509765, 0.03511868667602539, 0.03524854278564453, 0.03513689422607422, 0.03507059097290039, 0.03517187118530273, 0.034871776580810546, 0.034912254333496096, 0.03507174301147461, 0.03503731155395508, 0.03486937713623047, 0.03511062240600586, 0.03501030349731445, 0.03489641571044922, 0.03502272033691406, 0.035028480529785154, 0.034931137084960935, 0.034922687530517575, 0.03483238220214844, 0.034796737670898435, 0.03486598587036133, 0.03480780792236328, 0.03480476760864258, 0.03506480026245117, 0.035215263366699216, 0.03520521545410156, 0.034869247436523435, 0.0348851203918457, 0.03482787322998047, 0.034878047943115234, 0.03483270263671875, 0.03495280075073242, 0.03491836929321289, 0.034963134765625, 0.03502771377563477, 0.03479347229003906, 0.034790782928466796, 0.034761344909667966, 0.03472588729858399]",tokens/s,28.552958458030258,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking _ = backend.generate(self.inputs, self.config.generate_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 669, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 223, in forward return self.down_proj(self.act_fn(self.gate_proj(hidden_state)) * self.up_proj(hidden_state)) RuntimeError: CUDA error: an illegal memory access was encountered CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1210.740736,1155.39968,0.0,752.877568,710.554112,s,1,8.626697265625,8.626697265625,0.0,8.626697265625,8.626697265625,8.626697265625,8.626697265625,[8.626697265625],,kWh,4.44533166291573e-05,4.896336711864717e-06,1.509973430197098e-05,6.444938764299299e-05,,MB,1532.370944,1436.418048,0.0,1019.215872,949.099008,s,10,0.30884288215637207,0.030884288215637205,0.00023080657791462068,0.030854736328125,0.031202070236206053,0.03123500347137451,0.03126135005950927,"[0.031267936706542966, 0.03057459259033203, 0.030926464080810546, 0.03108937644958496, 0.030783008575439454, 0.030970399856567382, 0.030622528076171874, 0.030718656539916993, 0.031194751739501952, 0.030695167541503907]",tokens/s,8289.004370525954,kWh,9.047780937307215e-07,9.975897402839035e-08,5.810100967306553e-07,1.585547164489767e-06,tokens/kWh,161458457.83299762,MB,1567.248384,1444.806656,0.0,1027.60448,949.101568,s,10,15.265451782226565,1.5265451782226562,0.00326762247936747,1.5260553588867187,1.53120849609375,1.5318847045898438,1.5324256713867188,"[1.5258612060546874, 1.52718994140625, 1.5325609130859374, 1.5310582275390625, 1.5226080322265625, 1.52624951171875, 1.521906005859375, 1.524331298828125, 1.524953369140625, 1.5287332763671875]",tokens/s,41.269659685637585,kWh,4.477051119585006e-05,4.937821085826458e-06,1.8167254901866396e-05,6.78755871835429e-05,tokens/kWh,928168.7660342623,,s,630,15.2632564239502,0.024227391149127295,0.000534041988179136,0.024107423782348633,0.024447324752807618,0.024762432384490968,0.027310944194793716,"[0.024268415451049803, 0.024197023391723634, 0.024246271133422852, 0.024159967422485353, 0.024252096176147462, 0.02411795234680176, 0.02394281578063965, 0.023988576889038087, 0.024161888122558595, 0.02409459114074707, 0.02422604751586914, 0.028479776382446288, 0.02435478401184082, 0.023932960510253905, 0.023966815948486327, 0.024021856307983397, 0.02395452880859375, 0.02396406364440918, 0.024017375946044923, 0.024011104583740235, 0.023995296478271484, 0.023948127746582032, 0.02394316864013672, 0.023973888397216796, 0.024033279418945314, 0.0240263671875, 0.024029951095581054, 0.023983200073242186, 0.024158143997192384, 0.02422268867492676, 0.024199167251586915, 0.024325952529907227, 0.02413113594055176, 0.02409881591796875, 0.02415247917175293, 0.0241823673248291, 0.02418547248840332, 0.02403865623474121, 0.024036096572875976, 0.02407219123840332, 0.023961599349975587, 0.024178304672241212, 0.024246400833129882, 0.024221952438354493, 0.023961599349975587, 0.02396313667297363, 0.024545055389404297, 0.024036064147949218, 0.023912288665771483, 0.024103071212768553, 0.024337600708007813, 0.024053855895996092, 0.024111839294433595, 0.0247459831237793, 0.027072383880615235, 0.024108800888061523, 0.02426304054260254, 0.02388960075378418, 0.024065568923950197, 0.02405414390563965, 0.023933343887329102, 0.023838720321655273, 0.02395955276489258, 0.023977983474731446, 0.0238255672454834, 0.023951744079589842, 0.02393519973754883, 0.024029056549072267, 0.024244224548339844, 0.024267103195190428, 0.02421763229370117, 0.024424448013305664, 0.024254463195800782, 0.024176448822021485, 0.024131391525268556, 0.024932031631469728, 0.025126943588256834, 0.02424892807006836, 0.02420128059387207, 0.024162303924560546, 0.024131584167480468, 0.02394291114807129, 0.02412553596496582, 0.023877792358398438, 0.024102912902832032, 0.02404761505126953, 0.024014400482177733, 0.02390880012512207, 0.02408790397644043, 0.02414863967895508, 0.024159423828125, 0.024122175216674806, 0.02412291145324707, 0.02541001510620117, 0.02434668731689453, 0.028626720428466798, 0.02444713592529297, 0.024036479949951173, 0.024056447982788085, 0.024289535522460937, 0.0241592960357666, 0.023941503524780274, 0.023966272354125975, 0.024069728851318358, 0.024156864166259766, 0.02401251220703125, 0.024178144454956054, 0.02393961524963379, 0.024085887908935545, 0.024023168563842772, 0.023992319107055664, 0.023986207962036134, 0.024218175888061525, 0.023947168350219726, 0.023977983474731446, 0.02388787269592285, 0.023981376647949217, 0.024046272277832032, 0.023924352645874024, 0.023937055587768555, 0.024217887878417967, 0.02434182357788086, 0.024632064819335938, 0.024621055603027343, 0.02433843231201172, 0.02427494430541992, 0.02423097610473633, 0.02401126480102539, 0.024150175094604494, 0.0243786563873291, 0.023921152114868165, 0.024024864196777344, 0.024516128540039064, 0.023921247482299804, 0.02389433670043945, 0.02405990409851074, 0.028446176528930663, 0.02467193603515625, 0.024599391937255858, 0.025640127182006835, 0.024222528457641602, 0.024028160095214843, 0.023989248275756835, 0.024006656646728516, 0.023846399307250975, 0.023833087921142578, 0.024162303924560546, 0.02393497657775879, 0.023940767288208008, 0.023902463912963866, 0.02400265693664551, 0.023863296508789062, 0.024190208435058595, 0.023991039276123047, 0.024020576477050783, 0.023945087432861327, 0.023951904296875, 0.023918144226074217, 0.023908287048339843, 0.024029535293579103, 0.025180320739746093, 0.024089759826660156, 0.02767958450317383, 0.024210975646972655, 0.02389859199523926, 0.023850080490112304, 0.023993471145629882, 0.02403673553466797, 0.02398361587524414, 0.0238799991607666, 0.024096927642822265, 0.024101312637329102, 0.024031232833862305, 0.02402873611450195, 0.0240676155090332, 0.02399068832397461, 0.02404812812805176, 0.02412544059753418, 0.024260288238525392, 0.02838969612121582, 0.024215551376342775, 0.024274303436279298, 0.024225856781005858, 0.02411302375793457, 0.02414252853393555, 0.02409881591796875, 0.024778751373291014, 0.02415001678466797, 0.024225536346435546, 0.024479488372802734, 0.024465408325195313, 0.024209600448608398, 0.024334144592285157, 0.02427903938293457, 0.024414207458496092, 0.024233983993530273, 0.024362176895141602, 0.024083263397216798, 0.024061952590942383, 0.02407423973083496, 0.02431590461730957, 0.02517196846008301, 0.024268800735473633, 0.024446943283081054, 0.02418076705932617, 0.024163904190063475, 0.024109504699707032, 0.023954496383666993, 0.02422265625, 0.02398774337768555, 0.023992799758911134, 0.023876800537109374, 0.02387436866760254, 0.023932928085327147, 0.02398745536804199, 0.02398080062866211, 0.024014528274536134, 0.024433151245117187, 0.02404947280883789, 0.024329439163208007, 0.02868288040161133, 0.024246528625488283, 0.025395103454589844, 0.02524393653869629, 0.024314783096313478, 0.024150943756103514, 0.024002464294433593, 0.02400214385986328, 0.02412099266052246, 0.023988288879394533, 0.023977983474731446, 0.02405177688598633, 0.024066688537597657, 0.02424358367919922, 0.024095359802246093, 0.024002559661865236, 0.024030912399291993, 0.024258880615234374, 0.024049407958984376, 0.024215808868408205, 0.02412544059753418, 0.024131584167480468, 0.02426470375061035, 0.024449024200439453, 0.024379392623901368, 0.02444601631164551, 0.024316864013671877, 0.0245166072845459, 0.02428927993774414, 0.024180032730102538, 0.024066783905029296, 0.024251712799072265, 0.0241167049407959, 0.024125568389892577, 0.024182880401611328, 0.024088960647583008, 0.02413475227355957, 0.02409721565246582, 0.024086368560791015, 0.024195455551147462, 0.024156415939331054, 0.024233184814453124, 0.02433216094970703, 0.02424870491027832, 0.024044063568115233, 0.024045503616333008, 0.024086591720581054, 0.024094720840454102, 0.024197120666503907, 0.02409369659423828, 0.024101728439331054, 0.023972192764282228, 0.023934528350830077, 0.024248575210571287, 0.024628768920898436, 0.02418841552734375, 0.024064191818237303, 0.024104896545410155, 0.024031103134155272, 0.0240600643157959, 0.023958335876464842, 0.02402332878112793, 0.024032672882080077, 0.024273216247558595, 0.024024255752563478, 0.02416313552856445, 0.024176639556884767, 0.02413327980041504, 0.024123743057250978, 0.02401487922668457, 0.024083488464355467, 0.024215583801269532, 0.024132543563842774, 0.024280895233154298, 0.024087871551513672, 0.02407846450805664, 0.02402992057800293, 0.024140895843505858, 0.024217920303344728, 0.024295967102050783, 0.02417161560058594, 0.02415836715698242, 0.024232959747314452, 0.024094144821166993, 0.02424870491027832, 0.02516377639770508, 0.025149343490600586, 0.02435696029663086, 0.024180736541748047, 0.02411420822143555, 0.024055967330932616, 0.024029056549072267, 0.02403014373779297, 0.024058048248291015, 0.023975328445434572, 0.024119167327880858, 0.025741439819335937, 0.02596067237854004, 0.024287200927734374, 0.024164159774780272, 0.024102495193481444, 0.024093088150024415, 0.024031232833862305, 0.024102912902832032, 0.023991296768188477, 0.024013824462890625, 0.024016031265258787, 0.024000991821289064, 0.02405824089050293, 0.02404761505126953, 0.024094720840454102, 0.0240762882232666, 0.024121408462524415, 0.023910560607910157, 0.02405958366394043, 0.024047296524047853, 0.024099231719970703, 0.024012319564819334, 0.024185247421264648, 0.024102975845336914, 0.02412294387817383, 0.024061983108520507, 0.024101280212402345, 0.023909727096557618, 0.02407823944091797, 0.023993024826049803, 0.02416441535949707, 0.024076255798339843, 0.02425859260559082, 0.02407526397705078, 0.023997440338134765, 0.024143871307373048, 0.024212608337402342, 0.02411404800415039, 0.02409881591796875, 0.024073535919189454, 0.024107263565063475, 0.024015296936035158, 0.02407219123840332, 0.02409062385559082, 0.024156160354614258, 0.024070144653320313, 0.024195072174072265, 0.024118656158447264, 0.024742528915405272, 0.02476032066345215, 0.02476416015625, 0.02447337532043457, 0.024400640487670898, 0.024563423156738283, 0.024393152236938477, 0.02436265563964844, 0.024539648056030275, 0.024433055877685548, 0.02415123176574707, 0.024052000045776366, 0.024360607147216797, 0.024277376174926757, 0.024426496505737305, 0.024309087753295898, 0.024251039505004884, 0.02433433532714844, 0.024205312728881836, 0.024209407806396483, 0.024045696258544923, 0.024031103134155272, 0.024057855606079103, 0.024108959197998048, 0.024140960693359376, 0.024130495071411132, 0.024188928604125977, 0.02407769584655762, 0.024102848052978517, 0.024084512710571288, 0.02409129524230957, 0.024241952896118163, 0.02398847961425781, 0.024070112228393555, 0.02407769584655762, 0.024033920288085937, 0.02393907165527344, 0.024262847900390624, 0.024202592849731447, 0.02428268814086914, 0.024289791107177734, 0.024350976943969725, 0.024094879150390627, 0.02410809516906738, 0.024389759063720703, 0.024138656616210938, 0.024111007690429686, 0.02409676742553711, 0.02408448028564453, 0.024422399520874022, 0.023986175537109376, 0.024010751724243166, 0.024040607452392577, 0.02408550453186035, 0.02417033576965332, 0.024211456298828125, 0.02429747200012207, 0.024131584167480468, 0.024174367904663086, 0.024027360916137695, 0.024035327911376952, 0.024337535858154298, 0.024298368453979494, 0.024156160354614258, 0.024063360214233397, 0.024101119995117187, 0.024295936584472655, 0.024127359390258788, 0.023988191604614257, 0.02405379295349121, 0.024188928604125977, 0.02411894416809082, 0.02407868766784668, 0.02434048080444336, 0.02428009605407715, 0.024099807739257812, 0.0240164794921875, 0.024110048294067384, 0.024066015243530272, 0.023975967407226562, 0.024508607864379882, 0.024084287643432616, 0.023998464584350586, 0.02495235252380371, 0.02398896026611328, 0.023985151290893555, 0.02391116714477539, 0.023879680633544922, 0.02392064094543457, 0.023995456695556642, 0.023941183090209962, 0.023911296844482424, 0.024012800216674804, 0.02392064094543457, 0.023907583236694337, 0.023935808181762695, 0.024281024932861328, 0.02412748718261719, 0.02400214385986328, 0.023896480560302736, 0.023998464584350586, 0.023945215225219727, 0.023920608520507813, 0.02391651153564453, 0.023900224685668946, 0.023863296508789062, 0.02387126350402832, 0.023943391799926758, 0.02420128059387207, 0.024, 0.024017471313476563, 0.02392684745788574, 0.024046783447265626, 0.024233919143676758, 0.02404435157775879, 0.024292991638183593, 0.024154367446899413, 0.024194751739501953, 0.024222015380859375, 0.02470911979675293, 0.024573919296264648, 0.024600608825683594, 0.024598112106323244, 0.0251560001373291, 0.0247554874420166, 0.024658655166625975, 0.02452070426940918, 0.024560928344726562, 0.024355552673339845, 0.024098527908325194, 0.02415376091003418, 0.024369792938232424, 0.024199392318725584, 0.023987232208251955, 0.026022655487060547, 0.024180736541748047, 0.024170112609863282, 0.024241888046264648, 0.024109407424926756, 0.024068416595458983, 0.024156095504760743, 0.024142143249511718, 0.024084192276000976, 0.024038047790527345, 0.02408038330078125, 0.024018495559692384, 0.023936927795410155, 0.0239715518951416, 0.023989055633544924, 0.024203264236450195, 0.024223455429077147, 0.02423222351074219, 0.02405900764465332, 0.024238975524902343, 0.024131200790405274, 0.02404595184326172, 0.023963008880615234, 0.02394780731201172, 0.02396988868713379, 0.02392678451538086, 0.02394726371765137, 0.02395913505554199, 0.02420947265625, 0.024250719070434572, 0.024018943786621092, 0.02409814453125, 0.024064640045166015, 0.02416035270690918, 0.024110464096069335, 0.02403727912902832, 0.0242871036529541, 0.0240361270904541, 0.02407423973083496, 0.024029184341430664, 0.024035072326660155, 0.023953664779663087, 0.024055168151855467, 0.02443123245239258, 0.02527836799621582, 0.026904319763183592, 0.02542153549194336, 0.024381919860839842, 0.024295583724975586, 0.02409267234802246, 0.024068063735961914, 0.024011999130249023, 0.023968128204345702, 0.02400624084472656, 0.024107583999633787, 0.02397209548950195, 0.02411903953552246, 0.024029151916503906, 0.02431007957458496, 0.024381439208984376, 0.024553184509277345, 0.024422143936157225, 0.0242891845703125, 0.02419980812072754, 0.024172224044799805, 0.024033599853515625, 0.024162303924560546, 0.02426188850402832, 0.02417465591430664, 0.025659391403198242, 0.02471731185913086, 0.024033279418945314, 0.02406393623352051, 0.02398361587524414, 0.024123199462890627, 0.02407644844055176, 0.02407468795776367, 0.02417679977416992, 0.02409676742553711, 0.024082080841064453, 0.02409872055053711, 0.02411065673828125, 0.02406048011779785, 0.024142143249511718, 0.024129087448120118, 0.024048063278198244, 0.024106592178344727, 0.024037696838378905, 0.024194559097290038, 0.024169055938720704, 0.024149696350097657, 0.024031551361083984, 0.023969791412353517, 0.023945119857788084, 0.023993471145629882, 0.024044511795043945, 0.024034912109375, 0.023978399276733398, 0.02411420822143555, 0.024282079696655273, 0.024602624893188478, 0.024188287734985353, 0.02411747169494629, 0.024140192031860352, 0.02410905647277832, 0.023969120025634765, 0.02405580711364746, 0.024299360275268553, 0.024173376083374023, 0.024700927734375, 0.024821760177612305, 0.02503011131286621, 0.02436764717102051, 0.02425881576538086, 0.024284927368164063, 0.024309120178222655, 0.024033920288085937, 0.024059551239013672, 0.024092639923095703, 0.024185216903686524, 0.024090112686157225, 0.024054271697998047, 0.02413363265991211, 0.02411929512023926, 0.024053504943847656, 0.0241474552154541, 0.024060672760009765, 0.02524675178527832, 0.024099807739257812, 0.02410905647277832, 0.02740838432312012, 0.024451072692871095]",tokens/s,41.27559562004353,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1865.863168,2726.166528,0.0,2340.421632,2285.568,s,1,8.95863671875,8.95863671875,0.0,8.95863671875,8.95863671875,8.95863671875,8.95863671875,[8.95863671875],,kWh,5.4441351795821894e-05,5.991595866205535e-06,1.7412236151981286e-05,7.784518381400872e-05,,MB,1833.984,3095.26528,0.0,2680.160256,2578.857984,s,10,0.8459479370117187,0.08459479370117187,0.0003994235559614204,0.08466056060791016,0.08489867477416992,0.08519668235778809,0.08543508842468261,"[0.08470025634765625, 0.08411074829101563, 0.0847149429321289, 0.08549468994140624, 0.08410809326171875, 0.08415248107910156, 0.08474041748046875, 0.08483245086669922, 0.08462086486816406, 0.08447299194335937]",tokens/s,3026.190960454505,kWh,2.6323176378756615e-06,2.9024337125104117e-07,1.743790183819774e-06,4.666351192946477e-06,tokens/kWh,54860851.53362702,MB,1840.04608,3097.362432,0.0,2680.160256,2578.860544,s,10,16.557476318359374,1.6557476318359374,0.005427582160871653,1.655234619140625,1.6626841308593752,1.6643317504882813,1.6656498461914062,"[1.6519024658203125, 1.6623179931640626, 1.6555, 1.652006103515625, 1.6589691162109375, 1.65496923828125, 1.6584176025390625, 1.649270751953125, 1.6659793701171874, 1.6481436767578126]",tokens/s,38.04927682737731,kWh,4.8186328150872377e-05,5.314698049954419e-06,2.4817231064980114e-05,7.83182572658069e-05,tokens/kWh,804410.1362748948,,s,630,16.555322980880746,0.026278290445842442,0.00042770129779658384,0.026185791969299314,0.02656294746398926,0.026774630928039552,0.0276405739402771,"[0.0265849609375, 0.026529792785644532, 0.02629136085510254, 0.026248031616210938, 0.02614681625366211, 0.026086784362792968, 0.025939647674560546, 0.025985984802246093, 0.026009599685668947, 0.026222015380859377, 0.0261781120300293, 0.026162912368774414, 0.026145055770874025, 0.026419200897216798, 0.026283327102661132, 0.02631907272338867, 0.026554815292358397, 0.026473888397216795, 0.026319135665893556, 0.026272159576416015, 0.02623072052001953, 0.026243072509765625, 0.026108959197998046, 0.02615772819519043, 0.02599900817871094, 0.026059328079223634, 0.026032159805297852, 0.026077247619628905, 0.02607923126220703, 0.02637414360046387, 0.026062208175659178, 0.026068960189819336, 0.026083423614501954, 0.026233407974243166, 0.026143808364868164, 0.026176448822021484, 0.026095104217529298, 0.026231296539306642, 0.02616435241699219, 0.026217023849487306, 0.026038528442382813, 0.026095104217529298, 0.026245695114135742, 0.0261345272064209, 0.026054655075073242, 0.026086719512939453, 0.026163360595703126, 0.02619241523742676, 0.02609561538696289, 0.026270719528198243, 0.02652988815307617, 0.026280704498291015, 0.026501279830932617, 0.02675916862487793, 0.026900480270385742, 0.026445823669433592, 0.026436639785766602, 0.026211391448974608, 0.02601078414916992, 0.02598374366760254, 0.026052608489990234, 0.02613043212890625, 0.02607257652282715, 0.026856800079345704, 0.026511167526245116, 0.026106143951416017, 0.02609414482116699, 0.026167007446289064, 0.026294048309326173, 0.02741913604736328, 0.026904319763183592, 0.026500896453857423, 0.02640438461303711, 0.026520511627197266, 0.026425344467163086, 0.02622831916809082, 0.02618124771118164, 0.02603865623474121, 0.026024288177490234, 0.02595395278930664, 0.025985439300537108, 0.02595248031616211, 0.02597603225708008, 0.026014272689819335, 0.026138240814208985, 0.026573183059692383, 0.026133663177490236, 0.026293088912963867, 0.026334911346435546, 0.03038252830505371, 0.026779104232788085, 0.026530208587646483, 0.02677555274963379, 0.02631475257873535, 0.026154592514038087, 0.026239391326904296, 0.02627993583679199, 0.02615705680847168, 0.02662156867980957, 0.026190080642700196, 0.02626335906982422, 0.026054336547851564, 0.026309247970581054, 0.02612838363647461, 0.026236703872680664, 0.02611359977722168, 0.026094015121459962, 0.02615727996826172, 0.027084320068359376, 0.025993343353271484, 0.026261856079101562, 0.02685683250427246, 0.026882463455200196, 0.026238752365112306, 0.026210752487182618, 0.026529792785644532, 0.0264944953918457, 0.026134880065917968, 0.02606857681274414, 0.0259466552734375, 0.02675712013244629, 0.026144191741943358, 0.026030656814575195, 0.02613039970397949, 0.026136255264282225, 0.02634992027282715, 0.027080223083496092, 0.026474143981933592, 0.026423391342163087, 0.026170080184936523, 0.026081279754638673, 0.02615500831604004, 0.026206207275390626, 0.026085376739501953, 0.02608742332458496, 0.026066944122314452, 0.026171392440795898, 0.02650931167602539, 0.026006847381591796, 0.026069023132324218, 0.02596726417541504, 0.02599068832397461, 0.026132671356201172, 0.026429088592529296, 0.026278528213500976, 0.026228736877441407, 0.026202112197875976, 0.026234176635742186, 0.025995967864990234, 0.026076992034912108, 0.026056896209716796, 0.026187711715698243, 0.026057823181152344, 0.026106847763061523, 0.026793567657470704, 0.027492767333984376, 0.02648678398132324, 0.027088191986083983, 0.026539712905883788, 0.026618879318237306, 0.026570751190185548, 0.026435583114624024, 0.026492927551269533, 0.02650111961364746, 0.026201663970947267, 0.026117887496948242, 0.026178239822387695, 0.026343423843383788, 0.026330783843994142, 0.026036575317382814, 0.026025856018066406, 0.02605023956298828, 0.026031967163085937, 0.0260532169342041, 0.026117919921875, 0.026222816467285158, 0.026201568603515624, 0.0261549129486084, 0.026331775665283202, 0.026198015213012696, 0.02609971237182617, 0.026210304260253905, 0.02613862419128418, 0.02623404884338379, 0.026442432403564455, 0.02630463981628418, 0.026203872680664063, 0.026155296325683593, 0.0263372802734375, 0.026689535140991212, 0.026335552215576173, 0.026089151382446288, 0.026310655593872072, 0.026058752059936522, 0.026247167587280275, 0.026492927551269533, 0.026482688903808595, 0.026251264572143555, 0.02620796775817871, 0.02655836868286133, 0.02623958396911621, 0.026083103179931642, 0.02621254348754883, 0.026336511611938476, 0.026288703918457033, 0.026064895629882814, 0.026086912155151368, 0.026051071166992186, 0.02592563247680664, 0.026030080795288086, 0.026220352172851562, 0.026319040298461913, 0.02631679916381836, 0.026214399337768556, 0.026073087692260744, 0.02601478385925293, 0.026022207260131835, 0.025970783233642578, 0.02630505561828613, 0.025996416091918946, 0.026143583297729492, 0.026166784286499024, 0.026384927749633788, 0.026416223526000978, 0.0261856632232666, 0.025991392135620118, 0.02612505531311035, 0.026053823471069337, 0.02590153694152832, 0.0258768310546875, 0.026080928802490234, 0.02631715202331543, 0.02633638381958008, 0.02709503936767578, 0.026379135131835936, 0.026627424240112305, 0.026268575668334963, 0.026089120864868164, 0.02618582344055176, 0.026275840759277344, 0.026177536010742186, 0.02622812843322754, 0.026412927627563476, 0.026356447219848634, 0.026168479919433593, 0.026061439514160158, 0.02619824028015137, 0.026080928802490234, 0.026249568939208986, 0.02614067268371582, 0.026206207275390626, 0.02612428855895996, 0.02677350425720215, 0.026399551391601564, 0.026459552764892577, 0.026397279739379883, 0.026078880310058592, 0.02633558464050293, 0.02648700714111328, 0.026178560256958007, 0.02601795196533203, 0.026163839340209962, 0.02611609649658203, 0.025997087478637694, 0.02605504035949707, 0.026199392318725586, 0.026085823059082032, 0.0260643196105957, 0.026221183776855467, 0.026359807968139647, 0.026222591400146485, 0.026167232513427733, 0.026161216735839845, 0.026628223419189453, 0.02631052780151367, 0.026281312942504884, 0.026190431594848632, 0.026226751327514647, 0.026220544815063477, 0.02642495918273926, 0.026330944061279296, 0.02638435173034668, 0.026176095962524414, 0.026302112579345702, 0.02660211181640625, 0.026576704025268554, 0.026415008544921875, 0.02631817626953125, 0.026407583236694336, 0.026499135971069336, 0.02647443199157715, 0.026615327835083007, 0.026593759536743165, 0.026879135131835936, 0.02655708885192871, 0.026558624267578126, 0.026974239349365235, 0.026652671813964843, 0.026416160583496093, 0.026257759094238282, 0.026104448318481445, 0.02614476776123047, 0.026150079727172853, 0.026359968185424805, 0.026486656188964845, 0.026337215423583984, 0.026252128601074218, 0.02652364730834961, 0.026396671295166017, 0.026203903198242187, 0.026120447158813478, 0.026143775939941407, 0.026090463638305663, 0.025985023498535157, 0.026249216079711913, 0.027088703155517577, 0.026249216079711913, 0.027665472030639647, 0.02623174476623535, 0.026222591400146485, 0.02612019157409668, 0.02610585594177246, 0.02607513618469238, 0.02609561538696289, 0.026109952926635743, 0.025960447311401368, 0.025974559783935546, 0.02594755172729492, 0.02608211135864258, 0.02591059112548828, 0.0259931526184082, 0.026364255905151367, 0.026242847442626952, 0.026096256256103515, 0.026056703567504884, 0.02614233589172363, 0.0260992317199707, 0.02589926338195801, 0.02609008026123047, 0.02619536018371582, 0.026404863357543946, 0.026585695266723632, 0.026730495452880858, 0.026430944442749023, 0.02634601593017578, 0.026174848556518554, 0.02602867126464844, 0.025972736358642577, 0.026080671310424804, 0.026210079193115233, 0.02607391929626465, 0.025992671966552736, 0.026022111892700196, 0.02609388732910156, 0.026238719940185548, 0.02662015914916992, 0.026363872528076173, 0.02757961654663086, 0.027152479171752928, 0.02640086364746094, 0.026272384643554688, 0.02640640068054199, 0.026304256439208983, 0.026090240478515624, 0.02625062370300293, 0.02624371147155762, 0.0263045768737793, 0.02619500732421875, 0.02630131149291992, 0.026449920654296875, 0.026181631088256836, 0.02618544006347656, 0.02630672073364258, 0.026237056732177733, 0.02614681625366211, 0.026054208755493163, 0.026059200286865234, 0.026238975524902345, 0.02670684814453125, 0.026258975982666015, 0.026097600936889648, 0.026091167449951172, 0.02614566421508789, 0.026140064239501954, 0.026063743591308593, 0.025943775177001954, 0.025824352264404295, 0.02601603126525879, 0.026040960311889648, 0.025974559783935546, 0.026406591415405273, 0.026291872024536134, 0.026089887619018554, 0.026072927474975586, 0.026149023056030275, 0.026193599700927734, 0.02605516815185547, 0.02639286422729492, 0.026396383285522462, 0.026304800033569335, 0.025966527938842774, 0.02607459259033203, 0.02649679946899414, 0.02609440040588379, 0.02609766387939453, 0.026005504608154296, 0.02605881690979004, 0.02609286308288574, 0.02600204849243164, 0.026099008560180666, 0.026135232925415038, 0.02631679916381836, 0.026677024841308593, 0.026503616333007813, 0.026727264404296874, 0.026760128021240233, 0.026615007400512695, 0.026534048080444336, 0.02642393684387207, 0.02630393600463867, 0.02632761573791504, 0.026263200759887695, 0.026333471298217774, 0.026341440200805664, 0.027232095718383788, 0.026192031860351562, 0.026284032821655274, 0.026292224884033204, 0.026488832473754883, 0.026327039718627928, 0.02652569580078125, 0.026410207748413086, 0.02638108825683594, 0.028601919174194336, 0.027506879806518555, 0.026554496765136718, 0.026120319366455078, 0.02615839958190918, 0.025991840362548826, 0.02612022399902344, 0.026111135482788084, 0.026632192611694337, 0.026245119094848633, 0.02613043212890625, 0.025997312545776367, 0.0259583683013916, 0.026290143966674805, 0.02621446418762207, 0.026474496841430665, 0.026431488037109374, 0.026558464050292968, 0.029732864379882814, 0.026480640411376953, 0.026210304260253905, 0.02616908836364746, 0.026023679733276368, 0.02603267288208008, 0.02599113655090332, 0.026204160690307617, 0.026206207275390626, 0.02608742332458496, 0.02619219207763672, 0.026150592803955076, 0.026208255767822267, 0.026035648345947265, 0.026108287811279298, 0.02635385513305664, 0.026230783462524415, 0.025839487075805665, 0.0259421443939209, 0.02604044723510742, 0.02597056007385254, 0.02588876724243164, 0.02612633514404297, 0.025945823669433595, 0.02603036880493164, 0.026031391143798828, 0.02621718406677246, 0.026294271469116212, 0.025953983306884764, 0.02590342330932617, 0.02598819160461426, 0.0260863037109375, 0.025901023864746093, 0.026130464553833006, 0.025849855422973633, 0.025989120483398437, 0.026464256286621093, 0.02632499122619629, 0.026198015213012696, 0.026206207275390626, 0.02611404800415039, 0.026044416427612304, 0.02586419105529785, 0.026078592300415038, 0.025891103744506837, 0.02595814323425293, 0.0259586238861084, 0.02606118392944336, 0.026179584503173828, 0.02591049575805664, 0.02610665512084961, 0.026113824844360353, 0.02611177635192871, 0.03017942428588867, 0.026556095123291015, 0.02649350357055664, 0.026136255264282225, 0.026202560424804688, 0.02616851234436035, 0.026120479583740235, 0.026077728271484375, 0.025976831436157227, 0.02620345687866211, 0.026028032302856444, 0.026507167816162108, 0.029815103530883787, 0.029360128402709962, 0.026191839218139647, 0.02647488021850586, 0.026229984283447267, 0.026554752349853515, 0.026179103851318358, 0.02637081527709961, 0.026185760498046873, 0.02649884796142578, 0.02612268829345703, 0.02612006378173828, 0.026064895629882814, 0.026142688751220704, 0.026115360260009764, 0.026016639709472655, 0.026054655075073242, 0.026085344314575196, 0.026060831069946288, 0.02620198440551758, 0.026290304183959962, 0.026562080383300782, 0.026312864303588868, 0.02625958442687988, 0.026377471923828125, 0.026414016723632812, 0.026431488037109374, 0.026167295455932618, 0.026077247619628905, 0.02600048065185547, 0.02597545623779297, 0.02616543960571289, 0.026258783340454103, 0.026169023513793944, 0.02620310401916504, 0.02613248062133789, 0.026180767059326173, 0.026239360809326172, 0.026399168014526367, 0.026124160766601564, 0.026153120040893554, 0.027375232696533202, 0.026457727432250975, 0.026679967880249022, 0.026548320770263673, 0.026823999404907227, 0.02634822463989258, 0.026310016632080078, 0.02624166488647461, 0.026334400177001952, 0.026261856079101562, 0.02735103988647461, 0.026634239196777345, 0.026592256546020508, 0.026590208053588867, 0.02650726318359375, 0.026574848175048828, 0.02649497604370117, 0.026286079406738282, 0.026133855819702147, 0.026107967376708983, 0.02619366455078125, 0.02624188804626465, 0.02606844711303711, 0.026023712158203125, 0.02601190376281738, 0.02601215934753418, 0.02607302474975586, 0.02614396858215332, 0.0260350399017334, 0.026044416427612304, 0.026138496398925782, 0.02601587104797363, 0.025923583984375, 0.025876256942749025, 0.02596886444091797, 0.02608742332458496, 0.026736640930175783, 0.026265600204467773, 0.026978303909301758, 0.026165088653564452, 0.0260579833984375, 0.025944992065429686, 0.02592767906188965, 0.02607513618469238, 0.026019840240478515, 0.02607084846496582, 0.025921375274658202, 0.02621183967590332, 0.02603708839416504, 0.02590924835205078, 0.02610345649719238, 0.026179935455322264, 0.026089471817016603, 0.02608742332458496, 0.02590105628967285, 0.025898208618164064, 0.026075231552124024, 0.02602668762207031, 0.026158143997192383, 0.025962623596191406, 0.02629055976867676, 0.026167743682861327, 0.02612166404724121, 0.0261309757232666, 0.026091487884521484, 0.02598031997680664, 0.02594063949584961, 0.02608742332458496, 0.026054655075073242, 0.026036224365234374, 0.025957855224609375, 0.026106399536132814, 0.026017791748046876]",tokens/s,38.05422586605943,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1331.367936,1134.42816,0.0,731.906048,703.86944,s,1,8.34591015625,8.34591015625,0.0,8.34591015625,8.34591015625,8.34591015625,8.34591015625,[8.34591015625],,kWh,3.410296163748777e-05,3.753918991818293e-06,1.0808619758051474e-05,4.8665500387357535e-05,,MB,1477.71392,1415.446528,0.0,998.244352,942.610432,s,10,1.6521077880859376,0.16521077880859375,0.0007510284553228716,0.16499352264404296,0.16594110412597654,0.1665691925048828,0.16707166320800781,"[0.16719728088378907, 0.16580152893066405, 0.16450851440429687, 0.16465219116210938, 0.16478323364257813, 0.16524861145019532, 0.16517181396484376, 0.16482841491699218, 0.16515863037109374, 0.164757568359375]",tokens/s,1549.5357012788543,kWh,5.05672035488537e-06,5.576610946945561e-07,3.3677325792408535e-06,8.982114028820779e-06,tokens/kWh,28501085.51044626,MB,1510.199296,1423.835136,0.0,1006.63296,942.612992,s,10,15.313814819335937,1.5313814819335936,0.005852185732180266,1.5312710571289063,1.5377463256835937,1.538392266845703,1.5389090197753905,"[1.5390382080078124, 1.5278355712890626, 1.5297022705078125, 1.53283984375, 1.537602783203125, 1.518294921875, 1.5279208984375, 1.534456298828125, 1.53688916015625, 1.52923486328125]",tokens/s,41.139324683783734,kWh,4.432688304886471e-05,4.888981180723432e-06,1.8691646179363453e-05,6.79075104089516e-05,tokens/kWh,927732.435199028,,s,630,15.311605516433715,0.024304135740370977,0.0005291514347000965,0.024207215309143066,0.024602852058410646,0.024841420650482178,0.02645584592819216,"[0.025204736709594725, 0.024545248031616212, 0.024323488235473634, 0.024287647247314453, 0.024445152282714842, 0.0245166072845459, 0.024442623138427735, 0.024414751052856447, 0.024344287872314452, 0.02427289581298828, 0.024370208740234375, 0.024079328536987306, 0.024145120620727538, 0.024062175750732422, 0.024205759048461915, 0.024211584091186525, 0.02421881675720215, 0.025193183898925782, 0.024469375610351562, 0.024311264038085936, 0.024119712829589843, 0.024159967422485353, 0.02412710380554199, 0.024060640335083008, 0.02422310447692871, 0.024062240600585937, 0.02418662452697754, 0.02411199951171875, 0.02877644729614258, 0.024669439315795898, 0.024496896743774414, 0.024819711685180663, 0.024774656295776368, 0.024602624893188478, 0.02512076759338379, 0.024634464263916016, 0.024548255920410156, 0.02467020797729492, 0.024629247665405272, 0.024465408325195313, 0.02422083282470703, 0.02442483139038086, 0.024147743225097655, 0.02419980812072754, 0.024367168426513673, 0.02492572784423828, 0.02427356719970703, 0.024248128890991212, 0.024260608673095704, 0.024250240325927735, 0.024440576553344726, 0.024158592224121093, 0.024237695693969726, 0.024229280471801756, 0.024300512313842774, 0.02421455955505371, 0.0242259521484375, 0.02407097625732422, 0.02435251235961914, 0.024010463714599608, 0.023966239929199218, 0.02396953582763672, 0.024000448226928713, 0.024634143829345704, 0.024178367614746094, 0.02410323143005371, 0.02420025634765625, 0.024503007888793945, 0.0245413761138916, 0.024363040924072266, 0.024192768096923827, 0.024256767272949217, 0.024018943786621092, 0.024002559661865236, 0.024012800216674804, 0.024252416610717774, 0.024016895294189454, 0.02399465560913086, 0.024012319564819334, 0.024147808074951174, 0.02388412857055664, 0.024216896057128907, 0.024883295059204103, 0.02428892707824707, 0.024538047790527345, 0.025578527450561522, 0.026631135940551758, 0.024217599868774413, 0.02408038330078125, 0.024007999420166015, 0.024046272277832032, 0.02405276870727539, 0.024604896545410156, 0.02406595230102539, 0.024015296936035158, 0.02417705535888672, 0.024101119995117187, 0.026926464080810546, 0.024357248306274414, 0.024173568725585938, 0.024019968032836913, 0.02395136070251465, 0.023937023162841797, 0.023995872497558593, 0.024024608612060547, 0.023968767166137696, 0.024030784606933593, 0.02395795249938965, 0.02402243232727051, 0.0240666561126709, 0.024069728851318358, 0.024066400527954102, 0.024106143951416015, 0.0242390079498291, 0.024184415817260742, 0.02402275276184082, 0.023998783111572265, 0.02401638412475586, 0.02402396774291992, 0.024012672424316407, 0.024014944076538085, 0.023984128952026368, 0.024018144607543944, 0.024019744873046874, 0.02429862403869629, 0.024247167587280273, 0.025049055099487304, 0.02462928009033203, 0.024268800735473633, 0.024170495986938476, 0.02413363265991211, 0.023977983474731446, 0.023957311630249025, 0.02404751968383789, 0.023865631103515625, 0.023891744613647462, 0.02391267204284668, 0.024159807205200196, 0.02397235107421875, 0.024059839248657226, 0.024020320892333986, 0.024226272583007812, 0.024823999404907225, 0.02477670478820801, 0.024433727264404296, 0.024321983337402344, 0.024288415908813477, 0.024182432174682616, 0.02422524833679199, 0.024068832397460938, 0.023952415466308594, 0.023917343139648436, 0.023969024658203126, 0.0239932804107666, 0.02393231964111328, 0.02401340866088867, 0.023997503280639647, 0.02408038330078125, 0.024171039581298827, 0.02408678436279297, 0.024045408248901366, 0.024060224533081053, 0.024192928314208984, 0.024034656524658204, 0.02782080078125, 0.024377344131469726, 0.02423574447631836, 0.02416864013671875, 0.025446495056152343, 0.02408038330078125, 0.02410700798034668, 0.02415555191040039, 0.024080991744995117, 0.024142112731933594, 0.024157279968261718, 0.024345216751098634, 0.024209407806396483, 0.02415001678466797, 0.024207359313964845, 0.024213504791259766, 0.02429952049255371, 0.024203264236450195, 0.024207359313964845, 0.02450841522216797, 0.024696287155151368, 0.024445440292358397, 0.024547008514404296, 0.024351072311401368, 0.024412160873413087, 0.02520524787902832, 0.02446623992919922, 0.024488927841186524, 0.024508447647094728, 0.024475231170654296, 0.024279296875, 0.024194208145141602, 0.024333023071289064, 0.024637792587280275, 0.02429088020324707, 0.0241975040435791, 0.024144031524658202, 0.02412892723083496, 0.024192768096923827, 0.024275552749633788, 0.02424380874633789, 0.024086944580078123, 0.02399795150756836, 0.023998304367065428, 0.024197856903076173, 0.024088607788085938, 0.024186559677124023, 0.02428940773010254, 0.024184032440185545, 0.024295743942260743, 0.02431011199951172, 0.024211776733398437, 0.024301599502563477, 0.02437321662902832, 0.024813568115234375, 0.024414207458496092, 0.02463759994506836, 0.024223583221435547, 0.024192575454711915, 0.024207712173461914, 0.024223264694213868, 0.024219263076782228, 0.02417475128173828, 0.024281887054443358, 0.0241408634185791, 0.024153024673461913, 0.024081407546997072, 0.02424934387207031, 0.024131584167480468, 0.024364416122436523, 0.024472192764282228, 0.02433228874206543, 0.024141536712646485, 0.02412723159790039, 0.024168384552001952, 0.02724019241333008, 0.02525868797302246, 0.02433244705200195, 0.024307903289794923, 0.024184640884399415, 0.02442163276672363, 0.02396441650390625, 0.02407219123840332, 0.0241081600189209, 0.024062240600585937, 0.024056415557861328, 0.024149215698242188, 0.024119808197021485, 0.025322175979614257, 0.02456985664367676, 0.024360095977783203, 0.024340351104736327, 0.02426550483703613, 0.024207199096679687, 0.024395103454589843, 0.024378047943115235, 0.024297760009765624, 0.024583328247070314, 0.024662208557128907, 0.02462073516845703, 0.024577024459838868, 0.024559520721435548, 0.024430688858032228, 0.024389631271362306, 0.02475436782836914, 0.02462851142883301, 0.02420403289794922, 0.024194847106933592, 0.024102912902832032, 0.02420319938659668, 0.024125024795532225, 0.024381439208984376, 0.02429795265197754, 0.024340351104736327, 0.024426624298095702, 0.02417241668701172, 0.024292640686035157, 0.02419183921813965, 0.024368896484375, 0.02427270317077637, 0.024168895721435546, 0.02406755256652832, 0.024386079788208007, 0.024258560180664062, 0.024419744491577147, 0.024459648132324218, 0.02454550361633301, 0.024258560180664062, 0.02436204719543457, 0.024286144256591796, 0.02496512031555176, 0.02590105628967285, 0.0245732479095459, 0.024416063308715822, 0.024321023941040038, 0.024223455429077147, 0.02457151985168457, 0.024287776947021486, 0.024271968841552735, 0.024212223052978515, 0.024146080017089844, 0.024134847640991212, 0.024363967895507814, 0.024373119354248046, 0.02416796875, 0.024498783111572265, 0.024409215927124025, 0.024451839447021485, 0.02433228874206543, 0.02432736015319824, 0.024314016342163087, 0.025284608840942382, 0.024467519760131836, 0.024288415908813477, 0.02424297523498535, 0.024133184432983398, 0.02397433662414551, 0.023957567214965822, 0.024258495330810547, 0.02418627166748047, 0.024145824432373047, 0.024116064071655275, 0.024094560623168945, 0.024432640075683593, 0.024205312728881836, 0.02416431999206543, 0.024129568099975587, 0.02419036865234375, 0.024112863540649412, 0.02412019157409668, 0.02399967956542969, 0.02441049575805664, 0.02407263946533203, 0.024087999343872072, 0.023890464782714844, 0.023914047241210937, 0.02401942443847656, 0.023963647842407225, 0.024014400482177733, 0.024035615921020506, 0.024047231674194335, 0.02398409652709961, 0.023994848251342772, 0.02394940757751465, 0.024048639297485352, 0.0239432315826416, 0.023849184036254883, 0.0239615364074707, 0.023929887771606446, 0.02403843116760254, 0.024056415557861328, 0.02401523208618164, 0.024018688201904295, 0.02399452781677246, 0.02398806381225586, 0.023900159835815428, 0.02393497657775879, 0.024000383377075194, 0.02391983985900879, 0.023911392211914063, 0.024004415512084962, 0.023916671752929688, 0.023859167098999025, 0.023962688446044923, 0.024324480056762694, 0.023939680099487305, 0.023990272521972656, 0.02414726448059082, 0.024649824142456055, 0.024299488067626954, 0.024071807861328124, 0.023992767333984377, 0.024040000915527344, 0.024487295150756837, 0.025061376571655275, 0.024428512573242186, 0.02421283149719238, 0.024109567642211914, 0.024066240310668945, 0.024033279418945314, 0.023971712112426758, 0.024024351119995117, 0.02400486373901367, 0.023949855804443358, 0.023988096237182618, 0.024488128662109376, 0.024236032485961914, 0.024118463516235353, 0.024134464263916015, 0.02408448028564453, 0.023985984802246094, 0.0239815673828125, 0.024000448226928713, 0.024036096572875976, 0.024217567443847655, 0.024226879119873045, 0.024050655364990233, 0.0242293758392334, 0.024107295989990233, 0.024260704040527343, 0.024440031051635742, 0.024478015899658204, 0.024748640060424806, 0.024680511474609375, 0.024571840286254882, 0.02447939109802246, 0.02449135971069336, 0.024540000915527344, 0.02461712074279785, 0.024227840423583984, 0.02431184005737305, 0.02411654472351074, 0.025018144607543945, 0.02405824089050293, 0.02404969596862793, 0.024278879165649414, 0.024173183441162108, 0.024165407180786132, 0.024491039276123047, 0.024213119506835936, 0.024187200546264647, 0.024223360061645507, 0.02422777557373047, 0.024233856201171876, 0.024285600662231444, 0.02419932746887207, 0.024483840942382814, 0.02415951919555664, 0.024317888259887694, 0.024269311904907227, 0.024148256301879882, 0.024239839553833006, 0.02418012809753418, 0.024095775604248047, 0.024022880554199218, 0.02409062385559082, 0.024184831619262694, 0.024852479934692383, 0.024518016815185548, 0.024246400833129882, 0.024158527374267578, 0.024178335189819336, 0.024686784744262696, 0.02440575981140137, 0.02427676773071289, 0.024167392730712892, 0.024016031265258787, 0.023976640701293947, 0.023994176864624024, 0.023943359375, 0.023993696212768555, 0.024238431930541992, 0.024409536361694337, 0.027290271759033202, 0.02414614486694336, 0.024010751724243166, 0.024048992156982422, 0.023977632522583007, 0.023983104705810547, 0.02391244888305664, 0.024045087814331054, 0.02399827194213867, 0.024147808074951174, 0.024207231521606445, 0.024189823150634764, 0.025118783950805666, 0.03214281463623047, 0.024291584014892578, 0.024132896423339843, 0.02424892807006836, 0.024162208557128906, 0.024184896469116212, 0.024111455917358398, 0.024287328720092774, 0.024254463195800782, 0.02416655921936035, 0.024026975631713868, 0.023949151992797853, 0.024075616836547853, 0.023954463958740235, 0.023973663330078124, 0.023941120147705077, 0.023952991485595702, 0.023986591339111327, 0.024575199127197266, 0.02424198341369629, 0.024177631378173827, 0.02414556884765625, 0.024145952224731447, 0.0241312313079834, 0.024166336059570314, 0.024213920593261717, 0.024070079803466798, 0.024252799987792967, 0.024377344131469726, 0.024268192291259767, 0.024156991958618163, 0.02408790397644043, 0.02413817596435547, 0.02411929512023926, 0.02533366394042969, 0.02454582405090332, 0.024520063400268556, 0.02438390350341797, 0.02467430305480957, 0.024329439163208007, 0.024226240158081055, 0.024240095138549803, 0.02449180793762207, 0.024570463180541992, 0.02491539192199707, 0.025100608825683594, 0.024772607803344726, 0.024469247817993166, 0.02451718330383301, 0.024383424758911133, 0.02443059158325195, 0.02433843231201172, 0.024363008499145508, 0.024827903747558593, 0.02437843132019043, 0.024185792922973633, 0.024367103576660155, 0.024417951583862306, 0.024346559524536134, 0.024652191162109375, 0.02450841522216797, 0.024250368118286132, 0.02429952049255371, 0.024268800735473633, 0.02540883255004883, 0.02602668762207031, 0.024487903594970703, 0.024342079162597657, 0.02447612762451172, 0.024261791229248045, 0.024135679244995118, 0.024234752655029296, 0.024143743515014648, 0.024111328125, 0.024227359771728515, 0.024069984436035155, 0.024339071273803713, 0.024164352416992187, 0.024125152587890625, 0.02440425682067871, 0.02428028869628906, 0.02412828826904297, 0.024151935577392578, 0.024297183990478515, 0.02408083152770996, 0.024157888412475587, 0.02422198486328125, 0.024396896362304688, 0.02421561622619629, 0.024263391494750975, 0.023978111267089843, 0.023991455078125, 0.024052576065063478, 0.023988224029541014, 0.024025087356567384, 0.02420310401916504, 0.02416655921936035, 0.025311071395874022, 0.024350879669189453, 0.024251583099365235, 0.024156991958618163, 0.024174591064453126, 0.023905824661254883, 0.023906143188476562, 0.02468921661376953, 0.023907520294189452, 0.023903104782104494, 0.023953407287597657, 0.02394438362121582, 0.02385590362548828, 0.023929887771606446, 0.02402102470397949, 0.02414691162109375, 0.02413507270812988, 0.02403731155395508, 0.02418499183654785, 0.024087039947509766, 0.024147968292236328, 0.02404047966003418, 0.024300512313842774, 0.02399148750305176, 0.0244150390625, 0.024584192276000977, 0.02434016036987305, 0.02411961555480957, 0.02411315155029297, 0.024198175430297852, 0.024171104431152345, 0.024135551452636718, 0.024147680282592773, 0.024336864471435547, 0.024201824188232423, 0.023973087310791015, 0.02425276756286621, 0.024272127151489256, 0.02413372802734375, 0.024275423049926758, 0.024422399520874022, 0.024488224029541015, 0.02459401512145996, 0.02465635108947754, 0.024503360748291014, 0.02441046333312988, 0.0244619197845459, 0.024430559158325194, 0.024731807708740235, 0.02433625602722168, 0.024387296676635743, 0.02422403144836426, 0.02425347137451172, 0.024232927322387694, 0.02415763282775879, 0.024125408172607422, 0.02420796775817871, 0.02572492790222168, 0.024547040939331053, 0.024441120147705078, 0.024174591064453126, 0.024219648361206055, 0.024274591445922852]",tokens/s,41.145260653680666,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3167.461376,4609.409024,0.0,4206.886912,4087.771648,s,1,10.9444765625,10.9444765625,0.0,10.9444765625,10.9444765625,10.9444765625,10.9444765625,[10.9444765625],,kWh,0.0001114861773374893,1.2286898563748037e-05,4.299475661800889e-05,0.0001667678325192462,,MB,3214.155776,4783.47264,0.0,4366.270464,4273.050624,s,10,15.768203857421875,1.5768203857421876,0.0011414262729379102,1.576916015625,1.5782976074218749,1.5783107910156249,1.578321337890625,"[1.5758453369140626, 1.576094970703125, 1.575095947265625, 1.5752987060546875, 1.577389404296875, 1.578294677734375, 1.578323974609375, 1.5770294189453125, 1.5768026123046874, 1.57802880859375]",tokens/s,162.35203597999168,kWh,4.633989734207716e-05,5.110901781347052e-06,3.0869663584598416e-05,8.232046270802262e-05,tokens/kWh,3109797.8750191266,MB,3137.429504,4785.569792,0.0,4368.367616,4273.053184,s,10,25.683661621093748,2.568366162109375,0.004679090130973488,2.568155517578125,2.5733216064453126,2.5745589965820312,2.575548908691406,"[2.563158447265625, 2.559123046875, 2.5714609375, 2.566359619140625, 2.573046630859375, 2.57184423828125, 2.57579638671875, 2.568488037109375, 2.567822998046875, 2.566561279296875]",tokens/s,24.529212745996738,kWh,7.480770692501059e-05,8.25173907195384e-06,4.224625601920163e-05,0.00012530570201616604,tokens/kWh,502770.4165599119,,s,630,25.68108683013914,0.040763629889109775,0.00047292185199668724,0.040662319183349616,0.04120796318054199,0.04167188587188721,0.04255219890594483,"[0.042176513671875, 0.04080640029907227, 0.04116646575927734, 0.04079859161376953, 0.04071343994140625, 0.04043254470825195, 0.040556415557861325, 0.04044918441772461, 0.04079087829589844, 0.040309825897216794, 0.04075820922851563, 0.04202278518676758, 0.04056281661987305, 0.040123424530029296, 0.04043174362182617, 0.040339710235595704, 0.04108758544921875, 0.04062412643432617, 0.04053401565551758, 0.0406036491394043, 0.040376319885253906, 0.040321025848388675, 0.04022272109985352, 0.040582302093505856, 0.04110422515869141, 0.04065075302124024, 0.040610977172851566, 0.040792865753173826, 0.04071225738525391, 0.0404664306640625, 0.04083052825927734, 0.04039916610717773, 0.04050281524658203, 0.04067593765258789, 0.044136287689208985, 0.04154780960083008, 0.04096745681762695, 0.040710174560546875, 0.0405711669921875, 0.04051760101318359, 0.04059932708740235, 0.04096284866333008, 0.040570209503173825, 0.040463008880615235, 0.04037836837768555, 0.04022224044799805, 0.04117961502075195, 0.040602657318115236, 0.0406333122253418, 0.04070316696166992, 0.04050153732299805, 0.04050281524658203, 0.04079312133789063, 0.040509407043457034, 0.04022617721557617, 0.04008595275878906, 0.04025932693481445, 0.04015087890625, 0.04029705429077148, 0.04022246551513672, 0.040243457794189454, 0.04026572799682617, 0.040550495147705076, 0.04175049591064453, 0.04063235092163086, 0.040759361267089844, 0.04035372924804687, 0.040226688385009764, 0.04046470260620117, 0.040360958099365234, 0.04037046432495117, 0.04032566452026367, 0.040308734893798825, 0.04025715255737305, 0.04028793716430664, 0.04066579055786133, 0.040476673126220705, 0.04066825485229492, 0.04058118438720703, 0.04042233657836914, 0.04028575897216797, 0.04053641510009766, 0.04063625717163086, 0.041102752685546876, 0.04095852661132812, 0.04073686218261719, 0.04063059234619141, 0.040742687225341793, 0.04052483367919922, 0.040532958984375, 0.04062617492675781, 0.04058931350708008, 0.04038655853271484, 0.040458240509033204, 0.04037331390380859, 0.04038547134399414, 0.04048076629638672, 0.040564159393310546, 0.04079673767089844, 0.04183830261230469, 0.040931297302246095, 0.04063257598876953, 0.040535552978515625, 0.04054275131225586, 0.04325379180908203, 0.040930686950683595, 0.040566879272460936, 0.0406635856628418, 0.04055654525756836, 0.04075929641723633, 0.040390655517578124, 0.04076544189453125, 0.04040499114990234, 0.04043571090698242, 0.04059910583496094, 0.040486625671386715, 0.04048889541625977, 0.0407529296875, 0.04051804733276367, 0.04023532867431641, 0.04029228973388672, 0.040436065673828125, 0.04025078582763672, 0.04036259078979492, 0.0403430061340332, 0.04064108657836914, 0.04153152084350586, 0.04059872055053711, 0.04130271911621094, 0.04061183929443359, 0.04073833465576172, 0.04081711959838867, 0.04084531021118164, 0.040599552154541016, 0.04036767959594727, 0.04097695922851562, 0.0403590087890625, 0.040586017608642576, 0.04060979080200195, 0.040531967163085936, 0.04052377700805664, 0.04037548828125, 0.04072531127929688, 0.04175667190551758, 0.04265193557739258, 0.041127647399902344, 0.04068377685546875, 0.040723487854003905, 0.04060847854614258, 0.04054425430297852, 0.04057238388061524, 0.04053228759765625, 0.040523998260498045, 0.04070985412597656, 0.04066537475585937, 0.04043571090698242, 0.04048070526123047, 0.04025680160522461, 0.040261856079101564, 0.040609409332275394, 0.04083603286743164, 0.04060979080200195, 0.04087766265869141, 0.040980319976806644, 0.040428096771240235, 0.04044595336914063, 0.04085878372192383, 0.04105712127685547, 0.04155567932128906, 0.041115936279296876, 0.04097228622436523, 0.041009151458740234, 0.04088630294799805, 0.040947681427001954, 0.04086524963378906, 0.040689441680908205, 0.04066287994384766, 0.04056739044189453, 0.04086140823364258, 0.04108758544921875, 0.042156032562255856, 0.040734336853027346, 0.04069414520263672, 0.04068076705932617, 0.04107929611206055, 0.0406734733581543, 0.04102963256835938, 0.040965953826904294, 0.0406366081237793, 0.04180246353149414, 0.040874046325683595, 0.04100089645385742, 0.04063647842407227, 0.04052313613891602, 0.040569599151611326, 0.040906494140625, 0.04050956726074219, 0.04074700927734375, 0.04085299301147461, 0.040761856079101565, 0.040703998565673825, 0.040828929901123044, 0.04142659378051758, 0.0415860481262207, 0.041164833068847655, 0.0411262092590332, 0.041081470489501955, 0.04097539138793945, 0.04105900955200195, 0.04069609451293945, 0.040789726257324216, 0.04098691177368164, 0.040864990234375, 0.040728351593017575, 0.04159385681152344, 0.04258201599121094, 0.040908798217773434, 0.040951103210449216, 0.040669086456298825, 0.04073865509033203, 0.04059257507324219, 0.041309951782226566, 0.04067327880859375, 0.04072243118286133, 0.040648128509521486, 0.04054687881469726, 0.040424545288085936, 0.040447105407714845, 0.04038838577270508, 0.0404048957824707, 0.04072195053100586, 0.040417854309082034, 0.04029359817504883, 0.0403460807800293, 0.04049542236328125, 0.04037836837768555, 0.04040070343017578, 0.04017990493774414, 0.04020358276367188, 0.040248001098632816, 0.040304641723632816, 0.040559680938720706, 0.0407108154296875, 0.04038265609741211, 0.04050969696044922, 0.04030815887451172, 0.040128734588623045, 0.04035193634033203, 0.04045619201660156, 0.04044800186157226, 0.04058668899536133, 0.040850208282470706, 0.041654590606689454, 0.04074143981933594, 0.04100080108642578, 0.04071968078613281, 0.04032377624511719, 0.04058249664306641, 0.040614017486572264, 0.04078182220458984, 0.04065139389038086, 0.0406195182800293, 0.04095558547973633, 0.04082886505126953, 0.040848159790039064, 0.040908126831054686, 0.04205382537841797, 0.04177667236328125, 0.04085760116577149, 0.04083603286743164, 0.040736766815185545, 0.04082688140869141, 0.041018913269042966, 0.040624351501464845, 0.04070016098022461, 0.040796161651611325, 0.04080230331420898, 0.04067942428588867, 0.04043366241455078, 0.04110540771484375, 0.040573055267333985, 0.04074422454833984, 0.040507999420166016, 0.0408350715637207, 0.04067737579345703, 0.041093120574951174, 0.040898048400878906, 0.04111939239501953, 0.041050975799560546, 0.04122623825073242, 0.04084940719604492, 0.041062400817871096, 0.0408166389465332, 0.041072799682617185, 0.04112086486816406, 0.04102988815307617, 0.040830848693847656, 0.04122483062744141, 0.04031033706665039, 0.04074316787719726, 0.04049913787841797, 0.04064486312866211, 0.04043571090698242, 0.041, 0.040790977478027346, 0.040796161651611325, 0.040458240509033204, 0.04045004653930664, 0.04032921600341797, 0.040344993591308595, 0.04052643203735352, 0.041560222625732425, 0.041256641387939455, 0.040630561828613285, 0.04076121520996094, 0.04165907287597656, 0.040868961334228515, 0.040837471008300784, 0.040693695068359376, 0.0408004150390625, 0.040780254364013675, 0.04088627243041992, 0.04164812850952149, 0.04038614273071289, 0.040386974334716795, 0.040667137145996096, 0.04040630340576172, 0.04054227066040039, 0.04054035186767578, 0.04058569717407227, 0.04067327880859375, 0.041966751098632814, 0.04430227279663086, 0.040591712951660155, 0.0407946891784668, 0.0404106559753418, 0.040425952911376954, 0.04017868804931641, 0.040632766723632814, 0.04064627075195312, 0.04052409744262695, 0.04056361770629883, 0.04211248016357422, 0.040683006286621096, 0.04075785446166992, 0.04101545715332031, 0.04111916732788086, 0.040935199737548826, 0.040510238647460936, 0.04074607849121094, 0.04085238265991211, 0.040635520935058594, 0.04038540649414062, 0.04055984115600586, 0.040702014923095706, 0.04079609680175781, 0.040508190155029294, 0.04047052764892578, 0.04030217742919922, 0.04043180847167969, 0.04053424072265625, 0.0405665283203125, 0.040472831726074215, 0.040834110260009764, 0.040643104553222655, 0.04097206497192383, 0.0407248649597168, 0.04104207992553711, 0.04076758575439453, 0.040892414093017575, 0.04131379318237305, 0.04152166366577149, 0.040796161651611325, 0.04096566390991211, 0.040645248413085935, 0.040693599700927736, 0.04067737579345703, 0.040605087280273434, 0.04168236923217773, 0.04122457504272461, 0.041428543090820315, 0.041329280853271484, 0.04091494369506836, 0.04089651107788086, 0.04099686431884766, 0.040859233856201174, 0.04049462509155274, 0.04053071975708008, 0.04059145736694336, 0.040710113525390626, 0.041189407348632814, 0.04084076690673828, 0.04065840148925781, 0.040702239990234375, 0.04066988754272461, 0.04054956817626953, 0.040549182891845705, 0.040921089172363284, 0.041350303649902345, 0.04108579254150391, 0.04085715103149414, 0.04326364898681641, 0.04142755126953125, 0.04109257507324219, 0.04115718460083008, 0.040656158447265625, 0.04055859375, 0.04038540649414062, 0.040797214508056644, 0.04041321563720703, 0.04043462371826172, 0.04053811264038086, 0.04047270584106445, 0.04066086578369141, 0.04062822341918945, 0.042479198455810545, 0.04050716781616211, 0.04025836944580078, 0.04037798309326172, 0.04042758560180664, 0.04039449691772461, 0.04034703826904297, 0.04077462387084961, 0.04050102233886719, 0.04074892807006836, 0.04045827102661133, 0.04058531188964844, 0.0403724479675293, 0.04068102264404297, 0.04095177459716797, 0.04204300689697266, 0.040944480895996095, 0.04108083343505859, 0.040984256744384766, 0.0419699821472168, 0.04228710556030273, 0.04063164901733399, 0.04042166519165039, 0.04069004821777344, 0.04063177490234375, 0.04048320007324219, 0.0421580810546875, 0.04099187088012695, 0.04121660614013672, 0.04075753784179687, 0.04077280044555664, 0.040565567016601564, 0.040469631195068356, 0.04043382263183594, 0.0405777587890625, 0.04064390563964844, 0.0408009262084961, 0.04073884963989258, 0.04072623825073242, 0.040737056732177736, 0.04082419204711914, 0.040573089599609376, 0.04044809722900391, 0.04037875366210938, 0.040374271392822264, 0.04042083358764648, 0.040433185577392575, 0.040692127227783204, 0.04074143981933594, 0.04064230346679688, 0.04103939056396484, 0.04089724731445313, 0.04093132781982422, 0.040744991302490235, 0.04075711822509766, 0.04063603210449219, 0.04074131011962891, 0.04066310501098633, 0.041095104217529294, 0.040751167297363285, 0.04065625762939453, 0.041699966430664065, 0.04066326522827148, 0.04056816101074219, 0.040460254669189455, 0.040335838317871096, 0.04041270446777344, 0.04064697647094727, 0.04033113479614258, 0.040309024810791017, 0.040255615234375, 0.040261505126953125, 0.04038246536254883, 0.04082620620727539, 0.04216899108886719, 0.04095180892944336, 0.040822784423828126, 0.04087142562866211, 0.04121023941040039, 0.0404213752746582, 0.0406077766418457, 0.04046150588989258, 0.040538368225097654, 0.04065078353881836, 0.041740577697753904, 0.04221731185913086, 0.0409771842956543, 0.04057059097290039, 0.04083097457885742, 0.04162044906616211, 0.04080230331420898, 0.040441600799560544, 0.04044416046142578, 0.04036608123779297, 0.04028211212158203, 0.04036588668823242, 0.041320640563964846, 0.04220678329467773, 0.0406550407409668, 0.04041251373291015, 0.04023286437988281, 0.04036259078979492, 0.04085171127319336, 0.040736927032470706, 0.040449790954589844, 0.04051948928833008, 0.04039228820800781, 0.04063865661621094, 0.040925857543945315, 0.041121761322021486, 0.04098160171508789, 0.040763614654541015, 0.04057571029663086, 0.04046438217163086, 0.04069478225708008, 0.04084428787231445, 0.04079814529418945, 0.0403702392578125, 0.04171718215942383, 0.04044380950927735, 0.040311614990234376, 0.0405574722290039, 0.04055878448486328, 0.040683486938476565, 0.04070684814453125, 0.04060160064697266, 0.040820735931396485, 0.041488384246826174, 0.040443904876708986, 0.04026572799682617, 0.04094060897827148, 0.04057167816162109, 0.04040310287475586, 0.04107843017578125, 0.04183049774169922, 0.040455425262451175, 0.040750080108642575, 0.040425472259521485, 0.04063641738891602, 0.04034969711303711, 0.04067859268188476, 0.04086457443237305, 0.04079363250732422, 0.04090723037719726, 0.04310015869140625, 0.04116915130615234, 0.04073651123046875, 0.04050534439086914, 0.04056988906860352, 0.040482944488525394, 0.040463199615478514, 0.04061788940429688, 0.042187297821044925, 0.0413515510559082, 0.04120771026611328, 0.041290592193603516, 0.0410522232055664, 0.04107657623291015, 0.04097139358520508, 0.040920223236083984, 0.04067628860473633, 0.04074848175048828, 0.040551807403564455, 0.040513694763183596, 0.0404502067565918, 0.04044780731201172, 0.040575809478759765, 0.040836158752441405, 0.04078278350830078, 0.04071964645385742, 0.04161347198486328, 0.040929855346679686, 0.04076655960083008, 0.04065779113769531, 0.04050163269042969, 0.040810142517089844, 0.04061088180541992, 0.04110969543457031, 0.04099955368041992, 0.04070383834838867, 0.04057526397705078, 0.040474624633789064, 0.040593025207519534, 0.04052825546264648, 0.04056444931030274, 0.04039913558959961, 0.04085907363891601, 0.04055712127685547, 0.04055244827270508, 0.040286209106445314, 0.04026976013183594, 0.040277183532714846, 0.0404161262512207, 0.04074291229248047, 0.04128153610229492, 0.040597503662109374, 0.04065280151367188, 0.04064767837524414, 0.040667552947998044, 0.04063907241821289, 0.040771583557128906, 0.04056396865844727, 0.04068822479248047, 0.041124000549316406, 0.04113843154907226, 0.0408853759765625, 0.04074355316162109, 0.04054643249511719, 0.04054127883911133, 0.0404918098449707, 0.040387840270996095, 0.040661758422851565, 0.04031283187866211, 0.04052787017822266, 0.040279201507568356]",tokens/s,24.53167204982291,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3153.186816,4376.625152,0.0,3990.880256,3908.719616,s,1,10.2372119140625,10.2372119140625,0.0,10.2372119140625,10.2372119140625,10.2372119140625,10.2372119140625,[10.2372119140625],,kWh,9.091607653334299e-05,1.0019087014122748e-05,2.964141260197728e-05,0.00013057657614944302,,MB,3065.99936,4743.626752,0.0,4328.521728,4275.213312,s,10,2.243329360961914,0.22433293609619137,0.0014378268360940765,0.22415650939941406,0.22603587341308592,0.22639389038085939,0.22668030395507813,"[0.22443055725097658, 0.22492767333984376, 0.22357107543945312, 0.22388246154785157, 0.22556822204589844, 0.22675190734863282, 0.22161036682128907, 0.22595631408691405, 0.22343983459472655, 0.22319094848632812]",tokens/s,1141.1610103040339,kWh,6.852605858622445e-06,7.553867416442688e-07,4.5524648822787114e-06,1.2160457482545424e-05,tokens/kWh,21051839.56832635,MB,3029.95456,4747.821056,0.0,4330.61888,4275.215872,s,10,35.172543701171875,3.5172543701171874,0.005971763050511207,3.5173519287109376,3.5233646484374996,3.5259141357421875,3.5279537255859372,"[3.52194287109375, 3.5156611328125, 3.518225830078125, 3.51095361328125, 3.528463623046875, 3.51647802734375, 3.522798095703125, 3.51863330078125, 3.512713623046875, 3.506673583984375]",tokens/s,17.91169854965621,kWh,0.0001022644499455452,1.12803212669906e-05,5.172891347611837e-05,0.00016527368468865418,tokens/kWh,381185.9106226175,,s,630,35.16990844345094,0.05582525149754115,0.0008110078055314288,0.055678640365600586,0.05642263603210449,0.05687311611175537,0.059193163871765146,"[0.05974390411376953, 0.05723910522460938, 0.056100959777832034, 0.05578601455688476, 0.0553535041809082, 0.05552230453491211, 0.05552025604248047, 0.05645312118530273, 0.05604070281982422, 0.05594188690185547, 0.0564936637878418, 0.056326560974121094, 0.05630156707763672, 0.05646950531005859, 0.056118526458740235, 0.05621968078613281, 0.0561732177734375, 0.057003231048583985, 0.05581296157836914, 0.05604902267456055, 0.05651929473876953, 0.05562355041503906, 0.05551935958862304, 0.05545574569702148, 0.055365280151367186, 0.05582428741455078, 0.055887489318847655, 0.05569161605834961, 0.055548385620117186, 0.05658316802978516, 0.05589299011230469, 0.05590835189819336, 0.0553779182434082, 0.05571692657470703, 0.05532563018798828, 0.05569660949707031, 0.05526156616210937, 0.05521372985839844, 0.059052799224853514, 0.055568321228027344, 0.05551875305175781, 0.055271968841552735, 0.0555233268737793, 0.056554622650146484, 0.05590719985961914, 0.055408512115478516, 0.05532070541381836, 0.05540848159790039, 0.05560755157470703, 0.05584822463989258, 0.055593246459960936, 0.055892318725585935, 0.05602243041992187, 0.056047870635986326, 0.05533116912841797, 0.0555417594909668, 0.05554585647583008, 0.05546323013305664, 0.05547065734863281, 0.05528179168701172, 0.05515801620483399, 0.055180030822753905, 0.0550748176574707, 0.05631318283081055, 0.05557030487060547, 0.05567977523803711, 0.056766464233398435, 0.05568921661376953, 0.05550080108642578, 0.055235710144042965, 0.05592950439453125, 0.055523551940917966, 0.0554466552734375, 0.0555549430847168, 0.05561494445800781, 0.05579763031005859, 0.05766796875, 0.055892257690429686, 0.055921920776367186, 0.05576736068725586, 0.05572848129272461, 0.05575484848022461, 0.05602703857421875, 0.056460735321044925, 0.05665859222412109, 0.05561484909057617, 0.05554035186767578, 0.05525708770751953, 0.06193910217285156, 0.05569760131835937, 0.0557957763671875, 0.05555849456787109, 0.055534912109375, 0.055331295013427734, 0.0557344970703125, 0.05594521713256836, 0.05614745712280273, 0.056680862426757815, 0.05540047836303711, 0.05540665435791016, 0.055435104370117186, 0.05527977752685547, 0.055578624725341794, 0.05567238235473633, 0.05519353485107422, 0.05515724945068359, 0.05535696029663086, 0.05538768005371094, 0.055622592926025394, 0.055711742401123046, 0.05643468856811523, 0.05527142333984375, 0.0555601921081543, 0.055349246978759765, 0.05595750427246094, 0.056627201080322265, 0.05572608184814453, 0.055605247497558595, 0.055604671478271486, 0.05530476760864258, 0.05579507064819336, 0.05520857620239258, 0.05527724838256836, 0.054923583984375, 0.05503372955322266, 0.05524492645263672, 0.0561115837097168, 0.05537567901611328, 0.05585919952392578, 0.05637526321411133, 0.05528979110717774, 0.055392353057861325, 0.055103488922119144, 0.05526323318481445, 0.0554598388671875, 0.055715328216552736, 0.05518147277832031, 0.055144798278808596, 0.055211391448974606, 0.055460479736328124, 0.055638015747070314, 0.05497651290893555, 0.0551383056640625, 0.05492326354980469, 0.05619001770019531, 0.05526214218139648, 0.05548358535766602, 0.05642505645751953, 0.055965694427490234, 0.05592700958251953, 0.05570355224609375, 0.05528780746459961, 0.055433216094970705, 0.05527926254272461, 0.05554006576538086, 0.058332191467285154, 0.05676950454711914, 0.055889984130859376, 0.055353279113769534, 0.0552039680480957, 0.05490470504760742, 0.05500912094116211, 0.055066783905029296, 0.0550563850402832, 0.055715423583984375, 0.055650718688964845, 0.0561231689453125, 0.05574604797363281, 0.055530208587646485, 0.055554046630859374, 0.055633632659912106, 0.05603971099853516, 0.06501974487304688, 0.05647148895263672, 0.05574883270263672, 0.055785472869873044, 0.05763283157348633, 0.055642047882080076, 0.05548614501953125, 0.0557591667175293, 0.05539334487915039, 0.05602515029907226, 0.056124000549316405, 0.05602537536621094, 0.05599187088012696, 0.056027584075927735, 0.055504894256591795, 0.05586329650878906, 0.055771137237548826, 0.05636508941650391, 0.05591036987304687, 0.0553779182434082, 0.055547904968261716, 0.05536147308349609, 0.05528911972045898, 0.05567750549316406, 0.056864192962646484, 0.0559152946472168, 0.05617420959472656, 0.05556876754760742, 0.05536972808837891, 0.05517830276489258, 0.055779808044433596, 0.05597555160522461, 0.05551190567016601, 0.058054656982421876, 0.055752704620361325, 0.05568921661376953, 0.056033279418945314, 0.05539788818359375, 0.05575433731079102, 0.05515491104125977, 0.05571267318725586, 0.055004447937011716, 0.05546444702148438, 0.05513824081420898, 0.055473217010498045, 0.05508403015136719, 0.055861248016357425, 0.05711667251586914, 0.0558919677734375, 0.055316478729248046, 0.05535129547119141, 0.05528166580200195, 0.05543436813354492, 0.055312286376953124, 0.05583126449584961, 0.056188961029052735, 0.0554109115600586, 0.05503964614868164, 0.055654048919677734, 0.05536163330078125, 0.05670703887939453, 0.05585110473632812, 0.05593705749511719, 0.05553401565551758, 0.055879135131835934, 0.05563043212890625, 0.056188926696777344, 0.05553676986694336, 0.05564284896850586, 0.05550505447387695, 0.05568102264404297, 0.055166976928710934, 0.055578624725341794, 0.055019519805908204, 0.05524208068847656, 0.05773081588745117, 0.057035552978515626, 0.05522854232788086, 0.05580799865722656, 0.055160831451416016, 0.05718243026733399, 0.05609408187866211, 0.05538028717041016, 0.05676153564453125, 0.06407660675048828, 0.05575196838378906, 0.05573091125488281, 0.05577891159057617, 0.055572128295898436, 0.05563606262207031, 0.05572880172729492, 0.0558837776184082, 0.05575455856323242, 0.05598432159423828, 0.0554486083984375, 0.05574860763549805, 0.05620835113525391, 0.05592473602294922, 0.05608652877807617, 0.05590016174316406, 0.05593619155883789, 0.05615903854370117, 0.055758785247802735, 0.056041473388671874, 0.056291393280029293, 0.055992321014404295, 0.055680416107177735, 0.05565091323852539, 0.05579776000976563, 0.05599785614013672, 0.05606256103515625, 0.05728870391845703, 0.056153759002685544, 0.056082302093505856, 0.05553609466552734, 0.05595734405517578, 0.0566130256652832, 0.055651905059814454, 0.0554398078918457, 0.055744510650634765, 0.05535308837890625, 0.05522457504272461, 0.055818241119384764, 0.05560115051269531, 0.05547999954223633, 0.056021312713623046, 0.05542502212524414, 0.05599558258056641, 0.05612393569946289, 0.05585295867919922, 0.0556097297668457, 0.055920513153076175, 0.055922401428222655, 0.0562239990234375, 0.05637340927124023, 0.0557191047668457, 0.055515968322753906, 0.05547622299194336, 0.055342273712158205, 0.05566479873657226, 0.05589241409301758, 0.05561980819702148, 0.05555382537841797, 0.05715779113769531, 0.05543094253540039, 0.05531760025024414, 0.055157726287841796, 0.05548166275024414, 0.05580646514892578, 0.05575084686279297, 0.05642790222167969, 0.055465599060058594, 0.05534003067016602, 0.05543219375610352, 0.05647872161865235, 0.0553779182434082, 0.05560438537597656, 0.05504460906982422, 0.05542947387695313, 0.056544929504394534, 0.05569161605834961, 0.05522431945800781, 0.055556095123291016, 0.05560729598999024, 0.05579705429077148, 0.05548716735839844, 0.05688041687011719, 0.05719424057006836, 0.056213504791259764, 0.055462879180908205, 0.05535481643676758, 0.05534899139404297, 0.055642433166503906, 0.056508350372314456, 0.05520646286010742, 0.05496937561035156, 0.05563081741333008, 0.05530774307250977, 0.056422367095947265, 0.05536774444580078, 0.05557635116577148, 0.05543190383911133, 0.055736320495605465, 0.05536467361450195, 0.05591888046264649, 0.05577590560913086, 0.05627699279785156, 0.055904094696044924, 0.056244384765625, 0.055861248016357425, 0.05585715103149414, 0.056182785034179686, 0.0555417594909668, 0.05538816070556641, 0.055803905487060546, 0.05688115310668945, 0.056142913818359376, 0.05560006332397461, 0.055630878448486326, 0.05546601486206055, 0.05563078308105469, 0.055572479248046876, 0.055951358795166016, 0.05586102294921875, 0.058656993865966796, 0.055779327392578126, 0.05623849487304688, 0.05551254272460938, 0.055638847351074217, 0.05591244888305664, 0.05581619262695312, 0.05574185562133789, 0.05542972946166992, 0.055543296813964846, 0.05559552001953125, 0.05612953567504883, 0.05561740875244141, 0.05558806228637695, 0.05556684875488281, 0.05573878479003906, 0.05554739379882812, 0.055529983520507815, 0.055771137237548826, 0.055711742401123046, 0.0554598388671875, 0.055777278900146485, 0.055844417572021486, 0.05688723373413086, 0.05572832107543945, 0.055778942108154296, 0.055538368225097653, 0.05549363327026367, 0.055298622131347654, 0.05559270477294922, 0.05537247848510742, 0.055379776000976565, 0.05586761474609375, 0.055785633087158205, 0.05925049591064453, 0.05589606475830078, 0.05566019058227539, 0.05583292770385742, 0.05946726226806641, 0.05616806411743164, 0.05556927871704102, 0.05564825439453125, 0.0554700813293457, 0.05551017761230469, 0.05559190368652344, 0.05551705551147461, 0.05575680160522461, 0.056188926696777344, 0.056225151062011716, 0.05612607955932617, 0.05536054229736328, 0.05931644821166992, 0.05560726547241211, 0.05578627014160156, 0.05579500961303711, 0.056051841735839845, 0.05584953689575195, 0.055662593841552734, 0.056963329315185544, 0.056280830383300784, 0.05550236892700195, 0.05569148635864258, 0.05559936141967774, 0.0554126091003418, 0.05534262466430664, 0.056176513671875, 0.05607779312133789, 0.05562534332275391, 0.05570032119750976, 0.05596364974975586, 0.05548175811767578, 0.05551574325561524, 0.056248062133789065, 0.05600076675415039, 0.056643585205078124, 0.05585724639892578, 0.05575804901123047, 0.05573497772216797, 0.05574041748046875, 0.05620940780639649, 0.05589606475830078, 0.055478046417236325, 0.05534764862060547, 0.05552851104736328, 0.05570633697509766, 0.055820289611816405, 0.056215553283691405, 0.05607833480834961, 0.056002304077148436, 0.05539865493774414, 0.055787521362304686, 0.05629504013061523, 0.055604801177978516, 0.05555078506469727, 0.0555948486328125, 0.05589622497558594, 0.0557886734008789, 0.05559132766723633, 0.05565897750854492, 0.055638015747070314, 0.05556335830688477, 0.05562051010131836, 0.055861248016357425, 0.05583030319213867, 0.05582460784912109, 0.05590835189819336, 0.056018943786621096, 0.05566844940185547, 0.05586675262451172, 0.05622982406616211, 0.0563803825378418, 0.05620326232910156, 0.05583257675170898, 0.05586329650878906, 0.05597798538208008, 0.05608758544921875, 0.0559747200012207, 0.05660217666625977, 0.05574105453491211, 0.05543731307983398, 0.05563593673706055, 0.05517216110229492, 0.056654048919677735, 0.05574051284790039, 0.05569804763793945, 0.055347198486328124, 0.0566927375793457, 0.055332862854003906, 0.056516990661621094, 0.05698729705810547, 0.05658863830566406, 0.055940990447998044, 0.05579174423217773, 0.05576816177368164, 0.05565727996826172, 0.0553902702331543, 0.05617222213745117, 0.05626035308837891, 0.05548502349853516, 0.055887840270996095, 0.05569260787963867, 0.05541142272949219, 0.055392257690429686, 0.055742305755615236, 0.05524694442749024, 0.05536979293823242, 0.05532057571411133, 0.055811393737792966, 0.0555035514831543, 0.055613441467285155, 0.05550899124145508, 0.05541888046264649, 0.05545068740844727, 0.055357471466064456, 0.055349151611328126, 0.05602201461791992, 0.05569692611694336, 0.056198814392089846, 0.057024959564208985, 0.05637548828125, 0.055916225433349606, 0.05575337600708008, 0.05575164794921875, 0.05584371185302734, 0.057055233001708984, 0.056043521881103515, 0.05563372802734375, 0.05845743942260742, 0.0557658576965332, 0.05543423843383789, 0.055036289215087894, 0.0551893424987793, 0.0551954231262207, 0.05584000015258789, 0.055748321533203124, 0.05551721572875976, 0.055482368469238284, 0.055330814361572264, 0.055382049560546875, 0.05530131149291992, 0.05543196868896484, 0.05546799850463867, 0.05578550338745117, 0.055909568786621094, 0.0556060791015625, 0.05559820938110351, 0.055162849426269533, 0.05616880035400391, 0.055212608337402345, 0.05528371047973633, 0.05520582580566406, 0.05654560089111328, 0.0557916145324707, 0.05528985595703125, 0.05560678482055664, 0.055535648345947264, 0.05587712097167969, 0.055587200164794924, 0.05551808166503906, 0.055702529907226565, 0.05552816009521484, 0.055376094818115236, 0.055233631134033206, 0.05540476989746094, 0.05518588638305664, 0.057097888946533205, 0.056097057342529295, 0.05594464111328125, 0.05584960174560547, 0.05552323150634766, 0.056041118621826175, 0.05527548980712891, 0.055574462890625, 0.055207584381103514, 0.05672192001342773, 0.05585753631591797, 0.05540227127075195, 0.05495011138916016, 0.0552119026184082, 0.05861001586914062, 0.055384990692138675, 0.055384254455566405, 0.0557674560546875, 0.0553721923828125, 0.05600227355957031, 0.05615990447998047, 0.05577676773071289, 0.05543833541870117, 0.05535948944091797, 0.0550830078125, 0.05641747283935547, 0.05527203369140625, 0.05583039855957031, 0.05569980621337891, 0.055414783477783204, 0.055434623718261716, 0.05592947387695312, 0.055443073272705076, 0.05538035202026367, 0.05543280029296875, 0.055433406829833984, 0.05545743942260742, 0.056024993896484375, 0.055601825714111326, 0.05607833480834961, 0.055397953033447266, 0.05560332870483398, 0.05510995101928711, 0.05555564880371094, 0.05517356872558594, 0.05562572860717773, 0.05529395294189453, 0.05530624008178711, 0.05522758483886719]",tokens/s,17.91304066125068,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1873.03936,2868.772864,0.0,2466.250752,2401.696256,s,1,9.37133203125,9.37133203125,0.0,9.37133203125,9.37133203125,9.37133203125,9.37133203125,[9.37133203125],,kWh,6.34631668125318e-05,6.980839058607445e-06,2.4033352560015597e-05,9.447735843115484e-05,,MB,1884.995584,3097.362432,0.0,2680.160256,2582.175744,s,10,6.0324136352539055,0.6032413635253906,0.0025424675358530755,0.6022294311523437,0.6050836730957031,0.6077975433349609,0.6099686395263673,"[0.6105114135742188, 0.6015757446289063, 0.602468994140625, 0.602021728515625, 0.6029407958984375, 0.6021866455078125, 0.6018070068359375, 0.6021484985351563, 0.6044805908203125, 0.602272216796875]",tokens/s,424.3740822146472,kWh,1.7987646747548626e-05,1.98331557225515e-06,1.1969388660473649e-05,3.194035098027742e-05,tokens/kWh,8014940.103760139,MB,1884.995584,3097.362432,0.0,2680.160256,2582.178304,s,10,13.362197631835937,1.336219763183594,0.0040090560809475095,1.3367739868164064,1.3406837768554687,1.3415860290527344,1.342307830810547,"[1.34248828125, 1.3312923583984375, 1.3389248046875, 1.3381136474609374, 1.33025830078125, 1.3404832763671874, 1.33899365234375, 1.3315537109375, 1.335434326171875, 1.3346552734375]",tokens/s,47.14793309889395,kWh,3.8801049135363235e-05,4.28017697185035e-06,2.1174304521118395e-05,6.4255530628332e-05,tokens/kWh,980460.3492329046,,s,630,13.360147790908796,0.021206583795093353,0.00034009585062952813,0.021110112190246583,0.02147331523895264,0.021769799709320067,0.022389043521881105,"[0.022312896728515625, 0.021581216812133788, 0.02132815933227539, 0.021409727096557616, 0.021386720657348632, 0.021258560180664063, 0.02120515251159668, 0.021057344436645507, 0.021176992416381837, 0.02110054397583008, 0.021841920852661133, 0.021174079895019533, 0.021098688125610353, 0.021046815872192384, 0.021148128509521483, 0.02107187271118164, 0.021040224075317384, 0.021070751190185547, 0.021216320037841796, 0.021064512252807616, 0.021051136016845703, 0.021062015533447265, 0.02107360076904297, 0.02131385612487793, 0.021387264251708983, 0.021651456832885742, 0.022678688049316408, 0.021480352401733398, 0.021303232192993165, 0.021223424911499023, 0.021135040283203125, 0.021234207153320313, 0.021382720947265624, 0.021131328582763672, 0.021319488525390624, 0.021336416244506835, 0.021290655136108397, 0.021315391540527345, 0.02113180732727051, 0.021223583221435548, 0.021128192901611328, 0.021285343170166015, 0.021208799362182618, 0.021387456893920898, 0.02176425552368164, 0.021739904403686523, 0.02239481544494629, 0.021343551635742187, 0.021164735794067382, 0.02109814453125, 0.021340511322021485, 0.021275936126708986, 0.02119468879699707, 0.021367328643798828, 0.021180671691894533, 0.02123980712890625, 0.02107596778869629, 0.020979711532592774, 0.02124185562133789, 0.021054752349853517, 0.021166656494140627, 0.021266592025756835, 0.02106268882751465, 0.022281951904296875, 0.02141619110107422, 0.02120124816894531, 0.02109619140625, 0.021052000045776367, 0.020932640075683594, 0.021066879272460936, 0.021029727935791016, 0.020959232330322267, 0.020934656143188478, 0.02089571189880371, 0.020880735397338868, 0.02101318359375, 0.02105548858642578, 0.02103638458251953, 0.021166751861572266, 0.020979711532592774, 0.02135171127319336, 0.021030656814575194, 0.020982080459594727, 0.021086624145507812, 0.021006271362304686, 0.021102752685546875, 0.02097372817993164, 0.021059423446655273, 0.021053600311279296, 0.021194751739501954, 0.02117977523803711, 0.021936767578125, 0.021719039916992186, 0.021452768325805664, 0.02116543960571289, 0.021316255569458008, 0.021175392150878908, 0.021109376907348633, 0.021072160720825194, 0.02109235191345215, 0.020975616455078124, 0.02097871971130371, 0.02092451286315918, 0.020992895126342774, 0.02091526412963867, 0.020904895782470703, 0.020916223526000977, 0.020953088760375976, 0.020877248764038087, 0.02095724868774414, 0.02088960075378418, 0.02103500747680664, 0.02102681541442871, 0.021078016281127928, 0.021383167266845703, 0.022007808685302735, 0.021345727920532225, 0.021352703094482423, 0.021160255432128905, 0.021017791748046875, 0.02101753616333008, 0.021182207107543944, 0.021035072326660156, 0.021048639297485353, 0.021035776138305665, 0.021019935607910156, 0.022316959381103514, 0.021506048202514647, 0.021203104019165038, 0.021316320419311523, 0.021180416107177736, 0.021149663925170897, 0.021234975814819337, 0.02131350326538086, 0.0212324161529541, 0.021079200744628906, 0.021051839828491212, 0.021070240020751953, 0.021151744842529296, 0.021167552947998047, 0.02126313591003418, 0.02109417533874512, 0.02108415985107422, 0.021048736572265626, 0.02108201599121094, 0.02116640090942383, 0.02139788818359375, 0.021073471069335936, 0.02118604850769043, 0.021040063858032226, 0.021098495483398438, 0.021116287231445312, 0.02115190315246582, 0.021072351455688476, 0.020993824005126952, 0.021362815856933594, 0.02119798469543457, 0.0213636474609375, 0.02129715156555176, 0.020967424392700194, 0.021045248031616212, 0.020943136215209962, 0.021091264724731447, 0.02107676887512207, 0.021095903396606445, 0.021124799728393553, 0.021129663467407227, 0.021027231216430665, 0.02102681541442871, 0.0210513916015625, 0.021069343566894532, 0.02101910400390625, 0.02105241584777832, 0.020935680389404295, 0.020995296478271485, 0.02105945587158203, 0.021072799682617188, 0.022443296432495118, 0.022253791809082032, 0.02124236869812012, 0.021129215240478515, 0.021581663131713866, 0.02136899185180664, 0.024326271057128906, 0.021276544570922852, 0.02108415985107422, 0.021106687545776368, 0.021000192642211913, 0.0210513916015625, 0.022286336898803712, 0.02125379180908203, 0.02103740882873535, 0.021087871551513673, 0.021055999755859374, 0.02095897674560547, 0.020975744247436524, 0.020905727386474608, 0.020908287048339844, 0.020906208038330078, 0.020864608764648438, 0.02094918441772461, 0.020926464080810548, 0.021198432922363283, 0.02112553596496582, 0.02104310417175293, 0.021042335510253907, 0.02098387145996094, 0.021080543518066406, 0.021029279708862304, 0.020958879470825195, 0.020926719665527345, 0.021008031845092773, 0.021268672943115234, 0.02085321617126465, 0.02104707145690918, 0.021051040649414064, 0.0212258243560791, 0.02129100799560547, 0.02123593521118164, 0.023796768188476564, 0.02235260772705078, 0.021553152084350585, 0.021483552932739257, 0.021548095703125, 0.02143552017211914, 0.02161359977722168, 0.021615392684936525, 0.021443775177001953, 0.02197318458557129, 0.02141279983520508, 0.02115782356262207, 0.021319391250610352, 0.021352352142333983, 0.02117955207824707, 0.021093536376953125, 0.02103388786315918, 0.021109664916992187, 0.02167788887023926, 0.02152262306213379, 0.02105548858642578, 0.021061279296875, 0.021152095794677736, 0.021125343322753905, 0.021151424407958985, 0.021041248321533205, 0.021118976593017577, 0.02094220733642578, 0.020855424880981445, 0.02089779281616211, 0.021170175552368165, 0.021180128097534178, 0.021000480651855467, 0.02223753547668457, 0.02131350326538086, 0.021204383850097656, 0.021148319244384765, 0.020997535705566405, 0.021015264511108397, 0.020985151290893556, 0.020994752883911134, 0.021078016281127928, 0.0209583683013916, 0.0209574089050293, 0.020892223358154296, 0.02102684783935547, 0.02093212890625, 0.02085923194885254, 0.021004447937011717, 0.021037055969238282, 0.021403648376464843, 0.021344255447387696, 0.02119603157043457, 0.02104806327819824, 0.020956319808959963, 0.0210765438079834, 0.021075904846191405, 0.021050783157348634, 0.021029855728149412, 0.02114352035522461, 0.021081504821777345, 0.021116512298583984, 0.021089183807373048, 0.02111289596557617, 0.021141536712646486, 0.02103910446166992, 0.021050559997558595, 0.021138208389282227, 0.02106937599182129, 0.02120751953125, 0.02116022491455078, 0.021142688751220703, 0.02108473587036133, 0.022366207122802736, 0.021018495559692384, 0.02098374366760254, 0.021158079147338867, 0.021171775817871094, 0.02107366371154785, 0.021058080673217773, 0.020989471435546875, 0.021097087860107423, 0.02102272033691406, 0.020977664947509765, 0.021050432205200195, 0.020980255126953125, 0.020996511459350584, 0.02106982421875, 0.021149152755737304, 0.020980255126953125, 0.021028863906860353, 0.021131263732910157, 0.0210882568359375, 0.021170175552368165, 0.02107187271118164, 0.02102022361755371, 0.022360031127929687, 0.0214736328125, 0.021277984619140624, 0.02121801567077637, 0.021176223754882813, 0.021033056259155275, 0.02106368064880371, 0.02109382438659668, 0.02106425666809082, 0.021219327926635743, 0.021211135864257814, 0.02201318359375, 0.02128358459472656, 0.023295455932617188, 0.02128540802001953, 0.021077056884765626, 0.02110150337219238, 0.021202943801879884, 0.02123366355895996, 0.021040672302246093, 0.02099420738220215, 0.021090303421020508, 0.021102912902832033, 0.02109174346923828, 0.02126825523376465, 0.021152576446533202, 0.021059200286865233, 0.021085695266723634, 0.02109734344482422, 0.02116592025756836, 0.021137567520141603, 0.021014368057250977, 0.021008544921875, 0.021016576766967773, 0.020996095657348633, 0.021104192733764647, 0.020980159759521486, 0.021038944244384766, 0.021008127212524413, 0.021041568756103517, 0.021188608169555666, 0.02172492790222168, 0.021143808364868164, 0.02110851287841797, 0.02104547119140625, 0.021047168731689454, 0.021008512496948243, 0.021099872589111328, 0.02109507179260254, 0.021192703247070312, 0.021247392654418946, 0.021121631622314452, 0.021129215240478515, 0.02112291145324707, 0.02118467140197754, 0.021182464599609374, 0.021072895050048827, 0.021273408889770508, 0.021835968017578124, 0.021677280426025392, 0.02159244728088379, 0.023721920013427735, 0.02127305603027344, 0.02224550437927246, 0.021430368423461913, 0.021121183395385743, 0.02125555229187012, 0.021203296661376953, 0.021287200927734375, 0.021077951431274413, 0.02094905662536621, 0.021028863906860353, 0.021003583908081054, 0.02104390335083008, 0.02126848030090332, 0.021183807373046874, 0.0210578556060791, 0.02101696014404297, 0.021057504653930664, 0.02113539123535156, 0.02100223922729492, 0.02105958366394043, 0.021143135070800782, 0.0210599365234375, 0.021125152587890626, 0.021104671478271483, 0.021104448318481444, 0.02131577682495117, 0.021343263626098632, 0.021171167373657228, 0.021232831954956056, 0.021324607849121095, 0.0213637752532959, 0.021332223892211913, 0.021338687896728516, 0.02139289665222168, 0.021169855117797853, 0.021046207427978515, 0.021102592468261717, 0.021188447952270508, 0.02106284713745117, 0.02119900894165039, 0.021539648056030272, 0.02133932876586914, 0.02137785530090332, 0.021800960540771484, 0.02158742332458496, 0.0220861759185791, 0.021403648376464843, 0.021424127578735352, 0.021382240295410155, 0.021320608139038084, 0.021235712051391603, 0.02129702377319336, 0.021220703125, 0.021402399063110353, 0.021245664596557617, 0.021358879089355468, 0.0212171516418457, 0.021103904724121093, 0.02118947219848633, 0.02125619125366211, 0.02102681541442871, 0.021321535110473633, 0.021038496017456054, 0.021074655532836915, 0.022358015060424806, 0.021359840393066407, 0.02117737579345703, 0.02115558433532715, 0.021120288848876952, 0.021072608947753906, 0.020962495803833008, 0.021037887573242188, 0.021186304092407227, 0.021024351119995118, 0.021100576400756837, 0.021029504776000976, 0.021026432037353517, 0.021055391311645508, 0.02100681686401367, 0.020938751220703124, 0.021209087371826172, 0.02107596778869629, 0.021045248031616212, 0.02108415985107422, 0.020991935729980468, 0.021002111434936525, 0.021049375534057616, 0.02097372817993164, 0.02108582305908203, 0.02098214340209961, 0.0210513916015625, 0.02107961654663086, 0.02115580749511719, 0.020982336044311524, 0.021063583374023438, 0.021020671844482423, 0.020978879928588868, 0.020877344131469727, 0.02183241653442383, 0.02105936050415039, 0.021326112747192382, 0.021381120681762695, 0.021114879608154297, 0.021182464599609374, 0.021130912780761717, 0.021019264221191405, 0.021212352752685546, 0.02113385581970215, 0.021044416427612303, 0.02158598327636719, 0.02122368049621582, 0.02237491226196289, 0.02104275131225586, 0.020902336120605467, 0.02097148895263672, 0.02107375907897949, 0.021018304824829102, 0.021102592468261717, 0.021067968368530275, 0.02110905647277832, 0.021107839584350585, 0.020982112884521485, 0.021024608612060548, 0.02098246383666992, 0.020975616455078124, 0.02106982421875, 0.0209769287109375, 0.02230928039550781, 0.021577728271484374, 0.021594112396240234, 0.021506048202514647, 0.021205055236816407, 0.021333824157714842, 0.02122755241394043, 0.021350496292114256, 0.021215295791625975, 0.021222816467285157, 0.021557024002075195, 0.021287200927734375, 0.021322496414184572, 0.021270240783691406, 0.02123161506652832, 0.021124319076538087, 0.02117865562438965, 0.021192832946777342, 0.02117056083679199, 0.021163583755493164, 0.021141952514648437, 0.021149696350097655, 0.021087520599365233, 0.021027551651000977, 0.021078016281127928, 0.02102672004699707, 0.021133056640625, 0.02106812858581543, 0.02105548858642578, 0.021078016281127928, 0.021081760406494142, 0.021217184066772463, 0.021108320236206055, 0.02118332862854004, 0.021188608169555666, 0.02103910446166992, 0.02101759910583496, 0.020993024826049804, 0.021055456161499023, 0.021143423080444337, 0.021214527130126955, 0.021258304595947266, 0.02121603202819824, 0.02124799919128418, 0.021216991424560547, 0.02113564872741699, 0.02108006477355957, 0.020942848205566408, 0.021585920333862304, 0.021135360717773437, 0.02147327995300293, 0.021131263732910157, 0.021110559463500978, 0.02101475143432617, 0.02110380744934082, 0.02118124771118164, 0.02096329689025879, 0.020938783645629882, 0.02112054443359375, 0.021125600814819335, 0.02103856086730957, 0.021033792495727538, 0.021044160842895506, 0.02224742317199707, 0.021519615173339845, 0.02151910400390625, 0.021315584182739256, 0.02128486442565918, 0.02127257537841797, 0.021321727752685548, 0.021376096725463867, 0.021320608139038084, 0.021141504287719725, 0.020954111099243163, 0.020935680389404295, 0.020966720581054688, 0.02101318359375, 0.020989952087402345, 0.021090560913085938, 0.021034208297729493, 0.02113180732727051, 0.021218368530273438, 0.021171039581298828, 0.021079776763916015, 0.021074304580688475, 0.02124595260620117, 0.02104092788696289, 0.021026304244995117, 0.021027551651000977, 0.021091487884521483, 0.021103008270263672, 0.021018304824829102, 0.02097433662414551, 0.020963327407836914, 0.021012479782104493, 0.021008384704589843, 0.020981599807739258, 0.021008544921875, 0.021121023178100586, 0.020975423812866212, 0.020934431076049805, 0.021028640747070313, 0.021889503479003907, 0.020979583740234373, 0.021022783279418946, 0.021130624771118163, 0.021092607498168946, 0.021055904388427735, 0.02108624076843262, 0.021024927139282227, 0.021034368515014647, 0.021224063873291017, 0.02133171272277832, 0.021299455642700197, 0.021774335861206053, 0.021511552810668945, 0.021736064910888673, 0.021472415924072265, 0.021308000564575196, 0.02116223907470703, 0.021155263900756834, 0.021099071502685546, 0.021071680068969728, 0.02113759994506836, 0.02115702438354492, 0.02115056037902832]",tokens/s,47.15516698315991,,, 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3150.462976,4609.409024,0.0,4206.886912,4087.771648,s,1,10.89753515625,10.89753515625,0.0,10.89753515625,10.89753515625,10.89753515625,10.89753515625,[10.89753515625],,kWh,0.00011180697590416456,1.232585722129387e-05,4.4740869125980964e-05,0.0001688737022514394,,MB,3062.53824,4781.375488,0.0,4366.270464,4273.705984,s,10,15.807746826171874,1.5807746826171873,0.002567712822704467,1.5817668457031249,1.583661047363281,1.5840226623535156,1.5843119543457032,"[1.578341796875, 1.5770452880859376, 1.5769969482421875, 1.5817965087890624, 1.5820274658203124, 1.57900146484375, 1.582835205078125, 1.5817371826171875, 1.58438427734375, 1.5835806884765624]",tokens/s,161.9459134910721,kWh,4.640980510250605e-05,5.1186092058712545e-06,3.0822330213392536e-05,8.235074452176983e-05,tokens/kWh,3108654.347773688,MB,3062.53824,4785.569792,0.0,4368.367616,4274.363904,s,10,21.6495146484375,2.16495146484375,0.0037370531858053886,2.1656229248046874,2.1685774169921874,2.169800305175781,2.1707786157226563,"[2.16600390625, 2.167261474609375, 2.15921240234375, 2.158968505859375, 2.165241943359375, 2.167683349609375, 2.162429443359375, 2.1683056640625, 2.163384765625, 2.171023193359375]",tokens/s,29.099959524749377,kWh,6.336359664541002e-05,6.989397905122009e-06,3.844733631340746e-05,0.0001088003308639395,tokens/kWh,579042.3567625432,,s,630,21.646964221954345,0.034360260669768806,0.0005983946089104575,0.0342273120880127,0.03476741485595703,0.03505083293914795,0.03704894676208498,"[0.035530750274658206, 0.034418689727783204, 0.0343711051940918, 0.03432291030883789, 0.03429561614990234, 0.03418745422363281, 0.03413782501220703, 0.03419075012207031, 0.03424550247192383, 0.03399884796142578, 0.03405209732055664, 0.03423775863647461, 0.03429036712646484, 0.03425212860107422, 0.03412569427490234, 0.03430464172363281, 0.03416425704956055, 0.0340610237121582, 0.03446160125732422, 0.03430809783935547, 0.03428147125244141, 0.03424870300292969, 0.03496550369262695, 0.03456819152832031, 0.03424991989135742, 0.03454870223999024, 0.03421721649169922, 0.03414384078979492, 0.03412464141845703, 0.033731807708740236, 0.03433363342285156, 0.03390873718261719, 0.034146305084228515, 0.03409052658081055, 0.03401776123046875, 0.03416064071655273, 0.0343201904296875, 0.034002975463867186, 0.03427686309814453, 0.03406671905517578, 0.03718912124633789, 0.034363327026367185, 0.03406070327758789, 0.034140384674072266, 0.03417734527587891, 0.03423382568359375, 0.0348059196472168, 0.04168899154663086, 0.03442659378051758, 0.03418780899047852, 0.03415580749511719, 0.034243167877197264, 0.03398896026611328, 0.03382681655883789, 0.033926273345947264, 0.03422457504272461, 0.03402387237548828, 0.03393484878540039, 0.033909217834472656, 0.03401315307617187, 0.034001182556152344, 0.03378147125244141, 0.03408892822265625, 0.03555062484741211, 0.03464876937866211, 0.034546688079833986, 0.03423878479003906, 0.03414281463623047, 0.03404364776611328, 0.0345274887084961, 0.03424051284790039, 0.03415779113769531, 0.034646080017089846, 0.03421187210083008, 0.03413267135620117, 0.034162689208984375, 0.03406595230102539, 0.03451046371459961, 0.03430486297607422, 0.03799859237670898, 0.03435702514648437, 0.03430652618408203, 0.0344463996887207, 0.034260734558105466, 0.034078784942626957, 0.03436838531494141, 0.03470064163208008, 0.03471222305297852, 0.034514110565185545, 0.034831169128417966, 0.03476671981811524, 0.034928768157958985, 0.03496457672119141, 0.03445647811889648, 0.03503510284423828, 0.034359329223632815, 0.034162689208984375, 0.03412377548217774, 0.033888256072998044, 0.03413379287719726, 0.03402089691162109, 0.03394425582885742, 0.03379814529418945, 0.033947265625, 0.034076351165771485, 0.03393782424926758, 0.03414601516723633, 0.03437625503540039, 0.03412377548217774, 0.03412963104248047, 0.0342468147277832, 0.034232769012451175, 0.033973312377929686, 0.03401382446289063, 0.03399020767211914, 0.0341995849609375, 0.03399532699584961, 0.034078750610351566, 0.034047393798828124, 0.03462390518188477, 0.03456409454345703, 0.03469875335693359, 0.0344007682800293, 0.035036800384521484, 0.034374015808105465, 0.03449446487426758, 0.03534502410888672, 0.03450803375244141, 0.03446774291992188, 0.03415311813354492, 0.03434924697875977, 0.03418291091918945, 0.03457388687133789, 0.03462176132202149, 0.03440249633789062, 0.03407686233520508, 0.033875583648681644, 0.03393164825439453, 0.03404185485839844, 0.034178943634033204, 0.03387814331054687, 0.03398262405395508, 0.033953025817871095, 0.033931873321533204, 0.03397542572021484, 0.03406230545043945, 0.033868705749511716, 0.03390812683105469, 0.034161247253417966, 0.03395283126831055, 0.03383801651000977, 0.03381657409667969, 0.034133342742919924, 0.033911457061767576, 0.03395174407958984, 0.03395939254760742, 0.03403830337524414, 0.034274398803710936, 0.034059169769287106, 0.034179073333740234, 0.03420159912109375, 0.03434064102172851, 0.034773086547851564, 0.034887809753417966, 0.03525222396850586, 0.03465552139282226, 0.034491104125976564, 0.034557376861572266, 0.034471710205078124, 0.03417782211303711, 0.034221790313720704, 0.034074462890625, 0.0340379524230957, 0.03425715255737305, 0.03427532958984375, 0.034375679016113284, 0.03410943984985351, 0.034408447265625, 0.03640300750732422, 0.03468265533447266, 0.03449273681640625, 0.03447817611694336, 0.03434320068359375, 0.03401049423217774, 0.033925472259521486, 0.033925086975097656, 0.0341995849609375, 0.03407174301147461, 0.034318302154541014, 0.03609670257568359, 0.0344505615234375, 0.034326881408691404, 0.03407628631591797, 0.03399555206298828, 0.03408227157592773, 0.03391350555419922, 0.03408867263793945, 0.03402345657348633, 0.03416428756713867, 0.03409292984008789, 0.03731536102294922, 0.03441872024536133, 0.03420979309082031, 0.03429897689819336, 0.03465212631225586, 0.03449264144897461, 0.0345241584777832, 0.03452867126464844, 0.03456147384643555, 0.03422496032714844, 0.03431996917724609, 0.03408457565307617, 0.03419801712036133, 0.0339944953918457, 0.03398419189453125, 0.033962814331054685, 0.0340684814453125, 0.03385548782348633, 0.03404390335083008, 0.03405516815185547, 0.034051071166992186, 0.03400032043457031, 0.03391113662719727, 0.03384492874145508, 0.03409158325195313, 0.03405615997314453, 0.03416275024414062, 0.03499001693725586, 0.034207679748535155, 0.03418527984619141, 0.03425689697265625, 0.0340398063659668, 0.034106689453125, 0.0342718391418457, 0.034626720428466796, 0.034344928741455075, 0.03410185623168945, 0.034191295623779296, 0.03439807891845703, 0.03441430282592774, 0.03428438568115234, 0.03426508712768555, 0.03431945419311523, 0.03410831832885742, 0.03422003173828125, 0.03413401412963867, 0.03400899124145508, 0.033842464447021485, 0.03421388626098633, 0.03388630294799805, 0.034101982116699216, 0.033990657806396485, 0.03553756713867187, 0.03475983810424805, 0.03407494354248047, 0.034103553771972654, 0.03433500671386719, 0.03419340896606445, 0.03413187026977539, 0.03436316680908203, 0.03478489685058594, 0.03428934478759765, 0.03448486328125, 0.034117473602294925, 0.03405372619628906, 0.034103233337402346, 0.03426816177368164, 0.034269184112548826, 0.03419340896606445, 0.034430561065673826, 0.03414188766479492, 0.034114273071289065, 0.0341893424987793, 0.034009246826171874, 0.03445945739746094, 0.03464803314208984, 0.03502492904663086, 0.03453673553466797, 0.03438051223754883, 0.03468288040161133, 0.034490367889404294, 0.03417497634887695, 0.03410739135742188, 0.03448355102539063, 0.03434944152832031, 0.0341978874206543, 0.03439766311645508, 0.03420409774780273, 0.03456982421875, 0.03415599822998047, 0.03404822540283203, 0.03543888092041016, 0.03419929504394531, 0.03412355041503906, 0.03669289779663086, 0.034127872467041014, 0.03399270248413086, 0.03479785537719727, 0.033967838287353516, 0.03399884796142578, 0.0341872329711914, 0.034154239654541015, 0.03392540740966797, 0.034248031616210935, 0.03453107070922851, 0.03461417770385742, 0.03442710494995117, 0.03414204788208008, 0.03426310348510742, 0.034008960723876956, 0.03419750213623047, 0.03420979309082031, 0.034236415863037106, 0.03443478393554687, 0.03419779205322265, 0.03558556747436523, 0.03494534301757812, 0.03529046249389649, 0.03500502395629883, 0.0347789421081543, 0.03444569778442383, 0.034307167053222655, 0.034427902221679685, 0.03431151962280273, 0.03429539108276367, 0.03406537628173828, 0.03417865753173828, 0.034664447784423826, 0.034224063873291015, 0.034130657196044925, 0.034031360626220704, 0.03834649658203125, 0.03453158569335937, 0.034336769104003906, 0.03425654220581055, 0.03403190231323242, 0.03417094421386719, 0.0341401596069336, 0.03413590240478516, 0.03489193725585937, 0.03423379135131836, 0.03405231857299805, 0.034179424285888674, 0.03407974243164062, 0.034336959838867184, 0.03398739242553711, 0.03378585433959961, 0.03418931198120117, 0.03409305572509766, 0.03410851287841797, 0.03406735992431641, 0.034047008514404294, 0.033897441864013673, 0.034103103637695316, 0.03382825469970703, 0.03389110565185547, 0.03416166305541992, 0.03445564651489258, 0.034220958709716795, 0.034000511169433596, 0.03437158584594727, 0.03432252883911133, 0.03421846389770508, 0.03453878402709961, 0.03433526229858398, 0.03420147323608398, 0.03433075332641602, 0.034455745697021485, 0.034637374877929686, 0.03423667144775391, 0.034229663848876955, 0.034271678924560546, 0.034318496704101566, 0.03418342590332031, 0.034879230499267576, 0.03500191879272461, 0.03487993621826172, 0.0347586555480957, 0.03550796890258789, 0.034408702850341796, 0.03452108764648437, 0.03441632080078125, 0.03434502410888672, 0.034499073028564455, 0.03439795303344727, 0.034516990661621096, 0.034473983764648435, 0.034565406799316405, 0.0341798095703125, 0.03445555114746094, 0.03436281585693359, 0.03454009628295898, 0.03425651168823242, 0.0341978874206543, 0.03408281707763672, 0.0339947509765625, 0.03389817428588867, 0.0342039680480957, 0.034252799987792966, 0.033972225189208984, 0.034106433868408205, 0.03463180923461914, 0.03463987350463867, 0.034343742370605466, 0.034184417724609374, 0.034149150848388675, 0.03417625427246094, 0.034151168823242185, 0.03398031997680664, 0.03389763259887695, 0.03386873626708985, 0.03395174407958984, 0.033998241424560545, 0.03403580856323242, 0.03431875228881836, 0.034606273651123044, 0.0348304328918457, 0.03468780899047851, 0.034781185150146485, 0.034957374572753906, 0.03476678466796875, 0.03455516815185547, 0.03436028671264649, 0.03424444961547852, 0.034037662506103517, 0.03412140655517578, 0.03390687942504883, 0.03419148635864258, 0.03397017669677734, 0.033963455200195315, 0.03392160034179687, 0.034355358123779295, 0.03435452651977539, 0.036471454620361325, 0.03430086517333984, 0.03406940841674805, 0.0339332160949707, 0.03404748916625976, 0.034073184967041016, 0.034014366149902345, 0.034184032440185544, 0.03525404739379883, 0.034299102783203125, 0.03424639892578125, 0.035547134399414065, 0.034414398193359376, 0.03450470352172851, 0.03419564819335937, 0.034008960723876956, 0.03382899093627929, 0.0341297607421875, 0.03396214294433594, 0.03405209732055664, 0.03408211135864258, 0.03400531387329102, 0.034046337127685546, 0.034326526641845705, 0.034915550231933594, 0.0347553596496582, 0.03448128128051758, 0.034571136474609375, 0.03465145492553711, 0.03446044921875, 0.03449436950683594, 0.03447097778320313, 0.034032577514648436, 0.03438127899169922, 0.034277153015136716, 0.034750656127929686, 0.03655759811401367, 0.03772991943359375, 0.03463577651977539, 0.035038593292236325, 0.03453852844238281, 0.03451264190673828, 0.03441785430908203, 0.03445161437988281, 0.0342309455871582, 0.03403776168823242, 0.03395174407958984, 0.0340684814453125, 0.033893630981445315, 0.03411014556884766, 0.03402675247192383, 0.03390476989746094, 0.03408556747436523, 0.034283519744873044, 0.03445151901245117, 0.03409686279296875, 0.03412339019775391, 0.03464022445678711, 0.03437526321411133, 0.03427967834472656, 0.03414672088623047, 0.03402342224121094, 0.03385139083862305, 0.034836254119873046, 0.03427257537841797, 0.03406940841674805, 0.033972129821777344, 0.033899936676025394, 0.03518457412719726, 0.03412614440917969, 0.034081249237060546, 0.03543875122070313, 0.03438166427612305, 0.034285728454589846, 0.03425423812866211, 0.03447868728637695, 0.034360641479492186, 0.03422380828857422, 0.03430092620849609, 0.034138111114501955, 0.034354911804199216, 0.034054656982421876, 0.0339136962890625, 0.033886974334716796, 0.03429999923706055, 0.03388425445556641, 0.03424256134033203, 0.03450239944458008, 0.03412198257446289, 0.0341319694519043, 0.03406963348388672, 0.034079326629638675, 0.03397411346435547, 0.034099647521972656, 0.03504537582397461, 0.0350552978515625, 0.03458067321777344, 0.03445500946044922, 0.03402550506591797, 0.03424934387207031, 0.03426496124267578, 0.03406655883789062, 0.03399590301513672, 0.034089439392089846, 0.034034080505371093, 0.03415027236938477, 0.03408028793334961, 0.03417107009887695, 0.03391078567504883, 0.034595230102539065, 0.03607891082763672, 0.03433132934570313, 0.033957889556884766, 0.03402547073364258, 0.033955841064453124, 0.0339835205078125, 0.03410224151611328, 0.03463919830322266, 0.034355873107910155, 0.036705760955810546, 0.03467728042602539, 0.03453251266479492, 0.034415454864501954, 0.03423027038574219, 0.03438943862915039, 0.03441516876220703, 0.03427328109741211, 0.03444921493530274, 0.03418483352661133, 0.034428638458251955, 0.034251617431640624, 0.03416064071655273, 0.0341416015625, 0.034212448120117187, 0.035511520385742186, 0.03481887817382812, 0.03476403045654297, 0.03479814529418945, 0.03466822433471679, 0.03457865524291992, 0.034309791564941405, 0.03416867065429687, 0.03449951934814453, 0.03469292831420898, 0.03426416015625, 0.034356128692626955, 0.03418486404418945, 0.034197856903076175, 0.03431139373779297, 0.03406095886230469, 0.03402560043334961, 0.034848033905029295, 0.034075294494628906, 0.03407468795776367, 0.03861913681030273, 0.034560001373291016, 0.03427123260498047, 0.03420979309082031, 0.034127872467041014, 0.03399270248413086, 0.034338176727294924, 0.034562686920166015, 0.03396710586547851, 0.03417129516601562, 0.03447049713134766, 0.03453104019165039, 0.03526211166381836, 0.03442310333251953, 0.03418092727661133, 0.03452569580078125, 0.03413731384277344, 0.03428179168701172, 0.03439254379272461, 0.03395967864990234, 0.03390284729003906, 0.034154495239257815, 0.0339681282043457, 0.03403571319580078, 0.03432400131225586, 0.034154975891113284, 0.03529065704345703, 0.0354384651184082, 0.03423088073730469, 0.034334590911865234, 0.03416044616699219, 0.03431414413452148, 0.03410726547241211, 0.03430585479736328, 0.034087646484375, 0.034364768981933594, 0.03439888000488281, 0.03443260955810547, 0.034208030700683595, 0.03459699249267578, 0.03430998229980469, 0.0347825927734375, 0.03462793731689453]",tokens/s,29.10338805665203,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4937.25696,7235.043328,0.0,6849.298432,6445.09696,s,1,11.2100234375,11.2100234375,0.0,11.2100234375,11.2100234375,11.2100234375,11.2100234375,[11.2100234375],,kWh,0.00012108658461667496,1.3349423271267949e-05,4.042031011397551e-05,0.00017485631800191843,,MB,2659.356672,7566.393344,0.0,7151.28832,6823.3216,s,10,3.703178985595703,0.3703178985595703,0.0026546763290434205,0.3711983947753906,0.37277139892578126,0.37287703247070314,0.37296153930664067,"[0.3652895812988281, 0.36940750122070315, 0.3660270080566406, 0.3726346130371094, 0.3727479248046875, 0.3693802185058594, 0.3723126831054687, 0.3719620666503906, 0.372982666015625, 0.37043472290039064]",tokens/s,691.2979388675677,kWh,1.0799964082290594e-05,1.1906943176905653e-06,7.191384721357592e-06,1.9182043121338752e-05,tokens/kWh,13345815.061546648,MB,2663.600128,7568.490496,0.0,7153.385472,6823.32416,s,10,20.426462890625,2.0426462890624997,0.0030563307409211277,2.043179504394531,2.046071044921875,2.046303405761719,2.0464892944335937,"[2.0439560546875, 2.0432205810546873, 2.040147705078125, 2.0465357666015627, 2.0416890869140625, 2.0397025146484373, 2.043138427734375, 2.0460194091796877, 2.04569287109375, 2.0363604736328127]",tokens/s,30.842344236169595,kWh,5.968329299562054e-05,6.583372460711386e-06,3.9552958229647455e-05,0.00010581962368597937,tokens/kWh,595352.7125266768,,s,630,20.423789991378793,0.03241871427202982,0.0008299688275865184,0.03222220802307129,0.03284797554016113,0.033239012718200685,0.037410181999206545,"[0.03800937652587891, 0.033050624847412106, 0.03267379379272461, 0.032096446990966795, 0.03188019180297851, 0.03196601676940918, 0.03187667274475098, 0.03195331192016602, 0.031829792022705077, 0.032029151916503906, 0.031884288787841795, 0.03202537536621094, 0.03184207916259765, 0.033875713348388674, 0.032180767059326175, 0.03212486267089844, 0.03220275115966797, 0.03247923278808594, 0.03265945434570312, 0.03284128189086914, 0.03237113571166992, 0.03252230453491211, 0.032264385223388675, 0.03217583847045898, 0.03206310272216797, 0.032293407440185544, 0.03215327835083008, 0.032182750701904295, 0.03202751922607422, 0.032643585205078124, 0.032837249755859374, 0.0328856315612793, 0.03240739059448242, 0.032489471435546875, 0.032933887481689454, 0.03276777648925781, 0.03238441467285156, 0.032206783294677736, 0.03213926315307617, 0.032305534362792966, 0.032166400909423826, 0.032217086791992186, 0.032176128387451174, 0.03219571304321289, 0.03213606262207031, 0.03229695892333984, 0.03260569763183594, 0.03239929580688477, 0.0325781135559082, 0.03252239990234375, 0.03259743881225586, 0.03262300872802734, 0.032301055908203126, 0.03231961441040039, 0.032200351715087894, 0.03211859130859375, 0.03220841598510742, 0.03211148834228516, 0.03207167816162109, 0.03240959930419922, 0.03266559982299805, 0.03270041656494141, 0.032522239685058595, 0.036703262329101566, 0.03301884841918945, 0.031974912643432614, 0.03181193542480469, 0.03198796844482422, 0.0320285758972168, 0.032309249877929686, 0.032094207763671875, 0.03197337532043457, 0.031922176361083986, 0.03217123031616211, 0.03189606475830078, 0.03223785781860351, 0.032194561004638675, 0.031985088348388674, 0.03172784042358399, 0.03200649642944336, 0.03189760017395019, 0.031916032791137694, 0.03218227386474609, 0.03719372940063476, 0.032331775665283204, 0.032209983825683595, 0.032217918395996095, 0.03208217620849609, 0.03195833587646484, 0.03203670501708984, 0.03181852722167969, 0.032304256439208985, 0.03342627334594726, 0.0331036491394043, 0.032420032501220705, 0.03945471954345703, 0.03250995254516602, 0.03219792175292969, 0.03209084701538086, 0.032142398834228515, 0.03198444747924805, 0.031977312088012695, 0.03189993667602539, 0.03210982513427734, 0.03215753555297852, 0.03200476837158203, 0.03188876724243164, 0.03196713638305664, 0.031840383529663084, 0.031791967391967775, 0.031801343917846676, 0.032085407257080076, 0.03197091293334961, 0.0319109115600586, 0.03199510383605957, 0.03200652694702148, 0.031947168350219726, 0.03197747230529785, 0.031945856094360355, 0.03180031967163086, 0.032431167602539064, 0.0329183349609375, 0.032008190155029294, 0.033570816040039066, 0.03336569595336914, 0.032071296691894534, 0.03789587020874023, 0.032895294189453125, 0.032143360137939454, 0.03195065689086914, 0.031994047164916994, 0.03212633514404297, 0.032479873657226564, 0.0322476806640625, 0.03208428955078125, 0.03226809692382813, 0.03234201431274414, 0.03216588973999023, 0.032091552734375, 0.03210915374755859, 0.032398784637451175, 0.032604736328125, 0.03322985458374023, 0.03247203063964844, 0.03263078308105469, 0.03263897705078125, 0.032531967163085936, 0.032368480682373045, 0.03227276611328125, 0.032470558166503904, 0.03238374328613281, 0.03234406280517578, 0.03240332794189453, 0.03242406463623047, 0.032450176239013674, 0.03331929779052734, 0.032754718780517576, 0.03225904083251953, 0.031868543624877926, 0.032053695678710935, 0.032489505767822266, 0.03178486442565918, 0.03195196723937988, 0.03224054336547852, 0.03187737655639648, 0.03188710403442383, 0.03205849456787109, 0.031950719833374025, 0.03223855972290039, 0.03197135925292969, 0.03168803215026855, 0.03191622352600098, 0.03186735916137695, 0.03183616065979004, 0.031770751953125, 0.03187824058532715, 0.03194550323486328, 0.03219046401977539, 0.03251814270019531, 0.03195196723937988, 0.031890335083007815, 0.03245260620117187, 0.03242393493652344, 0.03310540771484375, 0.03244214248657227, 0.032602142333984375, 0.03292435073852539, 0.03263385772705078, 0.03270281600952148, 0.0374450569152832, 0.03258240127563477, 0.032271678924560544, 0.032097217559814456, 0.031984800338745116, 0.0319835205078125, 0.03196819114685059, 0.03191398429870605, 0.03185593605041504, 0.032100479125976564, 0.03243276977539063, 0.03225711822509766, 0.0330043830871582, 0.03258524703979492, 0.03246332931518554, 0.032573184967041015, 0.03263071823120117, 0.03241401672363281, 0.03256492614746094, 0.032220542907714846, 0.03259078216552734, 0.032043006896972655, 0.031884960174560543, 0.03213913726806641, 0.032269824981689454, 0.03299795150756836, 0.03212252807617187, 0.032165920257568356, 0.03215228652954102, 0.033212543487548825, 0.03271158218383789, 0.032276481628417966, 0.03207267379760742, 0.033740161895751956, 0.03206412887573242, 0.03195084762573242, 0.03210854339599609, 0.03219046401977539, 0.03217324829101562, 0.03218505477905274, 0.03209987258911133, 0.032053249359130856, 0.032233470916748046, 0.032508480072021485, 0.03214684677124023, 0.03230096054077149, 0.03235318374633789, 0.03223529434204102, 0.03222633743286133, 0.03220140838623047, 0.032085407257080076, 0.03249225616455078, 0.032098686218261716, 0.03212675094604492, 0.032597793579101565, 0.034732257843017575, 0.0339865608215332, 0.032497665405273435, 0.03220684814453125, 0.033091583251953126, 0.032712703704833986, 0.03222323226928711, 0.032641086578369144, 0.03765260696411133, 0.03248934555053711, 0.0324299201965332, 0.03220608139038086, 0.03220336151123047, 0.03211705780029297, 0.03220630264282227, 0.032153343200683596, 0.03189020729064941, 0.03198921585083008, 0.032295455932617186, 0.03226121520996094, 0.03294620895385742, 0.03190259170532227, 0.03189961624145508, 0.03211382293701172, 0.03192102432250977, 0.0319421443939209, 0.03218025588989258, 0.031889888763427736, 0.03220479965209961, 0.03211027145385742, 0.03211446380615234, 0.03198739242553711, 0.0320742073059082, 0.032059520721435544, 0.032032543182373044, 0.0320203857421875, 0.03229958343505859, 0.03258163070678711, 0.03242393493652344, 0.032464897155761716, 0.03251200103759765, 0.0323870735168457, 0.03255091094970703, 0.03251919937133789, 0.03235660934448242, 0.03259772872924805, 0.03226931381225586, 0.03230310440063477, 0.03234201431274414, 0.03248537445068359, 0.03230515289306641, 0.03271475219726563, 0.03207372665405273, 0.033923072814941405, 0.032769344329833985, 0.032160446166992186, 0.03206905746459961, 0.032182689666748046, 0.031934623718261716, 0.032542430877685546, 0.033488735198974606, 0.032756160736083985, 0.03205519866943359, 0.03212502288818359, 0.03194879913330078, 0.031948959350585934, 0.03186400032043457, 0.03231548690795898, 0.03363616180419922, 0.03268479919433594, 0.03255046463012695, 0.03684966278076172, 0.033122303009033204, 0.03217776107788086, 0.03208233642578125, 0.03210854339599609, 0.032299007415771484, 0.03203481674194336, 0.03197478485107422, 0.031899711608886716, 0.03267027282714844, 0.032215038299560544, 0.032150558471679684, 0.03230940628051758, 0.032260929107666016, 0.03228828811645508, 0.032149982452392575, 0.03250380706787109, 0.03213951873779297, 0.03223865509033203, 0.03205974578857422, 0.03219225692749023, 0.032199264526367184, 0.032145408630371096, 0.03229695892333984, 0.032658687591552736, 0.032285438537597654, 0.03252156829833985, 0.03235676956176758, 0.03288246536254883, 0.03291388702392578, 0.03248332977294922, 0.03227852630615234, 0.0326426887512207, 0.032332160949707034, 0.03215155029296875, 0.03235430526733398, 0.03215155029296875, 0.03203644943237305, 0.031969343185424805, 0.03211859130859375, 0.032123424530029296, 0.03203644943237305, 0.03340329742431641, 0.033271808624267575, 0.03241519927978516, 0.03207427215576172, 0.032194526672363284, 0.03214688110351562, 0.032401119232177734, 0.032322433471679686, 0.03227024078369141, 0.032077438354492185, 0.032127456665039064, 0.03207984161376953, 0.03214134216308594, 0.03241984176635742, 0.03199078369140625, 0.03203977584838867, 0.0324323844909668, 0.03218422317504883, 0.03214950561523437, 0.03194422340393067, 0.03270703887939453, 0.03750326538085937, 0.03292550277709961, 0.03217136001586914, 0.032000862121582034, 0.03207715225219727, 0.03190979194641113, 0.031828287124633786, 0.032392799377441404, 0.0320049934387207, 0.031932384490966796, 0.03221724700927735, 0.03209523010253906, 0.03274803161621094, 0.03212323379516602, 0.03209987258911133, 0.032184799194335936, 0.03214854431152344, 0.03227129745483399, 0.03230255889892578, 0.0320865592956543, 0.03209151840209961, 0.032083744049072264, 0.03208687973022461, 0.03195843124389648, 0.03215011215209961, 0.031917215347290036, 0.032104991912841795, 0.032054878234863284, 0.032735359191894534, 0.03323350524902344, 0.03268787384033203, 0.03265971374511719, 0.03267379379272461, 0.03286588668823242, 0.032567455291748044, 0.0327597770690918, 0.03271913528442383, 0.033017856597900394, 0.03274054336547851, 0.032567710876464845, 0.03223523330688476, 0.03233683013916016, 0.03251769638061523, 0.032264385223388675, 0.032051105499267575, 0.03230511856079102, 0.03244476699829101, 0.03231721496582031, 0.032382976531982424, 0.032331775665283204, 0.03214131164550781, 0.03218227386474609, 0.032281982421875, 0.03231097412109375, 0.03227948760986328, 0.03233996963500976, 0.03211276626586914, 0.032120704650878906, 0.032473087310791016, 0.032613792419433595, 0.03289907073974609, 0.032588382720947266, 0.03262259292602539, 0.0363765754699707, 0.03278390502929687, 0.032110206604003905, 0.032312095642089846, 0.03284588623046875, 0.03281510543823242, 0.03252012634277344, 0.03254278564453125, 0.03254006576538086, 0.03244297790527344, 0.03237068939208984, 0.032233119964599606, 0.03234201431274414, 0.03222768020629883, 0.0321638412475586, 0.0318156795501709, 0.03204499053955078, 0.03199596786499023, 0.032462879180908205, 0.03199334335327148, 0.03224428939819336, 0.03233577728271484, 0.032365726470947265, 0.032901985168457035, 0.032331775665283204, 0.03210588836669922, 0.03209081649780274, 0.03198556709289551, 0.03276105499267578, 0.0332435188293457, 0.03303436660766602, 0.0322399673461914, 0.032675777435302734, 0.03252944183349609, 0.032309696197509764, 0.03230758285522461, 0.03352899169921875, 0.035442752838134764, 0.03224444961547852, 0.031932640075683597, 0.03213865661621094, 0.032033409118652344, 0.03202032089233398, 0.03320550537109375, 0.032085887908935545, 0.032356830596923826, 0.032039455413818356, 0.03199180793762207, 0.03217203140258789, 0.032176128387451174, 0.03198361587524414, 0.03207167816162109, 0.03222118377685547, 0.03227852630615234, 0.03214960098266602, 0.03199526405334473, 0.03191043281555176, 0.032521408081054685, 0.03256159973144531, 0.032419456481933596, 0.03230313491821289, 0.03259260940551758, 0.03298851013183594, 0.03798031997680664, 0.033334880828857424, 0.03197964859008789, 0.0317959041595459, 0.03196505546569824, 0.03208345413208008, 0.031971967697143555, 0.03235177612304688, 0.032350688934326174, 0.03261849594116211, 0.03272499084472656, 0.035558433532714845, 0.03295484924316406, 0.0326824951171875, 0.03216793441772461, 0.032175392150878904, 0.03202326583862305, 0.03208220672607422, 0.03196393585205078, 0.031941568374633786, 0.032212928771972654, 0.031899711608886716, 0.032066913604736326, 0.03203958511352539, 0.03196688079833984, 0.03190355110168457, 0.03270655822753906, 0.03264473724365234, 0.03284598541259766, 0.03243884658813476, 0.03234016036987305, 0.032333824157714845, 0.03240771102905273, 0.032285823822021484, 0.032121150970458985, 0.03202908706665039, 0.032456703186035156, 0.03223961639404297, 0.032264446258544924, 0.03244620895385742, 0.03280486297607422, 0.03280607986450195, 0.03252668762207031, 0.03248380661010742, 0.032159744262695314, 0.03212214279174805, 0.032877281188964845, 0.03208371353149414, 0.03202585601806641, 0.03205836868286133, 0.032011680603027344, 0.03234585571289063, 0.032191326141357425, 0.03240876770019531, 0.03210732650756836, 0.03232271957397461, 0.03342627334594726, 0.03225968170166016, 0.032729534149169924, 0.03255859375, 0.0322495346069336, 0.03216870498657227, 0.03232566452026367, 0.037324798583984374, 0.032827392578125, 0.032092159271240234, 0.03199590492248535, 0.03213846588134766, 0.031990720748901365, 0.03184419250488281, 0.03192831993103027, 0.03204217529296875, 0.03189360046386719, 0.03228335952758789, 0.031893503189086916, 0.0321638412475586, 0.03202774429321289, 0.03205027389526367, 0.03219232177734375, 0.0323133430480957, 0.03193974494934082, 0.03212374496459961, 0.031977407455444334, 0.031981632232666014, 0.032008190155029294, 0.03190592002868652, 0.03219033432006836, 0.032086017608642575, 0.031890464782714845, 0.03247785568237305, 0.03248271942138672, 0.03277507019042969, 0.032460033416748045, 0.03234073638916016, 0.03312835311889648, 0.03205744171142578, 0.0327823371887207, 0.03276748657226562, 0.032258174896240235, 0.03210416030883789, 0.03192019271850586, 0.03195555114746094, 0.03202252960205078, 0.03253968048095703, 0.03229548645019531, 0.03178652763366699, 0.03248579025268555, 0.03190595245361328, 0.03212524795532227, 0.03205120086669922, 0.032036865234375, 0.03224886322021484, 0.032125919342041016, 0.03220825576782226, 0.031993888854980466, 0.03203299331665039, 0.03333670425415039, 0.032819328308105467, 0.03243097686767578, 0.032233470916748046, 0.0323482551574707, 0.03220675277709961, 0.03197952079772949, 0.032884735107421875, 0.032688129425048826, 0.03269222259521484]",tokens/s,30.84638063091783,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1331.945472,1092.48512,0.0,706.740224,675.13344,s,1,8.1287431640625,8.1287431640625,0.0,8.1287431640625,8.1287431640625,8.1287431640625,8.1287431640625,[8.1287431640625],,kWh,2.9535046391674767e-05,3.250388077441792e-06,9.600563236011883e-06,4.238599770512844e-05,,MB,1420.4928,1411.252224,0.0,996.1472,942.733312,s,10,0.3207475833892822,0.03207475833892822,0.00023249906206809033,0.03203071975708008,0.03232933044433594,0.032491704940795896,0.032621604537963866,"[0.03265407943725586, 0.03186550331115723, 0.03192255973815918, 0.032062496185302734, 0.03229324722290039, 0.03214118576049805, 0.031892959594726565, 0.03202444839477539, 0.03203699111938477, 0.03185411262512207]",tokens/s,7981.353976073457,kWh,1.0212356150932973e-06,1.1262463496125873e-07,6.767857007132689e-07,1.810645950767825e-06,tokens/kWh,141386006.40917143,MB,1440.632832,1421.737984,0.0,1006.63296,942.735872,s,10,13.875849975585938,1.3875849975585939,0.007198524546992218,1.3858881225585937,1.394866455078125,1.399564208984375,1.403322412109375,"[1.404261962890625, 1.393822509765625, 1.3882218017578125, 1.382159423828125, 1.383324462890625, 1.385825927734375, 1.3852431640625, 1.390898193359375, 1.3859503173828125, 1.3761422119140625]",tokens/s,45.4026240632799,kWh,4.019836533074275e-05,4.433480637229138e-06,1.7267867543887104e-05,6.1899713511859e-05,tokens/kWh,1017775.3082480779,,s,630,13.873712373733518,0.022021765672592893,0.0005707776975805041,0.02191190433502197,0.02225023403167725,0.022451014804840085,0.024288163604736342,"[0.022362112045288086, 0.021936223983764647, 0.021893056869506836, 0.022456287384033203, 0.023775199890136718, 0.023126047134399415, 0.02213478469848633, 0.021976383209228515, 0.021895872116088868, 0.02214236831665039, 0.0217872314453125, 0.021833728790283204, 0.021727136611938477, 0.02192188835144043, 0.021811199188232423, 0.021780479431152345, 0.021925312042236328, 0.021762592315673828, 0.02173750305175781, 0.02204047966003418, 0.021933504104614258, 0.02190403175354004, 0.021848192214965822, 0.021716543197631836, 0.021833248138427734, 0.02187548828125, 0.02202009582519531, 0.026342912673950194, 0.022229631423950194, 0.021906816482543945, 0.021946880340576173, 0.021759679794311523, 0.022057279586791993, 0.022120447158813478, 0.02223308753967285, 0.022190080642700196, 0.021987327575683592, 0.02191564750671387, 0.02190889549255371, 0.021893728256225587, 0.021940223693847655, 0.02208563232421875, 0.02188470458984375, 0.022093568801879883, 0.02203081512451172, 0.022255584716796874, 0.022056991577148438, 0.02198944091796875, 0.021927871704101563, 0.02212838363647461, 0.022106367111206053, 0.02209929656982422, 0.022001440048217774, 0.021987295150756835, 0.021943199157714845, 0.024708959579467775, 0.03149430465698242, 0.022032384872436524, 0.021950239181518554, 0.02209791946411133, 0.021888288497924804, 0.02199660873413086, 0.021716224670410157, 0.022190847396850587, 0.021993663787841795, 0.02208153533935547, 0.02231679916381836, 0.02187923240661621, 0.021928031921386718, 0.022054624557495118, 0.022611167907714842, 0.024849184036254884, 0.02440140724182129, 0.022649343490600587, 0.02244367980957031, 0.022302783966064454, 0.022220895767211913, 0.02217747116088867, 0.022135295867919923, 0.022044223785400392, 0.021905792236328124, 0.021889087677001953, 0.021774431228637696, 0.021735391616821288, 0.02183305549621582, 0.0217872314453125, 0.02190745544433594, 0.021888448715209962, 0.0221847038269043, 0.021917503356933595, 0.021975040435791016, 0.021796863555908205, 0.021742591857910155, 0.021777408599853516, 0.02171196746826172, 0.02228316879272461, 0.023759103775024413, 0.022390111923217774, 0.02202662467956543, 0.021880895614624023, 0.02177164840698242, 0.021763776779174803, 0.021730016708374024, 0.021864639282226563, 0.021861536026000977, 0.02180796813964844, 0.02180073547363281, 0.021816896438598632, 0.021878591537475588, 0.021777248382568358, 0.021788671493530275, 0.021816448211669923, 0.02178256034851074, 0.021867359161376953, 0.021888864517211913, 0.021980575561523438, 0.021795232772827147, 0.02225391960144043, 0.022487039566040038, 0.02228646469116211, 0.022275264739990235, 0.022161376953125, 0.022450592041015623, 0.022015424728393556, 0.022022335052490235, 0.022210464477539063, 0.022272064208984376, 0.022026655197143554, 0.021970943450927736, 0.022140928268432617, 0.021921152114868163, 0.0221210880279541, 0.022321151733398437, 0.022189376831054687, 0.02179875183105469, 0.021797727584838868, 0.021810783386230468, 0.021840288162231446, 0.021960639953613283, 0.021794879913330078, 0.021958751678466795, 0.02183363151550293, 0.021936128616333008, 0.021946367263793946, 0.021966848373413086, 0.022000959396362305, 0.022172351837158204, 0.022007360458374023, 0.02197763252258301, 0.021990848541259767, 0.021918176651000976, 0.021894655227661132, 0.02199603271484375, 0.0218787841796875, 0.02190889549255371, 0.021892959594726563, 0.021872447967529296, 0.02190572738647461, 0.022076000213623048, 0.022034175872802736, 0.022063392639160156, 0.021893119812011717, 0.022336959838867188, 0.021983135223388673, 0.02221036720275879, 0.022078048706054686, 0.022102272033691406, 0.02187264060974121, 0.021944320678710938, 0.021980831146240234, 0.021954912185668946, 0.021755615234375, 0.021784032821655273, 0.021680959701538084, 0.021807231903076173, 0.021851903915405275, 0.021735551834106446, 0.02191564750671387, 0.02188697624206543, 0.022026239395141603, 0.021817216873168944, 0.021962879180908203, 0.021907199859619142, 0.02278771209716797, 0.022085599899291992, 0.02184668731689453, 0.02543801689147949, 0.022235456466674804, 0.021931903839111328, 0.022058879852294922, 0.022128639221191407, 0.02202828788757324, 0.022054559707641603, 0.021794559478759766, 0.02170649528503418, 0.02187718391418457, 0.021612384796142577, 0.0216756477355957, 0.02169968032836914, 0.02401091194152832, 0.022555679321289063, 0.021965471267700196, 0.021962112426757812, 0.021830015182495117, 0.021913856506347657, 0.021825536727905274, 0.021806655883789064, 0.021714496612548827, 0.021696800231933593, 0.021725791931152344, 0.022169599533081053, 0.021940223693847655, 0.021757951736450197, 0.021946367263793946, 0.021737472534179687, 0.021903263092041016, 0.021770336151123046, 0.02169241523742676, 0.021728607177734376, 0.021829471588134766, 0.021877567291259767, 0.022056320190429687, 0.022043264389038086, 0.022187871932983397, 0.02191152000427246, 0.021776832580566407, 0.02188649559020996, 0.021919967651367188, 0.021813119888305664, 0.021756032943725585, 0.021932031631469725, 0.021901311874389647, 0.021648704528808595, 0.021678815841674803, 0.021759967803955078, 0.021778432846069336, 0.021744960784912108, 0.02203923225402832, 0.02196646308898926, 0.02198566436767578, 0.02195859146118164, 0.021882080078125, 0.021906368255615233, 0.021917280197143556, 0.02186476707458496, 0.022023935317993164, 0.021933536529541015, 0.022026592254638672, 0.022103935241699218, 0.022120576858520508, 0.022161855697631835, 0.022261760711669923, 0.022760799407958984, 0.022451360702514647, 0.022619199752807618, 0.022248416900634765, 0.02211801528930664, 0.021975231170654298, 0.02186390495300293, 0.022057504653930665, 0.021936128616333008, 0.02177743911743164, 0.02175388717651367, 0.021776447296142577, 0.021808000564575197, 0.0218603515625, 0.02191155242919922, 0.022035871505737305, 0.02207756805419922, 0.021970592498779296, 0.021783359527587892, 0.021796863555908205, 0.021815296173095702, 0.02174390411376953, 0.021639936447143553, 0.02172822380065918, 0.021892736434936524, 0.021809535980224608, 0.021851520538330078, 0.021842496871948242, 0.02174777603149414, 0.021834943771362306, 0.021963584899902345, 0.021755903244018555, 0.021773920059204102, 0.021731840133666993, 0.02175935935974121, 0.021844512939453126, 0.021796863555908205, 0.021712896347045898, 0.021818687438964843, 0.021765920639038087, 0.021996448516845703, 0.021851295471191405, 0.02193017578125, 0.021998239517211915, 0.02200371170043945, 0.022013471603393554, 0.02207315254211426, 0.02208188819885254, 0.022378816604614257, 0.022013471603393554, 0.021993951797485353, 0.022085664749145507, 0.022110240936279297, 0.02189244842529297, 0.022420095443725585, 0.02198246383666992, 0.021807519912719727, 0.021858688354492187, 0.021901248931884765, 0.021960704803466798, 0.02203647994995117, 0.02221670341491699, 0.02209328079223633, 0.022144832611083985, 0.022027040481567384, 0.02209724807739258, 0.021902368545532225, 0.02178223991394043, 0.02184601593017578, 0.02186579132080078, 0.021889087677001953, 0.02176063919067383, 0.02185215950012207, 0.021803007125854493, 0.02209401512145996, 0.021741376876831055, 0.021790655136108398, 0.02172047996520996, 0.021817087173461914, 0.02178278350830078, 0.021865119934082033, 0.021920927047729494, 0.022249824523925783, 0.02199091148376465, 0.02204569625854492, 0.021894975662231444, 0.02193631935119629, 0.02183782386779785, 0.022245471954345702, 0.022302623748779296, 0.021852256774902344, 0.02294144058227539, 0.02232441520690918, 0.021912544250488282, 0.02186649513244629, 0.021802207946777345, 0.022069696426391602, 0.022208160400390625, 0.02207161521911621, 0.022163551330566408, 0.022030624389648437, 0.021804224014282225, 0.021806047439575194, 0.02185318374633789, 0.021769056320190428, 0.021972991943359374, 0.02189926338195801, 0.02208358383178711, 0.022047807693481445, 0.02179987144470215, 0.021985088348388672, 0.021755231857299804, 0.021850976943969726, 0.021819648742675782, 0.021802751541137696, 0.02184806442260742, 0.022185983657836913, 0.02248089599609375, 0.022398080825805664, 0.02229132843017578, 0.022210111618041994, 0.022010303497314452, 0.022114303588867186, 0.02220844841003418, 0.02213484764099121, 0.022018047332763673, 0.022106592178344726, 0.022032064437866213, 0.022006048202514648, 0.02244528007507324, 0.02207606315612793, 0.02205084800720215, 0.022167680740356445, 0.022788095474243163, 0.022280191421508787, 0.022130271911621095, 0.02226335906982422, 0.02194108772277832, 0.022136735916137695, 0.02188502311706543, 0.022056671142578126, 0.022188064575195312, 0.022184192657470705, 0.021917695999145507, 0.021773759841918944, 0.02181996726989746, 0.02334316825866699, 0.02181452751159668, 0.02176201629638672, 0.022022048950195314, 0.02172707176208496, 0.02191971206665039, 0.021691455841064453, 0.021665855407714842, 0.021749248504638673, 0.021802591323852538, 0.021844863891601562, 0.021818527221679686, 0.021768735885620116, 0.02177827262878418, 0.021837568283081053, 0.02211289596557617, 0.021792768478393554, 0.02184806442260742, 0.022157312393188477, 0.021863807678222658, 0.021762687683105467, 0.021724895477294923, 0.021989664077758788, 0.02175507164001465, 0.0217891845703125, 0.021848384857177734, 0.021853536605834962, 0.021858751296997072, 0.02187900733947754, 0.02195622444152832, 0.021901472091674805, 0.022147296905517578, 0.021925920486450194, 0.021999584197998048, 0.021931552886962892, 0.022002431869506837, 0.022247135162353517, 0.02187820816040039, 0.022061344146728515, 0.021857696533203123, 0.022012191772460936, 0.02202684783935547, 0.02203411293029785, 0.022265920639038084, 0.02227222442626953, 0.022084287643432617, 0.022013055801391603, 0.02271526336669922, 0.023121631622314454, 0.023638303756713868, 0.023052288055419923, 0.022175071716308593, 0.022131359100341797, 0.021983232498168945, 0.021908863067626953, 0.021911424636840822, 0.021891839981079103, 0.021979135513305666, 0.021840896606445313, 0.021779455184936524, 0.021792768478393554, 0.021970720291137696, 0.021741216659545898, 0.021772863388061524, 0.02215068817138672, 0.022135456085205077, 0.022003263473510743, 0.02209132766723633, 0.021916160583496092, 0.021829824447631836, 0.02189132881164551, 0.02191059112548828, 0.021829376220703123, 0.021911712646484376, 0.021957408905029296, 0.02197849655151367, 0.021817983627319334, 0.021753984451293944, 0.021735231399536134, 0.021858367919921875, 0.021983104705810545, 0.02179007911682129, 0.022004480361938476, 0.02180099105834961, 0.021792736053466797, 0.021829183578491212, 0.02185420799255371, 0.021803264617919923, 0.022030527114868165, 0.021979135513305666, 0.021884384155273436, 0.021826080322265625, 0.021835775375366212, 0.021839872360229492, 0.021901216506958008, 0.02173129653930664, 0.021804927825927734, 0.02191209602355957, 0.02198281669616699, 0.021829471588134766, 0.02195689582824707, 0.021762048721313477, 0.021768192291259765, 0.02581657600402832, 0.023304704666137696, 0.02183782386779785, 0.021976480484008788, 0.022179487228393555, 0.022080192565917967, 0.02210723114013672, 0.02178060722351074, 0.021770463943481446, 0.021928672790527345, 0.021786527633666994, 0.021887264251708984, 0.021871807098388672, 0.021843744277954102, 0.021943071365356445, 0.022153120040893554, 0.022394176483154296, 0.02226655960083008, 0.022347776412963868, 0.022179103851318358, 0.022174432754516603, 0.022190080642700196, 0.02196505546569824, 0.021856000900268555, 0.021843967437744142, 0.02192793655395508, 0.021800960540771484, 0.02186569595336914, 0.021907615661621093, 0.02177292823791504, 0.021934080123901366, 0.021966848373413086, 0.022034496307373048, 0.021896575927734373, 0.021826112747192383, 0.022099967956542968, 0.02197305679321289, 0.02201593589782715, 0.022599679946899414, 0.023521280288696288, 0.023339008331298827, 0.022396928787231447, 0.022050559997558592, 0.021843231201171875, 0.021872928619384766, 0.021779136657714845, 0.02189516830444336, 0.021772287368774415, 0.02182143974304199, 0.02184716796875, 0.021752416610717775, 0.0219237117767334, 0.021860767364501953, 0.02183296012878418, 0.022033151626586915, 0.021991424560546875, 0.021890815734863282, 0.02184012794494629, 0.021994720458984374, 0.021785247802734376, 0.02180886459350586, 0.02179849624633789, 0.021681055068969727, 0.02173494338989258, 0.021807424545288084, 0.02172319984436035, 0.02211952018737793, 0.022053279876708985, 0.021879295349121093, 0.02185139274597168, 0.022049535751342775, 0.02193731117248535, 0.021877599716186524, 0.021950080871582033, 0.02177881622314453, 0.02164137649536133, 0.02178000068664551, 0.02213715171813965, 0.021739519119262696, 0.022071296691894532, 0.021929983139038087, 0.02190086364746094, 0.021768640518188477, 0.021849695205688476, 0.021792640686035158, 0.021817888259887695, 0.02179817581176758, 0.021639167785644533, 0.021696895599365235, 0.022042400360107423, 0.022063104629516602, 0.021889568328857422, 0.021905439376831055, 0.021742944717407228, 0.021805728912353516, 0.021802080154418944, 0.021713855743408204, 0.021839839935302734, 0.021800960540771484, 0.021794815063476563, 0.02184137535095215, 0.02172140884399414, 0.021784095764160155, 0.021838111877441405, 0.021790719985961913, 0.021782943725585938, 0.02226585578918457, 0.02206924819946289, 0.021760000228881835, 0.021819232940673828, 0.021964960098266602, 0.02178691291809082, 0.02185977554321289, 0.02169487953186035, 0.02174553680419922, 0.02176803207397461, 0.021688480377197266, 0.02185366439819336, 0.02192617607116699, 0.021733055114746092, 0.021617216110229494, 0.02171913528442383, 0.021614271163940428, 0.02187081527709961, 0.02196892738342285, 0.021925567626953125, 0.02174390411376953, 0.021635072708129883, 0.02169036865234375]",tokens/s,45.4096195040594,,, 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3157.573632,4647.15776,0.0,4244.635648,4125.520384,s,1,11.78355859375,11.78355859375,0.0,11.78355859375,11.78355859375,11.78355859375,11.78355859375,[11.78355859375],,kWh,0.0001354689418250473,1.4935865922706532e-05,5.164170797994361e-05,0.00020204651572769747,,MB,3073.3312,4821.221376,0.0,4404.0192,4310.79936,s,10,1.2193675994873048,0.12193675994873047,0.00030546697858979026,0.12188681793212891,0.12226797103881835,0.12241657829284668,0.12253546409606933,"[0.122565185546875, 0.12152102661132813, 0.12184233856201172, 0.12223494720458984, 0.12200390625, 0.121795166015625, 0.12160995483398437, 0.12167104339599609, 0.1219312973022461, 0.12219273376464844]",tokens/s,2099.4489283431653,kWh,3.6435033613679665e-06,4.0177724781832457e-07,2.4161850605193187e-06,6.4614656697056095e-06,tokens/kWh,39619493.94860185,MB,2985.582592,4823.318528,0.0,4406.116352,4310.80192,s,10,26.013256835937497,2.6013256835937497,0.004201725343135385,2.600520751953125,2.6053,2.60841220703125,2.61090197265625,"[2.6046083984375, 2.6115244140625, 2.597696044921875, 2.59693994140625, 2.59749853515625, 2.5991796875, 2.602465576171875, 2.602718994140625, 2.598763427734375, 2.60186181640625]",tokens/s,24.21842078342342,kWh,7.596234215654931e-05,8.378617727839146e-06,3.815690295347608e-05,0.00012249786283786453,tokens/kWh,514294.68678474345,,s,630,26.010558868408197,0.04128660137842572,0.0006735118827594471,0.041149360656738285,0.04163237457275391,0.041952564430236816,0.04457093757629395,"[0.041594879150390625, 0.04117686462402344, 0.041350399017333984, 0.04103881454467773, 0.041027584075927735, 0.04088195037841797, 0.040889854431152346, 0.042582752227783204, 0.040817726135253904, 0.040868255615234376, 0.04095849609375, 0.04127926254272461, 0.04150255966186524, 0.041156673431396486, 0.040720703125, 0.040871360778808596, 0.040900894165039066, 0.04097881698608399, 0.04122000122070312, 0.04206796646118164, 0.04150995254516601, 0.040986942291259765, 0.040850048065185544, 0.04110758590698242, 0.04131603240966797, 0.04079430389404297, 0.041541343688964845, 0.04144566345214844, 0.04195532989501953, 0.04618454360961914, 0.0416167984008789, 0.04119535827636719, 0.041288352966308596, 0.041107486724853516, 0.04125078582763672, 0.04117299270629883, 0.04113993453979492, 0.04102515029907226, 0.04086969757080078, 0.040901470184326175, 0.040894462585449216, 0.04245094299316406, 0.041688766479492184, 0.041336383819580075, 0.04133350372314453, 0.04086374282836914, 0.04098604965209961, 0.04110720062255859, 0.04137587356567383, 0.04143203353881836, 0.041903839111328126, 0.04186947250366211, 0.04174160003662109, 0.0415852165222168, 0.04146963119506836, 0.041103679656982424, 0.04121177673339844, 0.040992095947265626, 0.04126800155639648, 0.04116060638427734, 0.041216094970703124, 0.04108406448364258, 0.041132896423339844, 0.04209654235839844, 0.04246537780761719, 0.04194918441772461, 0.041553791046142576, 0.041338302612304687, 0.041308864593505856, 0.04131020736694336, 0.04138351821899414, 0.041124256134033206, 0.04223372650146484, 0.04137587356567383, 0.04141670227050781, 0.041065792083740234, 0.04439315032958984, 0.04165836715698242, 0.04114147186279297, 0.041063201904296874, 0.04141670227050781, 0.0413675537109375, 0.041422496795654296, 0.04102588653564453, 0.04092230224609375, 0.040821567535400394, 0.040691104888916016, 0.04135177612304688, 0.04141875076293945, 0.04156415939331055, 0.04141817474365234, 0.041011104583740236, 0.04114499282836914, 0.041265151977539063, 0.04104924774169922, 0.04119356918334961, 0.04143795013427734, 0.04156335830688476, 0.04102825546264648, 0.04094297790527344, 0.040841983795166015, 0.04115977478027344, 0.04088694381713867, 0.04093948745727539, 0.041099552154541016, 0.041237537384033206, 0.04132553482055664, 0.040879776000976566, 0.04082131195068359, 0.04187670516967774, 0.04079264068603516, 0.041555137634277345, 0.04134121704101563, 0.042684158325195315, 0.040962398529052736, 0.04086982345581055, 0.04088051223754883, 0.04103523254394531, 0.040558910369873045, 0.0405951042175293, 0.04074953460693359, 0.04192483139038086, 0.044005569458007814, 0.04715817642211914, 0.04096233749389649, 0.041183841705322265, 0.04144284820556641, 0.04094630432128906, 0.0411115837097168, 0.04164323043823242, 0.045943519592285154, 0.04132422256469727, 0.041365856170654296, 0.04103145599365234, 0.040804576873779294, 0.04083209609985351, 0.04082575988769531, 0.04073612976074219, 0.04105894470214844, 0.040815776824951175, 0.04085436630249024, 0.040824321746826174, 0.04097075271606445, 0.041138240814208984, 0.04103683090209961, 0.04081961441040039, 0.04083302307128906, 0.040963550567626954, 0.041021984100341795, 0.040809600830078126, 0.04099702453613281, 0.04082966232299805, 0.04113318252563476, 0.04116569519042969, 0.04105606460571289, 0.041064640045166016, 0.04117504119873047, 0.04111769485473633, 0.04180377578735352, 0.041285633087158206, 0.041307262420654293, 0.041046112060546876, 0.04110620880126953, 0.04093132781982422, 0.040902561187744144, 0.04077987289428711, 0.04065609741210938, 0.0411761589050293, 0.04126806259155273, 0.04130083084106445, 0.04142886352539062, 0.04143939208984375, 0.041414623260498044, 0.044608734130859376, 0.041564960479736325, 0.04115865707397461, 0.04117708969116211, 0.041020927429199217, 0.041080894470214846, 0.04110790252685547, 0.04116889572143555, 0.041199615478515625, 0.041279167175292966, 0.041223968505859375, 0.041295585632324217, 0.04105503845214844, 0.04112588882446289, 0.04105011367797851, 0.04080134582519531, 0.04138499069213867, 0.04119855880737305, 0.04104959869384766, 0.040860160827636716, 0.040871936798095705, 0.04181983947753906, 0.040965728759765625, 0.04107747268676758, 0.040914302825927736, 0.04119228744506836, 0.04084918212890625, 0.041082878112792966, 0.04079001617431641, 0.041130271911621094, 0.04156182479858399, 0.041662464141845705, 0.04158464050292969, 0.04094057464599609, 0.041030529022216794, 0.04127344131469726, 0.04093471908569336, 0.04103238296508789, 0.040879295349121096, 0.041051071166992185, 0.04109699249267578, 0.0410882568359375, 0.041304927825927734, 0.04116204833984375, 0.0410887680053711, 0.041118431091308597, 0.041380062103271484, 0.04123852920532227, 0.04125078582763672, 0.04132579040527344, 0.04125369644165039, 0.04118710327148437, 0.04111977767944336, 0.04125254440307617, 0.04146022415161133, 0.041836544036865236, 0.041588737487792966, 0.041301280975341796, 0.04130275344848633, 0.04150886535644531, 0.04120576095581055, 0.041240447998046874, 0.041017471313476564, 0.040959999084472655, 0.04108083343505859, 0.04131564712524414, 0.04108761596679687, 0.04335007858276367, 0.04130198287963867, 0.04133481597900391, 0.040925182342529294, 0.04100851058959961, 0.04100569534301758, 0.04115980911254883, 0.04089680099487305, 0.04091289520263672, 0.04101385498046875, 0.041109504699707033, 0.041750526428222655, 0.04140345764160156, 0.04096505737304688, 0.04087807846069336, 0.04110691070556641, 0.04111203384399414, 0.04075478363037109, 0.04405254364013672, 0.0441071662902832, 0.041229312896728515, 0.041132030487060545, 0.04115865707397461, 0.04111360168457031, 0.04091289520263672, 0.040769535064697264, 0.04066883087158203, 0.040796768188476565, 0.041051902770996095, 0.04101529693603516, 0.04109868621826172, 0.04080083084106445, 0.04102345657348633, 0.04097232055664062, 0.04088627243041992, 0.04076889419555664, 0.040958591461181644, 0.04105788803100586, 0.04118732833862305, 0.040800670623779296, 0.04069094467163086, 0.04075187301635742, 0.04107430267333984, 0.040993152618408205, 0.040888031005859374, 0.040857662200927736, 0.04092540740966797, 0.04100864028930664, 0.04109321594238281, 0.04094198226928711, 0.04091904067993164, 0.040947265625, 0.04114416122436523, 0.04097209548950195, 0.040891040802001954, 0.04100291061401367, 0.04128585433959961, 0.04226153564453125, 0.04158358383178711, 0.04155392074584961, 0.04134297561645508, 0.04114636611938476, 0.04136665725708008, 0.04096499252319336, 0.04109904098510742, 0.04091247940063476, 0.04112607955932617, 0.040995262145996095, 0.04090867233276367, 0.04163116836547852, 0.04501369476318359, 0.04115865707397461, 0.04129795074462891, 0.04136297607421875, 0.041340576171875, 0.04197974395751953, 0.041439678192138674, 0.04145532989501953, 0.04119318389892578, 0.04106079864501953, 0.04116463851928711, 0.04117708969116211, 0.041043968200683595, 0.04120361709594727, 0.041327743530273436, 0.04104291152954102, 0.040940895080566406, 0.04096611022949219, 0.04109305572509766, 0.040956096649169924, 0.040944190979003904, 0.040863582611083984, 0.04078998565673828, 0.040874080657958986, 0.041172767639160154, 0.04122985458374023, 0.041057056427001956, 0.041181182861328124, 0.04110348892211914, 0.04103504180908203, 0.04111014556884766, 0.041244384765625, 0.041328895568847654, 0.04152441787719727, 0.041267391204833984, 0.0409483528137207, 0.04095907211303711, 0.0408803825378418, 0.04101327896118164, 0.0409351692199707, 0.040924030303955077, 0.040725761413574216, 0.04105104064941406, 0.040898399353027345, 0.04095187377929688, 0.04089993667602539, 0.04107939147949219, 0.04110131072998047, 0.04139622497558594, 0.041245952606201175, 0.041546497344970706, 0.04156217575073242, 0.0412545280456543, 0.04516230392456055, 0.04221724700927734, 0.041355998992919925, 0.04159897613525391, 0.04158464050292969, 0.041306110382080076, 0.041095169067382815, 0.041121246337890625, 0.04092777633666992, 0.04101529693603516, 0.04141875076293945, 0.041533374786376955, 0.04128160095214844, 0.04178435134887695, 0.041353408813476565, 0.04155065536499023, 0.041145374298095706, 0.04089955139160156, 0.04100505447387695, 0.041189151763916014, 0.04110940933227539, 0.040814910888671875, 0.041078880310058595, 0.04094966506958008, 0.04108230209350586, 0.0413120002746582, 0.04134092712402344, 0.040977214813232424, 0.04134502410888672, 0.040880126953125, 0.0410860481262207, 0.04102441787719727, 0.04103168106079102, 0.04126105499267578, 0.04087129592895508, 0.040766014099121096, 0.04073212814331055, 0.040944225311279295, 0.040910846710205076, 0.04081388854980469, 0.04125766372680664, 0.044461280822753906, 0.04140848159790039, 0.04108544158935547, 0.04172604751586914, 0.04133705520629883, 0.041418464660644534, 0.04116099166870117, 0.04170751953125, 0.041809921264648435, 0.0417578239440918, 0.041509151458740234, 0.041824031829833984, 0.04125983810424805, 0.04153548812866211, 0.041224193572998044, 0.041329761505126954, 0.04104188919067383, 0.04106908798217773, 0.04147856140136719, 0.04115033721923828, 0.040914047241210935, 0.04113919830322266, 0.041008480072021486, 0.04115273666381836, 0.04131270217895508, 0.0413675537109375, 0.04138911819458008, 0.04148729705810547, 0.041586238861083986, 0.04140000152587891, 0.04130278396606445, 0.04139129638671875, 0.0415486068725586, 0.04236624145507813, 0.04154761505126953, 0.041288318634033205, 0.041252670288085935, 0.0416060791015625, 0.04116659164428711, 0.04123612976074219, 0.040963680267333984, 0.04103859329223633, 0.04123580932617187, 0.04128841781616211, 0.04095331192016602, 0.04122214508056641, 0.04110160064697266, 0.04110150527954102, 0.04078937530517578, 0.04107516860961914, 0.041471904754638675, 0.041258846282958984, 0.04110921478271484, 0.04159148788452149, 0.04139820861816406, 0.042465473175048826, 0.04125478363037109, 0.040970241546630856, 0.04103577423095703, 0.04105011367797851, 0.04103168106079102, 0.040871936798095705, 0.04112908935546875, 0.04098137664794922, 0.040880126953125, 0.04431872177124024, 0.04171878433227539, 0.041724929809570314, 0.04103782272338867, 0.04123852920532227, 0.040970241546630856, 0.04115792083740234, 0.04102195358276367, 0.040820289611816406, 0.041153182983398436, 0.041404415130615234, 0.04127948760986328, 0.04132147216796875, 0.04156687927246094, 0.04118767929077148, 0.041076736450195314, 0.040937473297119144, 0.04119551849365234, 0.040888320922851565, 0.04112812805175781, 0.04102944183349609, 0.04120576095581055, 0.04141577529907227, 0.042596286773681644, 0.04238435363769531, 0.041836544036865236, 0.04137318420410156, 0.04156796646118164, 0.04135331344604492, 0.041101055145263674, 0.041364414215087894, 0.0413592643737793, 0.04122547149658203, 0.041167713165283205, 0.041057376861572265, 0.04231167984008789, 0.04137503814697266, 0.041151134490966794, 0.041173023223876955, 0.0413573112487793, 0.0412303352355957, 0.04112998580932617, 0.04105766296386719, 0.04153142547607422, 0.04140092849731445, 0.04112998580932617, 0.04115456008911133, 0.04096614456176758, 0.04112319946289063, 0.040937088012695313, 0.04109619140625, 0.041078784942626956, 0.04115251159667969, 0.0412303352355957, 0.04132211303710937, 0.04140070343017578, 0.0413040657043457, 0.04152524948120117, 0.04164812850952149, 0.04231305694580078, 0.04159151840209961, 0.041272415161132815, 0.04122915267944336, 0.04090675354003906, 0.04088633728027344, 0.040998847961425784, 0.040976638793945315, 0.04112153625488281, 0.041191425323486325, 0.04097433471679687, 0.04101939010620117, 0.04447840118408203, 0.041400382995605466, 0.04134707260131836, 0.04132044982910156, 0.04134633636474609, 0.041217758178710935, 0.04131532669067383, 0.040937473297119144, 0.04136495971679687, 0.04119612884521484, 0.040966079711914065, 0.04180377578735352, 0.041323841094970705, 0.04138463973999024, 0.04087603378295898, 0.04080230331420898, 0.04090675354003906, 0.040992767333984374, 0.041006175994873044, 0.04084419250488281, 0.04070195388793945, 0.04089382553100586, 0.040817279815673825, 0.040904094696044925, 0.04105276870727539, 0.041110687255859375, 0.04088713455200195, 0.04168137741088867, 0.0410781135559082, 0.04087891387939453, 0.04099686431884766, 0.04080217742919922, 0.04122828674316406, 0.04187868881225586, 0.041288673400878904, 0.041150463104248046, 0.04121142578125, 0.041175521850585935, 0.04102963256835938, 0.040994815826416016, 0.04098358535766602, 0.04093231964111328, 0.041043968200683595, 0.04113817596435547, 0.04157030487060547, 0.04098057556152344, 0.04110233688354492, 0.04086057662963867, 0.04096409606933594, 0.04107827377319336, 0.040830848693847656, 0.0410015983581543, 0.04117628860473633, 0.041038623809814455, 0.046956832885742185, 0.04140531158447266, 0.0417224006652832, 0.041514495849609374, 0.04180038452148437, 0.04136563110351563, 0.04117708969116211, 0.041910049438476565, 0.04141052627563477, 0.04105651092529297, 0.04118937683105469, 0.040680736541748044, 0.04106496047973633, 0.04085721588134766, 0.04118163299560547, 0.04097964859008789, 0.04102825546264648, 0.04101772689819336, 0.041043006896972656, 0.041052032470703125, 0.04119635009765625, 0.04114838409423828, 0.041463680267333984, 0.04126345443725586, 0.041419872283935545, 0.04150979232788086, 0.041388031005859374, 0.04142627334594726, 0.04152288055419922, 0.04130710220336914, 0.041649246215820314, 0.04102371215820313, 0.04121049499511719, 0.041191390991210934, 0.04122544097900391, 0.04114316940307617]",tokens/s,24.22093285989263,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1332.473856,1092.48512,0.0,706.740224,675.13344,s,1,8.1371455078125,8.1371455078125,0.0,8.1371455078125,8.1371455078125,8.1371455078125,8.1371455078125,[8.1371455078125],,kWh,2.9714326570774573e-05,3.2704560529400483e-06,9.588063225984822e-06,4.2572845849699444e-05,,MB,1467.031552,1411.252224,0.0,996.1472,942.733312,s,10,0.34609778976440425,0.03460977897644042,0.00032589257249064467,0.034487936019897455,0.034802233505249025,0.03516105213165283,0.03544810703277588,"[0.03551987075805664, 0.0345047378540039, 0.034662784576416014, 0.03441708755493164, 0.034471134185791015, 0.03472249603271484, 0.03436297607421875, 0.03439759826660156, 0.034639297485351564, 0.03439980697631836]",tokens/s,7396.753390833972,kWh,1.0936034999847267e-06,1.2060506430817918e-07,7.258214295955927e-07,1.9400299938884985e-06,tokens/kWh,131956722.73441839,MB,1499.901952,1421.737984,0.0,1004.535808,942.735872,s,10,16.051060913085937,1.6051060913085937,0.0070064622640252094,1.6025123291015624,1.6148860595703125,1.6156471923828126,1.6162560986328125,"[1.6147169189453126, 1.604986083984375, 1.6019478759765624, 1.6146070556640626, 1.5990687255859375, 1.5985028076171874, 1.5967911376953126, 1.6164083251953125, 1.6030767822265626, 1.6009552001953125]",tokens/s,39.24974202087666,kWh,4.6795887377097884e-05,5.1612305944194436e-06,1.9379416876799288e-05,7.13365348483166e-05,tokens/kWh,883137.9339346571,,s,630,16.048917741775504,0.025474472605992875,0.0005350400110893431,0.025379327774047854,0.025704848289489747,0.025933857250213622,0.027355027198791508,"[0.025702016830444336, 0.025461727142333985, 0.025345760345458983, 0.025452831268310546, 0.025487360000610353, 0.026912256240844725, 0.025632543563842772, 0.02544304084777832, 0.025423871994018556, 0.025462623596191405, 0.025497503280639648, 0.025345888137817383, 0.025277952194213867, 0.025303968429565428, 0.026413055419921876, 0.025232767105102537, 0.025702783584594727, 0.025296255111694334, 0.025430719375610353, 0.02538015937805176, 0.02554150390625, 0.02627993583679199, 0.025515296936035155, 0.02544713592529297, 0.025475072860717773, 0.025475072860717773, 0.025427391052246093, 0.02541971206665039, 0.025414272308349608, 0.025745311737060548, 0.02560111999511719, 0.025483392715454103, 0.025702495574951172, 0.025430816650390625, 0.025601280212402343, 0.025610368728637697, 0.02543680000305176, 0.025390592575073243, 0.025432064056396485, 0.0253602237701416, 0.025371328353881836, 0.02536751937866211, 0.025573471069335937, 0.025748640060424804, 0.02594380760192871, 0.02581488037109375, 0.025514144897460938, 0.02540540885925293, 0.02552009582519531, 0.025536575317382813, 0.025569280624389647, 0.025554271697998048, 0.025498271942138672, 0.025357759475708008, 0.02536460876464844, 0.025274816513061522, 0.02528767967224121, 0.025475584030151367, 0.025311744689941407, 0.02794495964050293, 0.028762399673461916, 0.02575030326843262, 0.025561824798583984, 0.025538335800170897, 0.025420448303222657, 0.02537619209289551, 0.02526905632019043, 0.02522700881958008, 0.02529715156555176, 0.025479167938232423, 0.02526790428161621, 0.025250272750854494, 0.025344959259033205, 0.025461664199829103, 0.025441823959350587, 0.025354303359985352, 0.025338272094726562, 0.025261632919311522, 0.025412031173706055, 0.02540451240539551, 0.0254487361907959, 0.025500288009643556, 0.0255467529296875, 0.02543404769897461, 0.02544816017150879, 0.02550409507751465, 0.025434112548828124, 0.02551296043395996, 0.02551696014404297, 0.02539529609680176, 0.025397247314453125, 0.02536636734008789, 0.025514144897460938, 0.025487424850463868, 0.02551801681518555, 0.02572604751586914, 0.025740192413330077, 0.025574560165405272, 0.025921695709228514, 0.025517919540405273, 0.025453407287597655, 0.025389055252075195, 0.02542742347717285, 0.025446943283081055, 0.02535628890991211, 0.02524073600769043, 0.025308063507080078, 0.02527840042114258, 0.02543984031677246, 0.025175872802734374, 0.025302879333496092, 0.02513382339477539, 0.025462240219116212, 0.025391647338867188, 0.025655008316040038, 0.025346336364746095, 0.02543324851989746, 0.02558038330078125, 0.025638912200927736, 0.026037824630737304, 0.02588243293762207, 0.025987712860107422, 0.025761215209960938, 0.02600793647766113, 0.02561027145385742, 0.025382207870483398, 0.025543487548828125, 0.025419776916503906, 0.025357952117919923, 0.02531059265136719, 0.02571366310119629, 0.025265472412109375, 0.025178815841674803, 0.025357311248779296, 0.025382144927978516, 0.025556800842285156, 0.02583135986328125, 0.02529484748840332, 0.02628812789916992, 0.025679231643676758, 0.025442943572998047, 0.025333759307861328, 0.025200639724731445, 0.02520591926574707, 0.025205600738525392, 0.025280479431152345, 0.0252457275390625, 0.025249792098999024, 0.025390176773071288, 0.025287584304809572, 0.025281856536865235, 0.025215679168701172, 0.025423871994018556, 0.025587135314941407, 0.02551251220703125, 0.02547711944580078, 0.025643104553222655, 0.025638816833496093, 0.025714496612548828, 0.025745599746704102, 0.02552134323120117, 0.025540416717529296, 0.02553343963623047, 0.025382911682128906, 0.025509376525878907, 0.02520457649230957, 0.025258079528808593, 0.02529657554626465, 0.025279359817504884, 0.02527027130126953, 0.02534320068359375, 0.025323360443115235, 0.02559052848815918, 0.025208608627319336, 0.02542223930358887, 0.025500864028930665, 0.025330495834350587, 0.02532761573791504, 0.02536016082763672, 0.025376991271972658, 0.025450496673583983, 0.025451711654663086, 0.025295679092407226, 0.025417728424072264, 0.025290143966674804, 0.025436416625976562, 0.025428319931030275, 0.025604095458984375, 0.02551398468017578, 0.02557583999633789, 0.02545686340332031, 0.025366207122802735, 0.025282880783081055, 0.025466880798339843, 0.025448287963867187, 0.025776287078857422, 0.025462783813476563, 0.025388639450073244, 0.025299039840698243, 0.025256256103515624, 0.025488800048828125, 0.025479040145874023, 0.025440992355346678, 0.025317216873168947, 0.025303199768066408, 0.025144832611083984, 0.025151744842529297, 0.02523366355895996, 0.02525152015686035, 0.025216480255126954, 0.0253919677734375, 0.025267648696899413, 0.025274944305419923, 0.02556096076965332, 0.028726783752441407, 0.026597312927246094, 0.02550435256958008, 0.025309152603149414, 0.025378496170043945, 0.025376991271972658, 0.025405023574829103, 0.025352832794189453, 0.02524569511413574, 0.025231359481811523, 0.025245344161987305, 0.02528281593322754, 0.025268320083618165, 0.02525388717651367, 0.02672172737121582, 0.032465118408203125, 0.02562409591674805, 0.02542265510559082, 0.025352352142333983, 0.025503135681152343, 0.025468767166137694, 0.025395807266235353, 0.02531532859802246, 0.02553830337524414, 0.025704704284667968, 0.025771743774414064, 0.025620351791381835, 0.025708959579467772, 0.025661216735839844, 0.02568623924255371, 0.025706144332885743, 0.025626976013183592, 0.02545254325866699, 0.025492639541625978, 0.025336671829223632, 0.025587711334228515, 0.025432064056396485, 0.02532147216796875, 0.025465280532836913, 0.025437599182128907, 0.02536467170715332, 0.025392959594726563, 0.025367136001586913, 0.025362432479858397, 0.025212928771972655, 0.025204736709594725, 0.025170112609863283, 0.02526598358154297, 0.025208736419677736, 0.02531337547302246, 0.025273632049560547, 0.025281152725219726, 0.025276512145996095, 0.025210176467895508, 0.025436864852905274, 0.02531942367553711, 0.025571327209472656, 0.025593759536743164, 0.025816959381103517, 0.025988895416259764, 0.025870527267456055, 0.025581823348999024, 0.02565657615661621, 0.025594623565673828, 0.02538230323791504, 0.025313888549804688, 0.02526963233947754, 0.02532032012939453, 0.025115808486938476, 0.02514182472229004, 0.025354272842407228, 0.025425920486450194, 0.02529635238647461, 0.025563776016235353, 0.02545033645629883, 0.025299135208129882, 0.025633983612060547, 0.02542201614379883, 0.02547315216064453, 0.02525369644165039, 0.02528518486022949, 0.02524720001220703, 0.02519705581665039, 0.02526825523376465, 0.025251840591430662, 0.02526348876953125, 0.025213312149047852, 0.02524403190612793, 0.02521683120727539, 0.025178176879882812, 0.02511199951171875, 0.025225791931152344, 0.02525372886657715, 0.02537424087524414, 0.025260671615600586, 0.02531328010559082, 0.025323040008544923, 0.025338336944580077, 0.0257392635345459, 0.0257554874420166, 0.025830560684204102, 0.025458688735961913, 0.025470975875854493, 0.02536038398742676, 0.0256135368347168, 0.025262144088745116, 0.025160415649414063, 0.025206783294677734, 0.025061088562011717, 0.025247583389282225, 0.02520854377746582, 0.02543894386291504, 0.02536396789550781, 0.025352703094482423, 0.025434112548828124, 0.025204320907592774, 0.02538742446899414, 0.025425024032592773, 0.025215391159057618, 0.025301471710205078, 0.02533785629272461, 0.025217023849487305, 0.025233280181884764, 0.025215072631835936, 0.02526620864868164, 0.025196544647216795, 0.025273984909057617, 0.025311616897583007, 0.02531532859802246, 0.02536857604980469, 0.025487360000610353, 0.025422943115234374, 0.02539228820800781, 0.025396032333374022, 0.025421760559082032, 0.025409887313842774, 0.025854496002197264, 0.025827007293701174, 0.026374591827392577, 0.026085376739501953, 0.025448448181152345, 0.02523750305175781, 0.025397247314453125, 0.02535219192504883, 0.025333152770996094, 0.025301599502563478, 0.02531711959838867, 0.025315200805664063, 0.02520921516418457, 0.02540947151184082, 0.02562409591674805, 0.025391647338867188, 0.025182432174682617, 0.02525689506530762, 0.025495744705200194, 0.02529756736755371, 0.025143295288085937, 0.02532086372375488, 0.02525984001159668, 0.025228063583374025, 0.02530508804321289, 0.0252620792388916, 0.02533171272277832, 0.025298944473266603, 0.025406143188476563, 0.025316448211669923, 0.025238624572753908, 0.025203935623168944, 0.02528499221801758, 0.025243871688842772, 0.02529280090332031, 0.025282560348510744, 0.02532147216796875, 0.025389055252075195, 0.025630016326904297, 0.025774784088134765, 0.025628000259399413, 0.02544707107543945, 0.025625791549682617, 0.02558243179321289, 0.02553267288208008, 0.02556220817565918, 0.025463424682617187, 0.02536038398742676, 0.026041664123535157, 0.025814847946166994, 0.025342847824096678, 0.025204736709594725, 0.02537833595275879, 0.025223232269287108, 0.02518876838684082, 0.025419776916503906, 0.025472768783569334, 0.025338111877441408, 0.02532374382019043, 0.025327104568481446, 0.02529212760925293, 0.02530400085449219, 0.02513100814819336, 0.025274368286132814, 0.025097408294677735, 0.025108320236206055, 0.02517091178894043, 0.025069183349609375, 0.025061855316162108, 0.025101631164550782, 0.025161312103271483, 0.025199615478515625, 0.02513920021057129, 0.025134143829345704, 0.025008544921875, 0.02530758476257324, 0.025274463653564453, 0.025862144470214843, 0.025430015563964844, 0.025488384246826173, 0.0252807674407959, 0.025309696197509765, 0.02511644744873047, 0.02544816017150879, 0.025329824447631835, 0.02544086456298828, 0.025243648529052733, 0.025228607177734376, 0.02524435234069824, 0.02532966423034668, 0.025321184158325197, 0.026900896072387694, 0.027410976409912108, 0.026894336700439454, 0.029961952209472655, 0.03146575927734375, 0.02569215965270996, 0.02554368019104004, 0.025586687088012695, 0.025440256118774415, 0.025423871994018556, 0.025382911682128906, 0.025267520904541017, 0.025307104110717772, 0.02530112075805664, 0.025508447647094725, 0.02548531150817871, 0.025430015563964844, 0.025415679931640626, 0.02537436866760254, 0.025354047775268555, 0.025274911880493165, 0.025304128646850586, 0.026415552139282227, 0.025487871170043946, 0.02533171272277832, 0.02545052719116211, 0.02535215950012207, 0.025387008666992186, 0.025417600631713867, 0.02532364845275879, 0.02548940849304199, 0.02552422332763672, 0.02550579261779785, 0.025387008666992186, 0.02537676811218262, 0.02522096061706543, 0.025196704864501953, 0.02532147216796875, 0.025523487091064452, 0.02517465591430664, 0.025368959426879882, 0.02561529541015625, 0.025964736938476562, 0.025549407958984374, 0.025452800750732422, 0.025403135299682616, 0.025482559204101564, 0.02531203269958496, 0.02533491134643555, 0.02534275245666504, 0.02534604835510254, 0.025232864379882813, 0.02531990432739258, 0.025262144088745116, 0.0254005126953125, 0.025145952224731444, 0.025207008361816406, 0.02527039909362793, 0.02533500862121582, 0.025234079360961913, 0.025247264862060546, 0.02526665687561035, 0.02548940849304199, 0.026857887268066406, 0.026024383544921877, 0.025548799514770508, 0.025509279251098634, 0.02538710403442383, 0.025266271591186523, 0.025329792022705078, 0.025376640319824218, 0.025557151794433595, 0.025571584701538086, 0.025365856170654295, 0.025225887298583983, 0.02522297668457031, 0.025278656005859376, 0.025133056640625, 0.02508153533935547, 0.025166240692138672, 0.02528179168701172, 0.026167455673217772, 0.027218048095703124, 0.02546726417541504, 0.025418880462646485, 0.02528278350830078, 0.026419872283935546, 0.025159679412841796, 0.025274368286132814, 0.02505523109436035, 0.02508799934387207, 0.025259328842163087, 0.025258367538452148, 0.025194400787353514, 0.025291168212890625, 0.02558361625671387, 0.025647327423095702, 0.025644607543945312, 0.025553024291992188, 0.025466911315917967, 0.02539116859436035, 0.02548524856567383, 0.025145408630371093, 0.025249792098999024, 0.025195615768432617, 0.025301504135131835, 0.025340320587158204, 0.02540451240539551, 0.02530191993713379, 0.025294176101684572, 0.025420448303222657, 0.02550169563293457, 0.025372671127319335, 0.02534566307067871, 0.02540787124633789, 0.02537833595275879, 0.025356735229492187, 0.025771488189697267, 0.025348672866821288, 0.025528480529785156, 0.025315168380737305, 0.025165632247924806, 0.02539129638671875, 0.025280160903930662, 0.02531942367553711, 0.02522550392150879, 0.025292448043823242, 0.025337631225585938, 0.025367103576660156, 0.025260032653808592, 0.025425920486450194, 0.025108480453491212, 0.02511859130859375, 0.025202943801879884, 0.02518003273010254, 0.02516377639770508, 0.025216640472412108, 0.025283136367797852, 0.025333568572998046, 0.025636831283569337, 0.02547715187072754, 0.02531113624572754, 0.025255136489868164, 0.025323551177978517, 0.025225919723510744, 0.02537433624267578, 0.025235488891601564, 0.0251909122467041, 0.025267999649047853, 0.025219295501708986, 0.025317375183105468, 0.02552422332763672, 0.025409536361694338, 0.02538230323791504, 0.025237247467041014, 0.02524857521057129, 0.025766143798828123, 0.02549737548828125, 0.025395200729370116, 0.025448192596435548, 0.025390752792358397, 0.02530531120300293, 0.025321855545043945, 0.02533785629272461, 0.025282560348510744, 0.02518230438232422, 0.025271392822265624, 0.025247808456420898, 0.02523651123046875, 0.02530636787414551, 0.025385120391845702, 0.02532383918762207, 0.025818527221679686, 0.026597984313964845, 0.025570304870605468, 0.02551910400390625, 0.025415359497070314, 0.025436223983764647, 0.025591360092163087, 0.02573801612854004, 0.025563039779663087, 0.025683744430541992, 0.025453887939453124, 0.025389984130859376, 0.025419776916503906, 0.025389055252075195, 0.02544630432128906, 0.026648096084594727, 0.025408063888549805]",tokens/s,39.25498342857743,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4932.005888,7235.043328,0.0,6849.298432,6444.4416,s,1,11.2029736328125,11.2029736328125,0.0,11.2029736328125,11.2029736328125,11.2029736328125,11.2029736328125,[11.2029736328125],,kWh,0.0001200147982833793,1.3231355321371067e-05,3.949642048606261e-05,0.00017274257409081295,,MB,2817.835008,7587.364864,0.0,7172.25984,6822.66624,s,10,4.239782989501954,0.42397829895019534,0.006894182894289971,0.42635455322265625,0.42847293701171874,0.42895265502929686,0.42933642944335937,"[0.40464434814453126, 0.42081451416015625, 0.4279268798828125, 0.4277457580566406, 0.4232462158203125, 0.42646771240234377, 0.4248974609375, 0.42624139404296874, 0.4283663330078125, 0.429432373046875]",tokens/s,603.8044886586808,kWh,1.216049697065892e-05,1.3410824297559389e-06,8.067274972330118e-06,2.1568854372744977e-05,tokens/kWh,11868966.036670402,MB,2821.996544,7591.559168,0.0,7174.356992,6822.6688,s,10,29.656539306640624,2.9656539306640624,0.006324704514309121,2.9654089355468747,2.9716916259765624,2.9748336059570315,2.9773471899414066,"[2.970993408203125, 2.9779755859375, 2.958189697265625, 2.969535400390625, 2.970981689453125, 2.958642333984375, 2.96505224609375, 2.95945263671875, 2.95995068359375, 2.965765625]",tokens/s,21.243206885536097,kWh,8.690473866934011e-05,9.585829167885213e-06,5.745736078067026e-05,0.00015394792861789556,tokens/kWh,409229.2800922858,,s,630,29.65389392852786,0.04706967290242513,0.0005883383677196449,0.046964672088623045,0.047423367309570315,0.04790367488861084,0.05023746257781983,"[0.04883459091186523, 0.047251903533935546, 0.047286079406738284, 0.047322689056396486, 0.04724371337890625, 0.04704230499267578, 0.04696460723876953, 0.046975360870361325, 0.0467918701171875, 0.047100383758544924, 0.04691798400878906, 0.04688019180297852, 0.04696707153320313, 0.04716921615600586, 0.047051361083984375, 0.0468785285949707, 0.04682566452026367, 0.04690486526489258, 0.046846431732177736, 0.04688281631469727, 0.04665091323852539, 0.046583518981933594, 0.0472270393371582, 0.047343902587890625, 0.04703420639038086, 0.04775369644165039, 0.04713676834106445, 0.047429889678955076, 0.04732083129882812, 0.0471912956237793, 0.04764080047607422, 0.05028659057617187, 0.047513214111328125, 0.04750428771972656, 0.04715315246582031, 0.04716134262084961, 0.04709081649780274, 0.047089729309082035, 0.04695273590087891, 0.046940414428710935, 0.046763648986816404, 0.0467503662109375, 0.04713843154907227, 0.04758975982666016, 0.04696831893920898, 0.04689561462402344, 0.04721459197998047, 0.04706304168701172, 0.047000640869140624, 0.04688483047485351, 0.04665852737426758, 0.04696659088134766, 0.04678879928588867, 0.046935550689697264, 0.047880702972412106, 0.04698931121826172, 0.046930942535400394, 0.047108448028564454, 0.04707600021362305, 0.046944255828857424, 0.04695040130615234, 0.047132671356201174, 0.0469749755859375, 0.04797894287109375, 0.04702057647705078, 0.04682329559326172, 0.05072233581542969, 0.04701436614990234, 0.046929920196533206, 0.046706687927246096, 0.04677836990356445, 0.04670598220825195, 0.04656403350830078, 0.04648268890380859, 0.04652313613891602, 0.046507518768310545, 0.046884384155273434, 0.04755759811401367, 0.04724531173706055, 0.04710128021240234, 0.047398880004882814, 0.04742214584350586, 0.04714700698852539, 0.04696473693847656, 0.04667391967773438, 0.04746060943603515, 0.04700748825073242, 0.04698931121826172, 0.04694800186157227, 0.04699094390869141, 0.04695014572143555, 0.04717452621459961, 0.047380256652832034, 0.04769820785522461, 0.04700156784057617, 0.04689875030517578, 0.047895294189453125, 0.04698089599609375, 0.04790038299560547, 0.04707718276977539, 0.04720483016967773, 0.047355903625488284, 0.047134719848632815, 0.04707942581176758, 0.047168544769287106, 0.0469780158996582, 0.04733484649658203, 0.05046531295776367, 0.04733900833129883, 0.0470060806274414, 0.04690732955932617, 0.04669283294677734, 0.04659379196166992, 0.04676300811767578, 0.04683673477172851, 0.04726124954223633, 0.05125980758666992, 0.047376415252685544, 0.048132095336914066, 0.046952449798583984, 0.04683366394042969, 0.0469667854309082, 0.04705440139770508, 0.04711635208129883, 0.04718425750732422, 0.04716921615600586, 0.04799881744384766, 0.047142494201660154, 0.04702457427978515, 0.047008865356445315, 0.046962783813476565, 0.04751222229003906, 0.04756086349487305, 0.04702550506591797, 0.04686710357666016, 0.04699308776855469, 0.04683193588256836, 0.04685574340820312, 0.047083072662353516, 0.04696358489990234, 0.04676812744140625, 0.046647296905517575, 0.0469667854309082, 0.04677632141113281, 0.046806270599365235, 0.04691020965576172, 0.04675961685180664, 0.046895423889160154, 0.04668569564819336, 0.046793216705322264, 0.04646092987060547, 0.04688896179199219, 0.0473702392578125, 0.04724246215820312, 0.04739152145385742, 0.047195358276367186, 0.047209247589111325, 0.04690716934204102, 0.04687014389038086, 0.047166400909423825, 0.04704447937011719, 0.04714620971679687, 0.04703311920166016, 0.04698233413696289, 0.04688751983642578, 0.046747390747070315, 0.046751998901367185, 0.046860286712646484, 0.04691532897949219, 0.046989566802978514, 0.046641342163085936, 0.04689900970458984, 0.046892478942871095, 0.04683414459228516, 0.04717577743530273, 0.04844748687744141, 0.046868480682373044, 0.04700364685058594, 0.04661862564086914, 0.04665910339355469, 0.04663276672363281, 0.046620479583740236, 0.04668092727661133, 0.04669440078735351, 0.04653670501708984, 0.04692950439453125, 0.04668252944946289, 0.0467042236328125, 0.04651663970947266, 0.04796873474121094, 0.04732640075683594, 0.047149887084960936, 0.04680089569091797, 0.04698844909667969, 0.04705276870727539, 0.047690143585205076, 0.047154750823974606, 0.047086017608642575, 0.047247840881347654, 0.04845318222045898, 0.0469815673828125, 0.046978591918945316, 0.04685036849975586, 0.04679081726074219, 0.04693196868896484, 0.04677632141113281, 0.04689446258544922, 0.04723775863647461, 0.0469749755859375, 0.046837760925292966, 0.04668735885620117, 0.04673936080932617, 0.046998271942138674, 0.047046848297119144, 0.04690537643432617, 0.04692921447753906, 0.049492671966552736, 0.047709728240966795, 0.047315425872802734, 0.047187744140625, 0.04698748779296875, 0.04690911865234375, 0.04709203338623047, 0.047157470703125, 0.047314720153808595, 0.04730579376220703, 0.04732201766967774, 0.047419422149658205, 0.04737638473510742, 0.04729241561889649, 0.04710102462768555, 0.04699184036254883, 0.0474703369140625, 0.04703612899780273, 0.04689609527587891, 0.04694812774658203, 0.047001823425292966, 0.04674764633178711, 0.04678451156616211, 0.04700140762329102, 0.04691987228393555, 0.046604286193847655, 0.04671456146240234, 0.04662713623046875, 0.04681318283081055, 0.04680239868164063, 0.046870399475097656, 0.046742176055908205, 0.046927871704101565, 0.04763999938964844, 0.047906368255615235, 0.04734668731689453, 0.04835136032104492, 0.04719801712036133, 0.04655699157714844, 0.046760318756103515, 0.04671078491210937, 0.046908512115478515, 0.04682976150512695, 0.047082206726074216, 0.04672515106201172, 0.04676144027709961, 0.046555648803710936, 0.04658774566650391, 0.04659011077880859, 0.04680908966064453, 0.04678860855102539, 0.04716134262084961, 0.047097663879394534, 0.047388225555419924, 0.04710390472412109, 0.047045055389404296, 0.04696297454833984, 0.046911136627197265, 0.04730691146850586, 0.04716563034057617, 0.04742457580566406, 0.047166400909423825, 0.047271392822265626, 0.04681321716308594, 0.047052734375, 0.04670057678222656, 0.04696713638305664, 0.04723072052001953, 0.04739321517944336, 0.04699321746826172, 0.046930110931396485, 0.04732342529296875, 0.0468988151550293, 0.04703433609008789, 0.04699558258056641, 0.04771369552612305, 0.04687887954711914, 0.046854591369628905, 0.04710102462768555, 0.04719292831420899, 0.04716339111328125, 0.0472740478515625, 0.0470846061706543, 0.04691449737548828, 0.04697087860107422, 0.05094784164428711, 0.047239425659179685, 0.047325183868408206, 0.04712457656860351, 0.047273887634277346, 0.046989566802978514, 0.047013214111328125, 0.04733174514770508, 0.04740095901489258, 0.04763785552978515, 0.04803855895996094, 0.047607040405273436, 0.04709247970581055, 0.04699955368041992, 0.048228126525878906, 0.047874526977539064, 0.047347713470458984, 0.047042560577392575, 0.046808895111083985, 0.04703392028808594, 0.04687116622924805, 0.046871711730957034, 0.04691654586791992, 0.04708956909179687, 0.04676607894897461, 0.04659609603881836, 0.0467250862121582, 0.04643948745727539, 0.04668310546875, 0.046742622375488284, 0.046987934112548826, 0.046925537109375, 0.046643009185791014, 0.04670947265625, 0.046637054443359374, 0.046843902587890625, 0.04723862457275391, 0.04715779113769531, 0.04708691024780273, 0.04694086456298828, 0.047081504821777344, 0.04695036697387695, 0.04658380889892578, 0.04677811050415039, 0.04685644912719727, 0.04692156982421875, 0.0466324462890625, 0.04678313446044922, 0.0466431999206543, 0.04651001739501953, 0.04674671936035156, 0.04670102310180664, 0.04695523071289062, 0.0470313606262207, 0.046746078491210936, 0.04702848052978516, 0.04690691375732422, 0.04684233474731445, 0.04727743911743164, 0.04706572723388672, 0.046876449584960934, 0.04679478454589844, 0.046880958557128906, 0.04686438369750977, 0.04672512054443359, 0.047026176452636716, 0.04678860855102539, 0.04694192123413086, 0.04648988723754883, 0.0470362548828125, 0.046427745819091794, 0.046679969787597655, 0.04658448028564453, 0.04706694412231445, 0.04699676895141602, 0.05070323181152344, 0.04725465774536133, 0.048147647857666016, 0.04719472122192383, 0.04689254379272461, 0.04940259170532227, 0.04820329666137695, 0.04682137680053711, 0.04858927917480469, 0.046769153594970705, 0.04686540985107422, 0.04672716903686523, 0.04710604858398437, 0.047116287231445314, 0.04683974456787109, 0.04688294219970703, 0.04710188674926758, 0.04740313720703125, 0.046698368072509766, 0.04686227035522461, 0.046980831146240236, 0.046846305847167966, 0.0471247673034668, 0.04684975814819336, 0.04699750518798828, 0.04821811294555664, 0.047644672393798826, 0.04725907135009766, 0.04689977645874024, 0.04693955230712891, 0.04681379318237305, 0.046827518463134765, 0.0471011848449707, 0.04670064163208008, 0.046908065795898436, 0.04717174530029297, 0.04700956726074219, 0.047235134124755856, 0.04709929656982422, 0.04716400146484375, 0.046893054962158204, 0.046911487579345705, 0.046960639953613284, 0.04672441482543945, 0.04697081756591797, 0.04696345520019531, 0.047144191741943356, 0.04689075088500977, 0.046696575164794925, 0.04666457748413086, 0.04674560165405273, 0.046639102935791016, 0.046491455078125, 0.046895294189453124, 0.046409694671630856, 0.0466657600402832, 0.04702822494506836, 0.04698316955566406, 0.046857921600341794, 0.04718380737304687, 0.04711568069458008, 0.0469749755859375, 0.04679779052734375, 0.04706073760986328, 0.04671257781982422, 0.04827900695800781, 0.046873119354248045, 0.046737407684326174, 0.04680633544921875, 0.04690950393676758, 0.04683020782470703, 0.04684726333618164, 0.04682534408569336, 0.046801025390625, 0.04665827178955078, 0.046779712677001956, 0.046897857666015626, 0.04652793502807617, 0.046594337463378904, 0.04693840026855469, 0.04695382308959961, 0.04668707275390625, 0.04675279998779297, 0.04661689758300781, 0.04696521759033203, 0.04669235229492188, 0.0468023681640625, 0.047843902587890626, 0.04658732986450195, 0.04678639984130859, 0.046663455963134766, 0.04698822402954102, 0.04740095901489258, 0.047624126434326175, 0.04709564971923828, 0.046827072143554686, 0.04663363265991211, 0.04674969482421875, 0.04673126220703125, 0.04671481704711914, 0.04680614471435547, 0.046715103149414065, 0.04664355087280273, 0.04657603073120117, 0.04663702392578125, 0.04658582305908203, 0.04667599868774414, 0.046604286193847655, 0.046976543426513674, 0.046530078887939454, 0.04714179229736328, 0.04981148910522461, 0.04721884918212891, 0.047275871276855466, 0.04714905548095703, 0.04714905548095703, 0.04694800186157227, 0.046620128631591796, 0.04721088027954102, 0.04705535888671875, 0.046903297424316405, 0.04715315246582031, 0.04758937454223633, 0.047349472045898434, 0.04702787017822266, 0.04728895950317383, 0.04708572769165039, 0.0470546875, 0.04806803131103515, 0.046958942413330075, 0.04714672088623047, 0.0469222412109375, 0.0465530891418457, 0.046822559356689455, 0.046858272552490234, 0.04708000183105469, 0.046803199768066406, 0.04674710464477539, 0.04657001495361328, 0.04678041458129883, 0.04654694366455078, 0.04697427368164062, 0.04690915298461914, 0.04674595260620117, 0.046801151275634764, 0.04686467361450195, 0.047251167297363283, 0.04727817535400391, 0.04726816177368164, 0.04709708786010742, 0.04700233459472656, 0.04711446380615234, 0.04687235260009766, 0.04672431945800781, 0.04666470336914062, 0.04666729736328125, 0.04658406448364258, 0.04652032089233398, 0.046421791076660154, 0.046556480407714845, 0.04643231964111328, 0.04669116973876953, 0.04657145690917969, 0.046720638275146484, 0.046631359100341795, 0.04657478332519531, 0.04640169525146484, 0.0465252799987793, 0.04687785720825195, 0.04820019149780273, 0.046841537475585934, 0.04744240188598633, 0.046688255310058595, 0.047101951599121096, 0.04653055953979492, 0.04655440139770508, 0.0467279052734375, 0.04664524841308594, 0.04723081588745117, 0.047019168853759764, 0.04684288024902344, 0.046811134338378906, 0.04698223876953125, 0.046741825103759765, 0.04703702545166016, 0.047132671356201174, 0.0497064323425293, 0.050117183685302734, 0.04741734313964844, 0.04717158508300781, 0.04715017700195313, 0.047850784301757814, 0.047149791717529296, 0.04744796752929688, 0.04659603118896485, 0.046659553527832034, 0.04678879928588867, 0.04678876876831055, 0.04662870407104492, 0.04675897598266601, 0.046469886779785155, 0.04708713531494141, 0.048177886962890625, 0.04696697616577149, 0.04664681625366211, 0.04677804946899414, 0.04674614334106445, 0.04687849426269531, 0.046491870880126955, 0.04639516830444336, 0.046460990905761716, 0.05193318557739258, 0.04715945434570312, 0.04684185409545898, 0.04696883010864258, 0.04664041519165039, 0.047030078887939454, 0.04705168151855469, 0.04688070297241211, 0.04680710220336914, 0.047108097076416014, 0.047214462280273435, 0.04737635040283203, 0.04701007843017578, 0.047423233032226564, 0.0471544303894043, 0.047196224212646486, 0.047688350677490235, 0.047130783081054686, 0.04680089569091797, 0.04732723236083984, 0.04736819076538086, 0.04670259094238281, 0.04702822494506836, 0.04689715194702149, 0.04676403045654297, 0.04669440078735351, 0.04693955230712891, 0.04704259109497071, 0.04687222290039063, 0.04698409652709961, 0.04701318359375, 0.04688089752197266, 0.04699715042114258, 0.047917537689208985, 0.0472558708190918, 0.04729459381103516, 0.047026176452636716, 0.04698316955566406, 0.04720230484008789, 0.04696012878417969, 0.046594558715820314, 0.04695849609375, 0.046575294494628904]",tokens/s,21.245101959237918,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4928.905216,7235.043328,0.0,6849.298432,6444.4416,s,1,11.2213525390625,11.2213525390625,0.0,11.2213525390625,11.2213525390625,11.2213525390625,11.2213525390625,[11.2213525390625],,kWh,0.00011996994318748572,1.3224531050501209e-05,4.0386143420056975e-05,0.0001735806176580439,,MB,2726.62528,7566.393344,0.0,7151.28832,6822.66624,s,10,4.15484228515625,0.415484228515625,0.0054613066197689375,0.4171551971435547,0.4189053253173828,0.4200732681274414,0.4210076223754883,"[0.4011531372070313, 0.41598828125, 0.4104186706542969, 0.41617047119140627, 0.4186457824707031, 0.4169816589355469, 0.4173287353515625, 0.4185877990722656, 0.4212412109375, 0.4183265380859375]",tokens/s,616.1485380915552,kWh,1.2031164065666113e-05,1.3263660828945042e-06,7.972484155760462e-06,2.1330014304321078e-05,tokens/kWh,12001867.244324304,MB,2730.807296,7568.490496,0.0,7153.385472,6822.6688,s,10,26.847690917968755,2.684769091796875,0.0055925806121064105,2.6833587646484376,2.6903026611328125,2.6931192260742187,2.695372478027344,"[2.67469482421875, 2.68254638671875, 2.6896767578125, 2.680068115234375, 2.695935791015625, 2.68409716796875, 2.681876708984375, 2.688055908203125, 2.688118896484375, 2.682620361328125]",tokens/s,23.465705185780077,kWh,7.902008973309056e-05,8.716516295577857e-06,5.2343064096643795e-05,0.00014007967012531221,tokens/kWh,449744.06310095947,,s,630,26.8451715202331,0.042611383365449444,0.0005424468511794545,0.0424932804107666,0.04311258163452149,0.04349559078216553,0.044861261177062996,"[0.04348108673095703, 0.0427911376953125, 0.04249168014526367, 0.042584064483642575, 0.042269790649414066, 0.04307392120361328, 0.04260918426513672, 0.042534912109375, 0.0422457275390625, 0.04221583938598633, 0.04241350555419922, 0.04222963333129883, 0.042106689453125, 0.042062721252441405, 0.042024959564208986, 0.04236083221435547, 0.042434558868408204, 0.04216012954711914, 0.04205382537841797, 0.04226233673095703, 0.0420208625793457, 0.04212924957275391, 0.04227692794799805, 0.04228121566772461, 0.042639167785644534, 0.04214169692993164, 0.04192086410522461, 0.04236665725708008, 0.04286873626708984, 0.042561279296875, 0.042662174224853515, 0.042885089874267576, 0.042874881744384766, 0.04249599838256836, 0.042676223754882815, 0.04236492919921875, 0.04237007904052734, 0.0424719352722168, 0.042541534423828124, 0.04239382553100586, 0.04260831832885742, 0.04236707305908203, 0.04225606536865235, 0.04234822463989258, 0.04221132659912109, 0.04213209533691406, 0.04252259063720703, 0.04236483383178711, 0.042164352416992186, 0.04248371124267578, 0.042307777404785155, 0.04212911987304688, 0.04210697555541992, 0.04270425415039063, 0.044518016815185545, 0.042347553253173825, 0.043127296447753906, 0.04249843215942383, 0.042606689453125, 0.04220848083496094, 0.04226108932495117, 0.042525089263916016, 0.04231145477294922, 0.04319926452636719, 0.04237481689453125, 0.04215792083740234, 0.04269107055664063, 0.04200243377685547, 0.04230105590820313, 0.04222524642944336, 0.04234729766845703, 0.04255670547485352, 0.042482398986816404, 0.04228659057617187, 0.042221088409423825, 0.0421611213684082, 0.04209836959838867, 0.04223827362060547, 0.042702945709228515, 0.0430582389831543, 0.04268505477905273, 0.04257404708862305, 0.042461185455322265, 0.044144161224365236, 0.04297280120849609, 0.042482208251953125, 0.04250860977172852, 0.0425164794921875, 0.04217036819458008, 0.04257791900634766, 0.0430807991027832, 0.0444609603881836, 0.042840065002441405, 0.04261068725585938, 0.04280934524536133, 0.043003807067871096, 0.042727519989013675, 0.04263731384277344, 0.04211916732788086, 0.042466625213623044, 0.04424774551391602, 0.04357104110717774, 0.04338668823242187, 0.04300601577758789, 0.04488447952270508, 0.04240771102905273, 0.04244275283813476, 0.042848255157470705, 0.042194942474365234, 0.04195475387573242, 0.042199615478515626, 0.04210393524169922, 0.04228799819946289, 0.042264575958251956, 0.04213151931762695, 0.0419672966003418, 0.04213116836547852, 0.04244655990600586, 0.04221651077270508, 0.0421209602355957, 0.04194713592529297, 0.04190617752075195, 0.042188800811767575, 0.042229759216308595, 0.042278911590576174, 0.041990142822265625, 0.043462497711181644, 0.04311552047729492, 0.04286374282836914, 0.0426321907043457, 0.04242809677124024, 0.042405376434326174, 0.042336959838867184, 0.04247929763793945, 0.04241849517822266, 0.042461280822753904, 0.04281756973266602, 0.0424508171081543, 0.0424683837890625, 0.0441005744934082, 0.04430207824707031, 0.04281779098510742, 0.04323907089233398, 0.0424984016418457, 0.04305920028686523, 0.04254105758666992, 0.042395648956298826, 0.04273788833618164, 0.04236470413208008, 0.04229241561889648, 0.04252915191650391, 0.04265980911254883, 0.04268489456176758, 0.04256108856201172, 0.0426234245300293, 0.04207001495361328, 0.04277657699584961, 0.04224739074707031, 0.04226764678955078, 0.04222473526000976, 0.042166976928710936, 0.042016769409179686, 0.04228710556030273, 0.04305670547485352, 0.04221782302856445, 0.042197086334228515, 0.04256563186645508, 0.0426550407409668, 0.0429288330078125, 0.04253696060180664, 0.042739616394042966, 0.042675838470458985, 0.042503936767578125, 0.04262547302246094, 0.04326019287109375, 0.043202560424804685, 0.04269875335693359, 0.04246940612792969, 0.04247449493408203, 0.04283833694458008, 0.04265635299682617, 0.042355838775634765, 0.042344799041748045, 0.042403968811035156, 0.04488006210327149, 0.04359196853637695, 0.042425662994384765, 0.04292063903808594, 0.04240719985961914, 0.04371708679199219, 0.04252444839477539, 0.04293782424926758, 0.0423836784362793, 0.04300182342529297, 0.04250447845458984, 0.042653408050537106, 0.04295564651489258, 0.04291526412963867, 0.042830207824707034, 0.04297727966308594, 0.044638175964355466, 0.0426042251586914, 0.042670177459716796, 0.041891616821289064, 0.04226816177368164, 0.04284515380859375, 0.04295884704589844, 0.04275609588623047, 0.04238131332397461, 0.042162174224853514, 0.04205712127685547, 0.042590721130371094, 0.04266403198242188, 0.043009342193603514, 0.0424780158996582, 0.04222723388671875, 0.04263008117675781, 0.042052417755126956, 0.04230857467651367, 0.042159358978271486, 0.04218342590332031, 0.04190534210205078, 0.04196764755249023, 0.04210768127441406, 0.04273945617675781, 0.04225254440307617, 0.04209356689453125, 0.04232499313354492, 0.04234620666503906, 0.042661857604980466, 0.042915393829345706, 0.04280192184448242, 0.042355743408203125, 0.04234543991088867, 0.042412033081054686, 0.0431530876159668, 0.04287315368652344, 0.04270512008666992, 0.043183902740478515, 0.04237516784667969, 0.042777984619140626, 0.04232051086425781, 0.04213142395019531, 0.04210195159912109, 0.042258750915527346, 0.042093120574951175, 0.042161376953125, 0.04286745452880859, 0.04227276611328125, 0.042280960083007815, 0.04212736129760742, 0.04197785568237305, 0.04362259292602539, 0.042737472534179685, 0.04239558410644531, 0.042443008422851564, 0.04233420944213867, 0.042387454986572266, 0.042194942474365234, 0.04220310211181641, 0.04223161697387695, 0.04295087814331055, 0.042117183685302734, 0.042194881439208985, 0.04311225509643555, 0.042176223754882815, 0.04226892852783203, 0.042815711975097655, 0.04309718322753906, 0.04275423812866211, 0.04241084671020508, 0.04572313690185547, 0.04281100845336914, 0.04341632080078125, 0.042848255157470705, 0.04320380783081055, 0.04274051284790039, 0.04295065689086914, 0.04271321487426758, 0.0427737922668457, 0.042594913482666016, 0.04263452911376953, 0.04259708786010742, 0.04258784103393555, 0.04247097778320313, 0.04284697723388672, 0.042608638763427735, 0.04233193588256836, 0.044001502990722655, 0.04273356628417969, 0.04233132934570313, 0.04297811126708984, 0.04270284652709961, 0.04257791900634766, 0.04252671813964844, 0.04255670547485352, 0.04272611236572266, 0.042547359466552734, 0.04225827026367188, 0.042591552734375, 0.042906303405761716, 0.042307872772216794, 0.04274966430664062, 0.04252467346191406, 0.04252195358276367, 0.04318848037719727, 0.04424745559692383, 0.042719390869140624, 0.04321209716796875, 0.04221401596069336, 0.044238750457763674, 0.04322918319702149, 0.04285248184204102, 0.04261465454101562, 0.04329372787475586, 0.04327008056640625, 0.042504638671875, 0.04235308837890625, 0.04232191848754883, 0.0421069450378418, 0.04192998504638672, 0.042107711791992186, 0.043027999877929685, 0.042684768676757814, 0.04294793701171875, 0.04222012710571289, 0.04229676818847656, 0.04243929672241211, 0.042355838775634765, 0.04262591934204102, 0.042657344818115235, 0.04261113739013672, 0.042731166839599606, 0.04375996780395508, 0.04275199890136719, 0.04283123016357422, 0.04277030563354492, 0.04239382553100586, 0.042649280548095705, 0.042466400146484375, 0.042648670196533206, 0.04249615859985351, 0.04239798355102539, 0.042237728118896485, 0.04332537460327148, 0.04568259048461914, 0.042594974517822265, 0.04225219345092773, 0.042192256927490235, 0.04233078384399414, 0.04212275314331055, 0.04216883087158203, 0.042845504760742184, 0.04290409469604492, 0.04254870223999024, 0.04249260711669922, 0.042640480041503906, 0.04254812622070313, 0.042452991485595705, 0.04249379348754883, 0.04216179275512695, 0.04291843032836914, 0.04248271942138672, 0.042114017486572265, 0.04231987380981445, 0.04207926559448242, 0.04215907287597656, 0.04198108673095703, 0.04249686431884766, 0.04322886276245117, 0.04236729431152344, 0.04243251037597656, 0.04235059356689453, 0.042534912109375, 0.04246732711791992, 0.042676097869873045, 0.04373955154418945, 0.043121662139892575, 0.043507457733154294, 0.04274396896362305, 0.04268057632446289, 0.0425263671875, 0.04326950454711914, 0.042689247131347655, 0.04253081512451172, 0.04259635162353516, 0.04244841766357422, 0.042297119140625, 0.04225830459594727, 0.042723583221435546, 0.04578976058959961, 0.04301430511474609, 0.042608448028564457, 0.042706977844238284, 0.04278476715087891, 0.04270630264282226, 0.0427116813659668, 0.04232806396484375, 0.04253696060180664, 0.04231577682495117, 0.042291454315185543, 0.04238515090942383, 0.042370113372802734, 0.042372222900390624, 0.042161983489990236, 0.04223385620117188, 0.04338278579711914, 0.04221952056884765, 0.04230553436279297, 0.042369022369384765, 0.04255686569213867, 0.04239971160888672, 0.04243062210083008, 0.042805889129638675, 0.04250172805786133, 0.042447071075439456, 0.04234035110473633, 0.042684288024902345, 0.04248188781738281, 0.042260032653808594, 0.042346015930175784, 0.04199711990356445, 0.04228300857543945, 0.04293225479125977, 0.042372833251953124, 0.04207436752319336, 0.04253900909423828, 0.042196990966796875, 0.04213759994506836, 0.04233011245727539, 0.042810497283935545, 0.042476032257080076, 0.04227091217041016, 0.04230752182006836, 0.04212742233276367, 0.04249414443969726, 0.042223617553710936, 0.042485759735107424, 0.04251881790161133, 0.04280662536621094, 0.04308339309692383, 0.04342192077636719, 0.042522945404052735, 0.043251712799072264, 0.04230352020263672, 0.04235817718505859, 0.04222784042358398, 0.04248620986938476, 0.042194942474365234, 0.04253606414794922, 0.04233446502685547, 0.042193534851074216, 0.04265715026855469, 0.042609153747558595, 0.04309382247924805, 0.042815807342529294, 0.04252671813964844, 0.042967041015625, 0.042256385803222656, 0.04228451156616211, 0.04204803085327148, 0.04243059158325195, 0.04226035308837891, 0.04216831970214844, 0.04278297424316406, 0.0421844482421875, 0.0424194221496582, 0.04298012924194336, 0.04206796646118164, 0.04220111846923828, 0.04217177581787109, 0.04222012710571289, 0.04216243362426758, 0.04224383926391601, 0.04244598388671875, 0.042492767333984376, 0.042872833251953124, 0.04280476760864258, 0.042952350616455075, 0.042815807342529294, 0.04332326507568359, 0.04262169647216797, 0.04267161560058594, 0.042842750549316404, 0.0424977912902832, 0.04222566223144531, 0.04213145446777344, 0.04234854507446289, 0.04246518325805664, 0.042689857482910154, 0.04297548675537109, 0.04600271987915039, 0.042618881225585936, 0.04464230346679687, 0.04281856155395508, 0.04283699035644531, 0.04296038436889649, 0.042842079162597656, 0.04273337554931641, 0.04286880111694336, 0.042721824645996095, 0.04302412796020508, 0.04249647903442383, 0.042683902740478515, 0.04448668670654297, 0.042670528411865236, 0.04475289535522461, 0.04336640167236328, 0.04247552108764648, 0.04246131134033203, 0.0424508171081543, 0.042487071990966796, 0.042648193359375, 0.04286431884765625, 0.04290544128417969, 0.04283577728271484, 0.042422111511230466, 0.04254608154296875, 0.042241630554199217, 0.04239811325073242, 0.04273152160644531, 0.0425082893371582, 0.04258816146850586, 0.042479007720947266, 0.042396255493164066, 0.04242227172851563, 0.04278611373901367, 0.04357718276977539, 0.04252143859863281, 0.04235244750976563, 0.04220099258422851, 0.042331775665283206, 0.0422529296875, 0.04339292907714844, 0.04215532684326172, 0.04209337615966797, 0.04261478424072265, 0.04272742462158203, 0.04399631881713867, 0.04320547103881836, 0.04233951950073242, 0.042844352722167967, 0.04209113693237305, 0.04233647918701172, 0.042374305725097657, 0.04301900863647461, 0.042717056274414064, 0.04220345687866211, 0.04234543991088867, 0.04308041763305664, 0.04471308898925781, 0.04316659164428711, 0.042543102264404296, 0.042347904205322265, 0.04228979110717773, 0.042216926574707034, 0.04213356781005859, 0.04237929534912109, 0.04250223922729492, 0.042371551513671876, 0.042418304443359374, 0.04247225570678711, 0.04254816055297852, 0.042229793548583985, 0.04238332748413086, 0.04211507034301758, 0.04235174560546875, 0.04335001754760742, 0.04281958389282227, 0.042567680358886716, 0.042509632110595705, 0.04236460876464844, 0.04236185455322266, 0.04249379348754883, 0.04287004852294922, 0.04481523132324219, 0.04289945602416992, 0.04248739242553711, 0.04288348770141601, 0.04275820922851563, 0.042862529754638674, 0.042343521118164064, 0.04246006393432617, 0.043055103302001956, 0.042643455505371096, 0.04248291015625, 0.042496799468994144, 0.04262319946289062, 0.043523872375488284, 0.04249216079711914, 0.04308351898193359, 0.042528064727783206, 0.04239020919799805, 0.04230348968505859, 0.04284371185302734, 0.04507670211791992, 0.04291196823120117, 0.04274585723876953, 0.04274339294433594, 0.042299808502197264, 0.04235878372192383, 0.042401790618896484, 0.042132606506347654, 0.04220403289794922, 0.04217673492431641, 0.042595230102539065, 0.042146305084228515, 0.0425797119140625, 0.04238547134399414, 0.04227743911743164, 0.04235683059692383, 0.04227859115600586, 0.04277065658569336, 0.042031105041503904, 0.0425164794921875, 0.04228300857543945, 0.042227169036865235, 0.042803745269775394, 0.04237286376953125, 0.04243014526367188, 0.0423616943359375, 0.042411518096923825, 0.04285865783691406, 0.04223775863647461, 0.04201465606689453, 0.0419884147644043, 0.04173347091674805, 0.042107009887695314, 0.04190224075317383, 0.04233868789672852]",tokens/s,23.467907423320813,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4928.958464,7914.520576,0.0,7511.998464,6895.682048,s,1,12.6634599609375,12.6634599609375,0.0,12.6634599609375,12.6634599609375,12.6634599609375,12.6634599609375,[12.6634599609375],,kWh,0.00016121911061673017,1.7776517021757323e-05,6.666616444400697e-05,0.0002456617920824945,,MB,2726.592512,7931.297792,0.0,7514.095616,6822.141952,s,10,31.472917724609374,3.1472917724609375,0.003366596834724183,3.1480302734374996,3.1509873046875003,3.1513037109375,3.1515568359375,"[3.1400791015625, 3.1435078125, 3.145054931640625, 3.148227783203125, 3.14719140625, 3.147832763671875, 3.148430419921875, 3.1509169921875, 3.150056396484375, 3.1516201171875]",tokens/s,81.3397735284733,kWh,9.188008231208339e-05,1.0132780933891034e-05,6.097638211439715e-05,0.00016298924536037158,tokens/kWh,1570655.7781403325,MB,2730.76224,7933.394944,0.0,7516.192768,6822.144512,s,10,22.02518432617188,2.2025184326171874,0.004376447002694141,2.2021079101562497,2.2067664794921873,2.2092861938476562,2.2113019653320314,"[2.2046806640625, 2.200498779296875, 2.201718505859375, 2.199762451171875, 2.202800048828125, 2.201237548828125, 2.20620654296875, 2.1939765625, 2.211805908203125, 2.202497314453125]",tokens/s,28.60361986852431,kWh,6.443390710166567e-05,7.109094301964457e-06,4.2958034366408215e-05,0.0001145010357700383,tokens/kWh,550213.3633666686,,s,630,22.022617275238016,0.0349565353575207,0.00047840738005004686,0.034843774795532224,0.035331481552124025,0.035633221054077145,0.036680678405761724,"[0.035985408782958986, 0.034977790832519534, 0.035471359252929685, 0.03536896133422852, 0.03473123168945313, 0.03473683166503906, 0.03466559982299805, 0.0346383056640625, 0.03465193557739258, 0.034656993865966795, 0.03476275253295898, 0.03479292678833008, 0.03484899139404297, 0.03543276977539062, 0.03639849472045899, 0.03508623886108399, 0.03534918212890625, 0.03480985641479492, 0.03478268814086914, 0.03476076889038086, 0.03460758590698242, 0.034885215759277347, 0.03454159927368164, 0.03469350433349609, 0.03530956649780274, 0.034637630462646486, 0.03481766510009766, 0.034904640197753904, 0.03459481430053711, 0.036534271240234374, 0.034934078216552734, 0.03505535888671875, 0.03507295989990234, 0.034920448303222655, 0.03515801620483398, 0.03523516845703125, 0.03493132781982422, 0.034856670379638674, 0.03470947265625, 0.03476259231567383, 0.03484860610961914, 0.035246753692626955, 0.035454975128173825, 0.03522284698486328, 0.03469113540649414, 0.03473881530761719, 0.03483990478515625, 0.03522627258300781, 0.03464303970336914, 0.0350852165222168, 0.035334144592285156, 0.034547615051269534, 0.03467391967773437, 0.034571102142333984, 0.034534465789794924, 0.03469612884521484, 0.03470502471923828, 0.03481024169921875, 0.03553231811523438, 0.03563977432250977, 0.03563686370849609, 0.03494745635986328, 0.034738174438476564, 0.03599359893798828, 0.034939071655273435, 0.035163745880126954, 0.03611248016357422, 0.03461644744873047, 0.0345296630859375, 0.03460681533813476, 0.03467766571044922, 0.03508838272094727, 0.03524732971191406, 0.034511646270751956, 0.0348873291015625, 0.03522390365600586, 0.03461529541015625, 0.03458572769165039, 0.034564128875732424, 0.03476361465454102, 0.03484467315673828, 0.03465840148925781, 0.034951072692871094, 0.035206401824951175, 0.03523619079589844, 0.034894241333007815, 0.03476070404052734, 0.03472102355957031, 0.034775489807128905, 0.03482444763183594, 0.03472745513916015, 0.035574302673339844, 0.03480937576293945, 0.034714080810546874, 0.03501055908203125, 0.03494876861572266, 0.03472780990600586, 0.03461167907714844, 0.03447808074951172, 0.03475046539306641, 0.03489382553100586, 0.03562876892089844, 0.034844833374023436, 0.03510489654541016, 0.03487865447998047, 0.03468576049804688, 0.0348807373046875, 0.03471644973754883, 0.03461119842529297, 0.03471152114868164, 0.034890846252441404, 0.03522041702270508, 0.03528486251831055, 0.03515987014770508, 0.03518886566162109, 0.03533023834228516, 0.03514502334594727, 0.035030750274658205, 0.03492144012451172, 0.03467673492431641, 0.03481804656982422, 0.03476070404052734, 0.03467168045043945, 0.035077056884765624, 0.03501055908203125, 0.03474137496948242, 0.03608803176879883, 0.03530342483520508, 0.03493478393554687, 0.03540582275390625, 0.034971649169921876, 0.03618406295776367, 0.03471542358398438, 0.03474249649047852, 0.03480966567993164, 0.03493868637084961, 0.034617183685302734, 0.03453596878051758, 0.03456409454345703, 0.034588001251220704, 0.0345975341796875, 0.03459686279296875, 0.03446579360961914, 0.034704574584960936, 0.03475129699707031, 0.03509280014038086, 0.0351426887512207, 0.035205726623535154, 0.035256385803222656, 0.03516211318969727, 0.034887680053710936, 0.035030303955078126, 0.034902175903320315, 0.034938495635986326, 0.03551123046875, 0.03480166244506836, 0.034947166442871096, 0.03527775955200195, 0.034870464324951174, 0.03483830261230469, 0.03464601516723633, 0.034527198791503906, 0.03461532974243164, 0.034781185150146485, 0.03480153656005859, 0.03562713623046875, 0.03540934371948242, 0.034875072479248044, 0.034915199279785154, 0.03480499267578125, 0.034628223419189454, 0.03468300628662109, 0.03465180969238281, 0.03452931213378906, 0.0347630729675293, 0.034463199615478515, 0.03481449508666992, 0.03508428955078125, 0.034772991180419925, 0.034951072692871094, 0.037179489135742184, 0.03506790542602539, 0.034723838806152346, 0.034539329528808595, 0.03456147384643555, 0.03491507339477539, 0.03461689758300781, 0.034508766174316405, 0.035624862670898434, 0.0362828483581543, 0.03541551971435547, 0.0350972785949707, 0.03495872116088867, 0.034810497283935544, 0.034928638458251955, 0.0348930549621582, 0.03487001419067383, 0.03487503814697265, 0.03479996871948242, 0.03479142379760742, 0.034883392333984374, 0.0346640625, 0.0348326416015625, 0.034750785827636715, 0.035255870819091796, 0.0348897590637207, 0.03466896057128906, 0.034713855743408205, 0.034600704193115235, 0.03614892959594727, 0.03477945709228516, 0.03505561447143555, 0.03488086318969726, 0.03477910232543945, 0.03474502563476563, 0.03475004959106445, 0.035461536407470705, 0.03520512008666992, 0.03515961456298828, 0.03508006286621094, 0.035324256896972654, 0.03530364990234375, 0.03495731353759766, 0.03480575942993164, 0.0348671989440918, 0.03467619323730469, 0.03464796829223633, 0.03465216064453125, 0.0345643196105957, 0.034777503967285156, 0.03470745468139649, 0.034592769622802735, 0.034922462463378906, 0.03464604949951172, 0.034541088104248045, 0.03470998382568359, 0.0345984001159668, 0.034656768798828126, 0.03500969696044922, 0.03466463851928711, 0.03473065567016602, 0.03472800064086914, 0.03464771270751953, 0.03470979309082031, 0.03482828903198242, 0.03493379211425781, 0.035213695526123044, 0.0351748161315918, 0.03533135986328125, 0.03527478408813477, 0.034638591766357425, 0.03464767837524414, 0.03590361785888672, 0.035057952880859375, 0.03490198516845703, 0.034850784301757816, 0.034662464141845706, 0.03477827072143555, 0.034853729248046875, 0.034848480224609374, 0.03490576171875, 0.034712192535400394, 0.03466649627685547, 0.034895870208740236, 0.034606143951416014, 0.03463043212890625, 0.03469254302978515, 0.035060447692871095, 0.03477503967285156, 0.03487491226196289, 0.03496803283691406, 0.036502849578857424, 0.03643987274169922, 0.03541452789306641, 0.03531766510009766, 0.03526444625854492, 0.03511145782470703, 0.03478937530517578, 0.034797569274902344, 0.03475251388549805, 0.03501004791259766, 0.034859519958496094, 0.0351550407409668, 0.03481484985351563, 0.03477840042114258, 0.03511548614501953, 0.035856544494628904, 0.03505686569213867, 0.035638080596923825, 0.03476857757568359, 0.0347367057800293, 0.034848094940185544, 0.03498649597167969, 0.035342334747314456, 0.03471916961669922, 0.034699230194091796, 0.03474288177490235, 0.03474844741821289, 0.03471152114868164, 0.03476425552368164, 0.0348370246887207, 0.03463782501220703, 0.0346429443359375, 0.034915328979492184, 0.03525360107421875, 0.035105438232421876, 0.03475251388549805, 0.03473958587646484, 0.03453606414794922, 0.03455187225341797, 0.03517625427246094, 0.03471987152099609, 0.0347852783203125, 0.034772991180419925, 0.03471897506713867, 0.0361003532409668, 0.0353070068359375, 0.035112800598144533, 0.03518940734863281, 0.034928638458251955, 0.03488358306884766, 0.03571823883056641, 0.035246047973632816, 0.03518716812133789, 0.035332576751708984, 0.0353177604675293, 0.0354299201965332, 0.035440288543701175, 0.035291969299316404, 0.03502489471435547, 0.034830337524414064, 0.03481190490722656, 0.035648639678955076, 0.03485171127319336, 0.03476070404052734, 0.03461734390258789, 0.03465411376953125, 0.03465615844726563, 0.03476473617553711, 0.03470975875854492, 0.034590721130371094, 0.034904064178466795, 0.03490403366088867, 0.03483241653442383, 0.034799617767333986, 0.0347852783203125, 0.03473942565917969, 0.03461999893188476, 0.03463187026977539, 0.03458867263793945, 0.0347770881652832, 0.03475609588623047, 0.03491891098022461, 0.03532780838012695, 0.03489811325073242, 0.035380577087402346, 0.03482457733154297, 0.03484697723388672, 0.03473001480102539, 0.03453747177124023, 0.03466662216186524, 0.0348644790649414, 0.034882080078125, 0.03501667022705078, 0.03470748901367188, 0.03455788803100586, 0.034646080017089846, 0.03462144088745117, 0.03464601516723633, 0.034680831909179685, 0.0348037109375, 0.03467059326171875, 0.03492454528808594, 0.03506585693359375, 0.03497318267822266, 0.03514419174194336, 0.034928638458251955, 0.03495888137817383, 0.03632704162597656, 0.035671550750732424, 0.034973793029785157, 0.03521558380126953, 0.034815807342529294, 0.034933502197265626, 0.034705406188964845, 0.034774398803710936, 0.03463641738891601, 0.03459481430053711, 0.034977790832519534, 0.03477030563354492, 0.0352119026184082, 0.03761971282958984, 0.038153377532958985, 0.035025760650634764, 0.03485865783691406, 0.035078495025634766, 0.03514572906494141, 0.03504537582397461, 0.03510067367553711, 0.03484380722045898, 0.034829151153564455, 0.03486016082763672, 0.034660320281982425, 0.034820926666259765, 0.03492179107666016, 0.034945022583007815, 0.03477993774414063, 0.034648063659667966, 0.03477484893798828, 0.03476089477539063, 0.03462963104248047, 0.03474998474121094, 0.03482988739013672, 0.0346951675415039, 0.034702239990234376, 0.03458867263793945, 0.03491836929321289, 0.034739871978759766, 0.03480620956420898, 0.03493011093139648, 0.035182239532470704, 0.03478204727172852, 0.03461491012573242, 0.03454195022583008, 0.03491839981079101, 0.03486908721923828, 0.035520671844482425, 0.035094528198242186, 0.03496352005004883, 0.034893760681152346, 0.03477679824829102, 0.034643550872802735, 0.034842945098876955, 0.034982078552246096, 0.03529337692260742, 0.03521887969970703, 0.03504496002197265, 0.034871646881103516, 0.03510335922241211, 0.03489177703857422, 0.034840576171875, 0.03600396728515625, 0.03500431823730469, 0.034840576171875, 0.03472793579101562, 0.03457024002075195, 0.034653888702392575, 0.03478982543945312, 0.03479500961303711, 0.034774784088134766, 0.03480435180664063, 0.03499568176269531, 0.03483087921142578, 0.03473350524902344, 0.03484729766845703, 0.034661598205566406, 0.034736545562744144, 0.03490854263305664, 0.034947071075439456, 0.03518668746948242, 0.03469107055664063, 0.03476448059082031, 0.034926239013671874, 0.0346610221862793, 0.034531326293945314, 0.03462115097045899, 0.03463401412963867, 0.03462521743774414, 0.03460895919799805, 0.034602783203125, 0.03496623992919922, 0.03514700698852539, 0.034909950256347654, 0.03480473709106445, 0.034920448303222655, 0.03475609588623047, 0.03467721557617188, 0.03521868896484375, 0.03507894515991211, 0.03478297424316406, 0.034842529296875, 0.03480815887451172, 0.03475251388549805, 0.03486038589477539, 0.03465692901611328, 0.034551807403564457, 0.03472556686401367, 0.03471596908569336, 0.034912254333496096, 0.03475251388549805, 0.03442611312866211, 0.03482905578613281, 0.03481804656982422, 0.034729984283447264, 0.034813087463378904, 0.034796382904052736, 0.03490582275390625, 0.03486080169677734, 0.034781185150146485, 0.03482396697998047, 0.03491712188720703, 0.03484374237060547, 0.03487836837768555, 0.03495116806030273, 0.036740478515625, 0.03566656112670898, 0.0354015998840332, 0.03515552139282226, 0.035377662658691404, 0.034926239013671874, 0.034933151245117186, 0.03538489532470703, 0.035367359161376954, 0.035530750274658206, 0.0350590705871582, 0.034961406707763674, 0.03498236846923828, 0.0351704330444336, 0.0348078384399414, 0.034912254333496096, 0.03519596862792969, 0.03508319854736328, 0.03496659088134765, 0.0347841911315918, 0.03479129409790039, 0.03497356796264649, 0.03483059310913086, 0.034797569274902344, 0.03489756774902344, 0.03475436782836914, 0.03497014236450195, 0.03495935821533203, 0.03501465606689453, 0.035383296966552735, 0.035059711456298825, 0.0352250862121582, 0.0351104621887207, 0.03494105529785156, 0.03483260726928711, 0.03476105499267578, 0.03556288146972656, 0.03533071899414063, 0.035008319854736326, 0.03479750442504883, 0.03506633758544922, 0.03495116806030273, 0.034723838806152346, 0.03475151824951172, 0.034759647369384764, 0.03463782501220703, 0.034815231323242185, 0.03511920166015625, 0.035004161834716795, 0.03738467025756836, 0.03505401611328125, 0.035003841400146486, 0.03467935943603516, 0.03474406433105469, 0.0347691535949707, 0.03486105728149414, 0.03476070404052734, 0.03492454528808594, 0.0347911376953125, 0.03469302368164062, 0.0374799690246582, 0.03521558380126953, 0.03495980834960938, 0.03609395217895508, 0.035046527862548825, 0.03466665649414063, 0.03494908905029297, 0.034930526733398436, 0.03467964935302734, 0.0348834228515625, 0.034826400756835935, 0.03462508773803711, 0.034859519958496094, 0.034581790924072264, 0.03448700714111328, 0.034549182891845706, 0.0345912971496582, 0.034629695892333986, 0.03450259017944336, 0.03458627319335938, 0.03467628860473633, 0.03448912048339844, 0.03463372802734375, 0.034543617248535156, 0.03479347229003906, 0.03466649627685547, 0.03467257690429688, 0.034553920745849606, 0.03496345520019531, 0.035225601196289064, 0.03556147384643555, 0.03510003280639649, 0.0349024658203125, 0.034909534454345706, 0.03479843139648438, 0.035026016235351565, 0.03523651123046875, 0.034656513214111326, 0.034715648651123046, 0.034590721130371094, 0.03490611267089844, 0.03489791870117188, 0.034710655212402346, 0.03477388763427734, 0.034678783416748044, 0.03480607986450195, 0.034934463500976565, 0.03531161499023437, 0.03506790542602539, 0.03502284622192383, 0.035018527984619144, 0.03487558364868164, 0.04124444961547852, 0.03507952117919922, 0.0349211196899414, 0.034828544616699215, 0.03481135940551758, 0.035275104522705075, 0.03563916778564453, 0.03488380813598633, 0.03471334457397461, 0.03457263946533203, 0.034697216033935545, 0.03480169677734375, 0.034786880493164064, 0.034785694122314456]",tokens/s,28.60695402940886,,, 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1873.276928,2868.772864,0.0,2466.250752,2401.696256,s,1,9.4162490234375,9.4162490234375,0.0,9.4162490234375,9.4162490234375,9.4162490234375,9.4162490234375,[9.4162490234375],,kWh,6.37608176333136e-05,7.0157127547170014e-06,2.3509463251969454e-05,9.428599364000006e-05,,MB,1736.4992,3097.362432,0.0,2680.160256,2582.175744,s,10,5.979624511718749,0.597962451171875,0.001965094992385114,0.5974742736816406,0.59926318359375,0.601207763671875,0.6027634277343751,"[0.60315234375, 0.5960966796875, 0.5971708984375, 0.5978729858398437, 0.5971138916015625, 0.5977776489257812, 0.59714892578125, 0.595760009765625, 0.5988310546875, 0.5987000732421875]",tokens/s,428.12052746505447,kWh,1.78251366845603e-05,1.9657964771226416e-06,1.1811022520706348e-05,3.160195568238929e-05,tokens/kWh,8100764.477138364,MB,1740.726272,3097.362432,0.0,2680.160256,2582.178304,s,10,15.8285029296875,1.5828502929687498,0.004267978351672285,1.5823958740234376,1.5883071655273437,1.5897179748535155,1.590846622314453,"[1.5854661865234374, 1.5820706787109375, 1.5827210693359375, 1.58799365234375, 1.5834609375, 1.5911287841796875, 1.5783160400390626, 1.5814571533203126, 1.580028564453125, 1.57585986328125]",tokens/s,39.80161628667924,kWh,4.601011039127364e-05,5.074990299936751e-06,2.3680755872691997e-05,7.476585656390239e-05,tokens/kWh,842630.6190467288,,s,630,15.826387191772465,0.025121249510749937,0.0004887902048017488,0.025012351989746093,0.025385139083862304,0.025788841438293453,0.027186098136901864,"[0.026068511962890624, 0.025132896423339844, 0.025063968658447264, 0.026560415267944337, 0.02613680076599121, 0.02512233543395996, 0.024955360412597657, 0.02486793518066406, 0.024933280944824218, 0.024969215393066405, 0.024944639205932616, 0.024795135498046874, 0.024751583099365235, 0.024850303649902344, 0.025238176345825196, 0.025843711853027345, 0.024798879623413084, 0.02511087989807129, 0.024862335205078124, 0.024783199310302734, 0.024833248138427733, 0.024877887725830078, 0.02511257553100586, 0.02517148780822754, 0.025317855834960937, 0.025251295089721678, 0.02511311912536621, 0.025057279586791992, 0.025614336013793947, 0.025081567764282227, 0.025092384338378907, 0.02497030448913574, 0.024935327529907226, 0.024962432861328127, 0.02495075225830078, 0.025403295516967773, 0.028533376693725587, 0.0252192325592041, 0.025112384796142577, 0.025067712783813475, 0.024954687118530272, 0.02499193572998047, 0.024970272064208984, 0.025101280212402342, 0.025358367919921875, 0.025063039779663086, 0.024949087142944335, 0.02492403221130371, 0.02502659225463867, 0.025004127502441405, 0.025216543197631835, 0.025017824172973633, 0.025103008270263672, 0.025252191543579102, 0.025297183990478516, 0.025032415390014648, 0.02485219192504883, 0.02502889633178711, 0.024813568115234375, 0.024897695541381836, 0.024903520584106446, 0.024921695709228517, 0.02510492706298828, 0.02609974479675293, 0.0251231689453125, 0.025026559829711914, 0.025092384338378907, 0.02487887954711914, 0.024841440200805663, 0.02502668762207031, 0.025059167861938476, 0.02504368019104004, 0.025255296707153322, 0.028104639053344725, 0.02525257682800293, 0.025059328079223633, 0.025044992446899415, 0.026322240829467772, 0.024949440002441405, 0.025116672515869142, 0.02491360092163086, 0.024899904251098632, 0.02525142478942871, 0.025001407623291016, 0.0249783992767334, 0.02511052894592285, 0.024911264419555663, 0.024959808349609376, 0.02489673614501953, 0.02515001678466797, 0.024890560150146485, 0.02489411163330078, 0.024919328689575197, 0.02496099281311035, 0.02488003158569336, 0.024952415466308595, 0.02504672050476074, 0.02494870376586914, 0.024873727798461913, 0.02489958381652832, 0.02501024055480957, 0.02495017623901367, 0.02502614402770996, 0.025199487686157228, 0.02500204849243164, 0.02499171257019043, 0.02492624092102051, 0.024997888565063478, 0.02491187286376953, 0.02498150444030762, 0.024939680099487306, 0.024918880462646485, 0.024981632232666015, 0.025108352661132812, 0.024999935150146483, 0.025003711700439454, 0.024840511322021485, 0.02490563201904297, 0.02497545623779297, 0.025571327209472656, 0.02531059265136719, 0.025491296768188478, 0.025204896926879883, 0.025043231964111328, 0.024926816940307617, 0.02501580810546875, 0.026233503341674805, 0.025104255676269532, 0.02490332794189453, 0.02490937614440918, 0.025149471282958986, 0.024902399063110352, 0.024874528884887694, 0.025047391891479493, 0.025186559677124024, 0.025141248703002928, 0.02511484718322754, 0.025235231399536134, 0.025453760147094728, 0.025190303802490235, 0.025150367736816406, 0.025130655288696287, 0.025028799057006838, 0.025157791137695312, 0.02508595275878906, 0.025069280624389647, 0.025071903228759764, 0.02500102424621582, 0.024914880752563477, 0.02491529655456543, 0.02524745559692383, 0.024991775512695314, 0.024986528396606447, 0.025388256072998047, 0.024916767120361328, 0.02500601577758789, 0.025028671264648437, 0.024878143310546875, 0.02487731170654297, 0.024910528182983397, 0.025142656326293946, 0.02538559913635254, 0.025333759307861328, 0.02511052894592285, 0.024997119903564454, 0.024864639282226562, 0.02502911949157715, 0.02503654479980469, 0.024877695083618163, 0.024799232482910157, 0.025111583709716796, 0.025074655532836915, 0.02494825553894043, 0.02486729621887207, 0.024993343353271483, 0.025333696365356446, 0.025356864929199217, 0.0262872314453125, 0.025506624221801756, 0.02507161521911621, 0.0249036808013916, 0.025085504531860352, 0.025407487869262696, 0.02513145637512207, 0.02556723213195801, 0.024856576919555663, 0.025316415786743166, 0.02504185676574707, 0.02486409568786621, 0.026052127838134764, 0.025238431930541993, 0.02532352066040039, 0.025628671646118165, 0.02575564765930176, 0.02536038398742676, 0.025272319793701172, 0.025029727935791016, 0.025209760665893553, 0.02509974479675293, 0.024994367599487304, 0.02507539176940918, 0.024834335327148436, 0.024963071823120117, 0.025036800384521486, 0.02488934326171875, 0.025024511337280272, 0.027244543075561522, 0.025034751892089844, 0.025028032302856447, 0.024998464584350587, 0.024919807434082033, 0.02500534439086914, 0.02495996856689453, 0.02490777587890625, 0.02486425590515137, 0.024863231658935548, 0.024924095153808595, 0.025073375701904297, 0.025056640625, 0.025078752517700195, 0.025161184310913087, 0.025068063735961914, 0.025100288391113282, 0.025089632034301756, 0.02502697563171387, 0.0250098876953125, 0.024908063888549804, 0.025430015563964844, 0.024976543426513672, 0.024953695297241212, 0.025292928695678712, 0.02547488021850586, 0.025071680068969728, 0.02502771186828613, 0.025246591567993165, 0.02499945640563965, 0.024852960586547852, 0.025047040939331053, 0.025653247833251954, 0.02535353660583496, 0.025129152297973634, 0.024952447891235352, 0.02498633575439453, 0.024935647964477538, 0.0249496955871582, 0.024959999084472655, 0.025020608901977538, 0.026678079605102538, 0.027440799713134765, 0.025185951232910155, 0.025006784439086913, 0.02505523109436035, 0.02621366310119629, 0.025089183807373048, 0.025001504898071288, 0.02492883110046387, 0.024929056167602538, 0.024933311462402345, 0.024782848358154298, 0.024917440414428713, 0.02485728073120117, 0.02491526412963867, 0.024781375885009765, 0.024971263885498047, 0.024714624404907227, 0.024787200927734374, 0.024739936828613283, 0.024835840225219726, 0.025436256408691408, 0.024856639862060548, 0.025098495483398438, 0.024844032287597656, 0.024750463485717772, 0.024860671997070313, 0.02480531120300293, 0.024829919815063477, 0.024800544738769532, 0.024980287551879882, 0.02495692825317383, 0.025058303833007813, 0.025010751724243163, 0.02491231918334961, 0.024975360870361327, 0.025126911163330077, 0.024979007720947265, 0.025239999771118165, 0.025701440811157227, 0.02581599998474121, 0.02546892738342285, 0.025311040878295898, 0.025167200088500978, 0.02542064094543457, 0.025429183959960938, 0.02564735984802246, 0.02530156707763672, 0.025300991058349608, 0.025292287826538085, 0.025216703414916993, 0.025197376251220704, 0.02519011116027832, 0.025218463897705077, 0.025225791931152344, 0.025131328582763672, 0.025081823348999024, 0.025149311065673828, 0.025235456466674806, 0.02677689552307129, 0.025920352935791015, 0.025191648483276367, 0.025053983688354493, 0.025271360397338866, 0.024908096313476562, 0.024871551513671875, 0.02490118408203125, 0.02491187286376953, 0.02654502487182617, 0.029574527740478515, 0.03106649589538574, 0.025227231979370116, 0.024893728256225586, 0.025056352615356447, 0.024998336791992187, 0.02523311996459961, 0.0249781436920166, 0.02501452827453613, 0.024917760848999024, 0.025149471282958986, 0.025150911331176758, 0.024808000564575196, 0.024967168807983397, 0.024774656295776368, 0.024922111511230468, 0.025181215286254884, 0.02491663932800293, 0.024795455932617186, 0.025007968902587892, 0.025325727462768555, 0.02510643196105957, 0.024928255081176756, 0.02504528045654297, 0.025065183639526367, 0.025324928283691407, 0.02503446388244629, 0.02500899124145508, 0.025112640380859374, 0.02506342315673828, 0.025108480453491212, 0.024997888565063478, 0.025034751892089844, 0.02494278335571289, 0.024796543121337892, 0.024953279495239258, 0.024991743087768553, 0.02486662483215332, 0.024833984375, 0.024893024444580077, 0.02514396858215332, 0.025488704681396485, 0.02488185691833496, 0.025022464752197264, 0.024898719787597657, 0.024871360778808593, 0.024890943527221678, 0.024959840774536134, 0.024990720748901366, 0.024940736770629884, 0.02491267204284668, 0.024836095809936523, 0.024891424179077148, 0.024884767532348633, 0.024957183837890626, 0.02487318420410156, 0.024963071823120117, 0.024874208450317382, 0.02493929672241211, 0.02771353530883789, 0.026127744674682617, 0.02523129653930664, 0.026104480743408202, 0.02502377510070801, 0.024784927368164063, 0.024815807342529295, 0.024750879287719726, 0.02477027130126953, 0.024791040420532227, 0.02473574447631836, 0.024848543167114257, 0.02483593559265137, 0.024791040420532227, 0.024886655807495116, 0.02507391929626465, 0.02517030334472656, 0.025225215911865235, 0.025251552581787108, 0.02521731185913086, 0.025317375183105468, 0.025249120712280273, 0.025318048477172853, 0.025316640853881835, 0.025178112030029298, 0.024873695373535155, 0.024817792892456055, 0.025474943161010743, 0.0249487361907959, 0.02489139175415039, 0.024869951248168945, 0.02485753631591797, 0.024995840072631836, 0.024810880661010743, 0.024860416412353516, 0.024830848693847656, 0.02508576011657715, 0.024880735397338868, 0.02495510482788086, 0.024935903549194335, 0.02505206489562988, 0.024987648010253907, 0.02494054412841797, 0.025035776138305665, 0.025074752807617187, 0.02493382453918457, 0.024965631484985353, 0.025036800384521486, 0.025089792251586914, 0.024930559158325195, 0.02502835273742676, 0.024987903594970703, 0.025144704818725584, 0.02504153633117676, 0.02502195167541504, 0.025030399322509767, 0.025091903686523438, 0.024934431076049805, 0.02515558433532715, 0.02510723114013672, 0.025386816024780275, 0.02532111930847168, 0.025423871994018556, 0.025237184524536133, 0.025285343170166015, 0.0253023681640625, 0.02611417579650879, 0.025190784454345704, 0.02572675132751465, 0.025030879974365233, 0.02490707206726074, 0.024945344924926758, 0.024995840072631836, 0.024958976745605467, 0.02482585525512695, 0.02506764793395996, 0.0251144962310791, 0.02523750305175781, 0.025634815216064453, 0.025679519653320312, 0.025379039764404296, 0.025188480377197266, 0.025030656814575194, 0.025460063934326174, 0.025098911285400392, 0.02494838333129883, 0.024951328277587892, 0.024952224731445313, 0.025037216186523437, 0.02530508804321289, 0.025032991409301757, 0.024915679931640625, 0.025470495223999023, 0.024922592163085937, 0.024778751373291014, 0.02488630485534668, 0.025095136642456054, 0.025016096115112303, 0.024927871704101563, 0.024924768447875976, 0.025021535873413086, 0.02483430480957031, 0.024794815063476562, 0.02489228820800781, 0.02510857582092285, 0.024995359420776367, 0.024985631942749022, 0.02510198402404785, 0.025166624069213866, 0.025479167938232423, 0.024958623886108398, 0.025022272109985352, 0.025068063735961914, 0.02491708755493164, 0.025026912689208983, 0.025017919540405272, 0.025197568893432616, 0.02518016052246094, 0.025183744430541992, 0.025376256942749024, 0.025385087966918945, 0.024989984512329103, 0.024896095275878907, 0.025228511810302733, 0.025012447357177735, 0.024871488571166993, 0.024938495635986328, 0.02491801643371582, 0.024929279327392577, 0.02612713623046875, 0.025067327499389648, 0.024938688278198243, 0.02491596794128418, 0.024802560806274413, 0.025023231506347655, 0.025023839950561524, 0.02490140724182129, 0.02509094429016113, 0.025075712203979493, 0.025233407974243165, 0.024946687698364257, 0.024928255081176756, 0.02485641670227051, 0.025150880813598633, 0.024992511749267577, 0.024922111511230468, 0.02498543930053711, 0.024903839111328124, 0.02485820770263672, 0.02501059150695801, 0.025012224197387696, 0.02704300880432129, 0.02643561553955078, 0.025226272583007813, 0.025017471313476564, 0.02504528045654297, 0.024861024856567382, 0.024849536895751954, 0.024918912887573242, 0.024931840896606446, 0.024855039596557618, 0.02496678352355957, 0.02490140724182129, 0.024992351531982423, 0.024911680221557618, 0.025001888275146485, 0.02496441650390625, 0.025082847595214845, 0.025206783294677734, 0.025075712203979493, 0.025016223907470703, 0.025236576080322266, 0.025199615478515625, 0.025200639724731445, 0.025204736709594725, 0.025153535842895508, 0.025014272689819338, 0.025083904266357423, 0.02514886474609375, 0.025319040298461912, 0.024953792572021485, 0.024936384201049804, 0.025030752182006837, 0.024858240127563477, 0.02491222381591797, 0.024891647338867187, 0.02483078384399414, 0.024808095932006835, 0.024972959518432616, 0.025064064025878907, 0.024968576431274415, 0.024967103958129882, 0.026248544692993165, 0.02521718406677246, 0.024978111267089844, 0.025241439819335937, 0.025196575164794923, 0.024989824295043945, 0.024864767074584963, 0.024750080108642578, 0.024819072723388673, 0.02467430305480957, 0.02474457550048828, 0.024696832656860353, 0.02476032066345215, 0.024788415908813477, 0.024740415573120116, 0.024723455429077147, 0.02478489685058594, 0.024925247192382812, 0.02623583984375, 0.02606198310852051, 0.025071680068969728, 0.024990495681762696, 0.02492416000366211, 0.024872608184814453, 0.02475004768371582, 0.0248590087890625, 0.02491379165649414, 0.02479497528076172, 0.024876703262329103, 0.025067487716674806, 0.024748863220214842, 0.02483977508544922, 0.024903936386108397, 0.02489958381652832, 0.024819456100463867, 0.024748287200927734, 0.024860479354858397, 0.02496940803527832, 0.02496512031555176, 0.02502012825012207, 0.02478108787536621, 0.024911487579345703, 0.024777088165283203, 0.024854528427124024, 0.024778751373291014, 0.025047040939331053, 0.025221120834350585, 0.024993759155273437, 0.025012256622314454, 0.025001279830932616, 0.025047744750976562, 0.02490777587890625, 0.02485830307006836, 0.025003679275512697, 0.026179359436035155, 0.025241920471191406, 0.025096767425537108, 0.025187519073486327, 0.02501910400390625, 0.02511267280578613, 0.02523535919189453, 0.02500822448730469, 0.025040895462036132]",tokens/s,39.80693713392234,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2003.31264,2893.938688,0.0,2491.416576,2425.650176,s,1,9.8557216796875,9.8557216796875,0.0,9.8557216796875,9.8557216796875,9.8557216796875,9.8557216796875,[9.8557216796875],,kWh,8.047809139990629e-05,8.870251118258786e-06,3.0060857381997153e-05,0.00011940919990016223,,MB,1887.596544,3122.528256,0.0,2705.32608,2606.129664,s,10,0.5324016342163086,0.05324016342163086,0.0005294748916496864,0.0530576171875,0.053332573318481444,0.05407847805023193,0.05467520183563233,"[0.05482438278198242, 0.05316681671142578, 0.05303376007080078, 0.05302435302734375, 0.053063392639160156, 0.053088191986083985, 0.053052574157714846, 0.053059009552001955, 0.053056224822998044, 0.05303292846679687]",tokens/s,4808.399966255366,kWh,1.640555728347229e-06,1.8092390329804798e-07,1.0873494965956369e-06,2.9088291282409137e-06,tokens/kWh,88007919.58337323,MB,1895.641088,3122.528256,0.0,2705.32608,2606.132224,s,10,16.011645385742188,1.6011645385742188,0.0059864062672753465,1.5998549194335938,1.60557568359375,1.6110897216796876,1.6155009521484376,"[1.599410888671875, 1.60240234375, 1.616603759765625, 1.604350341796875, 1.5983360595703124, 1.5984710693359374, 1.6002989501953124, 1.5938203125, 1.5953896484375, 1.60256201171875]",tokens/s,39.3463622771082,kWh,4.6297584437072773e-05,5.10626206484206e-06,2.1918030018804966e-05,7.33218765207198e-05,tokens/kWh,859225.1452020193,,s,630,16.009391553878785,0.025411732625204422,0.0005982240681103797,0.025288527488708495,0.025634194374084474,0.025967753887176512,0.028124326572418222,"[0.025695903778076172, 0.02551897621154785, 0.02524883270263672, 0.025279296875, 0.025108480453491212, 0.025154592514038086, 0.025564128875732423, 0.027891424179077147, 0.025476800918579103, 0.02543382453918457, 0.0253121280670166, 0.02524563217163086, 0.025499711990356444, 0.025418943405151367, 0.025631551742553712, 0.025414815902709963, 0.025271135330200194, 0.02529280090332031, 0.02533171272277832, 0.025225215911865235, 0.025239551544189453, 0.025272319793701172, 0.025169919967651368, 0.025192447662353516, 0.025227264404296876, 0.025261951446533204, 0.02516371154785156, 0.02512505531311035, 0.02518016052246094, 0.025243040084838866, 0.025114656448364258, 0.025124671936035157, 0.025164159774780273, 0.02516927909851074, 0.02531817626953125, 0.02506979179382324, 0.025169919967651368, 0.025366527557373047, 0.029633760452270508, 0.025369375228881837, 0.025218080520629883, 0.025290912628173828, 0.025192352294921876, 0.025349023818969727, 0.025222431182861327, 0.025204832077026368, 0.02523423957824707, 0.025270303726196288, 0.025230943679809572, 0.02527574348449707, 0.02551628875732422, 0.025245344161987305, 0.02569932746887207, 0.0253306884765625, 0.025293760299682617, 0.025149440765380858, 0.025001247406005858, 0.025102815628051757, 0.02502681541442871, 0.025100288391113282, 0.025217023849487305, 0.025236768722534178, 0.02520547294616699, 0.02554265594482422, 0.02511257553100586, 0.025161727905273438, 0.025243999481201172, 0.02504003143310547, 0.02506188774108887, 0.024989696502685548, 0.02513862419128418, 0.024982080459594727, 0.025010175704956054, 0.024981664657592773, 0.024982847213745118, 0.025133535385131835, 0.027760704040527343, 0.02729078483581543, 0.025296960830688477, 0.025160480499267578, 0.025156768798828125, 0.025047903060913087, 0.025210880279541017, 0.025151487350463866, 0.025423871994018556, 0.025092096328735353, 0.02529280090332031, 0.02545254325866699, 0.025441503524780272, 0.025580320358276367, 0.025391103744506836, 0.02549350357055664, 0.025484800338745117, 0.025702911376953123, 0.025404863357543946, 0.026673728942871094, 0.025393056869506835, 0.02532771110534668, 0.025298944473266603, 0.025233375549316407, 0.025408960342407225, 0.02527497673034668, 0.02515135955810547, 0.025171072006225585, 0.025236480712890624, 0.02526985549926758, 0.02520515251159668, 0.025167871475219726, 0.0251943359375, 0.025155744552612304, 0.025229312896728515, 0.025124864578247072, 0.025176319122314453, 0.0251975040435791, 0.025285440444946287, 0.02528236770629883, 0.025432256698608397, 0.030107648849487304, 0.02565875244140625, 0.02561907196044922, 0.025447904586791994, 0.025244192123413087, 0.02522915267944336, 0.025315488815307617, 0.025261407852172853, 0.025186975479125975, 0.025955583572387694, 0.025694976806640624, 0.025894912719726562, 0.025545984268188476, 0.025426687240600584, 0.025288703918457032, 0.025059200286865233, 0.025186431884765624, 0.025080863952636718, 0.02514224052429199, 0.025210752487182617, 0.02548748779296875, 0.025277568817138673, 0.025594751358032228, 0.025195680618286132, 0.025057119369506838, 0.025094623565673827, 0.02507436752319336, 0.02521072006225586, 0.02512281608581543, 0.025114368438720704, 0.025172479629516603, 0.025212671279907225, 0.025161727905273438, 0.025348384857177733, 0.025317184448242186, 0.025466175079345704, 0.025391712188720703, 0.02532371139526367, 0.026152767181396485, 0.028811264038085937, 0.025618431091308593, 0.0256059513092041, 0.025667776107788087, 0.025594976425170897, 0.025624895095825197, 0.02564156723022461, 0.025620479583740235, 0.025647104263305662, 0.025837568283081053, 0.02587238311767578, 0.026068992614746093, 0.02570217514038086, 0.025509376525878907, 0.025408224105834962, 0.026011392593383788, 0.025387264251708983, 0.025372671127319335, 0.025506912231445314, 0.025575551986694336, 0.025542560577392577, 0.02531603240966797, 0.025229503631591797, 0.02532966423034668, 0.025322784423828126, 0.025450719833374023, 0.032772544860839845, 0.02821945571899414, 0.025351455688476562, 0.02533788871765137, 0.025342655181884766, 0.02549964714050293, 0.02531942367553711, 0.02546499252319336, 0.025316127777099608, 0.025470975875854493, 0.025618175506591796, 0.026377504348754882, 0.02766876792907715, 0.025335935592651366, 0.025422399520874023, 0.025288703918457032, 0.025496864318847658, 0.025374528884887695, 0.02524972724914551, 0.025252832412719726, 0.025530080795288086, 0.025391103744506836, 0.0251824951171875, 0.02530303955078125, 0.025292287826538085, 0.0252541446685791, 0.0252391357421875, 0.02523366355895996, 0.025340320587158204, 0.02566489601135254, 0.02526448059082031, 0.02537900733947754, 0.02518025588989258, 0.025267967224121092, 0.02525004768371582, 0.02526598358154297, 0.025281824111938477, 0.02706025505065918, 0.026303359985351563, 0.025394880294799804, 0.025389375686645507, 0.025206783294677734, 0.025302848815917968, 0.025411231994628906, 0.02514793586730957, 0.025290752410888673, 0.02524336051940918, 0.025104671478271483, 0.025118656158447265, 0.025348031997680664, 0.025210752487182617, 0.02544076728820801, 0.025498655319213866, 0.025643295288085937, 0.02558361625671387, 0.025350559234619142, 0.025214239120483397, 0.02533452796936035, 0.025521440505981444, 0.025470848083496093, 0.025481664657592773, 0.025651615142822267, 0.02565318489074707, 0.025552192687988282, 0.025488128662109377, 0.025476255416870118, 0.02538115119934082, 0.025403968811035155, 0.025389055252075195, 0.02539846420288086, 0.025625375747680663, 0.025335199356079103, 0.02545929527282715, 0.025298944473266603, 0.025107839584350585, 0.02523366355895996, 0.025208799362182618, 0.02520515251159668, 0.025226688385009764, 0.025414207458496093, 0.025235456466674806, 0.025227264404296876, 0.0252620792388916, 0.025378080368041993, 0.02513564872741699, 0.025145536422729493, 0.02537411117553711, 0.025321056365966797, 0.02521776008605957, 0.02515551948547363, 0.025210784912109374, 0.025432512283325194, 0.025509855270385743, 0.025183616638183595, 0.02526470375061035, 0.025374528884887695, 0.025964832305908202, 0.0257392635345459, 0.025717824935913087, 0.025485536575317384, 0.025527008056640627, 0.025556095123291017, 0.02538175964355469, 0.025494815826416016, 0.02526896095275879, 0.02532352066040039, 0.02530508804321289, 0.02524166488647461, 0.025597183227539063, 0.025338560104370116, 0.025349920272827148, 0.02534543991088867, 0.025332639694213867, 0.025200544357299806, 0.02524569511413574, 0.025212928771972655, 0.025244768142700196, 0.027155584335327148, 0.02607027244567871, 0.025436704635620117, 0.025175647735595705, 0.025191936492919922, 0.025168800354003908, 0.025168960571289062, 0.025131967544555663, 0.025374176025390625, 0.025238048553466796, 0.025235456466674806, 0.025393152236938478, 0.025168127059936523, 0.025128192901611328, 0.02519910430908203, 0.025399295806884766, 0.025506080627441405, 0.025261695861816407, 0.025187999725341796, 0.025243616104125975, 0.02505206489562988, 0.025180511474609375, 0.02511631965637207, 0.025069568634033205, 0.02514739227294922, 0.025118495941162108, 0.025108352661132812, 0.025069568634033205, 0.025104736328125, 0.02511193656921387, 0.02510643196105957, 0.025172767639160157, 0.025239423751831056, 0.025106399536132813, 0.025146944046020508, 0.025222591400146484, 0.02514227294921875, 0.0250797119140625, 0.025276512145996095, 0.02516912078857422, 0.025144096374511718, 0.02506547164916992, 0.025517471313476564, 0.025223167419433593, 0.02512073516845703, 0.028326271057128906, 0.027314559936523437, 0.025970144271850584, 0.02543657684326172, 0.025278560638427733, 0.025194303512573242, 0.025183744430541992, 0.025163423538208007, 0.025260992050170898, 0.025364479064941405, 0.025312511444091797, 0.025336576461791993, 0.025903232574462892, 0.025840543746948243, 0.02548198318481445, 0.025417951583862303, 0.025483264923095703, 0.02544156837463379, 0.025220863342285155, 0.0252807674407959, 0.025402048110961913, 0.025298303604125976, 0.025290752410888673, 0.025502368927001952, 0.02533900833129883, 0.025418624877929688, 0.02534147262573242, 0.025507488250732423, 0.025278656005859376, 0.025342496871948242, 0.02532975959777832, 0.02521903991699219, 0.025303104400634765, 0.025441600799560548, 0.02555286407470703, 0.025430463790893556, 0.025386272430419923, 0.025357023239135742, 0.02529280090332031, 0.025280511856079102, 0.025449888229370117, 0.025229087829589845, 0.02510214424133301, 0.026866687774658202, 0.030999839782714842, 0.025486047744750977, 0.0255098876953125, 0.025425920486450194, 0.02543833541870117, 0.025233280181884764, 0.025274368286132814, 0.025214975357055663, 0.025243648529052733, 0.025284608840942382, 0.025290752410888673, 0.025306304931640624, 0.02527724838256836, 0.02518806457519531, 0.02523369598388672, 0.025163455963134764, 0.025506111145019533, 0.025190399169921874, 0.025181856155395508, 0.025149791717529298, 0.02534604835510254, 0.025208127975463866, 0.025189056396484374, 0.025163232803344728, 0.02518275260925293, 0.025135103225708007, 0.02513715171813965, 0.025185983657836915, 0.025231679916381835, 0.025156991958618164, 0.0251278076171875, 0.025197376251220704, 0.025316287994384765, 0.025288703918457032, 0.025200639724731445, 0.02533897590637207, 0.025233728408813477, 0.02514739227294922, 0.02514512062072754, 0.025082687377929687, 0.025253055572509765, 0.025131839752197266, 0.025149440765380858, 0.02525542449951172, 0.02544223976135254, 0.02548182487487793, 0.025532384872436524, 0.025343040466308593, 0.025340480804443358, 0.025631103515625, 0.02531705665588379, 0.025278783798217772, 0.025376096725463867, 0.0261441593170166, 0.02527440071105957, 0.025195199966430663, 0.02515558433532715, 0.025253536224365235, 0.025055679321289062, 0.0252271671295166, 0.02516752052307129, 0.02514364814758301, 0.025059328079223633, 0.025280511856079102, 0.025100288391113282, 0.02512879943847656, 0.0250533447265625, 0.025161792755126953, 0.024991519927978517, 0.025030336380004882, 0.025196895599365235, 0.025091392517089844, 0.02508083152770996, 0.025111520767211914, 0.02516054344177246, 0.02512281608581543, 0.025390176773071288, 0.025323936462402344, 0.025184352874755858, 0.025313407897949218, 0.025409824371337892, 0.025491455078125, 0.02546233558654785, 0.025997695922851564, 0.025737279891967772, 0.02579167938232422, 0.0257807674407959, 0.025739423751831053, 0.025604223251342772, 0.025590816497802735, 0.025490400314331054, 0.025484928131103514, 0.025264511108398436, 0.025429471969604493, 0.025245376586914062, 0.025328479766845702, 0.025339231491088868, 0.02528118324279785, 0.02532966423034668, 0.02530851173400879, 0.02538256072998047, 0.02528358459472656, 0.025224992752075195, 0.025288351058959962, 0.025184831619262694, 0.02517740821838379, 0.02507574462890625, 0.02513350486755371, 0.025102432250976563, 0.025124895095825196, 0.025047008514404296, 0.02510041618347168, 0.02513007926940918, 0.02519321632385254, 0.025313440322875976, 0.025350143432617187, 0.025329408645629884, 0.02508448028564453, 0.025065088272094728, 0.025321823120117187, 0.02506857681274414, 0.025413856506347657, 0.025455392837524415, 0.025329311370849608, 0.025128576278686525, 0.025762144088745116, 0.02523936080932617, 0.025205087661743165, 0.02519392013549805, 0.02509903907775879, 0.025174016952514647, 0.025383968353271485, 0.025176383972167968, 0.025203104019165038, 0.025132896423339844, 0.025135520935058595, 0.025044992446899415, 0.025012224197387696, 0.025266176223754884, 0.02526367950439453, 0.025434560775756836, 0.02516377639770508, 0.02511052894592285, 0.025072736740112303, 0.02505731201171875, 0.02516262435913086, 0.025179359436035158, 0.025211679458618165, 0.025145343780517578, 0.025186304092407227, 0.02535420799255371, 0.025499679565429687, 0.025447807312011718, 0.02554944038391113, 0.02536038398742676, 0.025591808319091795, 0.025355520248413085, 0.02524236869812012, 0.02545894432067871, 0.025395040512084962, 0.025574752807617188, 0.025397823333740233, 0.02540243148803711, 0.025406015396118163, 0.025338239669799804, 0.025480991363525392, 0.025458911895751953, 0.025350143432617187, 0.02523750305175781, 0.025312799453735352, 0.025380447387695314, 0.02534079933166504, 0.025208448410034178, 0.02526166343688965, 0.025227807998657228, 0.026618112564086915, 0.025951295852661132, 0.025381824493408204, 0.02536857604980469, 0.025469247817993163, 0.025408607482910156, 0.02522137641906738, 0.025229440689086915, 0.025231424331665038, 0.02548579216003418, 0.025302303314208983, 0.02534649658203125, 0.025491039276123048, 0.025372480392456053, 0.0253439998626709, 0.025209056854248048, 0.02516035270690918, 0.025243520736694336, 0.025201791763305663, 0.0251013126373291, 0.025354175567626952, 0.025824384689331056, 0.02570745658874512, 0.0256014404296875, 0.02563337516784668, 0.025589759826660157, 0.025593536376953125, 0.025575328826904296, 0.025360639572143556, 0.02527177619934082, 0.025402048110961913, 0.025210464477539062, 0.025207199096679688, 0.02609110450744629, 0.025101728439331054, 0.025172992706298827, 0.02527248001098633, 0.026192928314208986, 0.025987903594970704, 0.026122175216674804, 0.02550726318359375, 0.025381504058837892, 0.025298656463623045, 0.02522755241394043, 0.025204736709594725, 0.02523116874694824, 0.025241792678833006, 0.02521116828918457, 0.025183359146118165, 0.02522377586364746, 0.025177568435668946, 0.025087968826293945, 0.027775552749633788, 0.025396608352661134, 0.025347776412963867, 0.025791423797607422, 0.025227136611938476, 0.026009088516235353, 0.025468896865844727, 0.025338560104370116, 0.025184223175048828, 0.02534604835510254, 0.025255935668945313, 0.025114240646362303, 0.02519798469543457, 0.025417856216430664, 0.02538972854614258]",tokens/s,39.35190153103366,,, 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1884.442624,2726.166528,0.0,2340.421632,2284.9536,s,1,8.997029296875,8.997029296875,0.0,8.997029296875,8.997029296875,8.997029296875,8.997029296875,[8.997029296875],,kWh,5.472776924999986e-05,6.02971539508736e-06,1.8143347847943136e-05,7.890083249303036e-05,,MB,1889.91488,3095.26528,0.0,2680.160256,2578.243584,s,10,0.9053141708374024,0.09053141708374024,0.0006031909810320035,0.09047865676879882,0.09108743820190429,0.09139106254577636,0.09163396202087402,"[0.09169468688964844, 0.09040755462646484, 0.09054975891113282, 0.09013510131835938, 0.0901319351196289, 0.0893485107421875, 0.0909727325439453, 0.09101996612548828, 0.09028189086914062, 0.09077203369140625]",tokens/s,2827.7476289054853,kWh,2.904308311922205e-06,3.202918962106098e-07,1.9210273894063624e-06,5.1456275975391775e-06,tokens/kWh,49750976.95030016,MB,1897.271296,3097.362432,0.0,2680.160256,2578.246144,s,10,21.222098876953126,2.1222098876953126,0.005431712058323578,2.122501220703125,2.1283552246093747,2.1293604736328122,2.130164672851562,"[2.12583056640625, 2.13036572265625, 2.118667236328125, 2.12681005859375, 2.1281318359375, 2.125207275390625, 2.117091796875, 2.119795166015625, 2.116600341796875, 2.113598876953125]",tokens/s,29.68603641198611,kWh,6.148651316557946e-05,6.78166453646145e-06,2.923144197699034e-05,9.749961967903123e-05,tokens/kWh,646156.3666340035,,s,630,21.219990070342984,0.03368252392117939,0.0005180384088976986,0.03357417678833008,0.03403684692382813,0.0343295316696167,0.035695087661743174,"[0.03395391845703125, 0.03391020965576172, 0.0335425910949707, 0.033420352935791015, 0.03341743850708008, 0.033339969635009764, 0.03593436813354492, 0.03538534545898438, 0.03534236907958985, 0.03378515243530274, 0.03394831848144531, 0.033596542358398436, 0.03364134216308594, 0.033568767547607424, 0.03362566375732422, 0.03329446411132812, 0.033559169769287106, 0.0332421760559082, 0.03341292953491211, 0.03332793426513672, 0.033697120666503905, 0.03333740615844726, 0.03364723205566406, 0.03371004867553711, 0.03337011337280273, 0.0335617904663086, 0.03327260971069336, 0.033351806640625, 0.03367440032958984, 0.03380710220336914, 0.03360710525512695, 0.033421886444091796, 0.033753089904785157, 0.033336830139160153, 0.03326377487182617, 0.033378654479980466, 0.03335948944091797, 0.03369395065307617, 0.034352542877197266, 0.033761791229248043, 0.03375062561035156, 0.03390473556518555, 0.03370652770996094, 0.0335175666809082, 0.03338415908813477, 0.033589534759521485, 0.03392716979980469, 0.03365491104125977, 0.03358911895751953, 0.03340288162231445, 0.0337770881652832, 0.03341289520263672, 0.033481502532958986, 0.034000736236572265, 0.034286880493164064, 0.034859905242919924, 0.03395379257202148, 0.03377151870727539, 0.03393740844726562, 0.03392921447753906, 0.033939456939697264, 0.03360960006713867, 0.033640159606933596, 0.03417212677001953, 0.03378255844116211, 0.03426057434082031, 0.035445152282714845, 0.034202880859375, 0.03406105422973633, 0.03390259170532227, 0.03376323318481445, 0.03384912109375, 0.034791488647460934, 0.040416927337646485, 0.03389247894287109, 0.03338063812255859, 0.033624256134033206, 0.03367139053344727, 0.033457950592041014, 0.03347455978393555, 0.03341427230834961, 0.03370687866210938, 0.03341107177734375, 0.033562625885009766, 0.03353148651123047, 0.03360611343383789, 0.03338550567626953, 0.03378691101074219, 0.03509958267211914, 0.033559135437011715, 0.0335068473815918, 0.03342959976196289, 0.03345625686645508, 0.03336662292480469, 0.03351907348632813, 0.03414233779907227, 0.03390095901489258, 0.03416454315185547, 0.033547649383544924, 0.033418209075927734, 0.034223968505859376, 0.033645950317382815, 0.03384320068359375, 0.033360511779785156, 0.03338179016113281, 0.033368480682373046, 0.03328019332885742, 0.03347251129150391, 0.033390113830566406, 0.03338288116455078, 0.033933311462402346, 0.03376947021484375, 0.033546241760253906, 0.03331891250610351, 0.03342131042480469, 0.033594879150390625, 0.03324979019165039, 0.03337356948852539, 0.033731201171875, 0.0334881591796875, 0.03326780700683594, 0.03354483032226562, 0.03349440002441406, 0.033514110565185544, 0.03381190490722656, 0.03408534240722656, 0.033882110595703126, 0.03405414581298828, 0.03382620620727539, 0.03349769592285156, 0.033258689880371096, 0.03319686508178711, 0.03421593475341797, 0.03517030334472656, 0.03380428695678711, 0.03401881790161133, 0.03353855895996094, 0.03362201690673828, 0.03385958480834961, 0.03361177444458008, 0.03357817459106445, 0.03356156921386719, 0.03367424011230469, 0.03336880111694336, 0.033403007507324216, 0.03356860733032226, 0.0336341438293457, 0.03346464157104492, 0.03488742446899414, 0.033263744354248045, 0.0334666862487793, 0.03337343978881836, 0.033740447998046874, 0.03370630264282227, 0.033358177185058596, 0.03321059036254883, 0.033210399627685544, 0.03347251129150391, 0.033587200164794925, 0.033701408386230466, 0.033436126708984375, 0.03356572723388672, 0.03352070236206055, 0.033717758178710935, 0.03362860870361328, 0.03356639862060547, 0.034035518646240236, 0.034095584869384764, 0.033898494720458985, 0.03365273666381836, 0.033761249542236325, 0.033754463195800784, 0.03339260864257813, 0.03349168014526367, 0.033467487335205076, 0.03348368072509766, 0.03360934448242187, 0.033773952484130856, 0.0335882568359375, 0.033352672576904295, 0.03328764724731445, 0.033363967895507815, 0.033301025390625, 0.03355196762084961, 0.033810272216796874, 0.033497665405273436, 0.03331216049194336, 0.03330643081665039, 0.03345235061645508, 0.03384652709960938, 0.03365964889526367, 0.033537921905517576, 0.03373004913330078, 0.034011390686035155, 0.03383871841430664, 0.033901214599609375, 0.0340022087097168, 0.033905281066894534, 0.03394784164428711, 0.03457360076904297, 0.033869758605957034, 0.03360847854614258, 0.03339878463745117, 0.03368700790405273, 0.03366761779785156, 0.03414992141723633, 0.033882591247558595, 0.03365385437011719, 0.03359632110595703, 0.03387721633911133, 0.03490486526489258, 0.03376892852783203, 0.03371990585327148, 0.03334854507446289, 0.033354782104492185, 0.03355542373657226, 0.03384323120117187, 0.03343273544311524, 0.0333504638671875, 0.03336508941650391, 0.03363318252563476, 0.033552383422851564, 0.03341104125976563, 0.03347868728637695, 0.03426630401611328, 0.03460588836669922, 0.033770881652832034, 0.033877696990966794, 0.03387004852294922, 0.03356143951416016, 0.03372019195556641, 0.03354019165039063, 0.0335660171508789, 0.03360163116455078, 0.03357923126220703, 0.033939361572265625, 0.034086719512939456, 0.03361219024658203, 0.03351363372802734, 0.033882110595703126, 0.03376332855224609, 0.033524993896484376, 0.033538814544677734, 0.033685504913330076, 0.03374476623535156, 0.034005119323730466, 0.03395142364501953, 0.033753406524658205, 0.03379836654663086, 0.03353782272338867, 0.03352339172363281, 0.03371654510498047, 0.0355090560913086, 0.03386483383178711, 0.034153278350830076, 0.033907871246337894, 0.03383987045288086, 0.03378409576416016, 0.03354793548583984, 0.03346454238891602, 0.033699840545654294, 0.03488083267211914, 0.033831073760986326, 0.033958431243896484, 0.033896446228027344, 0.0335093765258789, 0.03393260955810547, 0.03719852828979492, 0.033754497528076174, 0.03352844619750977, 0.03340083312988281, 0.03334873580932617, 0.0335860481262207, 0.03379987335205078, 0.033571136474609374, 0.03357263946533203, 0.03351919937133789, 0.03338304138183594, 0.03340499114990234, 0.03329446411132812, 0.03372422409057617, 0.034974945068359374, 0.03356099319458008, 0.033382625579833985, 0.03358736038208008, 0.03346022415161133, 0.03352576065063476, 0.0335964469909668, 0.033600479125976565, 0.033333087921142576, 0.03349724960327148, 0.03358857727050781, 0.03384905624389648, 0.035771072387695314, 0.033949951171875, 0.033861534118652344, 0.03361391830444336, 0.03372851181030274, 0.03383705520629883, 0.03351321411132813, 0.033470977783203126, 0.03335961532592773, 0.03346217727661133, 0.03329814529418945, 0.03350566482543945, 0.03333100891113281, 0.03337839889526367, 0.033357921600341796, 0.03349887847900391, 0.03373900985717773, 0.033750431060791015, 0.03376393508911133, 0.03373231887817383, 0.03364278411865235, 0.03353615951538086, 0.03430140686035156, 0.03382044982910156, 0.03344607925415039, 0.03336246490478516, 0.033177055358886716, 0.03323699188232422, 0.03337270355224609, 0.03385500717163086, 0.033736801147460936, 0.03385382461547851, 0.03364659118652344, 0.03352371215820313, 0.03328335952758789, 0.033262302398681644, 0.03352787017822265, 0.03345606231689453, 0.03361727905273437, 0.033632896423339845, 0.03362211227416992, 0.033519519805908206, 0.033500255584716795, 0.03405491256713867, 0.0341723518371582, 0.03451772689819336, 0.03386368179321289, 0.03366323089599609, 0.03383257675170898, 0.033685504913330076, 0.03384332656860352, 0.03393740844726562, 0.033933311462402346, 0.033849342346191406, 0.033931262969970705, 0.03421184158325195, 0.03393900680541992, 0.033765022277832034, 0.03358534240722656, 0.035864608764648434, 0.033993377685546874, 0.0336280632019043, 0.03389142227172852, 0.03349711990356445, 0.03343244934082031, 0.03333555221557617, 0.03740646362304687, 0.03360358428955078, 0.033570335388183596, 0.03392150497436523, 0.03332505416870117, 0.03330867385864258, 0.03347868728637695, 0.03329430389404297, 0.03350479888916016, 0.033617439270019533, 0.03338243103027344, 0.03354512023925781, 0.03379216003417969, 0.03344294357299805, 0.03336403274536133, 0.033196544647216795, 0.033302688598632814, 0.03326300811767578, 0.03348313522338867, 0.03427372741699219, 0.0341319694519043, 0.03401897430419922, 0.03379439926147461, 0.03402751922607422, 0.03391094589233398, 0.033982303619384764, 0.03452108764648437, 0.03420345687866211, 0.033786048889160154, 0.033683456420898435, 0.034587905883789065, 0.03358591842651367, 0.03323904037475586, 0.03353961563110352, 0.03381705474853516, 0.03417702484130859, 0.03369308853149414, 0.03343011093139649, 0.03383295822143555, 0.0336363525390625, 0.03323904037475586, 0.033156288146972655, 0.033067840576171875, 0.032985088348388675, 0.03317964935302734, 0.033170528411865234, 0.033391521453857424, 0.03325132751464844, 0.03312025451660156, 0.03317119979858398, 0.03348880004882813, 0.033656192779541017, 0.03401007843017578, 0.033795551300048826, 0.03371177673339844, 0.03355532836914062, 0.03345347213745117, 0.03369539260864258, 0.033422271728515626, 0.034105472564697266, 0.034829280853271485, 0.033598369598388675, 0.03379203033447266, 0.03331273651123047, 0.03348204803466797, 0.03339452743530273, 0.03321123123168945, 0.03321855926513672, 0.033495040893554685, 0.033527328491210935, 0.033288673400878904, 0.0331525764465332, 0.0332108154296875, 0.03319993591308594, 0.03335391998291016, 0.033587200164794925, 0.03340288162231445, 0.03356671905517578, 0.03324662399291992, 0.03345673751831055, 0.03358899307250977, 0.03347439956665039, 0.03467766571044922, 0.03364659118652344, 0.033658878326416015, 0.033447391510009764, 0.033516159057617186, 0.03344579315185547, 0.033283329010009764, 0.03338713455200195, 0.033732734680175784, 0.0336363525390625, 0.03487859344482422, 0.03331366348266602, 0.03356684875488281, 0.0338348159790039, 0.0338903694152832, 0.03338643264770508, 0.03337366485595703, 0.03343215942382813, 0.0336212158203125, 0.03366582489013672, 0.03382502365112305, 0.033489696502685545, 0.03350012969970703, 0.03371212768554688, 0.033408447265625, 0.03353241729736328, 0.033476673126220706, 0.03400447845458984, 0.033518081665039064, 0.033883518218994144, 0.033634944915771486, 0.03354787063598633, 0.03382268905639649, 0.03340127944946289, 0.03343891143798828, 0.034060577392578124, 0.0332845458984375, 0.033296192169189456, 0.03377385711669922, 0.03382886505126953, 0.03400908660888672, 0.03383647918701172, 0.03446227264404297, 0.03399679946899414, 0.03407462310791016, 0.03357491302490234, 0.03341721725463867, 0.03350444793701172, 0.03352659225463867, 0.033398303985595706, 0.03387580871582031, 0.03329497528076172, 0.03321241760253906, 0.03352764892578125, 0.033357982635498044, 0.03348070526123047, 0.033568767547607424, 0.033155326843261716, 0.03404880142211914, 0.03366179275512695, 0.033822624206542966, 0.03343996810913086, 0.03351472091674805, 0.0338768310546875, 0.03353011322021485, 0.033421054840087894, 0.03384265518188476, 0.03372806549072266, 0.0337072639465332, 0.03375241470336914, 0.034194881439208985, 0.03392979049682617, 0.03330710220336914, 0.033428577423095705, 0.03345436859130859, 0.03323526382446289, 0.03340924835205078, 0.03340233612060547, 0.03343779373168945, 0.03346886444091797, 0.033527809143066405, 0.03348428726196289, 0.03354880142211914, 0.03371212768554688, 0.033465953826904295, 0.03365315246582031, 0.03378176116943359, 0.03349094390869141, 0.03398608016967773, 0.03391455841064453, 0.03353216171264648, 0.03357344055175781, 0.03358259201049805, 0.03356719970703125, 0.03361382293701172, 0.03349708938598633, 0.03359743881225586, 0.03376233673095703, 0.03366191864013672, 0.03375455856323242, 0.033653312683105466, 0.033489086151123046, 0.03332470321655273, 0.03326512145996094, 0.03328691101074219, 0.03335692977905273, 0.03329312133789063, 0.03592396926879883, 0.033540287017822266, 0.03381843185424805, 0.03346022415161133, 0.03359539031982422, 0.03349647903442383, 0.0334502067565918, 0.03343017578125, 0.03340259170532227, 0.033538047790527346, 0.033570816040039066, 0.03372032165527344, 0.03374691009521484, 0.033445537567138674, 0.03346265411376953, 0.03330854415893555, 0.03319123077392578, 0.033237342834472654, 0.033523681640625, 0.034617664337158204, 0.03385343933105469, 0.03358083343505859, 0.033683582305908205, 0.033393985748291014, 0.033473217010498046, 0.03350742340087891, 0.03343564987182617, 0.03381452941894531, 0.03425484848022461, 0.03469107055664063, 0.033683231353759766, 0.03325564956665039, 0.03330252838134766, 0.03340889739990234, 0.03336140823364258, 0.03435379028320312, 0.033576961517333984, 0.033500545501708986, 0.03330419158935547, 0.03370844650268555, 0.03374899291992187, 0.03341577529907227, 0.03349676895141602, 0.03321811294555664, 0.033321182250976564, 0.033510017395019534, 0.03323836898803711, 0.03317929458618164, 0.03328732681274414, 0.033091327667236325, 0.03346636962890625, 0.033197856903076174, 0.03306927871704102, 0.03307110214233398, 0.0329697265625, 0.03308031845092774, 0.03314601516723633, 0.0335799674987793, 0.03334249496459961, 0.03346694564819336, 0.03321084976196289, 0.03345801544189453, 0.03319807815551758, 0.03350886535644531, 0.03335628890991211, 0.03342272186279297, 0.03353868865966797, 0.033462272644042966, 0.03341689682006836, 0.03355270385742187, 0.03352937698364258, 0.03368592071533203, 0.03367532730102539, 0.03355839920043945, 0.03368153762817383, 0.03391603088378906, 0.03409164810180664, 0.03398681640625, 0.03391033554077148, 0.033857982635498045, 0.03376128005981445, 0.03392102432250976]",tokens/s,29.688986559917662,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1873.338368,2726.166528,0.0,2340.421632,2285.568,s,1,9.063833984375,9.063833984375,0.0,9.063833984375,9.063833984375,9.063833984375,9.063833984375,[9.063833984375],,kWh,5.510223944167289e-05,6.066721217396817e-06,1.834918134602237e-05,7.951814200509209e-05,,MB,1885.425664,3074.29376,0.0,2659.188736,2578.857984,s,10,0.8214106674194336,0.08214106674194335,0.0005961036135130625,0.08192385864257812,0.08286011810302733,0.08317255554199218,0.08342250549316406,"[0.08348499298095703, 0.081864990234375, 0.08228569793701172, 0.08188909149169922, 0.0816435546875, 0.08126528167724609, 0.08279068756103515, 0.08187372589111327, 0.08195862579345703, 0.08235401916503907]",tokens/s,3116.589668895544,kWh,2.5691205952116485e-06,2.832432537892525e-07,1.7038512656136275e-06,4.556215114614528e-06,tokens/kWh,56186987.128604546,MB,1891.495936,3074.29376,0.0,2659.188736,2578.860544,s,10,14.398817260742188,1.4398817260742187,0.004958434987129725,1.4392574462890626,1.447031005859375,1.4481477661132813,1.4490411743164062,"[1.4406300048828125, 1.434330810546875, 1.4405054931640624, 1.4344842529296875, 1.4492645263671875, 1.4467828369140625, 1.4374737548828125, 1.4380093994140626, 1.442972412109375, 1.43436376953125]",tokens/s,43.75359368700861,kWh,4.217546460186228e-05,4.651574179034385e-06,2.2620195971387362e-05,6.944723475228402e-05,tokens/kWh,907163.5497758681,,s,630,14.39678635978698,0.022852041840931724,0.0003568118922736099,0.022764607429504397,0.02317021083831787,0.02331412467956543,0.024296263008117675,"[0.02330624008178711, 0.023214080810546874, 0.023127679824829103, 0.022868352890014647, 0.02287526321411133, 0.02271321678161621, 0.022900224685668946, 0.022841856002807616, 0.022890495300292968, 0.022740991592407226, 0.022740991592407226, 0.02263859176635742, 0.022953983306884765, 0.024163904190063475, 0.022731199264526367, 0.022720672607421874, 0.023037151336669923, 0.022917984008789062, 0.02290255928039551, 0.022664640426635744, 0.022630495071411134, 0.0225960636138916, 0.022624256134033204, 0.022605823516845702, 0.022959552764892577, 0.02291302490234375, 0.022722463607788086, 0.022648735046386717, 0.02254719924926758, 0.02266422462463379, 0.02259462356567383, 0.022544288635253908, 0.022558719635009765, 0.022511615753173828, 0.02251571273803711, 0.022637823104858398, 0.022618879318237306, 0.022761472702026365, 0.023982080459594726, 0.022803775787353514, 0.022747840881347656, 0.022896608352661132, 0.02281270408630371, 0.022892095565795897, 0.022739391326904296, 0.022740320205688478, 0.022591968536376954, 0.02277555274963379, 0.02271072006225586, 0.022967391967773438, 0.022743967056274413, 0.022888288497924805, 0.02282307243347168, 0.022794240951538085, 0.023015424728393553, 0.02292531204223633, 0.022951936721801756, 0.023162879943847657, 0.023175008773803712, 0.023201471328735353, 0.0232391357421875, 0.02311100769042969, 0.023132831573486327, 0.023607295989990236, 0.023154687881469727, 0.022974464416503908, 0.023109535217285156, 0.022962272644042967, 0.022874111175537108, 0.022853631973266602, 0.022601728439331056, 0.02265449523925781, 0.022830944061279296, 0.022751871109008788, 0.02269388771057129, 0.022602848052978516, 0.022734943389892577, 0.022564672470092775, 0.022615039825439453, 0.022642688751220705, 0.022599679946899414, 0.022601728439331056, 0.022573055267333983, 0.02257267189025879, 0.022608255386352537, 0.022540224075317382, 0.022564447402954102, 0.022810432434082033, 0.022550336837768553, 0.022403936386108398, 0.02289993667602539, 0.022554975509643554, 0.022610368728637694, 0.02249715232849121, 0.02267763137817383, 0.022603296279907228, 0.023376352310180665, 0.023937023162841797, 0.022900127410888673, 0.02266582489013672, 0.02309529685974121, 0.02263382339477539, 0.02333679962158203, 0.02268844795227051, 0.022694271087646486, 0.022656160354614256, 0.02260028839111328, 0.022640640258789063, 0.022665216445922853, 0.022562816619873048, 0.02268569564819336, 0.022677343368530275, 0.022907039642333985, 0.02292313575744629, 0.02267763137817383, 0.022686752319335937, 0.02268880081176758, 0.02267305564880371, 0.02265433692932129, 0.022592416763305666, 0.022651968002319337, 0.02264790344238281, 0.022799903869628907, 0.022696256637573242, 0.022841344833374022, 0.022972415924072266, 0.02332057571411133, 0.023004512786865234, 0.02308105659484863, 0.022900960922241212, 0.02267296028137207, 0.022723360061645506, 0.022579200744628908, 0.022703136444091797, 0.02258633613586426, 0.022877504348754883, 0.022833248138427735, 0.02280508804321289, 0.023190847396850588, 0.022878911972045897, 0.023000288009643554, 0.02287593650817871, 0.02294828796386719, 0.02276639938354492, 0.022634239196777345, 0.02270604705810547, 0.02262236785888672, 0.022797855377197265, 0.022761024475097657, 0.022745983123779297, 0.022718463897705078, 0.022834880828857422, 0.023057727813720702, 0.02322115135192871, 0.023064672470092775, 0.024192352294921875, 0.02289945602416992, 0.022981983184814453, 0.0228089599609375, 0.022820064544677734, 0.02289683151245117, 0.022952735900878905, 0.022902496337890627, 0.023081247329711913, 0.02269536018371582, 0.02267305564880371, 0.02262518310546875, 0.022650239944458007, 0.022743679046630858, 0.022617504119873046, 0.022766176223754882, 0.022603776931762694, 0.02264473533630371, 0.022566911697387695, 0.022732799530029296, 0.02261337661743164, 0.02263212776184082, 0.022710752487182618, 0.02277833557128906, 0.022863872528076173, 0.023385215759277343, 0.023568511962890625, 0.02315068817138672, 0.023067295074462892, 0.022882303237915038, 0.022703296661376954, 0.02269401550292969, 0.02282361602783203, 0.02266316795349121, 0.023252223968505858, 0.023040767669677734, 0.022791231155395508, 0.02274995231628418, 0.02287990379333496, 0.023253536224365233, 0.02429283142089844, 0.023678911209106444, 0.023144287109375, 0.023059200286865235, 0.022900447845458985, 0.02280067253112793, 0.022575008392333985, 0.02268172836303711, 0.022857215881347655, 0.022618112564086915, 0.022560224533081055, 0.022723583221435546, 0.02267955207824707, 0.022591487884521484, 0.022595264434814452, 0.0226297607421875, 0.02259644889831543, 0.022601823806762695, 0.022585344314575196, 0.022640640258789063, 0.0227259521484375, 0.02264726448059082, 0.022657247543334962, 0.022740991592407226, 0.022614015579223632, 0.022568960189819336, 0.022681600570678712, 0.022753055572509766, 0.022816991806030272, 0.022703487396240233, 0.02266406440734863, 0.02301260757446289, 0.02271286392211914, 0.02254025650024414, 0.022747135162353514, 0.022640640258789063, 0.022642688751220705, 0.022556671142578123, 0.022700031280517577, 0.022605823516845702, 0.022558816909790037, 0.022492927551269533, 0.022648000717163087, 0.022488256454467774, 0.022603551864624025, 0.022591487884521484, 0.022847488403320314, 0.022751232147216797, 0.022718463897705078, 0.022722560882568358, 0.022769664764404295, 0.022746463775634766, 0.022837919235229494, 0.02267136001586914, 0.022924383163452147, 0.02270915222167969, 0.02268681526184082, 0.023005439758300782, 0.022917215347290038, 0.022784095764160156, 0.022790143966674805, 0.02271455955505371, 0.02287123107910156, 0.023140064239501955, 0.023116287231445314, 0.02326531219482422, 0.02319753646850586, 0.023275487899780272, 0.023236255645751953, 0.023026592254638673, 0.02304140853881836, 0.023163328170776366, 0.023069087982177734, 0.02308627128601074, 0.02308140754699707, 0.023187007904052735, 0.023093856811523438, 0.022855648040771483, 0.022858976364135742, 0.02274182319641113, 0.022697984695434572, 0.022577152252197266, 0.02288640022277832, 0.022724607467651366, 0.022951936721801756, 0.022956031799316406, 0.023022783279418944, 0.023573312759399414, 0.024803327560424804, 0.023193599700927735, 0.02308095932006836, 0.02301923179626465, 0.023277856826782226, 0.022867040634155275, 0.02291529655456543, 0.022753984451293945, 0.02299830436706543, 0.022605791091918945, 0.0227108154296875, 0.02313238334655762, 0.023003135681152344, 0.022687488555908204, 0.022687999725341797, 0.02259926414489746, 0.0227291202545166, 0.0225996150970459, 0.022642751693725587, 0.022828767776489258, 0.022923839569091796, 0.023021215438842772, 0.02313590431213379, 0.023095584869384764, 0.023380096435546876, 0.023178560256958008, 0.023359743118286133, 0.02295187187194824, 0.022950399398803712, 0.0230501766204834, 0.023136192321777344, 0.022833280563354492, 0.02343734359741211, 0.0229703369140625, 0.023347200393676756, 0.02265497589111328, 0.02270604705810547, 0.022612287521362306, 0.02261974334716797, 0.02269308853149414, 0.02268057632446289, 0.02309225654602051, 0.022971359252929688, 0.02299612808227539, 0.02297964859008789, 0.02306025505065918, 0.022947935104370116, 0.022902687072753905, 0.023011007308959962, 0.023017791748046874, 0.022914207458496094, 0.022950719833374024, 0.022945823669433593, 0.022846624374389647, 0.022655839920043944, 0.02261305618286133, 0.022715328216552734, 0.02251910400390625, 0.0226693115234375, 0.02268956756591797, 0.022920320510864258, 0.0232957763671875, 0.023027584075927733, 0.023058559417724608, 0.022851232528686524, 0.02287446403503418, 0.022798336029052735, 0.022697984695434572, 0.022863872528076173, 0.023183359146118163, 0.022656864166259765, 0.022663007736206053, 0.0225830078125, 0.024297664642333985, 0.02456332778930664, 0.022782751083374023, 0.022773920059204103, 0.023102848052978516, 0.022749759674072265, 0.02260367965698242, 0.022648576736450196, 0.022552831649780274, 0.02266339111328125, 0.02285136032104492, 0.022675455093383787, 0.023369184494018556, 0.027099679946899415, 0.022959264755249023, 0.022993343353271484, 0.022665632247924804, 0.022634496688842775, 0.022593311309814453, 0.02287811279296875, 0.022604095458984376, 0.022819936752319334, 0.02443059158325195, 0.023160831451416015, 0.022771711349487304, 0.022631744384765624, 0.022549184799194336, 0.022883520126342774, 0.022606655120849608, 0.02257475280761719, 0.022870367050170898, 0.022595584869384764, 0.022812416076660156, 0.02279609680175781, 0.023015871047973632, 0.022781631469726563, 0.022753599166870118, 0.02266111946105957, 0.02265907287597656, 0.022833023071289062, 0.022780031204223634, 0.023003135681152344, 0.02277724838256836, 0.022905439376831056, 0.022841087341308595, 0.022879871368408203, 0.02278175926208496, 0.02260598373413086, 0.022723199844360352, 0.02260585594177246, 0.022583072662353515, 0.022679391860961913, 0.022517791748046877, 0.02262175941467285, 0.022733600616455078, 0.023412736892700195, 0.022902143478393554, 0.02274105644226074, 0.02274336051940918, 0.022692096710205077, 0.022577152252197266, 0.02250067138671875, 0.02242176055908203, 0.022827455520629883, 0.022576671600341797, 0.022782432556152345, 0.02265907287597656, 0.022591487884521484, 0.022734848022460938, 0.02256598472595215, 0.022879135131835936, 0.023007232666015624, 0.02375881576538086, 0.023265247344970704, 0.023119487762451173, 0.02296467208862305, 0.022713600158691408, 0.022976640701293946, 0.022910783767700196, 0.022847391128540038, 0.022795167922973633, 0.02287798309326172, 0.02270844841003418, 0.02253139114379883, 0.022761951446533202, 0.02323686408996582, 0.023046911239624022, 0.02279347229003906, 0.022700639724731447, 0.02259984016418457, 0.022700000762939453, 0.022736448287963867, 0.022726144790649414, 0.022688735961914064, 0.02265497589111328, 0.02298419189453125, 0.022839616775512696, 0.023123519897460938, 0.022891136169433595, 0.022918176651000977, 0.022997983932495115, 0.022986751556396484, 0.022747135162353514, 0.022697984695434572, 0.0227061767578125, 0.022763519287109374, 0.022705535888671875, 0.022639232635498045, 0.02268569564819336, 0.022689632415771484, 0.02297238349914551, 0.02311187171936035, 0.023090944290161133, 0.02312828826904297, 0.023048511505126955, 0.02297417640686035, 0.023127904891967775, 0.022936992645263672, 0.022860544204711914, 0.022837247848510742, 0.022830080032348633, 0.023161056518554688, 0.023271392822265625, 0.02283603286743164, 0.022788095474243163, 0.022702144622802733, 0.022748287200927735, 0.022761951446533202, 0.022668800354003905, 0.022754144668579102, 0.0227491512298584, 0.022880287170410157, 0.022791391372680665, 0.02267011260986328, 0.022618175506591797, 0.022575040817260743, 0.022762527465820314, 0.022615264892578125, 0.0227509765625, 0.022871679306030273, 0.022673791885375976, 0.022769664764404295, 0.02290892791748047, 0.022771232604980467, 0.02261859130859375, 0.022595584869384764, 0.02263859176635742, 0.022665216445922853, 0.023291999816894532, 0.02279376029968262, 0.022663007736206053, 0.02258188819885254, 0.023052288055419923, 0.02292736053466797, 0.02284339141845703, 0.02328371238708496, 0.02270412826538086, 0.022656383514404296, 0.022726816177368166, 0.0232425594329834, 0.022790559768676756, 0.02283113670349121, 0.022839519500732423, 0.02280348777770996, 0.022646976470947267, 0.022635103225708008, 0.022710079193115233, 0.022965887069702148, 0.02261846351623535, 0.02280828857421875, 0.022607648849487304, 0.022762399673461914, 0.022521535873413087, 0.02252150344848633, 0.02265519905090332, 0.022608320236206056, 0.022642688751220705, 0.02274835205078125, 0.022588224411010743, 0.02288755226135254, 0.022896575927734374, 0.02255062484741211, 0.022502239227294923, 0.022720479965209962, 0.0231561279296875, 0.02340518379211426, 0.022872064590454103, 0.023001087188720702, 0.02309107208251953, 0.022840768814086913, 0.022733407974243162, 0.023006879806518554, 0.02271891212463379, 0.022826400756835938, 0.022814720153808594, 0.023011680603027343, 0.023013311386108397, 0.02328812789916992, 0.024694976806640626, 0.02316988754272461, 0.023188447952270506, 0.023222272872924804, 0.02332784080505371, 0.023372383117675782, 0.023240095138549806, 0.02325984001159668, 0.022927583694458006, 0.022947839736938477, 0.02262419128417969, 0.022700096130371095, 0.02268329620361328, 0.024984096527099608, 0.023173120498657225, 0.02271980857849121, 0.02259987258911133, 0.022620704650878905, 0.02258121681213379, 0.022646047592163085, 0.022587968826293946, 0.02282716751098633, 0.02262015914916992, 0.02251568031311035, 0.02238057518005371, 0.022542335510253905, 0.022575103759765625, 0.022820863723754883, 0.02270518493652344, 0.022614816665649413, 0.02266921615600586, 0.022584768295288087, 0.022614879608154295, 0.0224849910736084, 0.022548479080200197, 0.022464607238769533, 0.022465919494628905, 0.022610336303710937, 0.022765695571899416, 0.023004703521728516, 0.02275302314758301, 0.022612703323364257, 0.022595584869384764, 0.022487039566040038, 0.02259708786010742, 0.022601856231689452, 0.022606239318847657, 0.0226562557220459, 0.022920127868652343, 0.02264454460144043, 0.022593536376953126, 0.02246873664855957, 0.02246847915649414, 0.02243174362182617, 0.022596799850463867, 0.02282579231262207, 0.023119232177734376, 0.024163040161132812, 0.024217504501342774, 0.022918272018432616, 0.02284556770324707, 0.02284329605102539, 0.022633567810058593, 0.022798080444335938, 0.022793792724609376, 0.022515296936035156, 0.022854496002197265, 0.022592927932739256, 0.02265763282775879, 0.022820512771606447, 0.022786399841308595, 0.022644128799438477, 0.022788864135742187, 0.023066463470458983, 0.022759424209594727, 0.022757375717163086]",tokens/s,43.75976584327959,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1331.912704,1134.42816,0.0,731.906048,703.86944,s,1,8.2872080078125,8.2872080078125,0.0,8.2872080078125,8.2872080078125,8.2872080078125,8.2872080078125,[8.2872080078125],,kWh,3.365705040835867e-05,3.7050469971872254e-06,1.1948898447966538e-05,4.931099585351243e-05,,MB,1440.903168,1415.446528,0.0,998.244352,942.610432,s,10,1.6360735626220704,0.16360735626220704,0.0015673730336839388,0.16323302459716799,0.16449686126708984,0.16624446334838866,0.16764254501342774,"[0.1679920654296875, 0.1635787811279297, 0.16235653686523438, 0.16255337524414062, 0.16410850524902343, 0.16258714294433593, 0.1633115234375, 0.16315452575683595, 0.1638372802734375, 0.1625938262939453]",tokens/s,1564.7218184353453,kWh,5.011777495904831e-06,5.525887407350414e-07,3.3431476839322225e-06,8.907513920572097e-06,tokens/kWh,28739781.07502728,MB,1460.719616,1423.835136,0.0,1006.63296,942.612992,s,10,12.867068847656249,1.286706884765625,0.006537605388276944,1.286470458984375,1.290857531738281,1.2968565612792968,1.3016557849121093,"[1.3028555908203125, 1.2794200439453125, 1.2859237060546875, 1.2891888427734375, 1.2780086669921875, 1.2895244140625, 1.283924072265625, 1.2881405029296875, 1.2870172119140626, 1.2830657958984375]",tokens/s,48.9622001295777,kWh,3.756512897701354e-05,4.1432230528568845e-06,1.633381250207251e-05,5.804216453194295e-05,tokens/kWh,1085417.8252661228,,s,630,12.864919296264643,0.020420506819467697,0.0005158661601812248,0.02032806396484375,0.0206540922164917,0.020869258785247802,0.021655140323638918,"[0.021448703765869142, 0.02096352005004883, 0.020688608169555665, 0.020700576782226563, 0.0205544319152832, 0.02051705551147461, 0.020436800003051758, 0.020557823181152343, 0.02047974395751953, 0.020523263931274415, 0.020549184799194337, 0.02049398422241211, 0.02053945541381836, 0.02058518409729004, 0.02044108772277832, 0.020490175247192384, 0.020371200561523438, 0.02040575981140137, 0.02124064064025879, 0.028610208511352538, 0.02205427169799805, 0.020640735626220704, 0.020434431076049805, 0.0203822078704834, 0.02041791915893555, 0.020454015731811524, 0.020444799423217773, 0.020243967056274414, 0.02034284782409668, 0.02031494331359863, 0.0203305606842041, 0.020251808166503907, 0.020364063262939453, 0.02020265579223633, 0.020259679794311522, 0.021005727767944335, 0.020633344650268556, 0.02104140853881836, 0.020721792221069336, 0.020767200469970704, 0.020760831832885743, 0.02069887924194336, 0.02069081687927246, 0.02065420722961426, 0.02086297607421875, 0.020801504135131835, 0.020647968292236328, 0.020412416458129884, 0.020329952239990234, 0.020289663314819337, 0.020617216110229493, 0.020429407119750977, 0.02046918487548828, 0.020351360321044922, 0.020361215591430663, 0.020377695083618166, 0.020334495544433593, 0.02026723289489746, 0.020319807052612306, 0.020297119140625, 0.020286176681518556, 0.020242528915405275, 0.020248575210571287, 0.021166080474853514, 0.020770463943481445, 0.02044144058227539, 0.020653055191040038, 0.02039740753173828, 0.020457183837890625, 0.020240320205688476, 0.02022400093078613, 0.020232032775878907, 0.020123807907104493, 0.02013148880004883, 0.02006844711303711, 0.020172800064086914, 0.02062089538574219, 0.02048476791381836, 0.020443071365356447, 0.020385856628417968, 0.020291519165039063, 0.020236352920532226, 0.020191232681274415, 0.020365312576293947, 0.020238176345825195, 0.02039619255065918, 0.020215808868408205, 0.020209280014038086, 0.020188863754272462, 0.020112064361572264, 0.020235424041748048, 0.020394784927368164, 0.02021491241455078, 0.020095935821533205, 0.019978239059448243, 0.020036832809448242, 0.02012240028381348, 0.020137983322143553, 0.020076671600341798, 0.02017215919494629, 0.020168256759643555, 0.02016556739807129, 0.020154367446899413, 0.02015135955810547, 0.020102079391479493, 0.02017241668701172, 0.020234432220458985, 0.020183231353759764, 0.02014787292480469, 0.020142431259155272, 0.020153472900390625, 0.02020400047302246, 0.020327871322631835, 0.020204511642456055, 0.02040809631347656, 0.020506784439086913, 0.020516319274902342, 0.02045369529724121, 0.02051417541503906, 0.020493215560913085, 0.02047350311279297, 0.02057046318054199, 0.020579744338989257, 0.020471807479858398, 0.020412288665771484, 0.0203721923828125, 0.02164531135559082, 0.020772863388061523, 0.020719615936279297, 0.020520959854125977, 0.020397375106811524, 0.02037388801574707, 0.02028371238708496, 0.020473567962646485, 0.02039129638671875, 0.02041539192199707, 0.020457311630249022, 0.02050377655029297, 0.02053215980529785, 0.02040012741088867, 0.020312063217163084, 0.02019327926635742, 0.020346431732177733, 0.02046816062927246, 0.020340736389160157, 0.020251808166503907, 0.020613983154296876, 0.02037455940246582, 0.020266176223754883, 0.02028726387023926, 0.02023423957824707, 0.020299776077270508, 0.020229280471801756, 0.02030041694641113, 0.020246463775634764, 0.02033897590637207, 0.020273151397705077, 0.020637504577636717, 0.020615360260009766, 0.02065407943725586, 0.02032640075683594, 0.02043408012390137, 0.02078748893737793, 0.020953088760375976, 0.020277503967285157, 0.020272832870483398, 0.02037379264831543, 0.02064009666442871, 0.020610912322998047, 0.02037980842590332, 0.02029545593261719, 0.020218080520629882, 0.020160512924194338, 0.02012774467468262, 0.020166048049926756, 0.020320831298828126, 0.02033270454406738, 0.02051878356933594, 0.020555776596069338, 0.020332544326782227, 0.020387615203857422, 0.02040553665161133, 0.0202490234375, 0.02016489601135254, 0.020219615936279297, 0.020181503295898438, 0.02032640075683594, 0.02023423957824707, 0.02026633644104004, 0.02116579246520996, 0.020466432571411133, 0.020719263076782228, 0.021655168533325195, 0.020652416229248047, 0.02061311912536621, 0.020402368545532228, 0.020274911880493164, 0.020316255569458007, 0.020262239456176757, 0.020616960525512696, 0.02039904022216797, 0.020315488815307616, 0.020119808197021485, 0.0203001594543457, 0.020354944229125975, 0.020095071792602538, 0.020135744094848633, 0.020166784286499023, 0.02018035125732422, 0.020791648864746094, 0.020451711654663085, 0.020288671493530273, 0.020259679794311522, 0.020358335494995116, 0.021211135864257814, 0.02032313537597656, 0.020983808517456053, 0.020423776626586915, 0.020392864227294923, 0.02042470359802246, 0.020550975799560545, 0.020320831298828126, 0.020441312789916993, 0.020434783935546874, 0.020380895614624025, 0.020693599700927736, 0.02039628791809082, 0.020538400650024415, 0.02032918357849121, 0.020414400100708007, 0.02025913619995117, 0.020428800582885744, 0.02025062370300293, 0.020364383697509765, 0.02027961540222168, 0.020314239501953126, 0.020321887969970705, 0.02026963233947754, 0.020289791107177733, 0.020828224182128905, 0.020534496307373046, 0.020362016677856445, 0.02027622413635254, 0.020313087463378905, 0.020178239822387697, 0.02035091209411621, 0.020431615829467772, 0.020899839401245117, 0.020817920684814452, 0.020717567443847656, 0.020502527236938475, 0.02036262321472168, 0.021294591903686523, 0.020624031066894533, 0.020502784729003905, 0.02024380874633789, 0.020306047439575196, 0.020547391891479493, 0.020349023818969726, 0.021544832229614258, 0.021641984939575195, 0.020708768844604493, 0.0204902400970459, 0.020227807998657227, 0.020482719421386717, 0.020273216247558595, 0.020238496780395507, 0.020159904479980468, 0.020292064666748048, 0.020199552536010742, 0.0201744327545166, 0.02008131217956543, 0.020201215744018553, 0.020078336715698242, 0.020172639846801756, 0.020125856399536134, 0.020160768508911135, 0.020125696182250977, 0.020147584915161134, 0.020173280715942384, 0.02015862464904785, 0.020076543807983398, 0.02001919937133789, 0.020105215072631837, 0.020131616592407225, 0.020168479919433595, 0.020213216781616212, 0.020206560134887697, 0.020174400329589844, 0.020149887084960936, 0.02021868705749512, 0.020114879608154296, 0.020166912078857423, 0.020094560623168944, 0.020097055435180665, 0.020140735626220704, 0.02007151985168457, 0.020111488342285155, 0.020089120864868165, 0.020136447906494142, 0.020543296813964843, 0.020261056900024416, 0.020211008071899413, 0.020265663146972656, 0.020297727584838866, 0.020154367446899413, 0.020264959335327147, 0.02026905632019043, 0.020170751571655272, 0.020126976013183594, 0.02008140754699707, 0.02011292839050293, 0.020156896591186524, 0.02044927978515625, 0.02022604751586914, 0.021082752227783202, 0.020555103302001953, 0.02044175910949707, 0.020290719985961915, 0.02022604751586914, 0.020349760055541993, 0.020270463943481445, 0.022669504165649414, 0.02171504020690918, 0.020412736892700196, 0.020404287338256836, 0.02056921577453613, 0.021221696853637697, 0.020709375381469726, 0.02098643112182617, 0.020874399185180664, 0.020628320693969728, 0.02064748764038086, 0.02062995147705078, 0.0203787841796875, 0.020384607315063478, 0.020397632598876954, 0.020242656707763672, 0.02029305648803711, 0.020252704620361328, 0.020261024475097655, 0.020492095947265625, 0.02037366485595703, 0.020206207275390624, 0.020184480667114257, 0.020318815231323242, 0.020221952438354493, 0.020166656494140626, 0.020313472747802735, 0.02024844741821289, 0.02013670349121094, 0.020240447998046876, 0.020217792510986328, 0.020719263076782228, 0.022945152282714844, 0.02054390335083008, 0.020480575561523436, 0.020434303283691405, 0.020265600204467774, 0.02021171188354492, 0.020178207397460936, 0.020161344528198243, 0.020125055313110353, 0.020163103103637694, 0.020152320861816408, 0.02020742416381836, 0.020209856033325195, 0.020257888793945314, 0.020138240814208983, 0.020189855575561525, 0.020162687301635743, 0.02010099220275879, 0.020166528701782226, 0.02013148880004883, 0.020189664840698243, 0.02023219108581543, 0.020385791778564453, 0.020533248901367186, 0.021355295181274415, 0.020562175750732423, 0.020446048736572266, 0.02038582420349121, 0.020601728439331054, 0.02045315170288086, 0.020453407287597657, 0.0204139518737793, 0.020374048233032228, 0.020278528213500978, 0.02024937629699707, 0.020119871139526367, 0.02037276840209961, 0.020209535598754883, 0.02031184005737305, 0.020221855163574217, 0.020184032440185545, 0.02025267219543457, 0.020187135696411132, 0.02023219108581543, 0.020497983932495117, 0.020522655487060545, 0.020331296920776367, 0.020377887725830077, 0.02041206359863281, 0.02020153617858887, 0.020326208114624024, 0.020297439575195312, 0.020283424377441406, 0.02033420753479004, 0.020281503677368164, 0.020302143096923828, 0.020187488555908205, 0.020254623413085936, 0.020153568267822265, 0.02027199935913086, 0.020264352798461914, 0.020324960708618164, 0.0203220157623291, 0.02020902442932129, 0.020302560806274413, 0.020842687606811523, 0.020682752609252928, 0.020385536193847656, 0.020438880920410157, 0.020442752838134765, 0.020459775924682618, 0.020396255493164064, 0.02048646354675293, 0.020322303771972656, 0.020399232864379883, 0.020353919982910158, 0.02026700782775879, 0.020624383926391602, 0.020212736129760742, 0.02045961570739746, 0.020313087463378905, 0.020458303451538085, 0.020217792510986328, 0.020799007415771485, 0.02037619209289551, 0.020321887969970705, 0.02031043243408203, 0.021655071258544923, 0.020768960952758788, 0.0204737606048584, 0.020318239212036134, 0.020253023147583007, 0.020208223342895508, 0.020236192703247072, 0.020346015930175782, 0.02032480049133301, 0.02039776039123535, 0.02017967987060547, 0.021534048080444335, 0.02716476821899414, 0.020584800720214843, 0.020494560241699218, 0.02046156883239746, 0.020316160202026368, 0.020307968139648438, 0.020395519256591797, 0.020253183364868164, 0.020358272552490234, 0.020147071838378907, 0.02026905632019043, 0.02027519989013672, 0.02017020797729492, 0.020165088653564454, 0.02016009521484375, 0.02018492889404297, 0.020177536010742188, 0.020170080184936524, 0.020112031936645507, 0.020307680130004883, 0.02018332862854004, 0.020180063247680666, 0.020416831970214842, 0.020302431106567383, 0.020297151565551758, 0.020275775909423827, 0.02023423957824707, 0.020161760330200194, 0.020363584518432617, 0.020285919189453126, 0.02032640075683594, 0.020303871154785155, 0.02024038314819336, 0.02026700782775879, 0.020227903366088866, 0.02010745620727539, 0.020176895141601564, 0.020091936111450194, 0.02029257583618164, 0.020525056838989256, 0.020389888763427736, 0.020421791076660156, 0.020394847869873046, 0.020369695663452148, 0.02028483200073242, 0.02026460838317871, 0.020328256607055666, 0.020254751205444337, 0.020246463775634764, 0.02030476760864258, 0.02023324775695801, 0.021510208129882812, 0.02067849540710449, 0.020504831314086914, 0.020381343841552733, 0.020228288650512696, 0.020262912750244142, 0.020202816009521483, 0.020272096633911132, 0.020253536224365234, 0.020521856307983397, 0.020289535522460937, 0.020363359451293944, 0.020418207168579103, 0.020424959182739257, 0.020410367965698242, 0.020550975799560545, 0.020279520034790038, 0.0202511043548584, 0.020420095443725587, 0.020455680847167968, 0.020660320281982423, 0.02055491256713867, 0.020318368911743163, 0.020420927047729492, 0.02038755226135254, 0.020447423934936523, 0.020402816772460936, 0.020471807479858398, 0.02046771240234375, 0.020527103424072265, 0.020559871673583984, 0.02046566390991211, 0.020496192932128905, 0.020496576309204102, 0.020465248107910155, 0.02033091163635254, 0.020548608779907225, 0.020402368545532228, 0.020464448928833007, 0.020404224395751954, 0.020428800582885744, 0.02039916801452637, 0.020461631774902345, 0.020343679428100586, 0.020410367965698242, 0.020374719619750976, 0.020691360473632812, 0.020449695587158204, 0.020424352645874024, 0.020498176574707032, 0.02055788803100586, 0.020470016479492186, 0.0203973445892334, 0.020291936874389647, 0.02024025535583496, 0.02038412857055664, 0.020263328552246093, 0.02032431983947754, 0.02043654441833496, 0.02019375991821289, 0.020219871520996094, 0.020276927947998048, 0.02029952049255371, 0.02127347183227539, 0.02045657539367676, 0.020428895950317383, 0.020470464706420898, 0.02039596748352051, 0.02036137580871582, 0.020264352798461914, 0.020304288864135742, 0.02035526466369629, 0.020262815475463866, 0.020504671096801756, 0.02032601547241211, 0.02021209526062012, 0.020088287353515626, 0.020215999603271483, 0.020248128890991212, 0.020170879364013673, 0.02025334358215332, 0.020148223876953125, 0.0201376953125, 0.020228384017944336, 0.02032655906677246, 0.020199264526367187, 0.020179168701171875, 0.020626911163330076, 0.020532896041870117, 0.020318880081176757, 0.021472768783569338, 0.020466175079345703, 0.020346879959106445, 0.020307136535644532, 0.020349760055541993, 0.02030112075805664, 0.020297536849975584, 0.020411392211914063, 0.020383615493774414, 0.02042265510559082, 0.02026630401611328, 0.020425216674804687, 0.020144319534301756, 0.020235584259033202, 0.02021855926513672, 0.020481536865234375, 0.020277759552001954, 0.020332351684570312, 0.020687040328979493, 0.02050204849243164, 0.0204366397857666, 0.020390687942504884, 0.020214879989624023, 0.020317119598388673, 0.02026438331604004, 0.020236255645751954, 0.020238431930541992, 0.020125951766967774, 0.02037580871582031, 0.020323488235473634, 0.020294496536254883, 0.020264799118041993, 0.020490400314331053, 0.02030735969543457, 0.02044918441772461, 0.02049839973449707]",tokens/s,48.970381041016054,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1815.461888,2726.166528,0.0,2340.421632,2284.9536,s,1,8.9574482421875,8.9574482421875,0.0,8.9574482421875,8.9574482421875,8.9574482421875,8.9574482421875,[8.9574482421875],,kWh,5.367731299584193e-05,5.913659765264593e-06,1.7436125059999696e-05,7.702709782110622e-05,,MB,1869.135872,3074.29376,0.0,2659.188736,2578.243584,s,10,0.867650177001953,0.08676501770019532,0.000773407254944715,0.08667796707153319,0.08762082824707032,0.08804329376220703,0.08838126617431641,"[0.08690713500976563, 0.08689100646972656, 0.08646492767333984, 0.0858563232421875, 0.08720556640625, 0.0859746551513672, 0.08752694702148438, 0.08601936340332031, 0.08633849334716796, 0.08846575927734375]",tokens/s,2950.4978709803654,kWh,2.766945632665395e-06,3.0514450902842144e-07,1.8437069256980732e-06,4.915797067391889e-06,tokens/kWh,52077007.34803168,MB,1869.135872,3074.29376,0.0,2659.188736,2578.246144,s,10,19.146637451171877,1.9146637451171877,0.011301228472434203,1.913807861328125,1.9276986450195313,1.9331595642089843,1.937528299560547,"[1.9156181640625, 1.912956298828125, 1.9003673095703124, 1.9045096435546875, 1.90773681640625, 1.902903076171875, 1.926485107421875, 1.914659423828125, 1.9227811279296876, 1.9386204833984375]",tokens/s,32.90394992889159,kWh,5.563000600484007e-05,6.135745735107591e-06,2.664109363990547e-05,8.840684537985314e-05,tokens/kWh,712614.5009395047,,s,630,19.14460872840882,0.03038826782287113,0.0007234683028099959,0.030255920410156248,0.03073218173980713,0.031015743923187254,0.03407565299987794,"[0.03131177520751953, 0.030664800643920898, 0.03520307159423828, 0.03065372848510742, 0.030611295700073243, 0.030196319580078124, 0.030083648681640623, 0.03027529525756836, 0.030078975677490235, 0.029912160873413085, 0.030002080917358398, 0.029929471969604493, 0.030229631423950194, 0.029987936019897462, 0.029931455612182616, 0.029923168182373047, 0.029874208450317383, 0.030164960861206055, 0.030480512619018553, 0.0299866886138916, 0.029970432281494142, 0.029997055053710937, 0.030099456787109374, 0.030091264724731445, 0.03005673599243164, 0.030113504409790038, 0.029945440292358398, 0.030134687423706053, 0.02996019172668457, 0.03015065574645996, 0.030107616424560547, 0.029972768783569335, 0.030318336486816408, 0.0304167366027832, 0.030783647537231444, 0.03091609573364258, 0.035322368621826174, 0.030679040908813477, 0.03056230354309082, 0.030902080535888672, 0.03106425666809082, 0.030373888015747072, 0.030257152557373046, 0.03032268714904785, 0.03073843193054199, 0.030212095260620117, 0.030185728073120116, 0.03023139190673828, 0.03021023941040039, 0.030355648040771486, 0.030321184158325194, 0.030220352172851562, 0.030166208267211916, 0.030034496307373048, 0.029980863571166992, 0.02996019172668457, 0.030097312927246093, 0.030177375793457032, 0.03019366455078125, 0.030057695388793944, 0.030202655792236327, 0.030129888534545898, 0.029929759979248047, 0.03020454406738281, 0.030666751861572264, 0.03029097557067871, 0.03070380783081055, 0.03528579330444336, 0.030455104827880858, 0.030141120910644532, 0.030113792419433592, 0.02995609664916992, 0.02999087905883789, 0.029904319763183595, 0.029889215469360353, 0.029865407943725587, 0.029892704010009766, 0.03087958335876465, 0.03003446388244629, 0.029879871368408202, 0.02979270362854004, 0.030517248153686522, 0.030285823822021486, 0.029928735733032227, 0.029960927963256837, 0.029855743408203125, 0.029884191513061525, 0.02980179214477539, 0.030073312759399413, 0.030021600723266602, 0.029870559692382812, 0.029923072814941408, 0.030043840408325195, 0.030249536514282225, 0.03006035232543945, 0.030240575790405275, 0.03001593589782715, 0.030482080459594725, 0.030482751846313477, 0.0303884162902832, 0.03040643119812012, 0.03416463851928711, 0.030421215057373045, 0.030309471130371093, 0.030669599533081054, 0.029965599060058593, 0.02997881507873535, 0.030038751602172852, 0.030435199737548827, 0.030123008728027343, 0.030056480407714845, 0.02996681594848633, 0.029935903549194336, 0.031692960739135745, 0.0308240966796875, 0.02993356704711914, 0.03300592041015625, 0.03059529685974121, 0.03008639907836914, 0.02997302436828613, 0.030101503372192383, 0.03001875114440918, 0.029865983963012696, 0.030020416259765623, 0.029902528762817383, 0.03022060775756836, 0.030474239349365235, 0.03008675193786621, 0.030038143157958986, 0.03004355239868164, 0.030053247451782228, 0.030293184280395506, 0.03030918312072754, 0.030457855224609375, 0.030130176544189452, 0.03021824073791504, 0.030044160842895507, 0.030289920806884765, 0.02998684883117676, 0.030154720306396484, 0.02998476791381836, 0.02997657585144043, 0.02993356704711914, 0.03062777519226074, 0.033207454681396485, 0.03037686347961426, 0.030099456787109374, 0.030291967391967774, 0.030130176544189452, 0.030120992660522462, 0.030149599075317383, 0.03004022407531738, 0.029928863525390623, 0.030130624771118164, 0.030015487670898438, 0.029896703720092774, 0.029857791900634766, 0.03009903907775879, 0.03018179130554199, 0.030070783615112305, 0.030256128311157225, 0.030487552642822265, 0.03047145652770996, 0.030456031799316406, 0.03052128028869629, 0.030450239181518554, 0.030162559509277344, 0.030374111175537108, 0.03026905632019043, 0.03028374481201172, 0.030171327590942383, 0.029970720291137697, 0.029884511947631837, 0.029822975158691405, 0.02983135986328125, 0.030303167343139648, 0.029903743743896486, 0.030191423416137696, 0.029925567626953125, 0.029925247192382813, 0.02993779182434082, 0.029846656799316407, 0.029797088623046874, 0.029905055999755858, 0.03002511978149414, 0.02990870475769043, 0.029741952896118164, 0.029900800704956054, 0.02975030326843262, 0.030450464248657227, 0.03009328079223633, 0.03020185661315918, 0.030230560302734376, 0.030623743057250977, 0.030846975326538087, 0.030287872314453124, 0.03023052787780762, 0.029988960266113283, 0.030389503479003908, 0.03021824073791504, 0.03071199989318848, 0.03054640007019043, 0.03017087936401367, 0.029972736358642577, 0.02998636817932129, 0.030101951599121095, 0.029908992767333983, 0.03019385528564453, 0.03011564826965332, 0.0307957763671875, 0.03363225555419922, 0.03045123291015625, 0.030062976837158202, 0.029886560440063478, 0.029875423431396483, 0.03005014419555664, 0.029732864379882814, 0.029802623748779296, 0.030144672393798828, 0.030309024810791015, 0.030101184844970704, 0.030513376235961915, 0.030206207275390626, 0.030058624267578125, 0.03012284851074219, 0.030307199478149412, 0.030391935348510743, 0.03055859184265137, 0.03021824073791504, 0.03032678413391113, 0.030005247116088866, 0.030019584655761718, 0.030283775329589844, 0.030568384170532228, 0.030246976852416993, 0.030029823303222656, 0.029940927505493164, 0.030065248489379883, 0.02995631980895996, 0.029865983963012696, 0.02999091148376465, 0.029902624130249023, 0.029792543411254882, 0.029917119979858398, 0.0299803524017334, 0.029927743911743163, 0.029878271102905272, 0.030273536682128906, 0.030058496475219725, 0.029890560150146486, 0.03021824073791504, 0.030665983200073244, 0.030718816757202148, 0.030121984481811522, 0.030002880096435546, 0.03103340721130371, 0.033857791900634766, 0.03058073616027832, 0.030611583709716797, 0.030371360778808594, 0.030103776931762697, 0.030047359466552733, 0.029998079299926757, 0.030050336837768556, 0.03028166389465332, 0.030166240692138673, 0.030081855773925782, 0.03003392028808594, 0.02995609664916992, 0.02995974349975586, 0.02979270362854004, 0.030054399490356445, 0.029808639526367187, 0.029828895568847658, 0.03017318344116211, 0.02998294448852539, 0.029832639694213868, 0.029817407608032226, 0.029865983963012696, 0.029843456268310548, 0.029781055450439454, 0.0299399356842041, 0.02986992073059082, 0.029985504150390627, 0.03246211242675781, 0.031136640548706053, 0.030453760147094725, 0.03020342445373535, 0.030339584350585938, 0.03020182418823242, 0.03024208068847656, 0.030605152130126954, 0.030180223464965822, 0.03019366455078125, 0.030424352645874023, 0.030532543182373046, 0.030474239349365235, 0.030431135177612305, 0.030556032180786133, 0.030543712615966796, 0.030432416915893556, 0.03039334487915039, 0.03018547248840332, 0.030085119247436523, 0.030394367218017578, 0.030138368606567382, 0.02995814323425293, 0.03002764892578125, 0.03009548759460449, 0.0299085750579834, 0.02994972801208496, 0.03000998306274414, 0.02998271942138672, 0.03016703987121582, 0.030264543533325194, 0.030841695785522462, 0.030266719818115233, 0.03018614387512207, 0.03036755180358887, 0.03056195259094238, 0.030717632293701173, 0.030475103378295898, 0.030267391204833984, 0.030693376541137695, 0.030116960525512694, 0.030052671432495116, 0.030077375411987305, 0.030036384582519532, 0.030911808013916017, 0.030424703598022462, 0.03040947151184082, 0.030190656661987305, 0.03003625679016113, 0.029911775588989258, 0.030266719818115233, 0.030053184509277343, 0.030111583709716797, 0.029929407119750978, 0.029988927841186522, 0.02984671974182129, 0.029796480178833008, 0.030168960571289063, 0.029913471221923827, 0.029922975540161132, 0.02978499221801758, 0.02991619110107422, 0.029889375686645507, 0.029871456146240233, 0.030012319564819336, 0.029734655380249022, 0.02998201560974121, 0.030210752487182618, 0.030482431411743165, 0.030494592666625978, 0.030535808563232424, 0.03064131164550781, 0.030482463836669922, 0.03110972785949707, 0.03037001609802246, 0.030300159454345704, 0.03021980857849121, 0.03008780860900879, 0.02997763252258301, 0.03011667251586914, 0.030240768432617186, 0.030332927703857423, 0.02996428871154785, 0.030267391204833984, 0.030004480361938476, 0.030285856246948243, 0.03005308723449707, 0.030312448501586913, 0.030093311309814453, 0.030059711456298828, 0.030029823303222656, 0.030061119079589842, 0.030049983978271484, 0.03018400001525879, 0.030482656478881837, 0.030108160018920898, 0.029911264419555664, 0.029943296432495117, 0.029930240631103517, 0.030545856475830076, 0.030586687088012696, 0.030253055572509766, 0.03083263969421387, 0.03097395133972168, 0.030612960815429687, 0.030591520309448242, 0.030724096298217773, 0.03056844711303711, 0.03053753662109375, 0.030810304641723633, 0.030956703186035155, 0.03114031982421875, 0.03072991943359375, 0.031105567932128906, 0.030613664627075196, 0.030406656265258788, 0.030397983551025392, 0.030439903259277343, 0.030258975982666015, 0.030347583770751953, 0.030586784362792968, 0.030254783630371093, 0.03046841621398926, 0.03030406379699707, 0.03047238349914551, 0.030509056091308592, 0.030431232452392577, 0.030637760162353516, 0.03046966361999512, 0.030611360549926758, 0.03056329536437988, 0.030674848556518555, 0.03057811164855957, 0.031255104064941405, 0.03072755241394043, 0.030642175674438478, 0.030542463302612306, 0.030749759674072265, 0.030511871337890625, 0.030478527069091797, 0.0306932487487793, 0.03053990364074707, 0.030418943405151368, 0.030398464202880858, 0.03080601692199707, 0.03345107269287109, 0.030911136627197265, 0.030699424743652344, 0.030644607543945313, 0.030533632278442382, 0.030451360702514647, 0.03036400032043457, 0.030156511306762696, 0.03014255905151367, 0.03024224090576172, 0.030518016815185546, 0.030035615921020508, 0.03074787139892578, 0.030255712509155274, 0.03024627113342285, 0.030230400085449218, 0.031019775390625, 0.030821664810180664, 0.030661344528198242, 0.030596416473388673, 0.030800319671630858, 0.030392608642578124, 0.03023561668395996, 0.030394527435302736, 0.030368064880371092, 0.030167007446289064, 0.030331167221069336, 0.030324800491333008, 0.030313919067382813, 0.030378015518188476, 0.032140033721923825, 0.03243417739868164, 0.030515199661254884, 0.03052297592163086, 0.03035113525390625, 0.030174911499023436, 0.03018351936340332, 0.03010755157470703, 0.030270399093627928, 0.030093311309814453, 0.030235807418823243, 0.030079839706420898, 0.029999103546142578, 0.030000831604003905, 0.030144832611083985, 0.030093151092529295, 0.029978527069091796, 0.03016643142700195, 0.03023766326904297, 0.030399999618530273, 0.03051353645324707, 0.030296031951904296, 0.030457887649536133, 0.03069340705871582, 0.030334367752075195, 0.03044118309020996, 0.030397024154663086, 0.030191776275634765, 0.03009654426574707, 0.030284832000732422, 0.0303656005859375, 0.030470144271850585, 0.030598463058471678, 0.03053433609008789, 0.03040870475769043, 0.030487871170043944, 0.030222816467285155, 0.03014473533630371, 0.03015065574645996, 0.030236671447753907, 0.03013145637512207, 0.03012268829345703, 0.03009542465209961, 0.03016867256164551, 0.03020636749267578, 0.030791839599609374, 0.03048556709289551, 0.0302806396484375, 0.030522527694702147, 0.03082703971862793, 0.03071436882019043, 0.03073148727416992, 0.0312838077545166, 0.031129119873046875, 0.03084851264953613, 0.030581727981567382, 0.030592735290527345, 0.030361888885498046, 0.030363264083862303, 0.030346656799316408, 0.030431264877319335, 0.030435712814331054, 0.03033760070800781, 0.0304005126953125, 0.03035935974121094, 0.030451072692871093, 0.032964862823486325, 0.030819904327392577, 0.030445695877075196, 0.03034815979003906, 0.03024665641784668, 0.030200063705444338, 0.030283775329589844, 0.030238719940185548, 0.03037593650817871, 0.030203903198242187, 0.03017523193359375, 0.030181631088256836, 0.030344959259033202, 0.03017523193359375, 0.030151744842529297, 0.03055302429199219, 0.030760959625244142, 0.03081126403808594, 0.03063692855834961, 0.03122790336608887, 0.030709920883178712, 0.0304617919921875, 0.030464000701904297, 0.03042723274230957, 0.03016694450378418, 0.03042918395996094, 0.030662015914916994, 0.030378719329833985, 0.030392223358154297, 0.030392416000366212, 0.030322591781616212, 0.03059507179260254, 0.030416288375854493, 0.030370399475097655, 0.030697471618652345, 0.030209024429321288, 0.030323711395263672, 0.030494720458984374, 0.03026700782775879, 0.030513023376464842, 0.030271968841552734, 0.030170976638793947, 0.03163545608520508, 0.0309403190612793, 0.03078144073486328, 0.030785472869873046, 0.03797699356079102, 0.03121558380126953, 0.030369823455810546, 0.03101081657409668, 0.030719999313354493, 0.0305930233001709, 0.030521343231201172, 0.03015884780883789, 0.03013542366027832, 0.03031328010559082, 0.030214208602905274, 0.030219583511352538, 0.030257856369018555, 0.030187328338623046, 0.030257343292236328, 0.030252031326293945, 0.03431935882568359, 0.03082444763183594, 0.030621536254882814, 0.030634143829345702, 0.030383424758911134, 0.03018547248840332, 0.030357568740844727, 0.03056870460510254, 0.030332447052001953, 0.0302478084564209, 0.03024892807006836, 0.032051071166992184, 0.03662556838989258, 0.030493663787841796, 0.030223648071289064, 0.030218015670776366, 0.03026220893859863, 0.03035955238342285, 0.030261119842529296, 0.030183231353759766, 0.030658687591552734, 0.030578880310058593, 0.030502111434936523, 0.03046444892883301, 0.030470272064208985, 0.03047657585144043, 0.030534879684448242, 0.030484895706176757, 0.030650848388671874, 0.030492223739624024, 0.030523679733276368, 0.030636032104492186, 0.030291967391967774, 0.030240928649902344, 0.0304167366027832, 0.030240768432617186, 0.0304815673828125, 0.03063894462585449, 0.030337024688720703, 0.0303636474609375, 0.030306304931640625, 0.03019980812072754, 0.030480384826660156]",tokens/s,32.90743670645715,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4941.602816,8008.892416,0.0,7606.370304,6988.678144,s,1,13.3713427734375,13.3713427734375,0.0,13.3713427734375,13.3713427734375,13.3713427734375,13.3713427734375,[13.3713427734375],,kWh,0.00018294973103747907,2.017153415573581e-05,7.5896171828016e-05,0.0002790174370212309,,MB,2827.485184,8025.669632,0.0,7608.467456,6915.138048,s,10,2.2782510833740237,0.22782510833740233,0.0009812805669034662,0.22797914886474607,0.22904386138916016,0.2292695960998535,0.2294501838684082,"[0.2289936981201172, 0.22843994140625, 0.2273548126220703, 0.2282374725341797, 0.22668386840820312, 0.22804515075683593, 0.2263667907714844, 0.22949533081054688, 0.2267208709716797, 0.22791314697265624]",tokens/s,1123.6689488187205,kWh,6.764692793938758e-06,7.457813216538286e-07,4.486771266182748e-06,1.1997245381775336e-05,tokens/kWh,21338231.556794036,MB,2831.712256,8027.766784,0.0,7610.564608,6915.140608,s,10,22.272522705078124,2.2272522705078126,0.003266796051850058,2.228244995117188,2.229776123046875,2.2301958007812503,2.23053154296875,"[2.230615478515625, 2.22771826171875, 2.228529541015625, 2.228916015625, 2.22796044921875, 2.2278330078125, 2.222165283203125, 2.229682861328125, 2.229218505859375, 2.21988330078125]",tokens/s,28.28597408305078,kWh,6.51667211252244e-05,7.188038335167434e-06,4.259429417641186e-05,0.0001149490536368037,tokens/kWh,548068.887970636,,s,630,22.270004325866687,0.03534921321566143,0.0005071031318993858,0.03523891258239746,0.03575773277282715,0.036034547615051274,0.03764153347015382,"[0.03697884750366211, 0.03560281753540039, 0.03671807861328125, 0.03536921691894531, 0.03530339050292969, 0.03513363265991211, 0.035229633331298825, 0.03535001754760742, 0.035236064910888674, 0.03513363265991211, 0.034969856262207034, 0.035092479705810545, 0.03508019256591797, 0.03507529449462891, 0.03522777557373047, 0.035308128356933595, 0.03508844757080078, 0.03516211318969727, 0.035092479705810545, 0.03515532684326172, 0.03509446334838867, 0.03508035278320312, 0.03507577514648438, 0.03513756942749023, 0.035066143035888675, 0.03506175994873047, 0.035374977111816405, 0.035469024658203126, 0.035394046783447264, 0.03510019302368164, 0.035111839294433594, 0.035143680572509765, 0.03495446395874023, 0.03498003387451172, 0.03509056091308594, 0.035757537841796874, 0.03601212692260742, 0.0353633918762207, 0.03499795150756836, 0.03524470520019531, 0.03534841537475586, 0.035243423461914065, 0.035336864471435546, 0.035485118865966794, 0.0362213134765625, 0.03594588851928711, 0.035705631256103515, 0.0356715202331543, 0.035672607421875, 0.03546112060546875, 0.03529024124145508, 0.035160415649414065, 0.035121696472167965, 0.03508019256591797, 0.03520444869995117, 0.03518697738647461, 0.03523622512817383, 0.03513043212890625, 0.04012860870361328, 0.03559075164794922, 0.03530163192749024, 0.03498128128051758, 0.03505606460571289, 0.03601635360717773, 0.03534262466430664, 0.03566806411743164, 0.035573375701904296, 0.03545542526245117, 0.03538323211669922, 0.03532185745239258, 0.035140960693359376, 0.0351566390991211, 0.03501670455932617, 0.03509779357910156, 0.03508115386962891, 0.03503910446166992, 0.035089664459228516, 0.03519359970092773, 0.03523788833618164, 0.035211265563964846, 0.03524515151977539, 0.03494294357299805, 0.03491068649291992, 0.034820575714111325, 0.034869247436523435, 0.03506694412231445, 0.03582867050170899, 0.035545215606689454, 0.035613983154296876, 0.03536137771606445, 0.035399681091308595, 0.035192832946777344, 0.035194881439208986, 0.035427936553955076, 0.03597296142578125, 0.03533808135986328, 0.03514380645751953, 0.03527740859985352, 0.035178497314453126, 0.03512115097045899, 0.035160064697265625, 0.035037185668945314, 0.0355693130493164, 0.034782718658447266, 0.03500323104858399, 0.03520716857910156, 0.0353177604675293, 0.035100223541259766, 0.03508268737792969, 0.04004009628295899, 0.03506972885131836, 0.035245697021484376, 0.034869598388671874, 0.034992416381835936, 0.035081790924072265, 0.03484473419189453, 0.03481657409667969, 0.036650142669677734, 0.03794940948486328, 0.035369983673095705, 0.035053569793701174, 0.03495468902587891, 0.0354268798828125, 0.03516416168212891, 0.03488358306884766, 0.03533184051513672, 0.035719295501708985, 0.0352946548461914, 0.03581203079223633, 0.035036415100097654, 0.0351383056640625, 0.035102718353271486, 0.03500236892700195, 0.03506790542602539, 0.03504127883911133, 0.035017921447753904, 0.03520719909667969, 0.035138336181640625, 0.03509574508666992, 0.03525305557250977, 0.03562700653076172, 0.03516211318969727, 0.03567001724243164, 0.03533606338500977, 0.03650348663330078, 0.03595487976074219, 0.03537919998168945, 0.035434497833251956, 0.03539164733886719, 0.035342174530029295, 0.03525632095336914, 0.03522355270385742, 0.03552175903320313, 0.035375038146972654, 0.03525900650024414, 0.035119327545166015, 0.03546054458618164, 0.035533374786376956, 0.035648895263671876, 0.03604134368896485, 0.03565881729125977, 0.03562089538574219, 0.03559516906738281, 0.0356495361328125, 0.03551596832275391, 0.035332542419433594, 0.035198974609375, 0.03525750350952148, 0.035203487396240234, 0.03560287857055664, 0.03596227264404297, 0.03550064086914063, 0.03558399963378906, 0.03575807952880859, 0.03541718292236328, 0.035428638458251956, 0.035492511749267576, 0.03543215942382812, 0.03529344177246094, 0.03517161560058594, 0.03512518310546875, 0.0350398063659668, 0.034971263885498045, 0.034968158721923825, 0.03519692611694336, 0.0350162239074707, 0.035090110778808595, 0.035070270538330076, 0.03498031997680664, 0.03608575820922852, 0.03528607940673828, 0.03523680114746094, 0.03601408004760742, 0.03520211029052735, 0.035769279479980466, 0.03610009765625, 0.03620982360839844, 0.03525513458251953, 0.03525004959106445, 0.035119232177734376, 0.03497100830078125, 0.03502252960205078, 0.03493974304199219, 0.03513353729248047, 0.03537510299682617, 0.03534438323974609, 0.03535257720947266, 0.03546844863891602, 0.035168830871582034, 0.03500377655029297, 0.03523372650146484, 0.035181697845458985, 0.03579888153076172, 0.03505286407470703, 0.03507020950317383, 0.03508883285522461, 0.034961406707763674, 0.0349791374206543, 0.035111614227294925, 0.03501372909545898, 0.03528777694702148, 0.035625152587890625, 0.03570892715454101, 0.03636764907836914, 0.03560111999511719, 0.035471359252929685, 0.035663230895996094, 0.03540195083618164, 0.03545539093017578, 0.03530246353149414, 0.035285823822021486, 0.03521331024169922, 0.03519705581665039, 0.03541123199462891, 0.03510345458984375, 0.035350528717041016, 0.03529916763305664, 0.035428638458251956, 0.03517955017089844, 0.035084606170654294, 0.035020641326904293, 0.03622768020629883, 0.03511852645874024, 0.03519718551635742, 0.035203487396240234, 0.035100318908691405, 0.035272289276123046, 0.035453697204589844, 0.03521484756469727, 0.03590777587890625, 0.035764545440673826, 0.03594550323486328, 0.038355808258056644, 0.03559628677368164, 0.03549091339111328, 0.03593699264526367, 0.03842220687866211, 0.03570534515380859, 0.03542739105224609, 0.03530438232421875, 0.035227649688720705, 0.03528460693359375, 0.03672127914428711, 0.035116798400878904, 0.035198974609375, 0.03503055953979492, 0.03495366287231445, 0.0350882568359375, 0.035246238708496094, 0.03504537582397461, 0.03522969436645508, 0.03533004760742187, 0.03562892913818359, 0.0355022087097168, 0.03529241561889648, 0.035140350341796876, 0.03526271820068359, 0.035096321105957035, 0.03523788833618164, 0.034938880920410156, 0.03492620849609375, 0.03500041580200195, 0.03489616012573242, 0.03491635131835937, 0.03480575942993164, 0.03493622589111328, 0.03507036972045898, 0.03504899215698242, 0.034949790954589846, 0.03493833541870117, 0.03485340881347656, 0.034918304443359374, 0.034990177154541016, 0.034895870208740236, 0.03509862518310547, 0.03492979049682617, 0.0359944953918457, 0.03497552108764648, 0.03501475143432617, 0.0351126708984375, 0.035552993774414066, 0.036026241302490235, 0.03636067199707031, 0.03557001495361328, 0.03546316909790039, 0.03540582275390625, 0.03531980895996094, 0.03527679824829102, 0.035165279388427735, 0.03522000122070312, 0.03535436630249023, 0.03531635284423828, 0.03531161499023437, 0.03508582305908203, 0.03511347198486328, 0.03609356689453125, 0.03517683029174805, 0.03497945785522461, 0.03508591842651367, 0.034843135833740234, 0.03521974563598633, 0.03500761413574219, 0.03504012680053711, 0.03543040084838867, 0.034969600677490234, 0.03531161499023437, 0.035448833465576174, 0.03697868728637695, 0.035261695861816406, 0.03519289779663086, 0.03530207824707031, 0.03533004760742187, 0.03541718292236328, 0.03502988815307617, 0.035043361663818356, 0.03523123168945313, 0.035229503631591795, 0.03528384017944336, 0.035450206756591794, 0.03513993453979492, 0.03516019058227539, 0.035760128021240234, 0.03521865463256836, 0.035379806518554685, 0.03510905456542969, 0.03511500930786133, 0.03517030334472656, 0.0352088623046875, 0.03521980667114258, 0.0352147216796875, 0.035246017456054685, 0.03589599990844727, 0.03547340774536133, 0.035225601196289064, 0.035417823791503905, 0.03545116806030273, 0.035366878509521485, 0.035254302978515624, 0.035261470794677736, 0.03518767929077148, 0.03528499221801758, 0.03523993682861328, 0.03523788833618164, 0.03527679824829102, 0.03520275115966797, 0.03524639892578125, 0.03539731216430664, 0.03534422302246094, 0.03537558364868164, 0.035244033813476565, 0.035364192962646486, 0.03509110260009766, 0.03525151824951172, 0.03513801574707031, 0.03543881607055664, 0.03875430297851563, 0.03551180648803711, 0.035356929779052734, 0.036256542205810545, 0.035355838775634765, 0.03522806549072265, 0.035125022888183595, 0.035199615478515626, 0.03507718276977539, 0.03500649642944336, 0.03512617492675781, 0.034953216552734374, 0.03498982238769531, 0.03514585494995117, 0.03496726226806641, 0.035088798522949216, 0.03521535873413086, 0.035299072265625, 0.03513983917236328, 0.03504265594482422, 0.035084030151367185, 0.03515606307983398, 0.03497856140136719, 0.035034175872802734, 0.03509683227539063, 0.03501919937133789, 0.03585184097290039, 0.035412384033203126, 0.03512969589233399, 0.035563518524169925, 0.03530271911621094, 0.03539539337158203, 0.03577945709228516, 0.03581542587280274, 0.03555737686157227, 0.03536422348022461, 0.035428897857666015, 0.03535472106933594, 0.03530886459350586, 0.03528297424316406, 0.035263137817382814, 0.03539353561401367, 0.03532588958740234, 0.03527619171142578, 0.03529081726074219, 0.035033409118652346, 0.03498358535766601, 0.03505561447143555, 0.035082527160644535, 0.03488134384155273, 0.03493571090698242, 0.034772865295410155, 0.035133377075195316, 0.03516435241699219, 0.03560198211669922, 0.03547401428222656, 0.035288928985595706, 0.0359153938293457, 0.03535491180419922, 0.03522265625, 0.035248767852783205, 0.03563555145263672, 0.03582751846313476, 0.03544278335571289, 0.035165470123291014, 0.03501715087890625, 0.03598214340209961, 0.03551852798461914, 0.03540070343017578, 0.03514873504638672, 0.03512435150146485, 0.035138431549072265, 0.0352624626159668, 0.035244033813476565, 0.03504457473754883, 0.035310367584228515, 0.03572268676757812, 0.03575769424438477, 0.035951553344726564, 0.035403583526611326, 0.03543264007568359, 0.03563520050048828, 0.035620159149169925, 0.03582432174682617, 0.03553279876708984, 0.035536865234375, 0.0354648323059082, 0.03519529724121094, 0.035250175476074216, 0.035251487731933595, 0.03530416107177734, 0.035089534759521486, 0.03550912094116211, 0.03552175903320313, 0.03568112182617188, 0.03515097427368164, 0.03514790344238281, 0.03519539260864258, 0.03504355239868164, 0.03511088180541992, 0.03509657669067383, 0.03499212646484375, 0.03495116806030273, 0.03531340789794922, 0.03532556915283203, 0.03594099044799805, 0.03523311996459961, 0.03536323165893555, 0.0354447021484375, 0.035340576171875, 0.03525571060180664, 0.03510726547241211, 0.03532556915283203, 0.03534627151489258, 0.03533075332641602, 0.035229057312011716, 0.035041919708251955, 0.035298881530761717, 0.03508063888549805, 0.03495683288574219, 0.0352196159362793, 0.036243648529052735, 0.037287647247314454, 0.03649782562255859, 0.03519631958007813, 0.035258495330810546, 0.035299808502197265, 0.03502431869506836, 0.03494150543212891, 0.03606937789916992, 0.03540895843505859, 0.035234142303466796, 0.03512790298461914, 0.035127262115478515, 0.03541561508178711, 0.03733747100830078, 0.03538905715942383, 0.03528691101074219, 0.03776572799682617, 0.036132865905761716, 0.03539971160888672, 0.03511497497558594, 0.035108863830566404, 0.03532185745239258, 0.035284126281738284, 0.035035999298095706, 0.03525763320922851, 0.035133216857910154, 0.035178688049316405, 0.035207870483398435, 0.03502700805664063, 0.035133342742919925, 0.035151710510253904, 0.03575014495849609, 0.03526860809326172, 0.035334144592285156, 0.035366912841796876, 0.035108863830566404, 0.03507120132446289, 0.0351317138671875, 0.03510524749755859, 0.03500556945800781, 0.035301567077636715, 0.03533280181884765, 0.035194881439208986, 0.03549798583984375, 0.03528704071044922, 0.035525856018066404, 0.035449630737304685, 0.035059711456298825, 0.03512940979003906, 0.03521897506713867, 0.035418529510498044, 0.03526422500610352, 0.035123489379882813, 0.03519606399536133, 0.03523376083374023, 0.03523452758789063, 0.03500252914428711, 0.03518259048461914, 0.0353546257019043, 0.03518259048461914, 0.03545292663574219, 0.035865825653076173, 0.03650214385986328, 0.03532537460327148, 0.035316448211669925, 0.035151744842529295, 0.03541398239135742, 0.035322017669677734, 0.03523583984375, 0.035399070739746095, 0.03641753768920898, 0.03570479965209961, 0.03554921722412109, 0.03671244812011719, 0.03520832061767578, 0.03533609771728516, 0.03556367874145508, 0.03533087921142578, 0.03500777435302734, 0.03493312072753906, 0.035161727905273436, 0.034945022583007815, 0.03504816055297852, 0.03508224105834961, 0.03495305633544922, 0.03508035278320312, 0.03515929412841797, 0.034946815490722656, 0.03510985565185547, 0.034883392333984374, 0.03534249496459961, 0.03495123291015625, 0.034811328887939454, 0.03501113510131836, 0.03499008178710938, 0.0352845458984375, 0.035178943634033205, 0.035143680572509765, 0.035102558135986325, 0.03540339279174805, 0.03551881790161133, 0.03524563217163086, 0.03515046310424805, 0.03545292663574219, 0.035342334747314456, 0.03503865432739258, 0.035025440216064456, 0.03497312164306641, 0.03488790512084961, 0.03493824005126953, 0.03503779220581055, 0.03576428985595703, 0.0353397445678711, 0.0353678092956543, 0.03508428955078125, 0.03512649536132813, 0.03511785507202148, 0.03506537628173828, 0.03515030288696289, 0.03510476684570313, 0.03507814407348633, 0.03510681533813476, 0.0350063362121582, 0.03503116989135742, 0.035378273010253904, 0.0353985595703125, 0.03510063934326172, 0.03504742431640625, 0.035288864135742185, 0.035520767211914064, 0.03554828643798828, 0.035533695220947265, 0.03549174499511719]",tokens/s,28.289172771657363,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1335.390208,1090.387968,0.0,704.643072,675.01056,s,1,8.3370830078125,8.3370830078125,0.0,8.3370830078125,8.3370830078125,8.3370830078125,8.3370830078125,[8.3370830078125],,kWh,2.9616197099979518e-05,3.259706843113159e-06,9.505007604049265e-06,4.238091154714194e-05,,MB,1474.650112,1409.155072,0.0,994.050048,942.610432,s,10,0.36204310226440434,0.03620431022644043,0.00018234259229563208,0.036226686477661134,0.03637332420349121,0.03643242206573486,0.03647970035552978,"[0.03636019134521484, 0.036205150604248046, 0.03624822235107422, 0.03614905548095703, 0.03630147171020508, 0.036323871612548825, 0.036491519927978514, 0.03618054580688477, 0.03591107177734375, 0.035872001647949216]",tokens/s,7070.981283688156,kWh,1.1809472198926354e-06,1.3023749249972036e-07,7.823079735163356e-07,2.0934926859086914e-06,tokens/kWh,122283684.92669554,MB,1507.405824,1419.640832,0.0,1002.438656,942.612992,s,10,20.57725756835938,2.057725756835938,0.008056546975848011,2.056117797851562,2.0680069335937503,2.06867314453125,2.06920611328125,"[2.05745068359375, 2.05325146484375, 2.061344482421875, 2.06785888671875, 2.054784912109375, 2.06933935546875, 2.066822265625, 2.0522314453125, 2.05098486328125, 2.043189208984375]",tokens/s,30.616324741384368,kWh,5.976107262510744e-05,6.591389742891236e-06,2.37547949894796e-05,9.010725735747827e-05,tokens/kWh,699166.7691100959,,s,630,20.575081758499163,0.03265885993412563,0.0004303863106755912,0.032572320938110355,0.032998672866821295,0.03336543674468994,0.03458794296264649,"[0.03266121673583984, 0.03255158233642578, 0.03260172653198242, 0.033242881774902346, 0.032719104766845704, 0.032419998168945315, 0.03231702423095703, 0.03265510559082031, 0.0326429443359375, 0.0328485107421875, 0.03285606384277344, 0.034393630981445315, 0.03280547332763672, 0.032669567108154295, 0.03262803268432617, 0.03234259033203125, 0.03242361450195313, 0.03238540649414062, 0.032325695037841794, 0.032694271087646484, 0.03235635375976562, 0.03241971206665039, 0.032395553588867185, 0.03240534210205078, 0.03231897735595703, 0.03244083023071289, 0.03246387100219727, 0.03245977783203125, 0.03243212890625, 0.03260211181640625, 0.03281884765625, 0.03292195129394531, 0.03296051025390625, 0.033148929595947264, 0.03304652786254883, 0.03314211273193359, 0.032919776916503905, 0.03289295959472656, 0.03280527877807617, 0.03263187026977539, 0.032573631286621094, 0.032462974548339844, 0.03274406433105469, 0.03238092803955078, 0.03246281433105469, 0.032444446563720704, 0.03239321517944336, 0.03252019119262695, 0.03245391845703125, 0.03278921508789062, 0.0325937614440918, 0.032581119537353515, 0.03244713592529297, 0.03240140914916992, 0.0322652473449707, 0.032426433563232424, 0.03324367904663086, 0.03293500900268555, 0.0329983024597168, 0.03288063812255859, 0.032645118713378905, 0.032474143981933594, 0.032355297088623045, 0.03251225662231445, 0.03254272079467774, 0.03251116943359375, 0.032605247497558595, 0.0324769287109375, 0.0324169921875, 0.03258857727050781, 0.03259392166137695, 0.03268198394775391, 0.03269836807250977, 0.032673408508300784, 0.032465152740478516, 0.03236428833007812, 0.03246323013305664, 0.032500862121582035, 0.032370849609375, 0.03229516983032227, 0.03237526321411133, 0.0361082878112793, 0.03354828643798828, 0.03281510543823242, 0.032571392059326174, 0.03291286468505859, 0.03264342498779297, 0.03269395065307617, 0.03247792053222656, 0.03238662338256836, 0.032476734161376954, 0.03248400115966797, 0.03250579071044922, 0.032529727935791015, 0.03262335968017578, 0.03243622589111328, 0.03244236755371094, 0.03271820831298828, 0.03280140686035156, 0.03264899063110351, 0.032655582427978516, 0.03249107360839844, 0.032540321350097656, 0.0324202880859375, 0.03245414352416992, 0.0322589111328125, 0.032266239166259765, 0.03226774215698242, 0.03234207916259765, 0.032542430877685546, 0.032576255798339844, 0.03232294464111328, 0.03245119857788086, 0.032464897155761716, 0.032352256774902347, 0.03257324981689453, 0.03274969482421875, 0.03304044723510742, 0.0326448974609375, 0.03241100692749024, 0.03244489669799805, 0.03214579010009765, 0.03231308746337891, 0.03236684799194336, 0.03222937774658203, 0.03275507354736328, 0.03257430267333984, 0.032605438232421874, 0.033055488586425784, 0.03293360137939453, 0.03284323120117188, 0.03295929718017578, 0.0327325439453125, 0.032628383636474606, 0.032568096160888675, 0.03247635269165039, 0.03238399887084961, 0.032539936065673826, 0.03251798248291016, 0.032661441802978516, 0.032524288177490236, 0.03248223876953125, 0.0324956169128418, 0.032615966796875, 0.0327125129699707, 0.03282412719726562, 0.03351945495605469, 0.033312511444091794, 0.03314201736450195, 0.033119232177734374, 0.03305673599243164, 0.03284761428833008, 0.03290083312988281, 0.032729278564453124, 0.03270284652709961, 0.03295142364501953, 0.032557151794433595, 0.03245340728759766, 0.03276611328125, 0.03258060836791992, 0.03280966567993164, 0.032761409759521486, 0.032561439514160156, 0.03266304016113281, 0.03260723114013672, 0.032427841186523435, 0.03244441604614258, 0.0325591049194336, 0.032519966125488284, 0.03245072174072266, 0.03286742401123047, 0.03250479888916016, 0.03249948883056641, 0.032680160522460935, 0.03245040130615234, 0.03240332794189453, 0.03241952133178711, 0.0335816650390625, 0.032794273376464844, 0.032600479125976564, 0.03259939193725586, 0.0328155517578125, 0.032497825622558596, 0.0324587516784668, 0.032456958770751956, 0.03265475082397461, 0.032632225036621096, 0.03261318588256836, 0.03401116943359375, 0.03272499084472656, 0.03286332702636719, 0.03268832015991211, 0.032483871459960935, 0.03274579238891601, 0.032720767974853515, 0.03281510543823242, 0.03264716720581055, 0.03249900817871094, 0.032411327362060545, 0.03239833450317383, 0.03240755081176758, 0.032525310516357424, 0.03286240005493164, 0.033589183807373045, 0.03270060729980469, 0.03259795379638672, 0.03268294525146485, 0.033441856384277345, 0.03280783843994141, 0.03263164901733399, 0.032538623809814454, 0.032366592407226565, 0.03241331100463867, 0.03239491271972656, 0.03391315078735352, 0.032519966125488284, 0.03255299377441406, 0.03254044723510742, 0.0325968017578125, 0.03260006332397461, 0.032985088348388675, 0.03346156692504883, 0.032778369903564454, 0.03265708923339844, 0.03262489700317383, 0.03253631973266601, 0.032475360870361326, 0.03243280029296875, 0.032229087829589845, 0.03224540710449219, 0.03250444793701172, 0.03239321517944336, 0.03256079864501953, 0.03260041427612305, 0.03234787368774414, 0.03249913787841797, 0.03283827209472656, 0.03343324661254883, 0.033828609466552736, 0.0330873908996582, 0.03286518478393555, 0.0340684814453125, 0.03288063812255859, 0.03283967971801758, 0.032688129425048826, 0.034977664947509764, 0.03268832015991211, 0.0326368637084961, 0.03274515151977539, 0.03305503845214844, 0.03514908981323242, 0.032841503143310545, 0.032919742584228515, 0.03272313690185547, 0.03242326354980469, 0.03237491226196289, 0.03252096176147461, 0.032489025115966796, 0.03246931076049805, 0.032544353485107425, 0.03252880096435547, 0.03248537445068359, 0.03242803192138672, 0.03237068939208984, 0.03235369491577148, 0.03258972930908203, 0.03257823944091797, 0.03256694412231445, 0.03264736175537109, 0.032553119659423826, 0.032487422943115234, 0.03247923278808594, 0.03247459030151367, 0.032487422943115234, 0.03240399932861328, 0.03275775909423828, 0.03261785507202149, 0.03254131317138672, 0.03274691009521485, 0.032479297637939455, 0.032589599609375, 0.03244518280029297, 0.032473087310791016, 0.032408702850341795, 0.03285491180419922, 0.032527359008789065, 0.03247824096679688, 0.032511775970458984, 0.03242374420166016, 0.03233001708984375, 0.03228681564331055, 0.032456703186035156, 0.032440319061279296, 0.032425567626953124, 0.03241401672363281, 0.03269142532348633, 0.03288358306884766, 0.03323289489746094, 0.03327385711669922, 0.03288678359985352, 0.032571392059326174, 0.03243996810913086, 0.03228409576416016, 0.03252931213378906, 0.03246249771118164, 0.03246067047119141, 0.03280358505249024, 0.03250985717773437, 0.032970367431640626, 0.032575294494628905, 0.03247552108764649, 0.03243737411499024, 0.0336044807434082, 0.0344719352722168, 0.03289907073974609, 0.032778240203857424, 0.032662593841552734, 0.032475711822509766, 0.032521759033203125, 0.03341804885864258, 0.03246492767333985, 0.032456703186035156, 0.03226419067382812, 0.032474529266357424, 0.03247679901123047, 0.032680545806884766, 0.032898529052734375, 0.03293686294555664, 0.03311820983886719, 0.033109409332275394, 0.032955230712890624, 0.03266124725341797, 0.03278803253173828, 0.032649055480957034, 0.03271692657470703, 0.03319356918334961, 0.03319420623779297, 0.03281782531738281, 0.03289487838745117, 0.032632225036621096, 0.03271955108642578, 0.03276595306396484, 0.03298275375366211, 0.032835712432861326, 0.03273334503173828, 0.03278793716430664, 0.03285251235961914, 0.03295846557617187, 0.032964286804199217, 0.03284409713745117, 0.032939712524414064, 0.03249555206298828, 0.032635265350341794, 0.03249580764770508, 0.03253750228881836, 0.03265756988525391, 0.032913951873779296, 0.033001697540283204, 0.03285504150390625, 0.0326558723449707, 0.03259648132324219, 0.03262249755859375, 0.032371936798095705, 0.03251276779174805, 0.032893054962158205, 0.03299123382568359, 0.03331619262695312, 0.03296937561035156, 0.03329846572875977, 0.0346558723449707, 0.03385174560546875, 0.0330665283203125, 0.03291388702392578, 0.03299737548828125, 0.032908512115478517, 0.03270940780639649, 0.03265945434570312, 0.03291328048706055, 0.032747520446777346, 0.03262054443359375, 0.0323807373046875, 0.03436563110351563, 0.03393475341796875, 0.03280070495605469, 0.032733856201171876, 0.032696319580078126, 0.032617694854736326, 0.032639774322509765, 0.03252819061279297, 0.032696640014648434, 0.032554878234863284, 0.03250307083129883, 0.03250864028930664, 0.032449600219726565, 0.032516639709472654, 0.03289948654174805, 0.032839649200439455, 0.03293801498413086, 0.03280822372436523, 0.03271343994140625, 0.03246675109863281, 0.032465087890625, 0.0332718734741211, 0.03272492980957031, 0.03271177673339844, 0.03270339202880859, 0.03274342346191406, 0.0327720947265625, 0.033107967376708985, 0.03276985549926758, 0.03274361419677734, 0.03280892944335938, 0.03263190460205078, 0.03260307312011719, 0.032538623809814454, 0.032655361175537106, 0.033164512634277346, 0.03340572738647461, 0.03301580810546875, 0.03290099334716797, 0.03284415817260742, 0.0328139533996582, 0.03276214218139648, 0.032836254119873044, 0.032795585632324216, 0.032819904327392575, 0.032524608612060545, 0.03250790405273438, 0.03277164840698242, 0.032538623809814454, 0.03256159973144531, 0.03478492736816406, 0.03300592041015625, 0.033159168243408206, 0.032785472869873045, 0.03269113540649414, 0.03264265441894531, 0.03247555160522461, 0.03241779327392578, 0.03249084854125977, 0.03267359924316406, 0.03373148727416992, 0.032478622436523434, 0.032463455200195314, 0.03260956954956055, 0.03266838455200195, 0.032769054412841794, 0.032693214416503906, 0.032780288696289066, 0.03315507125854492, 0.03298092651367188, 0.032576736450195314, 0.03259888076782227, 0.03250995254516602, 0.03246470260620117, 0.03261385726928711, 0.03247792053222656, 0.03236374282836914, 0.03238787078857422, 0.03243209457397461, 0.03234614562988281, 0.03237068939208984, 0.03241779327392578, 0.032646305084228514, 0.03259888076782227, 0.0323870735168457, 0.032517375946044924, 0.032285438537597654, 0.032452415466308594, 0.03237033462524414, 0.03290505599975586, 0.032639678955078126, 0.03259539031982422, 0.032440414428710936, 0.03235273742675781, 0.03270156860351563, 0.032955265045166014, 0.03249728012084961, 0.032528415679931644, 0.03341328048706055, 0.03272457504272461, 0.032662113189697264, 0.03280281448364258, 0.03234611129760742, 0.03229471969604492, 0.03228899383544922, 0.03258774566650391, 0.032478271484375, 0.03253139114379883, 0.03264492797851563, 0.03245484924316406, 0.03228575897216797, 0.03227884674072266, 0.032381439208984376, 0.03253401565551758, 0.0324901123046875, 0.03243212890625, 0.032462398529052736, 0.03249123382568359, 0.03253526306152344, 0.03238844680786133, 0.03259664154052734, 0.03264508819580078, 0.03250783920288086, 0.03253014373779297, 0.03254876708984375, 0.032506240844726565, 0.0327720947265625, 0.0328059196472168, 0.032639328002929686, 0.033288833618164065, 0.032612350463867186, 0.032557056427001956, 0.032567230224609375, 0.03400505447387695, 0.03259392166137695, 0.032628734588623046, 0.03277743911743164, 0.032713630676269534, 0.03270751953125, 0.03285811233520508, 0.03263379287719727, 0.032522239685058595, 0.0324771842956543, 0.03247091293334961, 0.032442337036132814, 0.03216310501098633, 0.03231833648681641, 0.03230944061279297, 0.032048961639404294, 0.032073406219482424, 0.03234540939331055, 0.03235942459106445, 0.03273318481445313, 0.032376766204833984, 0.03233100891113281, 0.03209043121337891, 0.03270502471923828, 0.03242803192138672, 0.03227033615112305, 0.032220863342285154, 0.032713024139404294, 0.03505152130126953, 0.032704513549804685, 0.03229849624633789, 0.03232364654541016, 0.03249308776855469, 0.0322754898071289, 0.0321923828125, 0.03274444961547852, 0.03434108734130859, 0.03372623825073242, 0.03245366287231445, 0.03221295928955078, 0.03255283355712891, 0.03235443115234375, 0.032176128387451174, 0.032177440643310545, 0.032158432006835935, 0.03226528167724609, 0.03219142532348633, 0.03206540679931641, 0.032061569213867186, 0.03194220733642578, 0.032155967712402346, 0.03233564758300781, 0.03234348678588867, 0.03218697738647461, 0.032099201202392576, 0.032266239166259765, 0.032390113830566405, 0.032499713897705076, 0.03237228775024414, 0.032343681335449216, 0.03209664154052734, 0.03209865570068359, 0.032261856079101564, 0.03218431854248047, 0.0324345588684082, 0.03240755081176758, 0.03247513580322266, 0.0322949104309082, 0.03214281463623047, 0.03195136070251465, 0.032049182891845704, 0.03216793441772461, 0.03200783920288086, 0.032110942840576175, 0.0321638412475586, 0.032046112060546875, 0.03233433532714844, 0.03217660903930664, 0.03220684814453125, 0.03230310440063477, 0.03225600051879883, 0.032408992767333986, 0.032514175415039065, 0.032849727630615236, 0.03406409454345703, 0.03299833679199219, 0.03280835342407227, 0.03252080154418945, 0.032922752380371095, 0.032682880401611325, 0.03249084854125977, 0.03212883377075195, 0.03240841674804688, 0.03269811248779297, 0.03248358535766602, 0.032688129425048826, 0.032205825805664064, 0.03222630310058594, 0.03220479965209961, 0.034635326385498044, 0.03276230239868164, 0.032456703186035156, 0.032595966339111326, 0.03232563018798828, 0.0324312629699707, 0.03269875335693359, 0.032397567749023436, 0.032384639739990236, 0.03218697738647461, 0.03210444641113281, 0.03218000030517578, 0.03207190322875977, 0.03257753753662109, 0.032713760375976564, 0.0324536018371582, 0.032368640899658206]",tokens/s,30.619562410232458,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2002.956288,2893.938688,0.0,2491.416576,2425.650176,s,1,9.79341015625,9.79341015625,0.0,9.79341015625,9.79341015625,9.79341015625,9.79341015625,[9.79341015625],,kWh,7.93095858916028e-05,8.741023310982786e-06,2.9021412105978417e-05,0.000117072021308564,,MB,1888.833536,3122.528256,0.0,2705.32608,2606.129664,s,10,0.5053277778625488,0.05053277778625489,0.00021223708086301135,0.050472480773925785,0.050604894256591794,0.050875902938842775,0.05109270988464355,"[0.05114691162109375, 0.05043532943725586, 0.050361473083496096, 0.050409183502197266, 0.05054316711425781, 0.05051561737060547, 0.05046745681762695, 0.05042646408081055, 0.05054467010498047, 0.05047750473022461]",tokens/s,5066.01875485327,kWh,1.565066808310683e-06,1.7259836253930734e-07,1.0360569785775413e-06,2.773722149427532e-06,tokens/kWh,92294752.75771073,MB,1892.421632,3122.528256,0.0,2705.32608,2606.132224,s,10,13.822185791015624,1.3822185791015626,0.006799971190165407,1.3808309326171875,1.3901560058593752,1.3932618408203126,1.3957465087890626,"[1.39636767578125, 1.3749127197265625, 1.3760465087890625, 1.387364013671875, 1.37486865234375, 1.383943115234375, 1.377555419921875, 1.3894658203125, 1.3786932373046874, 1.3829686279296876]",tokens/s,45.57889826726956,kWh,4.0268814141270745e-05,4.4412566015858645e-06,1.975434854262072e-05,6.446441928547731e-05,tokens/kWh,977283.2936105077,,s,630,13.819991048812868,0.02193649372827439,0.00043687826244430834,0.021862095832824707,0.022189871788024902,0.022343362712860106,0.02305553339004517,"[0.02214796829223633, 0.021800960540771484, 0.02215116882324219, 0.026427104949951173, 0.027838815689086915, 0.022355903625488283, 0.022111583709716796, 0.02212879943847656, 0.021903871536254883, 0.021921056747436524, 0.021850847244262697, 0.022378496170043945, 0.022223871231079103, 0.021935359954833984, 0.022040319442749024, 0.02191974449157715, 0.022181888580322266, 0.022005760192871093, 0.021770240783691407, 0.021855264663696288, 0.02172003173828125, 0.021749536514282228, 0.021659360885620118, 0.02212505531311035, 0.021800960540771484, 0.02177577590942383, 0.021829343795776366, 0.02189606475830078, 0.021755903244018555, 0.022208511352539064, 0.022095455169677734, 0.022088096618652343, 0.022048191070556642, 0.02207596778869629, 0.02208527946472168, 0.022004064559936524, 0.022188032150268554, 0.022115583419799804, 0.02213750457763672, 0.021983327865600585, 0.02189926338195801, 0.021917247772216798, 0.021817440032958983, 0.021868288040161134, 0.021862144470214843, 0.021787328720092772, 0.021837984085083008, 0.021815296173095702, 0.02188287925720215, 0.02198255920410156, 0.021967519760131837, 0.02188902473449707, 0.02181724739074707, 0.02206729507446289, 0.022007711410522462, 0.02193561553955078, 0.02258799934387207, 0.02228643226623535, 0.022245119094848633, 0.022134048461914062, 0.021944351196289062, 0.022069503784179687, 0.022240991592407226, 0.021989376068115234, 0.02188902473449707, 0.021775871276855468, 0.02176665687561035, 0.021781728744506835, 0.02169481658935547, 0.021692991256713867, 0.021614463806152343, 0.02179996871948242, 0.021692768096923828, 0.02179539108276367, 0.021726432800292968, 0.02171785545349121, 0.021673055648803712, 0.02168012809753418, 0.02166262435913086, 0.021714815139770506, 0.021589279174804688, 0.02170921516418457, 0.021624927520751954, 0.021598560333251953, 0.02154473686218262, 0.021637344360351564, 0.02167184066772461, 0.021776479721069338, 0.021664800643920897, 0.02195964813232422, 0.021725183486938478, 0.021703872680664062, 0.021732351303100587, 0.022619968414306642, 0.021983232498168945, 0.021751808166503905, 0.021749183654785155, 0.022121023178100586, 0.022111839294433593, 0.02211062431335449, 0.02202828788757324, 0.02192355155944824, 0.02200147247314453, 0.022529760360717774, 0.022291488647460937, 0.02228384017944336, 0.021913087844848633, 0.021971263885498048, 0.021797216415405274, 0.021807104110717773, 0.021764095306396485, 0.021792768478393554, 0.02172313690185547, 0.021691648483276368, 0.021735679626464843, 0.02177689552307129, 0.021805055618286134, 0.021888799667358398, 0.021641120910644532, 0.02189254379272461, 0.021658048629760743, 0.021619136810302735, 0.02173673629760742, 0.021739871978759765, 0.021819871902465822, 0.02180678367614746, 0.021948415756225585, 0.02182252883911133, 0.02166169548034668, 0.021717952728271483, 0.021618528366088866, 0.021667360305786133, 0.02172972869873047, 0.02173091125488281, 0.021747615814208983, 0.02183033561706543, 0.021657407760620116, 0.021665023803710937, 0.021651647567749024, 0.021621343612670898, 0.021624448776245118, 0.021684736251831056, 0.021775680541992186, 0.02216009521484375, 0.023000255584716797, 0.022175647735595702, 0.023139232635498046, 0.02207116889953613, 0.02205820846557617, 0.022188608169555663, 0.02182284736633301, 0.021704767227172853, 0.02193027114868164, 0.021809791564941405, 0.021786239624023436, 0.021734975814819337, 0.021775167465209962, 0.0218023681640625, 0.02170070457458496, 0.02168886375427246, 0.02192793655395508, 0.021702016830444336, 0.021856607437133788, 0.021714399337768555, 0.021698688507080077, 0.021731647491455078, 0.021692800521850585, 0.02175584030151367, 0.021759456634521484, 0.02171900749206543, 0.02175654411315918, 0.02172313690185547, 0.02185625648498535, 0.021652639389038084, 0.021650272369384764, 0.021749887466430664, 0.021821311950683594, 0.02172313690185547, 0.021727231979370116, 0.02184601593017578, 0.021786624908447266, 0.02199091148376465, 0.02193459129333496, 0.02186240005493164, 0.021794815063476563, 0.021780479431152345, 0.021878143310546876, 0.022051456451416016, 0.021951904296875, 0.022714368820190428, 0.022306495666503907, 0.022245695114135742, 0.02264678382873535, 0.022562559127807618, 0.022329599380493163, 0.022362112045288086, 0.02205001640319824, 0.022214431762695313, 0.021949600219726563, 0.02200150489807129, 0.022155263900756835, 0.022046720504760742, 0.021945375442504883, 0.0219303035736084, 0.022002336502075195, 0.02194384002685547, 0.022745567321777342, 0.021952512741088868, 0.022038528442382813, 0.021796512603759765, 0.021837472915649414, 0.022022848129272462, 0.021780223846435548, 0.0220380802154541, 0.021897920608520506, 0.021823488235473632, 0.021911359786987303, 0.02199519920349121, 0.021914112091064454, 0.02189936065673828, 0.02185206413269043, 0.02243174362182617, 0.02194819259643555, 0.02169059181213379, 0.02168217658996582, 0.021794815063476563, 0.021762048721313477, 0.021857280731201172, 0.022254592895507814, 0.021991424560546875, 0.021809152603149414, 0.02182499122619629, 0.021869087219238283, 0.02307811164855957, 0.021805856704711916, 0.021780479431152345, 0.021805055618286134, 0.02173244857788086, 0.021828767776489257, 0.021810943603515626, 0.022226112365722656, 0.022012191772460936, 0.022346271514892577, 0.021807104110717773, 0.02179177665710449, 0.021828575134277342, 0.021607872009277343, 0.02160611152648926, 0.021697376251220705, 0.02170649528503418, 0.0220633602142334, 0.022786048889160155, 0.022200031280517576, 0.021909503936767577, 0.02186031913757324, 0.021876352310180664, 0.02175222396850586, 0.02161033630371094, 0.021837984085083008, 0.02168422317504883, 0.02171241569519043, 0.0216759033203125, 0.0215947208404541, 0.021650911331176758, 0.02168681526184082, 0.02155254364013672, 0.021570144653320314, 0.021677919387817383, 0.021759807586669924, 0.021782880783081056, 0.021925888061523437, 0.021772287368774415, 0.021704704284667968, 0.02224332809448242, 0.02196892738342285, 0.02178371238708496, 0.021826143264770507, 0.021768415451049804, 0.02188902473449707, 0.021766143798828123, 0.02194384002685547, 0.02186204719543457, 0.02198374366760254, 0.022022464752197265, 0.02208768081665039, 0.02198659133911133, 0.02231983947753906, 0.021898752212524415, 0.021981695175170898, 0.021811199188232423, 0.02189926338195801, 0.021769664764404298, 0.02189961624145508, 0.021784799575805664, 0.021851871490478517, 0.021936000823974608, 0.021807519912719727, 0.021669248580932617, 0.021901439666748047, 0.021741855621337892, 0.021983455657958985, 0.02186649513244629, 0.021751808166503905, 0.02191062355041504, 0.021715871810913084, 0.021796863555908205, 0.021622175216674804, 0.021833984375, 0.021620576858520507, 0.021532415390014648, 0.021750528335571288, 0.02169241523742676, 0.021706304550170898, 0.02176438331604004, 0.021911712646484376, 0.02237455940246582, 0.022095584869384767, 0.022066560745239258, 0.022346464157104493, 0.0220446720123291, 0.022634496688842775, 0.0221693115234375, 0.02214121627807617, 0.022200191497802734, 0.022188159942626955, 0.02205695915222168, 0.022114303588867186, 0.021923999786376953, 0.021988544464111328, 0.02210678482055664, 0.021988960266113283, 0.022064735412597656, 0.022012704849243163, 0.02202841567993164, 0.022048736572265627, 0.022104000091552733, 0.022103424072265624, 0.021922304153442384, 0.021876863479614258, 0.02168832015991211, 0.021651456832885742, 0.022515359878540038, 0.02364246368408203, 0.02205695915222168, 0.021716991424560548, 0.02164233589172363, 0.021658655166625976, 0.021749631881713867, 0.021752864837646484, 0.02167087936401367, 0.021778432846069336, 0.02161622428894043, 0.02169206428527832, 0.021639904022216796, 0.021640607833862305, 0.02166441535949707, 0.021669984817504883, 0.021757823944091797, 0.021952415466308595, 0.02174166488647461, 0.021762048721313477, 0.021813247680664064, 0.021873727798461914, 0.021887935638427735, 0.02181065559387207, 0.02170697593688965, 0.021827104568481446, 0.02204342460632324, 0.02188870429992676, 0.021924480438232422, 0.022038143157958986, 0.021915712356567384, 0.0219238395690918, 0.021968511581420897, 0.021862144470214843, 0.022126399993896484, 0.021928991317749023, 0.021909280776977538, 0.02225424003601074, 0.022105152130126954, 0.021823680877685547, 0.021803775787353517, 0.02183888053894043, 0.021733728408813477, 0.021676671981811522, 0.021760000228881835, 0.022015552520751953, 0.02183942413330078, 0.021650304794311525, 0.021784351348876952, 0.021687519073486327, 0.021816320419311523, 0.021702655792236326, 0.0216812801361084, 0.021799520492553712, 0.02175209617614746, 0.021880224227905275, 0.021758560180664063, 0.021699712753295897, 0.021797760009765625, 0.021833471298217774, 0.02212188720703125, 0.021855072021484376, 0.021766143798828123, 0.021939903259277343, 0.022118719100952148, 0.0218603515625, 0.02200531196594238, 0.022007648468017577, 0.022194784164428712, 0.021903072357177734, 0.021827167510986328, 0.021680831909179688, 0.0217741756439209, 0.021767391204833984, 0.0217260799407959, 0.021912895202636718, 0.02187923240661621, 0.02227356719970703, 0.021975839614868164, 0.021833311080932616, 0.02189967918395996, 0.022032384872436524, 0.02211020851135254, 0.022214656829833986, 0.02183782386779785, 0.02189731216430664, 0.02188604736328125, 0.02179974365234375, 0.02191564750671387, 0.021788671493530275, 0.02191360092163086, 0.021775583267211913, 0.021778751373291015, 0.02178915214538574, 0.021813247680664064, 0.021835424423217772, 0.021661088943481444, 0.021728191375732422, 0.02180918312072754, 0.021749727249145506, 0.022112255096435548, 0.02183318328857422, 0.02251638412475586, 0.02738809585571289, 0.022435520172119142, 0.02186240005493164, 0.021962656021118163, 0.0221345272064209, 0.021985631942749023, 0.02219375991821289, 0.022243392944335937, 0.023173471450805665, 0.02209587287902832, 0.022127904891967774, 0.021881568908691407, 0.02214694404602051, 0.021756032943725585, 0.021745599746704102, 0.021952192306518556, 0.021964351654052736, 0.022176479339599608, 0.02186476707458496, 0.021886751174926757, 0.021799200057983397, 0.021869375228881837, 0.022107040405273438, 0.022300575256347658, 0.022149215698242186, 0.022347583770751953, 0.021880256652832032, 0.02194918441772461, 0.021770240783691407, 0.021861536026000977, 0.021908319473266602, 0.021933855056762694, 0.02186467170715332, 0.02181865692138672, 0.021713632583618164, 0.021811199188232423, 0.021729280471801758, 0.02183087921142578, 0.02174198341369629, 0.021931520462036135, 0.02179747200012207, 0.02180534362792969, 0.02182143974304199, 0.021794559478759766, 0.021879039764404296, 0.02186172866821289, 0.02186307144165039, 0.02182963180541992, 0.021843103408813475, 0.021833887100219728, 0.022350528717041015, 0.02199724769592285, 0.0221781120300293, 0.022070560455322265, 0.021854719161987304, 0.021756128311157228, 0.021659456253051757, 0.021794240951538087, 0.021661888122558592, 0.021848127365112303, 0.022196544647216796, 0.021946367263793946, 0.02176799964904785, 0.02177030372619629, 0.02168844795227051, 0.02176742362976074, 0.021759775161743163, 0.0217807674407959, 0.021961408615112303, 0.021791744232177734, 0.021711872100830077, 0.021558496475219728, 0.021635135650634765, 0.021598688125610353, 0.021565696716308595, 0.021702016830444336, 0.021626720428466795, 0.02166044807434082, 0.021939264297485352, 0.02164182472229004, 0.021686176300048828, 0.02161017608642578, 0.021956415176391603, 0.02169046401977539, 0.02174857521057129, 0.02167398452758789, 0.021618688583374023, 0.02169593620300293, 0.021756479263305664, 0.02181888008117676, 0.021864959716796875, 0.021803007125854493, 0.02184339141845703, 0.02210668754577637, 0.022353055953979493, 0.022055776596069335, 0.02199247932434082, 0.022059999465942382, 0.0220644474029541, 0.02224608039855957, 0.022469888687133788, 0.022008127212524414, 0.021987775802612304, 0.021946367263793946, 0.02189116859436035, 0.0220849609375, 0.02206342315673828, 0.021920000076293945, 0.02188083267211914, 0.0218787841796875, 0.021815231323242187, 0.02170403289794922, 0.021830368041992187, 0.02185830307006836, 0.021700607299804688, 0.022214656829833986, 0.022144447326660155, 0.022147296905517578, 0.02216111946105957, 0.021932672500610352, 0.022024192810058595, 0.02199897575378418, 0.022090047836303712, 0.022617183685302734, 0.022173791885375976, 0.02213692855834961, 0.02205708885192871, 0.021946016311645507, 0.02220332717895508, 0.021927711486816406, 0.021917919158935546, 0.021977088928222657, 0.021987327575683592, 0.02191564750671387, 0.021796863555908205, 0.021790624618530274, 0.021790815353393556, 0.02223308753967285, 0.02218943977355957, 0.022256032943725586, 0.022339807510375977, 0.022082687377929688, 0.022053760528564455, 0.021956607818603514, 0.021997087478637694, 0.021942304611206054, 0.0217379207611084, 0.02188483238220215, 0.021735456466674803, 0.021874591827392577, 0.0219465274810791, 0.021827232360839843, 0.02195043182373047, 0.02191391944885254, 0.021767295837402344, 0.021816255569458008, 0.021948415756225585, 0.021782527923583983, 0.021796863555908205, 0.02181318473815918, 0.022140575408935548, 0.021999359130859375, 0.021971359252929687, 0.02195859146118164, 0.021915712356567384, 0.02187884712219238, 0.021821632385253906, 0.021787872314453127, 0.021856767654418945, 0.0217541446685791, 0.021861536026000977, 0.02175382423400879, 0.021736288070678712, 0.02175939178466797, 0.02176473617553711, 0.02192793655395508, 0.02189107131958008, 0.022022144317626953, 0.022005760192871093, 0.021916799545288086, 0.021801855087280272, 0.021811199188232423, 0.021907039642333984, 0.022175615310668945, 0.02205900764465332, 0.022172191619873046]",tokens/s,45.586136617224284,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 613, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 419, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 626, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking _ = backend.generate(self.inputs, self.config.generate_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 335, in forward attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/functional.py"", line 1890, in softmax ret = input.softmax(dim, dtype=dtype) RuntimeError: CUDA error: an illegal memory access was encountered CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. " 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4356.325376,5355.012096,0.0,4959.76448,4769.731072,s,1,11.4017490234375,11.4017490234375,0.0,11.4017490234375,11.4017490234375,11.4017490234375,11.4017490234375,[11.4017490234375],,kWh,0.00013048237477916112,1.438593316229139e-05,5.61742116059992e-05,0.0002010425195474517,,MB,1614.225408,5373.886464,0.0,4966.055936,4251.027456,s,10,30.64483813476562,3.0644838134765626,0.0036675148485847946,3.063621826171875,3.069316064453125,3.0702912109375,3.071071328125,"[3.0587421875, 3.061697021484375, 3.060749755859375, 3.063556396484375, 3.063182861328125, 3.063687255859375, 3.065486572265625, 3.067370361328125, 3.069099365234375, 3.071266357421875]",tokens/s,83.53772301690701,kWh,8.943002240083321e-05,9.863613228052827e-06,5.943954755160009e-05,0.00015873318318048612,tokens/kWh,1612769.2702345515,MB,1623.69536,5388.566528,0.0,4980.736,4251.030016,s,10,17.700633666992186,1.7700633666992185,0.011951188198258642,1.7681280517578126,1.7857709716796875,1.78604716796875,1.7862681249999999,"[1.7573978271484374, 1.7863233642578125, 1.7594388427734375, 1.769110595703125, 1.7671455078125, 1.7812459716796876, 1.78075, 1.7857095947265624, 1.7613912353515624, 1.7521207275390625]",tokens/s,35.591946133251284,kWh,5.177968356166691e-05,5.712164755921341e-06,3.399308274999995e-05,9.148493106758823e-05,tokens/kWh,688638.0004315266,,s,630,17.697427032470696,0.028091154019794774,0.00044194637218125394,0.028012063980102538,0.028631215286254882,0.028905468654632567,0.029727254791259778,"[0.02891379165649414, 0.02809449577331543, 0.02785273551940918, 0.027840736389160157, 0.027873056411743164, 0.028383264541625975, 0.02812460708618164, 0.02760918426513672, 0.027482559204101562, 0.02770534324645996, 0.027692256927490236, 0.0276582088470459, 0.027659072875976562, 0.02769430351257324, 0.027609567642211914, 0.027683135986328124, 0.0275230712890625, 0.027706880569458008, 0.02744576072692871, 0.02772332763671875, 0.027998336791992186, 0.028228960037231444, 0.02785804748535156, 0.027723487854003907, 0.027793119430541992, 0.027801279067993165, 0.02767283248901367, 0.028082656860351562, 0.027867136001586915, 0.02798371124267578, 0.027897151947021484, 0.027850719451904298, 0.027609983444213868, 0.027845792770385742, 0.0277922248840332, 0.028086271286010742, 0.02776425552368164, 0.028457439422607422, 0.028084224700927734, 0.02800230407714844, 0.028049407958984376, 0.028853376388549803, 0.0283284797668457, 0.02793916893005371, 0.02778892707824707, 0.027827808380126953, 0.027752256393432616, 0.027748992919921875, 0.027691551208496094, 0.027758464813232422, 0.028183807373046876, 0.027753087997436525, 0.02780985641479492, 0.027869056701660157, 0.02777510452270508, 0.02793267250061035, 0.027922367095947264, 0.02795667266845703, 0.02782476806640625, 0.027813695907592775, 0.02814188766479492, 0.027807680130004883, 0.027857952117919922, 0.02893212890625, 0.028485151290893556, 0.028633663177490234, 0.027979776382446288, 0.02846339225769043, 0.0281395206451416, 0.02815180778503418, 0.027888832092285157, 0.028161024093627928, 0.028318912506103515, 0.028555295944213868, 0.02916409683227539, 0.029050336837768555, 0.028887615203857422, 0.02832793617248535, 0.028341888427734375, 0.028437055587768555, 0.03174515151977539, 0.02854572868347168, 0.028524511337280272, 0.028352672576904298, 0.02841791915893555, 0.02835456085205078, 0.028272640228271483, 0.028082176208496092, 0.028717056274414062, 0.028194015502929687, 0.028404767990112306, 0.028171360015869142, 0.027986591339111327, 0.02813065528869629, 0.027932607650756835, 0.02813759994506836, 0.027994720458984376, 0.02803887939453125, 0.028079456329345703, 0.028302175521850586, 0.028263679504394533, 0.02794313621520996, 0.02788800048828125, 0.027924736022949218, 0.029859071731567384, 0.027860767364501954, 0.028046144485473632, 0.027957408905029298, 0.028078079223632812, 0.02777292823791504, 0.029067264556884766, 0.027924543380737306, 0.02824799919128418, 0.02815795135498047, 0.028106752395629882, 0.028688032150268553, 0.027980127334594727, 0.02788937568664551, 0.028080448150634766, 0.028110815048217774, 0.028233728408813476, 0.028241119384765624, 0.028221952438354493, 0.028178911209106445, 0.028578624725341797, 0.028363679885864256, 0.02889529609680176, 0.027979776382446288, 0.027797504425048827, 0.02793168067932129, 0.02800761604309082, 0.02787708854675293, 0.027920448303222656, 0.027716928482055665, 0.02784326362609863, 0.027768863677978515, 0.027829376220703125, 0.027705312728881836, 0.02788047981262207, 0.027766624450683595, 0.02780364799499512, 0.028837888717651368, 0.02819891166687012, 0.027962911605834962, 0.028819232940673827, 0.027800256729125977, 0.027590240478515625, 0.027742624282836914, 0.02776678466796875, 0.027869184494018553, 0.028011743545532226, 0.028205888748168945, 0.027889631271362306, 0.02776268768310547, 0.027844608306884764, 0.028138656616210938, 0.0276713924407959, 0.028010496139526365, 0.027757728576660156, 0.028791519165039064, 0.02778329658508301, 0.02773334312438965, 0.02774399948120117, 0.028031488418579102, 0.02765987205505371, 0.028775232315063477, 0.027822080612182616, 0.027731359481811522, 0.027783775329589845, 0.027726079940795897, 0.027663776397705078, 0.027732095718383788, 0.027752960205078125, 0.027639007568359374, 0.027714271545410157, 0.027803232192993164, 0.0277193603515625, 0.02822400093078613, 0.028035072326660155, 0.027743520736694335, 0.028711616516113283, 0.027727903366088866, 0.02776268768310547, 0.02779955291748047, 0.027830272674560546, 0.027696672439575194, 0.027822559356689452, 0.02776268768310547, 0.027813343048095702, 0.028882944107055664, 0.02792780876159668, 0.027771615982055665, 0.027654176712036134, 0.028198080062866213, 0.027670623779296875, 0.027709951400756837, 0.027369760513305665, 0.02842367935180664, 0.027683263778686525, 0.028046432495117186, 0.027644031524658202, 0.027810592651367188, 0.027657535552978514, 0.027867040634155273, 0.02771206474304199, 0.027621599197387697, 0.027637760162353517, 0.027621471405029296, 0.027719776153564454, 0.027795263290405273, 0.027897855758666993, 0.028192768096923827, 0.028078304290771485, 0.028222688674926756, 0.029454912185668945, 0.028165311813354493, 0.028102912902832032, 0.02813395118713379, 0.03039414405822754, 0.028463327407836914, 0.028282880783081055, 0.028079391479492188, 0.02828300857543945, 0.028143936157226563, 0.028454719543457033, 0.028395999908447267, 0.02828019142150879, 0.028281471252441407, 0.028419776916503905, 0.028193088531494142, 0.028223487854003908, 0.028331712722778322, 0.028254528045654297, 0.028102655410766602, 0.028251903533935547, 0.027816255569458007, 0.027825824737548827, 0.028032352447509765, 0.028013504028320313, 0.027824127197265625, 0.02815590476989746, 0.027962495803833008, 0.028038015365600587, 0.02787062454223633, 0.027761247634887694, 0.027799903869628908, 0.028177600860595703, 0.027949663162231447, 0.02779532814025879, 0.028861536026000976, 0.027757471084594726, 0.027672063827514647, 0.028893184661865235, 0.02804096031188965, 0.027818239212036133, 0.027549503326416015, 0.02763590431213379, 0.027712543487548827, 0.027618303298950195, 0.027487199783325197, 0.027613664627075197, 0.027631200790405274, 0.027694015502929686, 0.02769932746887207, 0.02761100769042969, 0.027546911239624022, 0.027595775604248047, 0.027571935653686524, 0.027672256469726562, 0.02752067184448242, 0.027574079513549805, 0.02766524887084961, 0.02769865608215332, 0.02757891273498535, 0.02769264030456543, 0.027590623855590822, 0.027783103942871094, 0.02808451271057129, 0.027777280807495117, 0.027693023681640627, 0.027731935501098634, 0.02806719970703125, 0.02794767951965332, 0.028611808776855468, 0.027798303604125975, 0.027725887298583985, 0.027930559158325194, 0.028256256103515624, 0.027977088928222656, 0.028012384414672853, 0.028053407669067384, 0.027884384155273438, 0.028081407546997072, 0.028401439666748046, 0.028492799758911135, 0.028358655929565428, 0.02847871971130371, 0.02901683235168457, 0.02899488067626953, 0.028805728912353515, 0.02839366340637207, 0.028420000076293944, 0.028188671112060547, 0.028719104766845704, 0.02873958396911621, 0.028622655868530272, 0.029308223724365236, 0.0283384952545166, 0.028434335708618166, 0.028303327560424803, 0.02832771110534668, 0.028350400924682616, 0.02822243118286133, 0.02804524803161621, 0.027739328384399416, 0.028654848098754883, 0.02865023994445801, 0.029125728607177735, 0.02809561538696289, 0.02805491256713867, 0.028078144073486327, 0.02784707260131836, 0.027811552047729494, 0.027813568115234374, 0.027914016723632813, 0.028040000915527344, 0.028327871322631835, 0.028016031265258787, 0.02797590446472168, 0.028124927520751953, 0.029278656005859376, 0.028863807678222657, 0.028034143447875977, 0.02821299171447754, 0.02889472007751465, 0.029096479415893554, 0.02796678352355957, 0.028449024200439453, 0.028123584747314453, 0.028194816589355468, 0.02824563217163086, 0.02827712059020996, 0.028674047470092775, 0.028508159637451173, 0.028220895767211915, 0.02783695983886719, 0.02800764846801758, 0.027996448516845702, 0.028170751571655273, 0.02792448043823242, 0.027895904541015624, 0.028034143447875977, 0.02949305534362793, 0.028134624481201173, 0.028165376663208008, 0.028636991500854494, 0.030010080337524413, 0.028289215087890625, 0.0283441276550293, 0.028223167419433592, 0.028038848876953126, 0.02791881561279297, 0.027716896057128907, 0.03000819206237793, 0.028020736694335937, 0.028078079223632812, 0.027809791564941407, 0.027641855239868163, 0.027797632217407227, 0.027848575592041017, 0.027898048400878905, 0.027797504425048827, 0.027954463958740235, 0.027801759719848634, 0.029243776321411133, 0.027975679397583008, 0.028692480087280273, 0.027920127868652344, 0.02894236755371094, 0.028241888046264648, 0.028228832244873048, 0.028179391860961914, 0.02814959907531738, 0.028023807525634766, 0.02810316848754883, 0.02838313674926758, 0.02950614356994629, 0.028439807891845702, 0.028461824417114256, 0.02812656021118164, 0.02822153663635254, 0.028150304794311524, 0.028332063674926758, 0.028278783798217775, 0.028182079315185547, 0.028453311920166015, 0.028145471572875978, 0.02837494468688965, 0.02873948860168457, 0.028363008499145508, 0.02808582305908203, 0.028239839553833006, 0.02825276756286621, 0.02800217628479004, 0.027832447052001955, 0.028016639709472657, 0.028630943298339845, 0.028277984619140627, 0.02803606414794922, 0.02798896026611328, 0.027826175689697266, 0.02797260856628418, 0.027801536560058595, 0.02824185562133789, 0.02801465606689453, 0.02879302406311035, 0.02797916793823242, 0.028219808578491212, 0.028251935958862304, 0.028311071395874025, 0.02832044792175293, 0.02893414306640625, 0.02856342315673828, 0.028114431381225585, 0.02801513671875, 0.027940927505493166, 0.02784454345703125, 0.028116640090942384, 0.0279553279876709, 0.028065343856811524, 0.02841667175292969, 0.028339744567871094, 0.028281312942504883, 0.02815590476989746, 0.02870681571960449, 0.02856550407409668, 0.028286527633666993, 0.0282891845703125, 0.028306751251220702, 0.028072864532470702, 0.028325183868408203, 0.02885273551940918, 0.028256256103515624, 0.02843846321105957, 0.02826576042175293, 0.028363487243652345, 0.028344575881958007, 0.028318880081176757, 0.028208831787109374, 0.028289119720458986, 0.028437568664550782, 0.02824678421020508, 0.02832067108154297, 0.028248064041137694, 0.028434528350830077, 0.02906105613708496, 0.02861609649658203, 0.02878121566772461, 0.028423423767089843, 0.028299871444702147, 0.028389312744140624, 0.028280704498291016, 0.028043615341186524, 0.028250080108642578, 0.02807360076904297, 0.02818035125732422, 0.027961280822753905, 0.029817567825317384, 0.028209184646606444, 0.028290912628173827, 0.028184288024902342, 0.028250463485717775, 0.029091680526733398, 0.02824355125427246, 0.028440351486206054, 0.028168735504150392, 0.02803936004638672, 0.028182783126831056, 0.029053760528564454, 0.028545568466186524, 0.02835433578491211, 0.028125823974609374, 0.028067840576171874, 0.028043264389038085, 0.027972896575927733, 0.02799603271484375, 0.02810761642456055, 0.027914239883422853, 0.027835424423217774, 0.027948127746582032, 0.02888096046447754, 0.027917631149291994, 0.02905753517150879, 0.03016214370727539, 0.028048160552978516, 0.02813132858276367, 0.02800390434265137, 0.027938400268554688, 0.028130144119262696, 0.028333568572998048, 0.028324287414550783, 0.0281146240234375, 0.02813171195983887, 0.027932575225830078, 0.028979583740234374, 0.028151552200317384, 0.028030975341796875, 0.027975679397583008, 0.028110847473144532, 0.02778726387023926, 0.02762112045288086, 0.02750694465637207, 0.027594335556030275, 0.027789215087890624, 0.027894271850585937, 0.0277708797454834, 0.027836063385009765, 0.02775059127807617, 0.028353824615478515, 0.028119871139526367, 0.028079999923706054, 0.028123327255249023, 0.02812518310546875, 0.028954784393310548, 0.028184288024902342, 0.02806387138366699, 0.02814771270751953, 0.02777635192871094, 0.028213375091552733, 0.02784908866882324, 0.027944864273071288, 0.02789811134338379, 0.027973024368286133, 0.027779647827148438, 0.027711135864257812, 0.02789414405822754, 0.027864992141723634, 0.027743711471557617, 0.02779404830932617, 0.028248064041137694, 0.027873279571533204, 0.02773606491088867, 0.028008575439453124, 0.02814761543273926, 0.028110815048217774, 0.028051456451416015, 0.027992223739624022, 0.027863967895507814, 0.027939775466918945, 0.02779292869567871, 0.02776316833496094, 0.027680192947387695, 0.027564607620239257, 0.027855968475341795, 0.027627487182617188, 0.027800512313842774, 0.027803232192993164, 0.027931039810180663, 0.027853919982910157, 0.028773279190063478, 0.02780499267578125, 0.028055295944213868, 0.027765695571899413, 0.02790399932861328, 0.028033023834228517, 0.027856767654418944, 0.027827327728271484, 0.02893619155883789, 0.028039167404174805, 0.028060863494873047, 0.027712320327758787, 0.027669504165649415, 0.027613920211791994, 0.027615520477294923, 0.02752012825012207, 0.027776927947998048, 0.02780620765686035, 0.028094655990600587, 0.027680288314819335, 0.02775321578979492, 0.027572223663330078, 0.02772991943359375, 0.0276378231048584, 0.027629247665405275, 0.028608640670776366, 0.0277544002532959, 0.02771171188354492, 0.027745311737060546, 0.027654176712036134, 0.027795839309692382, 0.027605567932128906, 0.027891712188720705, 0.027557472229003906, 0.027641855239868163, 0.027713951110839845, 0.027770975112915038, 0.02774131202697754, 0.0277225284576416, 0.027547040939331056, 0.027664480209350587, 0.02821990394592285, 0.027889663696289063, 0.02773324775695801, 0.027615936279296874, 0.02763987159729004, 0.029056608200073244, 0.0278573112487793, 0.027719808578491212, 0.02780556869506836, 0.0284117431640625, 0.02786729621887207, 0.02778726387023926, 0.02810425567626953, 0.027940704345703126, 0.027720544815063478, 0.027725215911865234, 0.02790575981140137, 0.02770172882080078, 0.027637216567993166, 0.027591360092163085, 0.027842559814453126, 0.027639808654785155, 0.027661983489990234, 0.027859296798706055, 0.027723392486572265, 0.02768499183654785, 0.027684415817260742, 0.027714111328125, 0.027533439636230467, 0.027594207763671875]",tokens/s,35.59839511382615,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpb54f2zm9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 115009 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpd5b7wzyr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 294659 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4088, in from_pretrained hf_quantizer.postprocess_model(model) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model return self._process_model_after_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 456, in post_init_awq_exllama_modules model = exllama_post_init(model) File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 133, in exllama_post_init submodule.post_init() File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllama.py"", line 73, in post_init self.q4 = exl_ext.make_q4( RuntimeError: scales and qweight have incompatible shapes " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 30641 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp5778qlk9/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp870ld_h3/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmponlpp3k7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 226000 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1682, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp57p77u1h/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new " 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v1-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,818.782208,666.763264,0.0,281.018368,267.897856,s,1,9.9607578125,9.9607578125,0.0,9.9607578125,9.9607578125,9.9607578125,9.9607578125,[9.9607578125],,kWh,1.567963637916563e-05,1.7217451032221594e-06,5.177226364000333e-06,2.257860784638812e-05,,MB,1258.692608,775.815168,0.0,360.710144,344.082944,s,20,0.18849174308776856,0.009424587154388427,0.00017202687475052685,0.009424431800842285,0.00963621416091919,0.009659278249740602,0.009853571195602416,"[0.009902144432067871, 0.00945910358428955, 0.009083231925964356, 0.009213919639587403, 0.009414431571960449, 0.009205408096313476, 0.009635071754455566, 0.009280063629150391, 0.009461919784545898, 0.00937168025970459, 0.009451552391052246, 0.009408096313476562, 0.009466431617736816, 0.00940988826751709, 0.009335455894470214, 0.009314784049987793, 0.009478143692016602, 0.009646495819091798, 0.00943443202972412, 0.009519488334655761]",tokens/s,27162.99354086796,kWh,2.7342758612152674e-07,3.015425269405162e-08,1.444531510721645e-07,4.4803498988774285e-07,tokens/kWh,571383944.9551517,MB,1305.595904,800.980992,0.0,385.875968,344.085504,s,20,9.925915557861327,0.49629577789306634,0.0062812143268559315,0.49699862670898437,0.5028985565185546,0.5048428009033202,0.5079149468994141,"[0.49627203369140627, 0.49030471801757813, 0.4806847229003906, 0.4902862548828125, 0.4877281494140625, 0.4932890014648437, 0.4922030944824219, 0.49249868774414063, 0.4988995971679687, 0.49724258422851564, 0.5086829833984375, 0.5004189453125, 0.5002186279296875, 0.49303341674804685, 0.4967546691894531, 0.4986956481933594, 0.5046406860351562, 0.5001802673339844, 0.5011764831542969, 0.5027049865722656]",tokens/s,126.9404311023057,kWh,1.384743345415634e-05,1.5269064078200278e-06,5.419665056832591e-06,2.0794004918808956e-05,tokens/kWh,3029719.3948922334,,s,1260,9.9131855726242,0.0078676075973208,0.00023638380468430666,0.00785155200958252,0.008028537654876709,0.008138702487945557,0.008642678308486938,"[0.007751743793487548, 0.007950719833374023, 0.007987648010253907, 0.007874752044677734, 0.007868224143981934, 0.007972479820251465, 0.007837759971618652, 0.007741119861602783, 0.007770624160766602, 0.007738560199737549, 0.007738431930541992, 0.007800704002380371, 0.00774886417388916, 0.007791359901428222, 0.007802656173706055, 0.008380640029907226, 0.008136608123779298, 0.007882688045501709, 0.00781328010559082, 0.007837696075439453, 0.007776256084442139, 0.007773312091827392, 0.007656320095062256, 0.00792739200592041, 0.007700895786285401, 0.007708672046661377, 0.007796256065368652, 0.007798816204071045, 0.00769871997833252, 0.007645343780517578, 0.007632224082946777, 0.007653439998626709, 0.007635712146759033, 0.007699391841888428, 0.0077051520347595215, 0.007724575996398926, 0.007686975955963135, 0.007655360221862793, 0.0077432641983032225, 0.007776544094085693, 0.0077396159172058105, 0.007771743774414063, 0.007803232192993164, 0.007784287929534912, 0.00778220796585083, 0.007808224201202392, 0.008080351829528808, 0.007919392108917237, 0.007771488189697266, 0.0077645120620727535, 0.007715487957000732, 0.0076992959976196285, 0.007695136070251465, 0.007749023914337158, 0.007875232219696045, 0.007622879981994629, 0.0077432641983032225, 0.010675999641418457, 0.008836319923400879, 0.008473600387573242, 0.007852255821228027, 0.007802144050598144, 0.007853792190551758, 0.007475391864776611, 0.007786687850952149, 0.007816832065582275, 0.009025312423706055, 0.007889344215393066, 0.007876383781433105, 0.007747583866119385, 0.007733248233795166, 0.007700479984283447, 0.007645088195800781, 0.00771065616607666, 0.007710879802703858, 0.007688191890716553, 0.007686143875122071, 0.007819168090820313, 0.007768159866333008, 0.007741439819335938, 0.007677951812744141, 0.007693408012390136, 0.007859104156494141, 0.008040063858032227, 0.007939680099487305, 0.007766111850738526, 0.007801472187042236, 0.007785568237304688, 0.007734111785888672, 0.00777455997467041, 0.007778079986572266, 0.007683455944061279, 0.007600671768188476, 0.007518303871154785, 0.007825407981872558, 0.007848959922790527, 0.007762944221496582, 0.007762944221496582, 0.007715424060821533, 0.007779744148254395, 0.007789567947387695, 0.00778172779083252, 0.007725215911865234, 0.007998112201690675, 0.007712768077850342, 0.007633823871612549, 0.007582848072052002, 0.007650752067565918, 0.007685664176940918, 0.00776688003540039, 0.00773529577255249, 0.007860223770141601, 0.007770112037658691, 0.007858176231384278, 0.007906720161437989, 0.007905888080596923, 0.007824543952941895, 0.00779699182510376, 0.007811679840087891, 0.007759871959686279, 0.007768064022064209, 0.007921696186065674, 0.007663584232330322, 0.007544672012329102, 0.0075634241104125975, 0.007553023815155029, 0.007182047843933105, 0.007563551902770996, 0.007708255767822266, 0.00761897611618042, 0.007480959892272949, 0.007475103855133057, 0.007479551792144775, 0.007548575878143311, 0.007531199932098389, 0.007548031806945801, 0.007780960083007812, 0.007708864212036133, 0.0076269121170043945, 0.007614272117614746, 0.007628992080688477, 0.007812928199768067, 0.008046943664550782, 0.007753215789794922, 0.007604383945465088, 0.007668000221252441, 0.007632607936859131, 0.007577600002288819, 0.007540287971496582, 0.007569856166839599, 0.007599775791168213, 0.007729536056518555, 0.0075833601951599125, 0.0077064957618713376, 0.0076620478630065915, 0.0076267518997192385, 0.007780352115631104, 0.007791967868804932, 0.008004256248474121, 0.007771872043609619, 0.008028448104858399, 0.00773529577255249, 0.007679999828338623, 0.007605247974395752, 0.007655519962310791, 0.007608320236206055, 0.007578527927398682, 0.0077209601402282715, 0.007492800235748291, 0.007862912178039552, 0.007463103771209717, 0.007479199886322022, 0.007544928073883057, 0.007765696048736572, 0.007828959941864014, 0.007607135772705078, 0.007727104187011719, 0.007636703968048096, 0.007593920230865478, 0.0075381760597229, 0.007505951881408691, 0.007485919952392578, 0.007498079776763916, 0.007444479942321777, 0.0074304318428039555, 0.007403200149536133, 0.007501855850219726, 0.007376448154449463, 0.007389279842376709, 0.007279295921325684, 0.0076054401397705074, 0.007661791801452636, 0.007797215938568115, 0.007771168231964111, 0.00769532823562622, 0.007614048004150391, 0.007974527835845947, 0.007584159851074219, 0.007534719944000244, 0.007524415969848632, 0.007428192138671875, 0.007534175872802734, 0.008026432037353516, 0.007681375980377198, 0.007671584129333496, 0.007748223781585693, 0.0077719039916992185, 0.00796118402481079, 0.007829376220703125, 0.007793791770935058, 0.007776768207550049, 0.007825920104980469, 0.007780384063720703, 0.007783423900604248, 0.007792768001556397, 0.008041472434997558, 0.007960415840148926, 0.00798083209991455, 0.007825632095336914, 0.007755775928497314, 0.007755775928497314, 0.007793824195861817, 0.007836512088775635, 0.007890560150146485, 0.007764351844787598, 0.007634943962097168, 0.007734975814819336, 0.007739327907562256, 0.008081567764282227, 0.007827680110931396, 0.007800608158111572, 0.007780576229095459, 0.007776256084442139, 0.00778380823135376, 0.007788288116455078, 0.007735360145568848, 0.0077545599937438965, 0.007841760158538819, 0.007843008041381836, 0.007871327877044677, 0.008163423538208007, 0.007800735950469971, 0.007772160053253174, 0.007725056171417236, 0.007841472148895263, 0.007768383979797363, 0.0076267199516296385, 0.007725344181060791, 0.008019712448120117, 0.007733471870422363, 0.007783872127532959, 0.0076902079582214355, 0.007381120204925537, 0.007655968189239502, 0.007655424118041992, 0.00764518404006958, 0.0076388797760009765, 0.007559328079223633, 0.007620448112487793, 0.007657824039459228, 0.007601952075958252, 0.0076349759101867675, 0.007659520149230957, 0.0076574721336364745, 0.007650496006011963, 0.007773151874542236, 0.00778223991394043, 0.007776544094085693, 0.0077636799812316895, 0.007623775959014893, 0.00772976016998291, 0.007686463832855225, 0.0076984319686889645, 0.007929855823516846, 0.007704576015472412, 0.007714816093444824, 0.007650303840637207, 0.007628992080688477, 0.007598911762237549, 0.007675903797149658, 0.007780352115631104, 0.007624896049499512, 0.007708479881286621, 0.007725247859954834, 0.007816864013671874, 0.007967103958129882, 0.008640447616577148, 0.007753568172454834, 0.00775548791885376, 0.007842080116271972, 0.007766079902648926, 0.0078058881759643555, 0.007764575958251953, 0.008028032302856446, 0.008020511627197266, 0.007685376167297363, 0.007594751834869385, 0.007586944103240967, 0.007566207885742187, 0.007593696117401123, 0.007737631797790527, 0.007759871959686279, 0.00774348783493042, 0.007659232139587403, 0.007698912143707276, 0.007716095924377441, 0.007649312019348145, 0.0076555519104003905, 0.007752096176147461, 0.007731264114379883, 0.00774944019317627, 0.007742847919464111, 0.008160063743591309, 0.007890751838684082, 0.007677408218383789, 0.007706975936889649, 0.007731840133666992, 0.007707583904266358, 0.007781248092651367, 0.00782147216796875, 0.007884160041809082, 0.007881408214569091, 0.007847424030303956, 0.007942080020904541, 0.007718815803527832, 0.007684288024902344, 0.007602752208709717, 0.007673215866088867, 0.007717184066772461, 0.007847231864929199, 0.007776351928710937, 0.007692863941192627, 0.007961984157562257, 0.007815839767456055, 0.007735072135925293, 0.007741759777069092, 0.007891039848327636, 0.007701471805572509, 0.007800864219665527, 0.007797567844390869, 0.007666944026947021, 0.00769920015335083, 0.007787807941436768, 0.007858911991119384, 0.007863391876220703, 0.007898015975952149, 0.00787660789489746, 0.007829504013061523, 0.007817215919494629, 0.008156543731689453, 0.007967167854309083, 0.007793856143951416, 0.007795711994171142, 0.007825407981872558, 0.00780083179473877, 0.007722815990447998, 0.007856575965881348, 0.007902463912963868, 0.007842207908630371, 0.007815264225006103, 0.007791744232177734, 0.007840640068054199, 0.00800972843170166, 0.007820608139038086, 0.00796947193145752, 0.008325119972229005, 0.00799129581451416, 0.007772255897521973, 0.0077406721115112305, 0.007758304119110108, 0.007739232063293457, 0.007758175849914551, 0.007764224052429199, 0.007736288070678711, 0.007938848018646241, 0.007763967990875244, 0.007809023857116699, 0.007857696056365967, 0.007493535995483399, 0.007813407897949218, 0.007786592006683349, 0.007823423862457275, 0.007792096138000488, 0.007821599960327148, 0.0078065600395202635, 0.007751552104949952, 0.007932159900665283, 0.007731232166290283, 0.0077767038345336915, 0.007765120029449463, 0.00775820779800415, 0.007748095989227295, 0.007910912036895751, 0.007776768207550049, 0.007698272228240967, 0.007657631874084473, 0.007725056171417236, 0.007741504192352295, 0.007775360107421875, 0.007904064178466797, 0.0076697921752929684, 0.007622464179992676, 0.007572896003723144, 0.007676671981811523, 0.007665664196014404, 0.007763967990875244, 0.007972256183624267, 0.0078096318244934085, 0.0077844481468200685, 0.007854400157928466, 0.007777440071105957, 0.007748000144958496, 0.007939648151397706, 0.007925951957702636, 0.007897471904754639, 0.007867712020874024, 0.00782150411605835, 0.007774720191955567, 0.007825407981872558, 0.00800972843170166, 0.00802406406402588, 0.007929855823516846, 0.007864287853240967, 0.007851647853851318, 0.007877024173736572, 0.008005632400512695, 0.007833759784698486, 0.007802720069885254, 0.007817311763763428, 0.007785439968109131, 0.007787231922149658, 0.00798473596572876, 0.007976607799530029, 0.007819488048553466, 0.0077441601753234866, 0.007680191993713379, 0.007727104187011719, 0.007788064002990723, 0.00792409610748291, 0.007714399814605713, 0.007686431884765625, 0.007303071975708008, 0.007745535850524903, 0.007796576023101806, 0.007782656192779541, 0.007755008220672608, 0.00780947208404541, 0.007815392017364501, 0.008054783821105957, 0.007786496162414551, 0.007798655986785889, 0.00773747205734253, 0.007988800048828125, 0.008068639755249024, 0.008047072410583497, 0.007823616027832032, 0.007778783798217773, 0.00781820821762085, 0.007788544178009033, 0.007768415927886963, 0.0077554559707641605, 0.00778934383392334, 0.007750879764556884, 0.007845791816711425, 0.007697216033935547, 0.007647232055664062, 0.007677696228027344, 0.007691840171813965, 0.007725664138793945, 0.007749951839447021, 0.0076911039352416995, 0.007732160091400146, 0.007737631797790527, 0.007748415946960449, 0.007928991794586181, 0.007657408237457275, 0.007687039852142334, 0.007744575977325439, 0.00777942419052124, 0.007893727779388428, 0.007987040042877197, 0.007966944217681885, 0.007822624206542969, 0.007766751766204834, 0.007914495944976807, 0.007797599792480469, 0.007776415824890136, 0.0076934719085693356, 0.007736095905303955, 0.007716544151306152, 0.008190336227416993, 0.008193056106567382, 0.007650207996368408, 0.007737088203430176, 0.007716608047485351, 0.00772544002532959, 0.0077560958862304685, 0.007782271862030029, 0.007844128131866456, 0.0080763521194458, 0.007854368209838867, 0.007854656219482422, 0.007954239845275879, 0.007911424160003662, 0.007473152160644531, 0.00778547191619873, 0.00780185604095459, 0.007833600044250488, 0.007801951885223389, 0.00779366397857666, 0.007754687786102295, 0.007709504127502442, 0.007730751991271973, 0.007850016117095947, 0.007842591762542725, 0.007735007762908935, 0.007811071872711181, 0.0077742719650268554, 0.007841567993164062, 0.007848159790039062, 0.007884607791900635, 0.0078089919090271, 0.007964767932891846, 0.007751008033752441, 0.007674655914306641, 0.007786464214324951, 0.007851967811584473, 0.007824960231781006, 0.007803423881530762, 0.007796735763549805, 0.007800320148468018, 0.007832064151763915, 0.007825407981872558, 0.007809216022491455, 0.007881824016571046, 0.0077998719215393066, 0.007825056076049804, 0.007868351936340333, 0.008574848175048827, 0.008003680229187012, 0.010539104461669922, 0.008538111686706543, 0.007961952209472656, 0.007873184204101562, 0.007873568058013916, 0.00806163215637207, 0.007903520107269288, 0.00792188787460327, 0.007986464023590089, 0.00793446397781372, 0.008103872299194336, 0.0079301438331604, 0.007866144180297852, 0.007824607849121094, 0.007875455856323241, 0.007888448238372803, 0.008034111976623536, 0.008016544342041016, 0.007849152088165284, 0.007768928050994873, 0.007950463771820068, 0.007907040119171142, 0.007796735763549805, 0.007772160053253174, 0.00785203218460083, 0.0076938881874084476, 0.007786943912506104, 0.007426047801971435, 0.00774348783493042, 0.007830656051635743, 0.007815231800079345, 0.007792960166931153, 0.007888991832733155, 0.008004192352294923, 0.007827263832092285, 0.007704576015472412, 0.007731200218200684, 0.00800767993927002, 0.007845888137817383, 0.007861472129821778, 0.007858016014099122, 0.007848896026611329, 0.008083456039428711, 0.008000736236572265, 0.007860640048980712, 0.007936384201049805, 0.007832831859588623, 0.0078087677955627445, 0.007816192150115966, 0.007878655910491944, 0.007900767803192139, 0.007808703899383545, 0.007862336158752441, 0.007914175987243652, 0.00783561611175537, 0.0077432641983032225, 0.007717152118682861, 0.007948224067687987, 0.007907328128814697, 0.007839744091033935, 0.007947519779205322, 0.007825503826141358, 0.007880832195281982, 0.007866911888122559, 0.007841856002807616, 0.007745183944702148, 0.007794976234436035, 0.007802879810333252, 0.00785747194290161, 0.007862143993377686, 0.007911488056182862, 0.008119296073913575, 0.00798089599609375, 0.008193951606750488, 0.007927135944366455, 0.007954463958740235, 0.007877503871917725, 0.00786624002456665, 0.00791539192199707, 0.007907328128814697, 0.0078113279342651365, 0.00787660789489746, 0.007886591911315919, 0.007888063907623291, 0.008149824142456054, 0.008122367858886719, 0.007947999954223633, 0.008018207550048829, 0.0079584641456604, 0.007968575954437255, 0.007532864093780517, 0.007907328128814697, 0.007970560073852538, 0.00796889591217041, 0.007964799880981446, 0.007954080104827881, 0.007963168144226073, 0.007966527938842774, 0.007917215824127197, 0.007928160190582276, 0.007939712047576904, 0.007991680145263672, 0.00821065616607666, 0.00800540828704834, 0.007927807807922363, 0.008005632400512695, 0.007944191932678223, 0.007929440021514893, 0.007928224086761474, 0.00795628786087036, 0.007963840007781983, 0.008000672340393067, 0.007931263923645019, 0.007914207935333251, 0.008148703575134277, 0.008003616333007812, 0.007886720180511475, 0.007927968025207519, 0.007959807872772217, 0.007975647926330567, 0.007882751941680909, 0.00817801570892334, 0.010360608100891113, 0.010517919540405273, 0.008614368438720703, 0.008756511688232422, 0.008071904182434081, 0.008026111602783203, 0.008181088447570801, 0.007952735900878907, 0.007874656200408935, 0.007891168117523193, 0.007894495964050292, 0.007887392044067383, 0.0077578239440917966, 0.00790511989593506, 0.008175423622131348, 0.008030240058898925, 0.007935647964477538, 0.007881216049194336, 0.008376480102539063, 0.007923711776733398, 0.007840991973876953, 0.007885695934295655, 0.00785158395767212, 0.007866528034210205, 0.008052000045776367, 0.007932831764221191, 0.007935999870300293, 0.007962624073028564, 0.008053759574890136, 0.007961664199829101, 0.007952320098876952, 0.007648255825042725, 0.007997663974761962, 0.008001983642578125, 0.008210816383361817, 0.007911136150360107, 0.007876863956451416, 0.007855455875396728, 0.007868800163269043, 0.007910783767700196, 0.007933856010437012, 0.00788108777999878, 0.007887519836425782, 0.007860191822052002, 0.007921311855316162, 0.007944511890411376, 0.007921696186065674, 0.007913311958312989, 0.007860576152801514, 0.007891839981079101, 0.008057791709899903, 0.007929855823516846, 0.007966720104217529, 0.007928127765655518, 0.007950079917907716, 0.007885983943939209, 0.00792633581161499, 0.007923871994018554, 0.007925280094146728, 0.007967264175415038, 0.007921535968780518, 0.00792623996734619, 0.007880224227905273, 0.007849472045898438, 0.00789353609085083, 0.007860256195068359, 0.007941279888153076, 0.00798966407775879, 0.00786687994003296, 0.008120384216308594, 0.007963776111602783, 0.007940639972686767, 0.007905504226684571, 0.007845376014709473, 0.007872640132904053, 0.007825856208801269, 0.008057056427001953, 0.007819039821624756, 0.007843679904937744, 0.007872672080993652, 0.007911424160003662, 0.007862400054931641, 0.007798655986785889, 0.007808032035827636, 0.008375167846679688, 0.007939871788024902, 0.007885119915008544, 0.00835587215423584, 0.008120256423950195, 0.008001024246215821, 0.007961120128631592, 0.007960383892059325, 0.007956160068511962, 0.007887360095977783, 0.007483839988708496, 0.007885312080383301, 0.007958271980285644, 0.007952640056610108, 0.007931615829467773, 0.007842080116271972, 0.007817215919494629, 0.007810304164886475, 0.00783232021331787, 0.007848031997680664, 0.007900767803192139, 0.007846208095550537, 0.007871679782867432, 0.007819519996643067, 0.007938623905181885, 0.007970399856567383, 0.00907100772857666, 0.008561823844909668, 0.008676192283630372, 0.00783961582183838, 0.008645888328552245, 0.008262656211853027, 0.007962495803833007, 0.00787660789489746, 0.007788544178009033, 0.007946335792541503, 0.007883903980255126, 0.007906079769134522, 0.007938047885894776, 0.007809023857116699, 0.007897088050842285, 0.008021023750305176, 0.007889056205749511, 0.007818367958068848, 0.007780288219451904, 0.008171263694763184, 0.00786787223815918, 0.007832096099853516, 0.007831552028656007, 0.0078023362159729, 0.00784000015258789, 0.007776544094085693, 0.007650623798370362, 0.008938176155090332, 0.007749184131622315, 0.007756031990051269, 0.007972447872161866, 0.007864927768707276, 0.007900800228118897, 0.00780511999130249, 0.007835936069488526, 0.007810976028442383, 0.007792960166931153, 0.007780032157897949, 0.007797791957855225, 0.0077905597686767575, 0.007783423900604248, 0.007767680168151855, 0.007759520053863525, 0.007700704097747803, 0.008069439888000489, 0.007850175857543945, 0.007807263851165772, 0.007395008087158203, 0.008137951850891114, 0.007900896072387696, 0.00783945608139038, 0.00794652795791626, 0.007747583866119385, 0.007823200225830078, 0.007846335887908935, 0.007878367900848388, 0.007829504013061523, 0.007745535850524903, 0.007831456184387207, 0.007958623886108398, 0.007925280094146728, 0.007885280132293702, 0.0078080959320068356, 0.008002464294433593, 0.007922016143798827, 0.007837344169616699, 0.007868351936340333, 0.0077355198860168456, 0.0078438081741333, 0.007838784217834472, 0.007856959819793702, 0.007829504013061523, 0.00799129581451416, 0.007853663921356202, 0.007756127834320069, 0.007899551868438721, 0.007893824100494384, 0.0077012481689453125, 0.00773302412033081, 0.007612256050109864, 0.007669568061828613, 0.007720704078674316, 0.007647776126861573, 0.007724639892578125, 0.007766816139221191, 0.007733248233795166, 0.007731200218200684, 0.007710720062255859, 0.007875616073608399, 0.007928927898406983, 0.007683072090148926, 0.007723904132843017, 0.007783967971801758, 0.00778278398513794, 0.0075838398933410645, 0.007766272068023682, 0.007739359855651855, 0.007903007984161377, 0.00788479995727539, 0.007826623916625976, 0.00788588809967041, 0.007796703815460205, 0.007875904083251953, 0.007940576076507569, 0.00781328010559082, 0.007886688232421874, 0.007817215919494629, 0.007821280002593994, 0.007796768188476562, 0.007927968025207519, 0.008147040367126464, 0.007846399784088135, 0.007897088050842285, 0.007845888137817383, 0.007766016006469726, 0.007703968048095703, 0.007587423801422119, 0.007671135902404785, 0.007683455944061279, 0.007752096176147461, 0.007745503902435303, 0.00771676778793335, 0.007831552028656007, 0.007751904010772705, 0.00783561611175537, 0.0077783999443054195, 0.007810527801513672, 0.007827712059020995, 0.007735487937927246, 0.008138688087463378, 0.00829030418395996, 0.008269696235656739, 0.008027680397033691, 0.007850624084472657, 0.007804768085479736, 0.007818528175354004, 0.007841760158538819, 0.007916287899017334, 0.007853119850158692, 0.007976895809173584, 0.007887872219085693, 0.007849984169006348, 0.00790118408203125, 0.008028160095214844, 0.007966720104217529, 0.007815392017364501, 0.0077346558570861815, 0.007821311950683594, 0.008034720420837402, 0.007981056213378907, 0.007880896091461182, 0.007906911849975586, 0.008603872299194336, 0.007825407981872558, 0.0077844481468200685, 0.007774208068847656, 0.007753727912902832, 0.007872320175170899, 0.007911871910095215, 0.007865983963012696, 0.007923999786376952, 0.00790057611465454, 0.007929664134979248, 0.007819903850555419, 0.007747680187225342, 0.00782531213760376, 0.008069120407104492, 0.007948287963867188, 0.00783564805984497, 0.007805215835571289, 0.007863552093505859, 0.007674464225769043, 0.00783574390411377, 0.007732480049133301, 0.007788415908813477, 0.0076542081832885744, 0.007757952213287353, 0.007722335815429688, 0.007651135921478271, 0.007764768123626709, 0.008146944046020508, 0.007796448230743408, 0.0077785921096801755, 0.007710207939147949, 0.007680511951446533, 0.007724991798400879, 0.007708127975463867, 0.007797344207763672, 0.007825056076049804, 0.008092000007629394, 0.007767199993133545, 0.007766208171844482, 0.0078076481819152836, 0.007925119876861573, 0.007885536193847657, 0.007925663948059082, 0.007879776000976562, 0.007857183933258056, 0.00783894395828247, 0.007948959827423096, 0.007931327819824219, 0.007918144226074219, 0.008026111602783203, 0.007995327949523926, 0.007909440040588378, 0.008082655906677247, 0.007940608024597168, 0.00786848020553589, 0.007968992233276366, 0.007849984169006348, 0.007825407981872558, 0.007847551822662353, 0.007854720115661622, 0.008065983772277832, 0.007895872116088868, 0.007858176231384278, 0.007792960166931153, 0.007828224182128906, 0.007781280040740967, 0.007798816204071045, 0.008163328170776368, 0.00805900764465332, 0.007966591835021972, 0.008510623931884766, 0.007973440170288086, 0.007959871768951417, 0.007958623886108398, 0.008242112159729004, 0.007917376041412353, 0.007958655834197997, 0.007954432010650634, 0.008032511711120606, 0.008041664123535157, 0.008031999588012696, 0.008216480255126953, 0.008050687789916992, 0.007739776134490966, 0.00800556755065918, 0.00804867172241211, 0.007942240238189697, 0.007880512237548829, 0.00796447992324829, 0.007917888164520263, 0.007876543998718262, 0.007954495906829834, 0.007952159881591796, 0.00802019214630127, 0.007897088050842285, 0.007942143917083741, 0.007998496055603028, 0.007945184230804442, 0.008032223701477052, 0.007960383892059325, 0.007971039772033691, 0.007993343830108643, 0.007870463848114014, 0.008017279624938966, 0.007854015827178954, 0.007850592136383056, 0.008245344161987305, 0.008472319602966308, 0.008016127586364746, 0.007931903839111328, 0.008031295776367188, 0.00806156826019287, 0.00793836784362793, 0.007978655815124512, 0.007946815967559815, 0.007859712123870849, 0.00787279987335205, 0.007851520061492919, 0.007920127868652344, 0.007956448078155518, 0.007933887958526611, 0.008036224365234376, 0.007923935890197754, 0.008507391929626466, 0.009132063865661622, 0.008123711585998535, 0.00785807991027832, 0.0077868480682373046, 0.0077992000579833985, 0.008316287994384766, 0.007967455863952637, 0.007943295955657959, 0.007949088096618652, 0.007940095901489258, 0.007974271774291991, 0.007921311855316162, 0.008067359924316406, 0.007920479774475098, 0.00796239995956421, 0.008015487670898437, 0.007930111885070801, 0.00795468807220459, 0.007939519882202149, 0.008122783660888672, 0.007997600078582764, 0.008259519577026368, 0.00778710412979126, 0.008102335929870606, 0.007964704036712646, 0.0080381441116333, 0.008144927978515624, 0.007974559783935548, 0.00789638376235962, 0.007895584106445313, 0.007920095920562744, 0.007933951854705811, 0.00792799997329712, 0.007882559776306152, 0.00800972843170166, 0.008028063774108887, 0.00790447998046875, 0.007943039894104003, 0.008058879852294922, 0.00786636781692505, 0.00788259220123291, 0.007857984066009521, 0.008251744270324707, 0.00801587200164795, 0.007927711963653564, 0.007948383808135987, 0.007874815940856934, 0.00790502405166626, 0.008025535583496094, 0.007960864067077637, 0.007858304023742676, 0.007800992012023926, 0.00786566400527954, 0.007840447902679444, 0.007806975841522217, 0.007870463848114014, 0.007981056213378907, 0.007874112129211425, 0.008157631874084473, 0.007821311950683594, 0.00785148811340332, 0.008138976097106933, 0.007854400157928466, 0.007925055980682372, 0.00785481595993042, 0.00784169578552246, 0.007844128131866456, 0.007920703887939452, 0.007860159873962403, 0.007833471775054932, 0.00803932762145996, 0.007872511863708496, 0.007886847972869874, 0.007876319885253906, 0.007891232013702393, 0.00791983985900879, 0.007925536155700684, 0.007907392024993897, 0.007822912216186523, 0.00775820779800415, 0.008126784324645996, 0.007957312107086181, 0.007955296039581299, 0.007961952209472656, 0.007866975784301757, 0.007639296054840088, 0.007886591911315919, 0.007909120082855225, 0.008029567718505859, 0.007986303806304931, 0.00792252779006958, 0.007827712059020995, 0.007923999786376952, 0.007901567935943604, 0.007899168014526368, 0.007907296180725098, 0.007851871967315674, 0.008071552276611329, 0.007862048149108887, 0.00790937614440918, 0.007963903903961182, 0.008185728073120117, 0.007985151767730713, 0.007963103771209717, 0.007950751781463624, 0.008029343605041504, 0.007920095920562744, 0.00791590404510498, 0.007934239864349366, 0.007939807891845703, 0.00794163179397583, 0.007883456230163574, 0.007945759773254395, 0.007866847991943359, 0.00784169578552246, 0.007917471885681152, 0.007872511863708496, 0.007857183933258056, 0.008112704277038574, 0.008010144233703614, 0.007972864151000977, 0.007903232097625732, 0.008398431777954102, 0.007965248107910156, 0.007906559944152833, 0.007873119831085205, 0.007808864116668701, 0.007753536224365234, 0.007838047981262208, 0.007889920234680176, 0.008000191688537597, 0.00787056016921997, 0.007885216236114502, 0.007898719787597656, 0.008046560287475586, 0.00808140754699707, 0.007970367908477783, 0.00796947193145752, 0.007944191932678223, 0.007997439861297608, 0.00806015968322754, 0.007901152133941651, 0.007918367862701416, 0.00799503993988037, 0.008001824378967286, 0.008231103897094727, 0.007920512199401855, 0.007953407764434815, 0.007579648017883301, 0.007931519985198974, 0.007973599910736083, 0.007849631786346435, 0.00790118408203125, 0.007907328128814697, 0.007911424160003662, 0.00789692783355713, 0.007941376209259034, 0.007855264186859131, 0.007894783973693848, 0.007927199840545655, 0.007901887893676757, 0.007935904026031494, 0.008166720390319824, 0.007991168022155761, 0.007973152160644531, 0.007979551792144775, 0.007950431823730468, 0.008002943992614746, 0.007892704010009766, 0.007871295928955078, 0.007890944004058837, 0.007847040176391602, 0.007911968231201171, 0.008026016235351563, 0.00787500810623169, 0.008122719764709473, 0.007947936058044433, 0.007949471950531006, 0.007879519939422607, 0.008004672050476074, 0.00791648006439209, 0.007864511966705323, 0.007882559776306152, 0.007895040035247802, 0.008163552284240722, 0.00797059202194214, 0.007894783973693848, 0.007868192195892333, 0.00805072021484375, 0.007987360000610351, 0.007965983867645264, 0.007911680221557617, 0.008321791648864747, 0.00799129581451416, 0.007878655910491944, 0.008871935844421386, 0.008065024375915527, 0.007989247798919678, 0.007955520153045655, 0.007904191970825196, 0.008085536003112794, 0.008021183967590332, 0.007886943817138671, 0.007991360187530518, 0.007971456050872803, 0.008005632400512695, 0.008065024375915527, 0.007914912223815919, 0.007952991962432862, 0.007969120025634765, 0.008130208015441894]",tokens/s,127.10344124693454,,, 4bit-awq-gemm-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,829.5424,666.763264,0.0,281.018368,267.897856,s,1,9.342271484375,9.342271484375,0.0,9.342271484375,9.342271484375,9.342271484375,9.342271484375,[9.342271484375],,kWh,1.5016974204096793e-05,1.6486958501310273e-06,4.468059130013913e-06,2.113372918424173e-05,,MB,1275.092992,775.815168,0.0,360.710144,344.082944,s,17,0.1984738245010376,0.011674930853002212,0.00012155092994916372,0.011631551742553712,0.011845523071289063,0.011906130981445313,0.011956983032226563,"[0.011890239715576172, 0.011589376449584961, 0.011517727851867676, 0.01165113639831543, 0.011643424034118652, 0.011567423820495606, 0.011741472244262694, 0.01159500789642334, 0.011721759796142579, 0.01174847984313965, 0.011631551742553712, 0.011630240440368653, 0.011516863822937012, 0.011815711975097657, 0.011626336097717285, 0.011617376327514648, 0.011969696044921875]",tokens/s,21927.324728794392,kWh,3.46575781165836e-07,3.821283882236332e-08,1.76286100384812e-07,5.610747203730113e-07,tokens/kWh,456267214.8725702,MB,1321.725952,800.980992,0.0,385.875968,344.085504,s,17,10.393686706542969,0.6113933356789981,0.005754476209730111,0.6100719604492187,0.6189729248046875,0.6208252807617188,0.6239396166992187,"[0.606656005859375, 0.6119292602539063, 0.6042606201171875, 0.6183868408203125, 0.6058283081054687, 0.6123331909179688, 0.614412109375, 0.6066995239257813, 0.61985205078125, 0.6126442260742188, 0.6086817016601562, 0.6047228393554688, 0.6080897216796874, 0.6066796875, 0.617720458984375, 0.6247182006835937, 0.6100719604492187]",tokens/s,103.04332141604681,kWh,1.787342686809745e-05,1.9711463866607767e-06,6.7763011610275024e-06,2.662087441578573e-05,tokens/kWh,2366563.8857693593,,s,1071,10.384504063606254,0.009696082225589413,0.00035216620922298475,0.009645248413085937,0.009893888473510743,0.00997436761856079,0.010774205017089844,"[0.009189375877380371, 0.009754624366760254, 0.009770943641662597, 0.009658432006835938, 0.009676511764526368, 0.009609279632568359, 0.009821503639221192, 0.009870207786560059, 0.00984067153930664, 0.009973823547363281, 0.009696672439575196, 0.009723679542541504, 0.009728768348693847, 0.009621503829956055, 0.009596128463745118, 0.009769760131835937, 0.009706687927246094, 0.00976796817779541, 0.009590047836303711, 0.009509568214416503, 0.009713472366333008, 0.009582015991210937, 0.009648415565490722, 0.009545472145080567, 0.009441280364990234, 0.009672960281372071, 0.009565631866455078, 0.009590880393981933, 0.009507136344909668, 0.009526752471923828, 0.009528287887573242, 0.009676799774169922, 0.00956230354309082, 0.009477952003479003, 0.009590784072875976, 0.009506048202514649, 0.009603839874267578, 0.009652223587036133, 0.00951318359375, 0.009459487915039063, 0.0094901123046875, 0.009576512336730956, 0.009629952430725097, 0.009623167991638183, 0.009496288299560547, 0.009599552154541016, 0.009846272468566895, 0.009658047676086426, 0.009731264114379882, 0.009500672340393066, 0.009496288299560547, 0.009623616218566895, 0.009553983688354492, 0.009551199913024902, 0.009523743629455567, 0.009650176048278808, 0.009727007865905761, 0.009709856033325196, 0.009537983894348144, 0.009515423774719239, 0.00962883186340332, 0.00960377597808838, 0.009621184349060058, 0.009272128105163575, 0.009645248413085937, 0.00994803237915039, 0.009725664138793945, 0.009555808067321777, 0.009536959648132325, 0.009503680229187012, 0.00961740779876709, 0.009580544471740723, 0.009494112014770508, 0.009464320182800292, 0.009450431823730468, 0.00961638355255127, 0.009498080253601075, 0.009482751846313477, 0.009529343605041504, 0.010004480361938477, 0.010034560203552246, 0.009808575630187989, 0.0099749116897583, 0.009718591690063477, 0.009607423782348632, 0.00968671989440918, 0.009640000343322753, 0.009792736053466797, 0.009859871864318848, 0.009842623710632324, 0.010161888122558594, 0.009824640274047851, 0.009697504043579101, 0.009674495697021484, 0.009741375923156739, 0.009891776084899903, 0.00983356761932373, 0.009633695602416992, 0.009598976135253906, 0.009664223670959473, 0.009820063591003418, 0.009543359756469727, 0.009742912292480468, 0.009541760444641114, 0.009801728248596191, 0.010285183906555176, 0.01017955207824707, 0.010494879722595215, 0.00959648036956787, 0.009684896469116211, 0.00962553596496582, 0.009723679542541504, 0.009708191871643066, 0.009633952140808106, 0.009657631874084473, 0.009552032470703124, 0.009560640335083009, 0.009605119705200196, 0.00955948829650879, 0.009650752067565918, 0.00981606388092041, 0.009644031524658203, 0.009653280258178711, 0.009539615631103516, 0.009613280296325683, 0.009548992156982421, 0.00930025577545166, 0.009467840194702148, 0.009644096374511718, 0.00956822395324707, 0.009531423568725586, 0.009484160423278808, 0.00950864028930664, 0.009724255561828613, 0.009563808441162109, 0.009947392463684082, 0.00947824001312256, 0.009486335754394531, 0.00986467170715332, 0.009607711791992187, 0.009539584159851074, 0.00952841567993164, 0.009689311981201171, 0.009617888450622558, 0.009573727607727052, 0.00947216033935547, 0.009540127754211427, 0.009606399536132813, 0.009634495735168457, 0.009535712242126465, 0.00945359992980957, 0.009485600471496582, 0.009594911575317383, 0.00961353588104248, 0.009582655906677246, 0.009462176322937011, 0.009457792282104492, 0.009666208267211915, 0.009620863914489746, 0.00957151985168457, 0.009473535537719726, 0.00947379207611084, 0.00966697597503662, 0.009641440391540527, 0.009515551567077637, 0.009485407829284668, 0.009501536369323731, 0.009476160049438477, 0.00976041603088379, 0.009585311889648437, 0.009477952003479003, 0.00957033634185791, 0.009543519973754883, 0.009691136360168457, 0.009590784072875976, 0.009474047660827637, 0.009454751968383789, 0.009513824462890626, 0.009619135856628418, 0.009578816413879395, 0.009529312133789063, 0.009492671966552735, 0.009637727737426757, 0.00970751953125, 0.010018815994262695, 0.00992460823059082, 0.009612735748291015, 0.009559935569763183, 0.009699680328369141, 0.009365792274475098, 0.0095065279006958, 0.009698431968688965, 0.00973299217224121, 0.009682656288146973, 0.009619359970092773, 0.009623007774353027, 0.009876383781433105, 0.0097423677444458, 0.009866239547729493, 0.009910271644592286, 0.009839903831481934, 0.009850560188293457, 0.009954879760742188, 0.009824704170227051, 0.009689087867736817, 0.009713664054870605, 0.009840831756591797, 0.009895872116088868, 0.010772352218627929, 0.009795488357543946, 0.00981545639038086, 0.009998623847961425, 0.009918880462646485, 0.009818047523498534, 0.009744607925415039, 0.010112832069396973, 0.009916447639465331, 0.01075609588623047, 0.011275808334350585, 0.012282336235046386, 0.00995900821685791, 0.009867615699768066, 0.009721952438354492, 0.010032511711120606, 0.009628128051757813, 0.009833791732788085, 0.009695327758789063, 0.009572575569152832, 0.009560799598693848, 0.009494015693664551, 0.00962998390197754, 0.009527327537536621, 0.009482208251953126, 0.009523360252380372, 0.00944547176361084, 0.009576288223266602, 0.009629823684692384, 0.009451295852661133, 0.009480192184448242, 0.009658176422119141, 0.00977286434173584, 0.009823712348937988, 0.009778079986572265, 0.00961945629119873, 0.009625184059143066, 0.00974022388458252, 0.009654335975646973, 0.009479616165161133, 0.00951097583770752, 0.009530207633972169, 0.009611519813537597, 0.009524703979492188, 0.009158687591552734, 0.009694815635681153, 0.009689184188842773, 0.009546015739440918, 0.009574399948120118, 0.00941875171661377, 0.009520959854125977, 0.009660608291625976, 0.009586367607116699, 0.009528639793395997, 0.009598048210144042, 0.0095763521194458, 0.009624640464782714, 0.009620160102844237, 0.009554176330566407, 0.009489503860473633, 0.009655391693115235, 0.009626943588256836, 0.009623776435852051, 0.009515328407287598, 0.009410528182983399, 0.009688639640808105, 0.009811840057373046, 0.009665087699890136, 0.009621503829956055, 0.009603296279907227, 0.009633567810058594, 0.009762847900390625, 0.009655743598937989, 0.009546272277832031, 0.009595999717712403, 0.00971072006225586, 0.009642016410827637, 0.009780447959899902, 0.0094901762008667, 0.00945622444152832, 0.009463392257690429, 0.00951580810546875, 0.009541088104248047, 0.009731967926025391, 0.009581024169921876, 0.009735967636108398, 0.009736415863037109, 0.009608287811279297, 0.009515904426574706, 0.009446751594543458, 0.009607872009277344, 0.009809951782226562, 0.009732064247131347, 0.009496576309204101, 0.009592831611633301, 0.009457663536071777, 0.009682208061218263, 0.009738975524902344, 0.009464127540588379, 0.009467583656311035, 0.009684576034545898, 0.009809856414794921, 0.009712096214294433, 0.009656096458435058, 0.009702816009521484, 0.00968569564819336, 0.009787008285522461, 0.009203840255737304, 0.009463744163513184, 0.009595232009887695, 0.009732416152954101, 0.009742624282836914, 0.009493856430053712, 0.009491423606872558, 0.009663264274597167, 0.009694016456604004, 0.009676511764526368, 0.009545696258544923, 0.00948799991607666, 0.009542783737182618, 0.009612223625183106, 0.009492511749267579, 0.009499232292175292, 0.009484383583068847, 0.009721856117248535, 0.009614784240722657, 0.009511712074279786, 0.009666336059570313, 0.00947542381286621, 0.00954751968383789, 0.009750880241394044, 0.009508959770202637, 0.009488863945007324, 0.010655903816223144, 0.009685855865478515, 0.00961843204498291, 0.009529343605041504, 0.009543487548828125, 0.009507007598876953, 0.009754624366760254, 0.009985119819641113, 0.00979417610168457, 0.009787679672241211, 0.009656064033508301, 0.009971967697143555, 0.00980345630645752, 0.009740608215332031, 0.009735936164855958, 0.009750783920288086, 0.009898207664489745, 0.009860544204711915, 0.009732447624206542, 0.009680031776428222, 0.009715871810913086, 0.009841312408447265, 0.009947168350219726, 0.009830400466918946, 0.009722880363464356, 0.00971673583984375, 0.009901439666748047, 0.0098023681640625, 0.00973414421081543, 0.009690719604492188, 0.009851296424865723, 0.010255711555480957, 0.009865856170654297, 0.009746463775634765, 0.009711039543151856, 0.009835071563720703, 0.010170720100402831, 0.00944761562347412, 0.009756863594055176, 0.009870528221130371, 0.00993552017211914, 0.009701215744018554, 0.009914560317993164, 0.009599072456359863, 0.009789471626281738, 0.009943167686462402, 0.00962060832977295, 0.009505536079406738, 0.009599167823791504, 0.009785344123840332, 0.00983017635345459, 0.009697600364685059, 0.009680607795715331, 0.009793439865112304, 0.009871456146240234, 0.009836864471435548, 0.009748096466064454, 0.009586943626403808, 0.009680671691894531, 0.009869440078735352, 0.009985055923461914, 0.009833120346069335, 0.009582816123962402, 0.009615360260009765, 0.01023904037475586, 0.009720767974853516, 0.009688896179199219, 0.009613504409790039, 0.00974847984313965, 0.009906432151794434, 0.009709312438964843, 0.009689344406127929, 0.00961900806427002, 0.009912511825561524, 0.00988268756866455, 0.009660384178161622, 0.009622495651245117, 0.00955504035949707, 0.009743264198303223, 0.009811967849731444, 0.009714079856872558, 0.009559647560119629, 0.00948361587524414, 0.009691807746887207, 0.009815775871276855, 0.009818079948425294, 0.009727935791015625, 0.009655743598937989, 0.009679295539855958, 0.010055871963500976, 0.00985696029663086, 0.009693568229675292, 0.009778528213500977, 0.009806495666503906, 0.00992255973815918, 0.00982425594329834, 0.00972771167755127, 0.009548064231872558, 0.00961353588104248, 0.009716544151306152, 0.009814623832702637, 0.010125311851501465, 0.009623167991638183, 0.009661919593811035, 0.009608096122741699, 0.009471103668212891, 0.009505151748657226, 0.009626111984252929, 0.009656479835510254, 0.009620512008666992, 0.00962399959564209, 0.009548159599304199, 0.009635552406311036, 0.009554207801818848, 0.009539584159851074, 0.009483455657958984, 0.009558912277221679, 0.0095927677154541, 0.009547967910766602, 0.009488191604614258, 0.009504768371582031, 0.010526720046997071, 0.009848128318786621, 0.009781951904296876, 0.009472224235534668, 0.009600799560546875, 0.00955510425567627, 0.009636927604675292, 0.00959977626800537, 0.009507840156555175, 0.009607168197631836, 0.009750592231750487, 0.009701312065124512, 0.009736191749572755, 0.009477952003479003, 0.009549311637878418, 0.00952790355682373, 0.009620896339416504, 0.009649151802062989, 0.009457344055175782, 0.009433216094970703, 0.009602944374084472, 0.009558239936828613, 0.009926688194274903, 0.009656255722045898, 0.00947542381286621, 0.009482208251953126, 0.010091103553771973, 0.009596384048461915, 0.009466303825378417, 0.009517056465148926, 0.009534527778625488, 0.009690048217773438, 0.009650176048278808, 0.009512576103210449, 0.00946390438079834, 0.009562399864196778, 0.009579584121704102, 0.009577152252197265, 0.0095, 0.009472928047180175, 0.009644319534301758, 0.009760479927062988, 0.009241439819335938, 0.009441280364990234, 0.009650176048278808, 0.00967238426208496, 0.00967302417755127, 0.00960921573638916, 0.00953264045715332, 0.009691935539245605, 0.009728063583374023, 0.0095927677154541, 0.00956169605255127, 0.009637727737426757, 0.009565855979919434, 0.009702303886413573, 0.009556991577148437, 0.01195900821685791, 0.010778528213500976, 0.009812447547912597, 0.009869215965270996, 0.009605119705200196, 0.009535231590270997, 0.00948572826385498, 0.009679488182067871, 0.00966089630126953, 0.009579808235168456, 0.009588288307189942, 0.009619647979736329, 0.00994700813293457, 0.009788160324096679, 0.009655391693115235, 0.009470784187316894, 0.00949443244934082, 0.009617695808410645, 0.00960921573638916, 0.009455615997314454, 0.009622655868530273, 0.009816415786743164, 0.009843551635742187, 0.009792384147644042, 0.00985366439819336, 0.009494624137878417, 0.009489472389221192, 0.009690112113952636, 0.00953775978088379, 0.00948310375213623, 0.009431551933288575, 0.009437472343444823, 0.009637791633605957, 0.009934975624084472, 0.009558176040649414, 0.009588640213012695, 0.009744383811950684, 0.009699328422546387, 0.009534496307373047, 0.00959177589416504, 0.009453568458557129, 0.009444704055786133, 0.009632415771484375, 0.00954911994934082, 0.009566911697387695, 0.013919872283935547, 0.015174176216125488, 0.009745792388916015, 0.009273664474487305, 0.00953324794769287, 0.00955731201171875, 0.009509568214416503, 0.009463808059692384, 0.009461503982543945, 0.009647616386413574, 0.00958131217956543, 0.009652223587036133, 0.00950614356994629, 0.009491104125976563, 0.0095283203125, 0.009566368103027344, 0.009519935607910156, 0.009447456359863281, 0.009490464210510254, 0.009662431716918945, 0.009992192268371582, 0.009885215759277343, 0.009724384307861328, 0.009913951873779296, 0.009679264068603515, 0.009897983551025391, 0.00966214370727539, 0.009511199951171875, 0.009633472442626953, 0.010129311561584472, 0.009885343551635742, 0.00991926383972168, 0.00971116828918457, 0.009581088066101074, 0.009788607597351074, 0.009679840087890626, 0.009618207931518555, 0.009657312393188476, 0.009650176048278808, 0.010064096450805663, 0.009748448371887207, 0.009706591606140137, 0.009521599769592284, 0.009677087783813477, 0.009659839630126952, 0.009711999893188477, 0.009629887580871582, 0.009474176406860352, 0.009485631942749024, 0.009668767929077149, 0.009592255592346192, 0.009599871635437011, 0.009674847602844238, 0.011787808418273926, 0.011426527976989747, 0.00961903953552246, 0.009600255966186523, 0.009749407768249512, 0.009846624374389648, 0.00988486385345459, 0.009595392227172851, 0.0095927677154541, 0.009538240432739258, 0.00964361572265625, 0.009624992370605469, 0.009578880310058593, 0.009393856048583985, 0.009942527770996093, 0.009821151733398438, 0.009717599868774414, 0.009636863708496094, 0.009559040069580077, 0.009665568351745606, 0.009747424125671388, 0.0096112642288208, 0.009543583869934083, 0.009623680114746094, 0.009753984451293945, 0.009849344253540039, 0.009633152008056641, 0.009611871719360352, 0.009635968208312988, 0.00975216007232666, 0.009630144119262695, 0.009487456321716308, 0.009470208168029785, 0.009468544006347656, 0.009897664070129394, 0.009718079566955567, 0.00973414421081543, 0.009725888252258301, 0.00960313606262207, 0.009795583724975587, 0.009657983779907226, 0.009666144371032715, 0.009562751770019532, 0.009558208465576172, 0.009770560264587402, 0.009703392028808594, 0.00964566421508789, 0.009630144119262695, 0.009687264442443848, 0.009934080123901368, 0.009608127593994141, 0.009619071960449218, 0.009506175994873047, 0.009450495719909668, 0.009644031524658203, 0.00966256046295166, 0.009612768173217774, 0.009540032386779786, 0.00982812786102295, 0.0102608003616333, 0.009715616226196289, 0.009526623725891113, 0.009499296188354493, 0.00954319953918457, 0.009625439643859863, 0.009499423980712891, 0.009475008010864257, 0.009450400352478027, 0.009653887748718261, 0.009660256385803223, 0.009676544189453126, 0.009695615768432617, 0.009548192024230956, 0.009562111854553223, 0.00989417552947998, 0.009570143699645995, 0.009207584381103516, 0.00982857608795166, 0.009713248252868652, 0.009533984184265137, 0.009475968360900878, 0.009492480278015136, 0.009552960395812988, 0.009835455894470215, 0.009527296066284179, 0.009451519966125489, 0.0096112642288208, 0.009558303833007813, 0.009881535530090331, 0.009526592254638672, 0.009497056007385254, 0.009525247573852539, 0.009622847557067871, 0.009591487884521485, 0.009637087821960448, 0.009673503875732422, 0.00960707187652588, 0.009658592224121094, 0.009751999855041504, 0.009574848175048828, 0.009658016204833985, 0.009576800346374512, 0.0096112642288208, 0.009660415649414063, 0.009492447853088379, 0.009500576019287109, 0.009565888404846191, 0.009648384094238282, 0.009632160186767578, 0.009637663841247559, 0.009517056465148926, 0.009577664375305175, 0.009691007614135742, 0.009823103904724122, 0.00959494400024414, 0.009523072242736817, 0.009511103630065918, 0.009648063659667968, 0.009665535926818849, 0.009552000045776367, 0.009506815910339356, 0.009538432121276856, 0.009565631866455078, 0.009723808288574219, 0.009626463890075683, 0.009509951591491698, 0.009489248275756836, 0.00958454418182373, 0.00954310417175293, 0.009452256202697754, 0.009608927726745606, 0.00956214427947998, 0.009598912239074707, 0.009621184349060058, 0.00951750373840332, 0.009492287635803223, 0.009508799552917481, 0.00962172794342041, 0.009698464393615722, 0.009191967964172364, 0.00944428825378418, 0.00965833568572998, 0.009563039779663086, 0.00951033592224121, 0.009546303749084472, 0.009463808059692384, 0.009699328422546387, 0.009808192253112793, 0.009612992286682128, 0.009580544471740723, 0.009484288215637206, 0.009770560264587402, 0.009615263938903808, 0.00981174373626709, 0.00956060791015625, 0.009519455909729004, 0.00971945571899414, 0.009756640434265136, 0.009634048461914062, 0.009532704353332519, 0.009427264213562011, 0.009602815628051757, 0.009685152053833008, 0.00970742416381836, 0.00945798397064209, 0.009437472343444823, 0.009651840209960938, 0.009820575714111329, 0.009653823852539063, 0.00951363182067871, 0.009468768119812012, 0.009628767967224122, 0.009928511619567872, 0.009537535667419434, 0.009522303581237793, 0.009484831809997558, 0.009697279930114745, 0.009578144073486328, 0.009538240432739258, 0.009506815910339356, 0.00975881576538086, 0.009712736129760742, 0.009599712371826172, 0.00954377555847168, 0.009532511711120606, 0.00968387222290039, 0.00977280044555664, 0.00963203239440918, 0.009640128135681152, 0.009645855903625489, 0.00991164779663086, 0.00994371223449707, 0.009883135795593261, 0.009720319747924805, 0.009617600440979004, 0.009727359771728516, 0.009886143684387207, 0.009846015930175782, 0.009751296043395996, 0.009709088325500489, 0.009814496040344239, 0.009814016342163086, 0.009440287590026856, 0.0096527042388916, 0.009904800415039062, 0.009802944183349609, 0.009695903778076172, 0.009623552322387695, 0.009596927642822266, 0.00976860809326172, 0.00966486358642578, 0.009651552200317383, 0.009599648475646972, 0.009693183898925782, 0.009807807922363281, 0.009625120162963867, 0.009624095916748046, 0.009521408081054688, 0.009608960151672364, 0.009666560173034668, 0.00954543972015381, 0.009552160263061523, 0.009439455986022948, 0.009472064018249512, 0.009671839714050293, 0.009554495811462402, 0.009738112449645996, 0.009517215728759765, 0.009555839538574218, 0.009911744117736817, 0.009849151611328126, 0.009646047592163086, 0.009623647689819336, 0.009703712463378906, 0.009779199600219727, 0.009744383811950684, 0.009520447731018067, 0.00945644760131836, 0.009576319694519043, 0.009728320121765137, 0.009735456466674805, 0.009544095993041991, 0.009445376396179199, 0.009718912124633789, 0.009675647735595703, 0.009662816047668457, 0.009522080421447754, 0.009489343643188477, 0.009569567680358887, 0.00964793586730957, 0.00965500831604004, 0.009549856185913086, 0.009480223655700684, 0.009587743759155273, 0.009766143798828125, 0.00961734390258789, 0.009501824378967285, 0.009413215637207031, 0.009523200035095216, 0.0096461763381958, 0.009586688041687011, 0.009502623558044434, 0.009410655975341797, 0.009502880096435547, 0.00984447956085205, 0.00922265625, 0.009613216400146484, 0.009791616439819337, 0.009930720329284668, 0.009779199600219727, 0.009730048179626465, 0.009719712257385254, 0.009977055549621581, 0.009950079917907716, 0.009771007537841797, 0.009615360260009765, 0.009623807907104492, 0.010088191986083985, 0.009803775787353516, 0.009906175613403321, 0.009692416191101075, 0.009905983924865722, 0.00982102394104004, 0.009762911796569825, 0.009677887916564942, 0.009808832168579102, 0.009724191665649414, 0.009803392410278321, 0.009783391952514648, 0.009783295631408692, 0.009606528282165527, 0.009649087905883789, 0.009901760101318359, 0.009752096176147461, 0.009664992332458495, 0.009666463851928712, 0.009715583801269531, 0.010000831604003906, 0.009819519996643067, 0.009777215957641602, 0.00965452766418457, 0.009705504417419434, 0.009891903877258301, 0.009737695693969727, 0.009996512413024902, 0.009689151763916016, 0.009662848472595215, 0.009776448249816895, 0.009771583557128906, 0.009557408332824707, 0.009527199745178223, 0.009771552085876465, 0.009658528327941894, 0.009682944297790527, 0.010372735977172852, 0.010145695686340332, 0.01073744010925293, 0.010043744087219239, 0.009765215873718262, 0.009740511894226074, 0.009727775573730469, 0.010061823844909668, 0.009893888473510743, 0.009819904327392577, 0.009729791641235351, 0.009603584289550781, 0.009861120223999023, 0.009719840049743652, 0.009246111869812012, 0.009519712448120117, 0.009950336456298827, 0.009614208221435546, 0.009629695892333985, 0.009612799644470215, 0.009647904396057129, 0.009836864471435548, 0.009748512268066407, 0.009640447616577149, 0.009559840202331542, 0.009700511932373047, 0.009785759925842285, 0.009785599708557129, 0.009582240104675293, 0.009701343536376953, 0.009697600364685059, 0.009699551582336426, 0.009663935661315917, 0.009655072212219238, 0.009532928466796875, 0.009628064155578613, 0.009678848266601562, 0.00959488010406494, 0.00955084800720215, 0.010144512176513671, 0.011502047538757325, 0.014032671928405762, 0.012222463607788087, 0.010391712188720703, 0.009938783645629883, 0.009939295768737793, 0.009801376342773437, 0.009705568313598633, 0.009777055740356446, 0.00974847984313965, 0.009893888473510743, 0.009789440155029297, 0.00966857624053955, 0.009698944091796875, 0.009762720108032226, 0.0098472957611084, 0.00991427230834961, 0.009697376251220703, 0.009654272079467773, 0.009902079582214356, 0.009880831718444824, 0.009980671882629395, 0.009682944297790527, 0.01065120029449463, 0.009967552185058594, 0.009897472381591797, 0.009751551628112793, 0.009644160270690918, 0.009789312362670898, 0.01002291202545166, 0.009893823623657226, 0.00978335952758789, 0.00972390365600586, 0.009652511596679687, 0.009903840065002441, 0.009826144218444825, 0.009802975654602052, 0.009430975914001464, 0.01019273567199707, 0.009759552001953125, 0.009620927810668945, 0.009652959823608398, 0.009641983985900878, 0.009805024147033692, 0.009695167541503907, 0.009602911949157715, 0.009587200164794921, 0.009689599990844726, 0.009913887977600098, 0.009722335815429688, 0.00964185619354248, 0.009602975845336915, 0.009801952362060547, 0.00991971206665039, 0.009694272041320801, 0.009655839920043945, 0.009558079719543457, 0.009853055953979492, 0.0096910400390625, 0.009795680046081542, 0.009574367523193359, 0.009512991905212402, 0.009703616142272949, 0.009916288375854492, 0.009625120162963867, 0.00950755214691162, 0.009590399742126466, 0.009924639701843261, 0.009740320205688477, 0.009731743812561035, 0.009556575775146485, 0.009561856269836426, 0.009618623733520508, 0.009614208221435546, 0.009590304374694825, 0.0095830078125, 0.009531040191650391, 0.009580063819885253, 0.009654848098754883, 0.009593024253845214, 0.009564224243164063, 0.00956822395324707, 0.009674943923950196, 0.009981823921203612, 0.009751615524291992, 0.009604000091552734, 0.00974233627319336, 0.009660415649414063, 0.009740159988403321, 0.009666080474853516, 0.00956867218017578, 0.009774592399597168, 0.009782143592834473, 0.009751392364501952, 0.009581472396850586, 0.00947824001312256, 0.00960262393951416, 0.009590751647949218, 0.009721376419067384, 0.009522239685058593]",tokens/s,103.13443891398221,,, 4bit-awq-exllama-v1-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-exllama-v2-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemv-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. " 4bit-awq-gemm-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,831.627264,666.763264,0.0,281.018368,267.897856,s,1,9.456859375,9.456859375,0.0,9.456859375,9.456859375,9.456859375,9.456859375,[9.456859375],,kWh,1.515592843337193e-05,1.6645277530026877e-06,4.411392417996707e-06,2.1231848604371323e-05,,MB,1281.880064,775.815168,0.0,360.710144,344.082944,s,17,0.19688054466247562,0.011581208509557387,0.0001569116910684609,0.011544095993041991,0.011799289894104004,0.011808019256591798,0.01183460189819336,"[0.011492735862731933, 0.011729632377624512, 0.011448672294616699, 0.011544095993041991, 0.0117990083694458, 0.011444160461425781, 0.011365728378295898, 0.01184124755859375, 0.011717727661132812, 0.011452447891235352, 0.011799712181091309, 0.011585280418395997, 0.011751935958862305, 0.011616800308227539, 0.011386719703674317, 0.011502559661865235, 0.011402079582214356]",tokens/s,22104.774280571506,kWh,3.351567573430541e-07,3.696187562701572e-08,1.7318485598622225e-07,5.453034889562921e-07,tokens/kWh,469463345.0630998,MB,1328.508928,800.980992,0.0,385.875968,344.085504,s,17,10.18960107421875,0.5993882984834559,0.004308318420695943,0.5982415161132812,0.6067464233398437,0.6075535156249999,0.6085300390625,"[0.600537353515625, 0.5964866943359375, 0.5984871826171875, 0.5980602416992188, 0.6072483520507812, 0.600726806640625, 0.5982415161132812, 0.5997461547851562, 0.5964443969726563, 0.608774169921875, 0.5976922607421875, 0.5994452514648437, 0.6064118041992187, 0.593465087890625, 0.5981964111328125, 0.5924757690429687, 0.59716162109375]",tokens/s,105.10715701224005,kWh,1.728407195907644e-05,1.9061257727718128e-06,6.556412030955048e-06,2.5746609762803296e-05,tokens/kWh,2446924.1030334607,,s,1071,10.180634686470036,0.009505727998571458,0.0002507588389683319,0.00946390438079834,0.009692511558532714,0.009804975986480714,0.010341026878356932,"[0.009095871925354004, 0.009379839897155762, 0.009383935928344727, 0.009555456161499023, 0.009564672470092773, 0.00943289566040039, 0.009510751724243163, 0.009402463912963867, 0.009549920082092284, 0.009519264221191407, 0.009406463623046875, 0.009377792358398437, 0.009360544204711914, 0.009511775970458984, 0.009488384246826171, 0.00943289566040039, 0.009744576454162598, 0.009398271560668945, 0.009601280212402344, 0.009498656272888184, 0.00943280029296875, 0.009393759727478027, 0.009543871879577637, 0.009563776016235351, 0.009521439552307128, 0.009813376426696777, 0.009401280403137207, 0.009474047660827637, 0.009801728248596191, 0.009537216186523437, 0.009436703681945801, 0.009383071899414062, 0.009371199607849122, 0.009584447860717774, 0.009521408081054688, 0.009469056129455566, 0.009385024070739746, 0.009381759643554687, 0.0096210880279541, 0.009668224334716797, 0.00940944004058838, 0.009743904113769531, 0.009404704093933106, 0.009936351776123047, 0.009531935691833497, 0.0094268798828125, 0.00940447998046875, 0.010045727729797364, 0.009795424461364746, 0.009583519935607911, 0.009571295738220215, 0.00935267162322998, 0.009429408073425292, 0.009939104080200196, 0.009660544395446778, 0.009614656448364258, 0.009519647598266602, 0.009520671844482421, 0.009735967636108398, 0.009519392013549805, 0.009578911781311036, 0.009400320053100587, 0.009373600006103516, 0.009205727577209473, 0.00951689624786377, 0.009370016098022462, 0.009479904174804687, 0.010134719848632812, 0.009613439559936524, 0.009423616409301758, 0.00929702377319336, 0.00942579174041748, 0.009560064315795898, 0.009578495979309083, 0.009453568458557129, 0.009356767654418945, 0.009398112297058106, 0.009577152252197265, 0.0094551362991333, 0.009496383666992188, 0.00944809627532959, 0.009291775703430176, 0.009447423934936524, 0.009587807655334473, 0.00934943962097168, 0.009380031585693359, 0.009314271926879883, 0.009484864234924317, 0.00985308837890625, 0.009464960098266601, 0.009380191802978515, 0.009368127822875976, 0.009402048110961915, 0.009500672340393066, 0.009383935928344727, 0.009364576339721679, 0.010115455627441407, 0.009418432235717773, 0.009568991661071778, 0.009421055793762206, 0.009348575592041015, 0.009455743789672852, 0.009431327819824219, 0.00964140796661377, 0.00942956829071045, 0.009298144340515136, 0.009361184120178223, 0.009392127990722657, 0.009480511665344239, 0.009492159843444824, 0.009324543952941895, 0.00932863998413086, 0.009514399528503418, 0.009488991737365723, 0.009427007675170898, 0.009385600090026855, 0.00928604793548584, 0.009338784217834472, 0.009526816368103028, 0.009363936424255372, 0.0095665283203125, 0.00945529556274414, 0.00935321617126465, 0.0096211519241333, 0.009445728302001953, 0.009344415664672851, 0.009158432006835937, 0.009526944160461426, 0.009392416000366211, 0.009338591575622559, 0.009471615791320801, 0.009402655601501464, 0.00963651180267334, 0.009449376106262206, 0.00935859203338623, 0.009312607765197755, 0.009326720237731933, 0.009450048446655273, 0.010537055969238282, 0.010383071899414063, 0.009459424018859863, 0.009506208419799805, 0.009816160202026367, 0.009491231918334961, 0.009431039810180664, 0.009308287620544434, 0.009398143768310547, 0.009494527816772461, 0.009345279693603516, 0.009315232276916503, 0.009374815940856934, 0.009326335906982421, 0.009462911605834961, 0.009566944122314453, 0.00934342384338379, 0.009401503562927247, 0.009464384078979492, 0.009807647705078125, 0.009564384460449219, 0.009467040061950684, 0.009487199783325195, 0.01010905647277832, 0.009643903732299805, 0.009533439636230469, 0.009414655685424805, 0.009324383735656739, 0.009335968017578125, 0.00940351963043213, 0.009443552017211913, 0.009383040428161621, 0.009335552215576172, 0.009449248313903808, 0.009424256324768067, 0.009366144180297851, 0.009475968360900878, 0.009320735931396484, 0.009319680213928223, 0.009478464126586914, 0.009484576225280763, 0.00943513584136963, 0.00946729564666748, 0.009390975952148438, 0.009672032356262208, 0.009548159599304199, 0.009549823760986328, 0.009545727729797364, 0.009486335754394531, 0.009441503524780273, 0.009611040115356446, 0.00912230396270752, 0.01011747169494629, 0.009572128295898437, 0.009502719879150391, 0.009423968315124511, 0.009310463905334472, 0.00929856014251709, 0.009498144149780273, 0.009386431694030762, 0.009345088005065918, 0.009518272399902343, 0.009363424301147462, 0.009480511665344239, 0.009407008171081543, 0.00940550422668457, 0.009340991973876953, 0.009501855850219726, 0.009424223899841308, 0.009488896369934082, 0.009402112007141113, 0.00932256031036377, 0.009555968284606933, 0.009492544174194336, 0.00961638355255127, 0.00971878433227539, 0.009326592445373535, 0.009540608406066894, 0.009364288330078125, 0.009511103630065918, 0.009447423934936524, 0.0093306884765625, 0.009354656219482421, 0.009363840103149414, 0.009396512031555176, 0.009354623794555664, 0.009517727851867675, 0.009317791938781739, 0.009394335746765136, 0.009511296272277831, 0.009424799919128419, 0.009392191886901855, 0.009312255859375, 0.00932249641418457, 0.009477919578552246, 0.009404191970825196, 0.009394623756408692, 0.009758560180664063, 0.009648320198059081, 0.01009763240814209, 0.00961843204498291, 0.009656319618225098, 0.00942300796508789, 0.009473728179931641, 0.009805888175964355, 0.009733856201171875, 0.009664575576782226, 0.009507136344909668, 0.009463839530944824, 0.009588064193725586, 0.009466496467590332, 0.009480192184448242, 0.009518272399902343, 0.009517120361328126, 0.009338624000549316, 0.00984921646118164, 0.00957427215576172, 0.009646080017089843, 0.009691136360168457, 0.009600159645080566, 0.009530207633972169, 0.009559455871582032, 0.00969206428527832, 0.009715392112731933, 0.009715583801269531, 0.009592960357666016, 0.009555968284606933, 0.00962764835357666, 0.00978873634338379, 0.010662591934204102, 0.009619071960449218, 0.009605504035949708, 0.009713312149047852, 0.009869664192199708, 0.009684096336364746, 0.009833344459533691, 0.009645600318908692, 0.009842752456665039, 0.009759424209594726, 0.009651519775390626, 0.00961315155029297, 0.009550399780273437, 0.009735648155212401, 0.009787872314453125, 0.009698783874511719, 0.009638496398925781, 0.009542816162109375, 0.009756575584411621, 0.009601568222045899, 0.009478560447692871, 0.009545727729797364, 0.009327648162841796, 0.009465984344482422, 0.009604288101196288, 0.009492128372192384, 0.009665663719177247, 0.009410943984985352, 0.009441791534423828, 0.009613311767578125, 0.00942188835144043, 0.009470911979675293, 0.009424127578735352, 0.009616127967834473, 0.009706656455993652, 0.009788352012634277, 0.00956345558166504, 0.009559712409973144, 0.00958902359008789, 0.009665504455566406, 0.009569984436035156, 0.009510272026062012, 0.009605759620666504, 0.009571840286254883, 0.009742239952087402, 0.009653120040893555, 0.009484000205993652, 0.009464127540588379, 0.009210111618041993, 0.009608960151672364, 0.009365568161010742, 0.009436703681945801, 0.00950710391998291, 0.009506752014160156, 0.009983551979064942, 0.009368191719055176, 0.00932470417022705, 0.009518943786621093, 0.009491935729980468, 0.009579039573669434, 0.009330464363098144, 0.009311712265014649, 0.0093603515625, 0.009529120445251464, 0.009402463912963867, 0.009503999710083007, 0.009337504386901856, 0.009508288383483887, 0.009687616348266601, 0.009508959770202637, 0.010251872062683106, 0.009745792388916015, 0.009546688079833984, 0.009652223587036133, 0.009467904090881347, 0.009383935928344727, 0.009353311538696289, 0.009411487579345703, 0.009595904350280762, 0.009476287841796875, 0.009410367965698243, 0.009568256378173828, 0.009584639549255371, 0.009675968170166015, 0.00956704044342041, 0.009458911895751954, 0.009408543586730957, 0.009489151954650879, 0.00975376033782959, 0.009503199577331542, 0.009443712234497071, 0.009425056457519532, 0.009529184341430665, 0.009652223587036133, 0.009629023551940919, 0.009545375823974609, 0.009651040077209473, 0.0095250244140625, 0.009633407592773438, 0.009640159606933594, 0.00950096035003662, 0.009434816360473632, 0.009453791618347168, 0.009666912078857422, 0.009601152420043946, 0.009481216430664062, 0.009464703559875488, 0.009705151557922363, 0.009559679985046386, 0.00949728012084961, 0.009493632316589356, 0.00910688018798828, 0.009525983810424805, 0.00939417552947998, 0.009385151863098145, 0.009384767532348633, 0.00936723232269287, 0.00946003246307373, 0.00946895980834961, 0.009385120391845704, 0.009363264083862304, 0.009588543891906737, 0.009692511558532714, 0.009628031730651855, 0.00948476791381836, 0.009428192138671875, 0.009476896286010741, 0.009572192192077637, 0.009461952209472655, 0.009549599647521972, 0.009343168258666993, 0.009330559730529785, 0.009559552192687988, 0.0094967679977417, 0.009444128036499024, 0.009344736099243163, 0.00930729579925537, 0.009501312255859375, 0.009832768440246583, 0.00948412799835205, 0.009346112251281739, 0.00931935977935791, 0.009521151542663574, 0.009459168434143066, 0.009386015892028808, 0.00942950439453125, 0.009326784133911133, 0.009424511909484862, 0.009533632278442382, 0.009396224021911622, 0.009315967559814453, 0.009395615577697754, 0.009374784469604493, 0.009520768165588379, 0.009416511535644531, 0.009359264373779297, 0.009366239547729492, 0.00945907211303711, 0.00965999984741211, 0.009663359642028808, 0.009623007774353027, 0.009583135604858399, 0.009592320442199707, 0.010054143905639648, 0.009676799774169922, 0.009449472427368164, 0.009484288215637206, 0.009403583526611328, 0.00963260841369629, 0.009631711959838868, 0.00970582389831543, 0.009548640251159668, 0.009546560287475586, 0.009631008148193359, 0.00917529582977295, 0.009428319931030273, 0.009637632369995118, 0.009875967979431152, 0.009672863960266113, 0.009590559959411622, 0.009520992279052735, 0.009713888168334961, 0.00980406379699707, 0.009547648429870605, 0.009476096153259277, 0.009433088302612304, 0.009520383834838867, 0.009544192314147949, 0.009515551567077637, 0.009610336303710937, 0.009441920280456543, 0.009907487869262696, 0.009652928352355957, 0.009613344192504883, 0.009472000122070312, 0.009416704177856445, 0.00974847984313965, 0.009697279930114745, 0.009448960304260253, 0.009402688026428224, 0.009337023735046386, 0.009426719665527343, 0.009543904304504395, 0.00942080020904541, 0.009391136169433593, 0.009339967727661132, 0.009432991981506348, 0.009651424407958985, 0.00946985626220703, 0.009436191558837891, 0.009463647842407226, 0.009451519966125489, 0.009774911880493163, 0.009615551948547364, 0.009659680366516113, 0.009478495597839356, 0.009473983764648437, 0.009853376388549804, 0.009532511711120606, 0.009366432189941406, 0.009342335700988769, 0.009419391632080078, 0.00951296043395996, 0.009496416091918945, 0.009416031837463378, 0.009433279991149903, 0.009429183959960938, 0.00952086353302002, 0.009478879928588866, 0.009380191802978515, 0.009379551887512208, 0.009403871536254884, 0.009486368179321289, 0.009425567626953126, 0.009375519752502441, 0.009317760467529296, 0.009438976287841797, 0.009225279808044433, 0.00939516830444336, 0.009373023986816406, 0.009573023796081543, 0.009625663757324219, 0.009547743797302246, 0.009393759727478027, 0.009389792442321777, 0.009407135963439941, 0.009537792205810547, 0.009527039527893067, 0.009836159706115722, 0.009572416305541993, 0.009595232009887695, 0.009701343536376953, 0.009713760375976562, 0.009539744377136231, 0.009393919944763183, 0.009593055725097657, 0.00950217628479004, 0.009470272064208984, 0.00939247989654541, 0.009335776329040528, 0.00937827205657959, 0.009516799926757812, 0.009449088096618652, 0.009394432067871094, 0.00931619167327881, 0.009282560348510742, 0.009528479576110839, 0.009441503524780273, 0.00939625644683838, 0.00937731170654297, 0.009321279525756836, 0.009458751678466796, 0.009665472030639648, 0.009550080299377442, 0.009284543991088866, 0.0093787202835083, 0.00973136043548584, 0.009498687744140625, 0.009411135673522949, 0.00932419204711914, 0.009337183952331542, 0.009506015777587891, 0.009470751762390137, 0.00941875171661377, 0.009333024024963379, 0.0093340482711792, 0.009521599769592284, 0.009594016075134278, 0.009647040367126465, 0.009369407653808594, 0.009314399719238281, 0.0094269437789917, 0.009537599563598632, 0.00937059211730957, 0.009377023696899415, 0.009291487693786621, 0.009449567794799805, 0.009539487838745118, 0.009420576095581055, 0.00932271957397461, 0.009263487815856933, 0.00960921573638916, 0.009475647926330566, 0.009394463539123534, 0.00935747241973877, 0.009449119567871095, 0.009769375801086427, 0.009476032257080078, 0.009441280364990234, 0.009416223526000977, 0.009525440216064452, 0.009643808364868164, 0.009518943786621093, 0.009411423683166505, 0.009398079872131348, 0.009385984420776367, 0.009618751525878906, 0.009441632270812989, 0.010017439842224122, 0.009952960014343261, 0.011828736305236816, 0.009712127685546875, 0.00958198356628418, 0.009389920234680176, 0.009373791694641113, 0.009466496467590332, 0.009541664123535156, 0.009514368057250977, 0.009714143753051758, 0.009417119979858399, 0.009508383750915528, 0.009555904388427735, 0.009430368423461915, 0.009372608184814453, 0.00933193588256836, 0.009425472259521485, 0.009803168296813965, 0.009403200149536132, 0.009346272468566895, 0.009353343963623046, 0.009379520416259765, 0.009538335800170899, 0.009427136421203613, 0.009367456436157226, 0.009390336036682129, 0.009350720405578614, 0.009492511749267579, 0.009425439834594727, 0.00936627197265625, 0.0093787841796875, 0.009329664230346679, 0.009465855598449707, 0.009438207626342773, 0.009334912300109863, 0.009486495971679687, 0.009470879554748535, 0.009483424186706543, 0.009393823623657227, 0.009349120140075684, 0.010323007583618165, 0.011121600151062012, 0.013594464302062988, 0.011669055938720703, 0.009194175720214843, 0.009594271659851075, 0.00976694393157959, 0.009408576011657715, 0.009323360443115235, 0.009529120445251464, 0.009531264305114745, 0.009482239723205567, 0.009431167602539063, 0.009674624443054199, 0.009414655685424805, 0.009486111640930175, 0.009422271728515626, 0.009442079544067382, 0.009361151695251466, 0.009570783615112304, 0.009612159729003906, 0.009638848304748536, 0.009405695915222168, 0.009326335906982421, 0.009425375938415527, 0.009449888229370117, 0.009414143562316894, 0.009513567924499512, 0.009457311630249023, 0.009321184158325195, 0.00948799991607666, 0.009489760398864747, 0.009398943901062011, 0.009494751930236816, 0.009541407585144043, 0.009537535667419434, 0.009578495979309083, 0.009459327697753907, 0.009617792129516602, 0.009473983764648437, 0.009515071868896484, 0.00951296043395996, 0.009406656265258788, 0.009369024276733398, 0.009406847953796387, 0.009586912155151367, 0.009504544258117675, 0.009474047660827637, 0.009400447845458985, 0.00954476833343506, 0.009491264343261719, 0.009539104461669922, 0.009417183876037597, 0.009353440284729004, 0.009398048400878906, 0.009564064025878906, 0.00953164768218994, 0.009459456443786622, 0.009426527976989747, 0.009353631973266602, 0.00942636775970459, 0.009627743721008301, 0.00940614414215088, 0.009517951965332032, 0.009359359741210938, 0.009430496215820313, 0.009869855880737305, 0.009156319618225097, 0.009464096069335938, 0.009791040420532226, 0.009619647979736329, 0.009512639999389649, 0.009361984252929688, 0.009363455772399902, 0.009522624015808105, 0.00974835205078125, 0.009449952125549317, 0.009404767990112304, 0.009334752082824707, 0.009656160354614259, 0.00950614356994629, 0.009386719703674317, 0.009396224021911622, 0.010231072425842286, 0.009434911727905273, 0.009821120262145997, 0.009703424453735352, 0.009486271858215331, 0.00934124755859375, 0.009414400100708008, 0.00951251220703125, 0.009423135757446289, 0.009379615783691406, 0.009295455932617188, 0.00937241554260254, 0.00951910400390625, 0.009406496047973632, 0.009385919570922852, 0.009394240379333497, 0.009318400382995605, 0.009644031524658203, 0.009408512115478516, 0.009355551719665527, 0.009321311950683593, 0.009499520301818847, 0.009549823760986328, 0.009473119735717773, 0.009396767616271973, 0.009376031875610352, 0.009381695747375489, 0.009428640365600587, 0.009384160041809081, 0.009349184036254883, 0.00956230354309082, 0.009417023658752442, 0.009721920013427735, 0.00940009593963623, 0.009515007972717286, 0.00950102424621582, 0.009335743904113769, 0.009493215560913086, 0.009572352409362793, 0.009502559661865235, 0.009830559730529785, 0.009795231819152832, 0.010041695594787597, 0.009670656204223632, 0.009580032348632812, 0.009773088455200194, 0.009487135887145996, 0.009527039527893067, 0.009675904273986817, 0.009655424118041992, 0.009725695610046386, 0.009773216247558593, 0.009582143783569336, 0.009449760437011718, 0.00948201560974121, 0.009781696319580079, 0.0097871675491333, 0.009613311767578125, 0.009508864402770996, 0.009312383651733399, 0.00934832000732422, 0.009451552391052246, 0.009533568382263184, 0.00939065647125244, 0.009715968132019043, 0.009828031539916992, 0.010008576393127442, 0.010983424186706543, 0.009539679527282715, 0.009383040428161621, 0.009498687744140625, 0.009567135810852051, 0.009400320053100587, 0.009342816352844239, 0.009258784294128419, 0.009368991851806641, 0.009838975906372071, 0.009648159980773926, 0.009377440452575684, 0.009357151985168457, 0.009374015808105469, 0.009622079849243164, 0.009461759567260742, 0.010822719573974609, 0.01092089557647705, 0.009492032051086426, 0.009546175956726074, 0.00947753620147705, 0.009337183952331542, 0.009478400230407715, 0.009537535667419434, 0.009586688041687011, 0.009454784393310546, 0.00938588809967041, 0.009325152397155761, 0.011346528053283692, 0.009561375617980957, 0.009478943824768066, 0.009337887763977051, 0.009494848251342773, 0.009450079917907715, 0.009412320137023925, 0.009427136421203613, 0.009625120162963867, 0.009414112091064453, 0.009567008018493653, 0.009614815711975097, 0.009716064453125, 0.009517312049865723, 0.009398240089416504, 0.009132063865661622, 0.009420991897583008, 0.009336864471435547, 0.009312064170837402, 0.009522432327270508, 0.009527359962463379, 0.009419360160827638, 0.009396512031555176, 0.009334783554077148, 0.009447423934936524, 0.009482272148132324, 0.009367679595947266, 0.009348959922790527, 0.009277664184570312, 0.009361184120178223, 0.009497792243957519, 0.009421536445617676, 0.009334688186645507, 0.009324735641479492, 0.00936451244354248, 0.009485247611999512, 0.00941481590270996, 0.009635711669921875, 0.009348447799682618, 0.009513759613037109, 0.009524895668029785, 0.009405856132507323, 0.00933561611175537, 0.009323807716369628, 0.009392864227294922, 0.009564224243164063, 0.009447360038757324, 0.009431039810180664, 0.009560064315795898, 0.009371647834777832, 0.00946566390991211, 0.009399871826171875, 0.009353856086730956, 0.00932249641418457, 0.009342335700988769, 0.009437824249267578, 0.009594847679138183, 0.009358847618103027, 0.009312352180480958, 0.009307616233825684, 0.009466719627380372, 0.00949068832397461, 0.009361280441284179, 0.00932249641418457, 0.009326911926269532, 0.009445055961608887, 0.00943126392364502, 0.009603103637695313, 0.009302847862243653, 0.00941379165649414, 0.009537311553955078, 0.009393888473510742, 0.009384223937988281, 0.009287360191345215, 0.009308639526367188, 0.009529312133789063, 0.00946777629852295, 0.009574399948120118, 0.009119423866271972, 0.00946390438079834, 0.009413408279418946, 0.00938268756866455, 0.009382304191589355, 0.009345536231994628, 0.009662464141845703, 0.009542847633361816, 0.009546560287475586, 0.00941055965423584, 0.009395487785339355, 0.009473055839538573, 0.009506496429443359, 0.00933289623260498, 0.00956332778930664, 0.009657312393188476, 0.009543647766113281, 0.009691967964172363, 0.00944595241546631, 0.009355392456054688, 0.009455327987670898, 0.009505056381225585, 0.009529279708862306, 0.009378080368041991, 0.00941260814666748, 0.009293120384216308, 0.009423551559448242, 0.009848640441894532, 0.009543295860290527, 0.0094234561920166, 0.00937980842590332, 0.009393247604370117, 0.01002943992614746, 0.009455391883850098, 0.009318816184997558, 0.009352767944335937, 0.009349984169006348, 0.009487296104431152, 0.009862144470214844, 0.009469951629638672, 0.009412511825561524, 0.009443072319030762, 0.009546079635620118, 0.009488160133361817, 0.00942307186126709, 0.009390080451965332, 0.009416543960571289, 0.009553471565246581, 0.009504639625549316, 0.010117216110229492, 0.009423744201660157, 0.009402112007141113, 0.009490431785583496, 0.009551872253417968, 0.009469568252563476, 0.009574624061584472, 0.00954793643951416, 0.009516575813293456, 0.009454208374023438, 0.009565407752990723, 0.009323103904724121, 0.009335040092468262, 0.009569664001464844, 0.00916988754272461, 0.009335295677185058, 0.009382176399230956, 0.009532447814941405, 0.009413056373596192, 0.009312383651733399, 0.009363103866577148, 0.00936780834197998, 0.009427488327026367, 0.009628671646118164, 0.009400351524353028, 0.009310463905334472, 0.009296159744262695, 0.009521568298339844, 0.00935968017578125, 0.00948192024230957, 0.00950492763519287, 0.009264991760253907, 0.009452927589416503, 0.009504575729370118, 0.009314271926879883, 0.0093787202835083, 0.00932259178161621, 0.009440287590026856, 0.009421600341796874, 0.009338560104370118, 0.009287263870239258, 0.009427712440490723, 0.009383935928344727, 0.00943228816986084, 0.009350175857543946, 0.00926796817779541, 0.009230560302734375, 0.009442079544067382, 0.009477248191833496, 0.00939743995666504, 0.009418432235717773, 0.009337023735046386, 0.009527296066284179, 0.009550880432128907, 0.009392383575439454, 0.009316576004028321, 0.00952556800842285, 0.009467807769775391, 0.009468000411987304, 0.009366815567016601, 0.009263615608215332, 0.009381695747375489, 0.00955020809173584, 0.00940886402130127, 0.009422528266906737, 0.009293824195861817, 0.009334112167358398, 0.009405088424682618, 0.009556991577148437, 0.009393471717834473, 0.009320063591003418, 0.009346367835998536, 0.009310015678405762, 0.009593184471130372, 0.009400639533996582, 0.00933897590637207, 0.00935251235961914, 0.009214624404907227, 0.009366527557373047, 0.009411616325378418, 0.009297056198120117, 0.00955180835723877, 0.009775232315063477, 0.009545696258544923, 0.009832287788391113, 0.009356224060058593, 0.009397600173950195, 0.009697952270507813, 0.009570303916931153, 0.009469951629638672, 0.009399871826171875, 0.009430527687072754, 0.01003001594543457, 0.009696991920471191, 0.009527584075927735, 0.00930611228942871, 0.00935644817352295, 0.009529631614685058, 0.009424896240234374, 0.009357888221740723, 0.009371616363525391, 0.009310239791870118, 0.009882752418518066, 0.009755647659301758, 0.009498751640319825, 0.00940550422668457, 0.009429696083068848, 0.009537471771240234, 0.009473407745361328, 0.009366208076477051, 0.009344415664672851, 0.009388287544250489, 0.009533791542053223, 0.009535552024841309, 0.009416319847106934, 0.009361920356750488, 0.009355072021484376, 0.009443327903747559, 0.009494272232055664, 0.009406720161437988, 0.009283424377441406, 0.009314623832702637, 0.009454463958740235, 0.009427616119384766, 0.009351391792297364, 0.009309439659118653, 0.009540448188781738, 0.009616512298583985, 0.009515487670898437, 0.009374112129211425, 0.009545280456542968, 0.00932703971862793, 0.009539648056030274, 0.009573727607727052, 0.009417183876037597, 0.00937996768951416, 0.009437055587768554, 0.009445504188537597, 0.009563743591308594, 0.009388447761535645]",tokens/s,105.19972801139293,,, 4bit-awq-exllama-v1-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,1,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,818.761728,666.763264,0.0,281.018368,267.897856,s,1,9.6127890625,9.6127890625,0.0,9.6127890625,9.6127890625,9.6127890625,9.6127890625,[9.6127890625],,kWh,1.5340605758380355e-05,1.684966244752296e-06,4.485003588003411e-06,2.1510575591136063e-05,,MB,1293.713408,775.815168,0.0,360.710144,344.082944,s,17,0.19929152011871337,0.011723030595218432,0.00017550656666153505,0.011736255645751953,0.011830508995056152,0.011941561889648436,0.012225598754882811,"[0.011556703567504883, 0.011852800369262695, 0.01165500831604004, 0.012296607971191406, 0.011654720306396485, 0.011747424125671386, 0.011815648078918457, 0.011750432014465332, 0.011542304039001465, 0.011568096160888672, 0.011508992195129394, 0.011736255645751953, 0.011606687545776368, 0.011798336029052734, 0.011660096168518066, 0.011787487983703613, 0.01175391960144043]",tokens/s,21837.35663919676,kWh,3.389843792683431e-07,3.7383830919613954e-08,1.7524888197212272e-07,5.516170921600798e-07,tokens/kWh,464090042.9635501,MB,1340.616704,800.980992,0.0,385.875968,344.085504,s,17,10.346817932128907,0.6086363489487592,0.004081359464565873,0.6081692504882813,0.6140361572265625,0.6153832641601563,0.6167835375976563,"[0.6021505126953125, 0.6121931762695313, 0.6075858764648437, 0.6171336059570313, 0.6110765380859375, 0.6134298095703125, 0.6087356567382812, 0.6066644897460938, 0.6040718383789062, 0.6028499755859374, 0.6093318481445312, 0.6074606323242188, 0.6149456787109375, 0.610459716796875, 0.6056370849609375, 0.6081692504882813, 0.6049222412109375]",tokens/s,103.51008464876281,kWh,1.7502282571707792e-05,1.9301981164942858e-06,6.671194503675413e-06,2.610367519187749e-05,tokens/kWh,2413453.2603900656,,s,1071,10.337577745437633,0.009652266802462764,0.0002793436939253724,0.009605952262878418,0.009832703590393066,0.009952672004699707,0.010569689559936523,"[0.009246720314025878, 0.009492480278015136, 0.009587967872619629, 0.009625951766967773, 0.00948038387298584, 0.009437408447265625, 0.009380096435546876, 0.009451264381408692, 0.00961945629119873, 0.009547455787658692, 0.009471391677856445, 0.009444512367248536, 0.009514880180358887, 0.009760640144348144, 0.009658080101013183, 0.009522720336914063, 0.009569024085998535, 0.009502719879150391, 0.009599167823791504, 0.009481599807739258, 0.009419039726257324, 0.009432671546936035, 0.009445216178894043, 0.00961404800415039, 0.00959603214263916, 0.009689151763916016, 0.009473152160644531, 0.009473631858825684, 0.009562208175659179, 0.009506303787231446, 0.009425408363342285, 0.009428480148315429, 0.009484800338745117, 0.00954691219329834, 0.009776191711425781, 0.009570112228393554, 0.009591936111450195, 0.009501536369323731, 0.009590784072875976, 0.009550944328308105, 0.009458592414855957, 0.00946787166595459, 0.009451456069946289, 0.009544863700866699, 0.009529760360717773, 0.009499168395996094, 0.009446559906005859, 0.00945251178741455, 0.009814047813415527, 0.00961638355255127, 0.009514847755432128, 0.009420000076293945, 0.009431167602539063, 0.009788991928100586, 0.00963167953491211, 0.009772768020629883, 0.009619808197021485, 0.00954377555847168, 0.009746432304382324, 0.009658016204833985, 0.00959727954864502, 0.009674176216125489, 0.009787967681884766, 0.009571711540222167, 0.009710495948791503, 0.009647680282592773, 0.009765088081359863, 0.009891263961791992, 0.009765376091003418, 0.009709407806396484, 0.009668064117431641, 0.009931391716003417, 0.00983568000793457, 0.009927424430847168, 0.009672160148620606, 0.00967750358581543, 0.009793536186218262, 0.009790816307067872, 0.009778911590576173, 0.00964684772491455, 0.009731295585632325, 0.0098787202835083, 0.00986092758178711, 0.010000351905822755, 0.009872896194458008, 0.009746623992919921, 0.009909791946411133, 0.009774111747741699, 0.009719552040100098, 0.00968723201751709, 0.009654335975646973, 0.009864768028259278, 0.009830112457275391, 0.009860960006713868, 0.009723775863647461, 0.009732864379882812, 0.009805695533752441, 0.00968511962890625, 0.009649375915527343, 0.009489184379577636, 0.009512672424316406, 0.009666144371032715, 0.009736895561218262, 0.009506239891052246, 0.009456192016601563, 0.009506431579589844, 0.00973043155670166, 0.009613311767578125, 0.009514399528503418, 0.009471615791320801, 0.009550944328308105, 0.009834367752075196, 0.009731967926025391, 0.009717439651489258, 0.009628095626831056, 0.009779199600219727, 0.009751615524291992, 0.009669631958007812, 0.009584575653076173, 0.009526592254638672, 0.009877728462219239, 0.009754143714904785, 0.009709919929504394, 0.009574015617370606, 0.009472991943359375, 0.009533439636230469, 0.009245216369628906, 0.009492256164550782, 0.009497152328491212, 0.009524895668029785, 0.009725184440612792, 0.009601792335510255, 0.009461119651794433, 0.009466496467590332, 0.009492480278015136, 0.009727871894836426, 0.009594271659851075, 0.009478879928588866, 0.009418623924255371, 0.00948646354675293, 0.009674752235412597, 0.00961740779876709, 0.009672032356262208, 0.009475744247436524, 0.009504128456115722, 0.009823871612548827, 0.009622559547424317, 0.009511551856994628, 0.009496576309204101, 0.009682751655578614, 0.009902624130249024, 0.009727168083190917, 0.009654560089111329, 0.009723679542541504, 0.00974675178527832, 0.009849280357360839, 0.0096561918258667, 0.009716896057128907, 0.009523263931274414, 0.009556608200073242, 0.009721792221069336, 0.009554112434387207, 0.009728416442871094, 0.009459456443786622, 0.009524864196777344, 0.009759231567382813, 0.009581727981567383, 0.009642720222473145, 0.009619359970092773, 0.009625696182250976, 0.009736191749572755, 0.009988096237182617, 0.00963584041595459, 0.009529343605041504, 0.009703424453735352, 0.0097194242477417, 0.009574784278869628, 0.009478143692016602, 0.009414655685424805, 0.009476096153259277, 0.009686847686767579, 0.009533599853515625, 0.009561727523803711, 0.009508352279663086, 0.009721759796142579, 0.009860095977783203, 0.009682975769042968, 0.010481344223022462, 0.010158111572265625, 0.00943887996673584, 0.0096812162399292, 0.009511072158813477, 0.009471872329711913, 0.009734304428100585, 0.009632672309875488, 0.009535840034484863, 0.009599360466003418, 0.009420831680297852, 0.009564607620239259, 0.00956390380859375, 0.009576448440551758, 0.009489919662475586, 0.009490528106689454, 0.009916831970214843, 0.00960921573638916, 0.00951296043395996, 0.009473247528076172, 0.009476799964904786, 0.009701760292053222, 0.00969279956817627, 0.009592703819274902, 0.009694751739501952, 0.009558015823364258, 0.009747136116027831, 0.00966163158416748, 0.009605952262878418, 0.009549823760986328, 0.009809472084045411, 0.009810367584228515, 0.009676799774169922, 0.009608863830566406, 0.009676192283630371, 0.009574527740478516, 0.009734975814819335, 0.009830400466918946, 0.010014240264892577, 0.010157631874084474, 0.011146016120910644, 0.013964447975158691, 0.012447711944580078, 0.01030246353149414, 0.009984000205993653, 0.00994268798828125, 0.010029727935791016, 0.00958835220336914, 0.009551936149597168, 0.009682592391967773, 0.009793984413146972, 0.009635807991027832, 0.009566143989562988, 0.009544960021972656, 0.009715776443481445, 0.009731103897094726, 0.009610176086425781, 0.00948089599609375, 0.009460927963256836, 0.00957753562927246, 0.009650239944458008, 0.00950767993927002, 0.009380831718444824, 0.009412704467773437, 0.009477952003479003, 0.009158495903015137, 0.009529408454895019, 0.009542240142822265, 0.009654272079467773, 0.00971951961517334, 0.009549440383911133, 0.009474720001220703, 0.009445376396179199, 0.009584639549255371, 0.009649824142456055, 0.009798239707946778, 0.009596672058105469, 0.009766207695007325, 0.009726079940795898, 0.010025535583496094, 0.00960921573638916, 0.009539711952209472, 0.009449343681335449, 0.00964748764038086, 0.009683232307434082, 0.009508288383483887, 0.009677727699279786, 0.009474047660827637, 0.009592672348022462, 0.009600735664367676, 0.009597567558288575, 0.009482048034667968, 0.009409983634948731, 0.009524959564208985, 0.009593695640563964, 0.00955292797088623, 0.00944422435760498, 0.009492927551269532, 0.009580224037170411, 0.009697279930114745, 0.009748448371887207, 0.009555968284606933, 0.009485407829284668, 0.00970844841003418, 0.00992460823059082, 0.009859071731567384, 0.009941056251525878, 0.009711551666259765, 0.010062144279479981, 0.009659104347229004, 0.00966486358642578, 0.01037929630279541, 0.00955196762084961, 0.009730112075805665, 0.009656767845153808, 0.009828351974487304, 0.009617216110229492, 0.009944352149963379, 0.009785728454589844, 0.009744928359985352, 0.009611519813537597, 0.009711359977722168, 0.010567999839782715, 0.009848575592041015, 0.010419808387756347, 0.009861120223999023, 0.009693535804748535, 0.009805824279785156, 0.009312031745910644, 0.00964425563812256, 0.009631008148193359, 0.009685312271118164, 0.009695424079895019, 0.009539967536926269, 0.009531231880187988, 0.009574655532836915, 0.009710399627685547, 0.009804544448852539, 0.009645919799804687, 0.009570655822753906, 0.009579680442810058, 0.009877632141113282, 0.009822303771972657, 0.009659008026123047, 0.009613311767578125, 0.00973628807067871, 0.009746335983276367, 0.009676799774169922, 0.00956726360321045, 0.009653216361999511, 0.009587008476257325, 0.009692095756530761, 0.009629631996154785, 0.00966329574584961, 0.009600768089294434, 0.009555456161499023, 0.009683520317077636, 0.009669024467468262, 0.009738271713256835, 0.009755871772766113, 0.00977286434173584, 0.010734527587890626, 0.009806976318359376, 0.009625439643859863, 0.009460800170898438, 0.009622655868530273, 0.009837408065795899, 0.009950655937194824, 0.009770815849304199, 0.009667103767395019, 0.009734111785888672, 0.00978486442565918, 0.009811871528625489, 0.009643903732299805, 0.009593567848205566, 0.00964358425140381, 0.010018752098083497, 0.01009436798095703, 0.009575136184692383, 0.009543680191040039, 0.00946774387359619, 0.009635487556457519, 0.009603584289550781, 0.009472000122070312, 0.009486335754394531, 0.00988684844970703, 0.01015078353881836, 0.011622112274169922, 0.009646240234375, 0.009554304122924805, 0.009793503761291504, 0.009446335792541503, 0.009766912460327149, 0.009708767890930175, 0.00989299201965332, 0.009944992065429687, 0.010946175575256348, 0.010964896202087402, 0.009625215530395508, 0.009662528038024903, 0.0096976318359375, 0.009554112434387207, 0.009562496185302735, 0.009571423530578613, 0.00975926399230957, 0.009846783638000489, 0.010115008354187011, 0.00959500789642334, 0.009514880180358887, 0.009623007774353027, 0.00955247974395752, 0.00943513584136963, 0.009491456031799317, 0.009501567840576172, 0.009788895606994629, 0.009898655891418457, 0.010524991989135743, 0.009747615814208984, 0.009607263565063476, 0.009648192405700683, 0.009543456077575684, 0.009503040313720703, 0.009505120277404785, 0.009506752014160156, 0.009584639549255371, 0.009623552322387695, 0.009567296028137207, 0.009413279533386231, 0.009471872329711913, 0.0096014404296875, 0.009489600181579589, 0.009492287635803223, 0.009458687782287598, 0.009500800132751465, 0.009602944374084472, 0.009588095664978027, 0.009544063568115234, 0.009486623764038086, 0.00947993564605713, 0.009602432250976562, 0.009508864402770996, 0.009477151870727538, 0.009478015899658203, 0.009499711990356445, 0.009700480461120606, 0.009541407585144043, 0.009496095657348633, 0.009618016242980957, 0.009502623558044434, 0.00965830421447754, 0.009653440475463867, 0.009431615829467773, 0.009541376113891602, 0.00955996799468994, 0.009276512145996094, 0.00957919979095459, 0.009490431785583496, 0.009488320350646973, 0.009611455917358399, 0.009514880180358887, 0.009510560035705566, 0.009476672172546387, 0.009461536407470703, 0.009607040405273437, 0.009496319770812988, 0.009488767623901367, 0.009431103706359863, 0.00944035243988037, 0.009567135810852051, 0.009547295570373536, 0.009480607986450196, 0.009457599639892579, 0.009373087882995606, 0.00958892822265625, 0.009518848419189454, 0.009514847755432128, 0.009687935829162598, 0.009494527816772461, 0.00960867214202881, 0.009918368339538575, 0.009523679733276367, 0.009504768371582031, 0.00952297592163086, 0.009986495971679688, 0.009679007530212402, 0.009635904312133788, 0.009573727607727052, 0.009637951850891113, 0.009830623626708985, 0.009723999977111816, 0.0096212797164917, 0.009566783905029297, 0.009598624229431153, 0.009747967720031739, 0.009734496116638184, 0.009602687835693359, 0.009646047592163086, 0.009736512184143066, 0.00976483154296875, 0.009621631622314454, 0.009611295700073243, 0.009520511627197265, 0.0096878080368042, 0.00995468807220459, 0.010031744003295899, 0.00964633560180664, 0.009684351921081543, 0.00983238410949707, 0.009865471839904785, 0.009684800148010254, 0.009613183975219727, 0.009691743850708008, 0.009793631553649903, 0.009729439735412598, 0.009752799987792968, 0.009528608322143554, 0.009636223793029785, 0.00929747200012207, 0.009471615791320801, 0.009560895919799804, 0.00943017578125, 0.009586943626403808, 0.009627488136291504, 0.009515071868896484, 0.009598848342895507, 0.0094519681930542, 0.009564543724060058, 0.009628031730651855, 0.009647744178771973, 0.009588095664978027, 0.009534272193908691, 0.009643456459045411, 0.009683327674865723, 0.009649760246276855, 0.009533856391906738, 0.009615360260009765, 0.009558015823364258, 0.009607168197631836, 0.009494527816772461, 0.009475104331970215, 0.009431327819824219, 0.009509568214416503, 0.009586688041687011, 0.009475135803222656, 0.009470080375671387, 0.009464032173156738, 0.009599583625793457, 0.009697279930114745, 0.009582592010498046, 0.009555968284606933, 0.009480192184448242, 0.00956982421875, 0.00998681640625, 0.009672415733337403, 0.009605119705200196, 0.009518207550048828, 0.009689151763916016, 0.009762687683105469, 0.009521951675415039, 0.009480640411376954, 0.009441280364990234, 0.009610976219177246, 0.009671680450439453, 0.009708352088928222, 0.009613504409790039, 0.009596927642822266, 0.009881183624267579, 0.01000492763519287, 0.009770079612731934, 0.009603967666625976, 0.009476096153259277, 0.00960307216644287, 0.009578495979309083, 0.009478176116943359, 0.009461728096008301, 0.009446847915649413, 0.009799648284912109, 0.009545536041259765, 0.009433888435363769, 0.009414655685424805, 0.009672960281372071, 0.009506400108337403, 0.00974944019317627, 0.0094204158782959, 0.009447039604187012, 0.009692959785461426, 0.009591808319091797, 0.009578495979309083, 0.009566207885742188, 0.009598208427429198, 0.009626463890075683, 0.009557920455932617, 0.009529343605041504, 0.009571552276611328, 0.009483072280883789, 0.00963379192352295, 0.009705439567565918, 0.009576607704162597, 0.00969718360900879, 0.009545984268188477, 0.00966419219970703, 0.009530367851257325, 0.009498720169067382, 0.009405664443969727, 0.009412287712097168, 0.009778623580932616, 0.01003923225402832, 0.009587167739868164, 0.00952131175994873, 0.009515168190002442, 0.009629535675048827, 0.009547776222229003, 0.009457663536071777, 0.009444576263427734, 0.009437984466552734, 0.009590239524841308, 0.009545375823974609, 0.009607263565063476, 0.009433888435363769, 0.009363648414611816, 0.009606975555419922, 0.009504768371582031, 0.009483648300170898, 0.009495231628417969, 0.00938316822052002, 0.009616064071655274, 0.00957583999633789, 0.00952956771850586, 0.009593119621276855, 0.009461183547973633, 0.009618240356445313, 0.009674592018127441, 0.009500672340393066, 0.009544960021972656, 0.009470720291137696, 0.009581791877746581, 0.009638848304748536, 0.009512767791748047, 0.0095830078125, 0.00946889591217041, 0.009564831733703613, 0.00967686367034912, 0.009501791954040528, 0.009116671562194823, 0.009747967720031739, 0.009727840423583984, 0.009640607833862305, 0.009537535667419434, 0.009452960014343263, 0.009589344024658204, 0.009658368110656738, 0.009737407684326172, 0.009579520225524902, 0.009541664123535156, 0.009666336059570313, 0.009621503829956055, 0.009578463554382324, 0.009562272071838379, 0.009648287773132324, 0.009772959709167481, 0.009725088119506837, 0.00978825569152832, 0.00959011173248291, 0.009542112350463866, 0.009637887954711915, 0.009737631797790528, 0.009724512100219726, 0.009511136054992675, 0.009494239807128907, 0.009662528038024903, 0.00961945629119873, 0.009590527534484864, 0.009476223945617675, 0.00952348804473877, 0.009701215744018554, 0.009629695892333985, 0.009557632446289063, 0.009474207878112793, 0.009499872207641601, 0.009642016410827637, 0.009553024291992188, 0.00950284767150879, 0.0094487361907959, 0.00961580753326416, 0.010464256286621093, 0.009767935752868653, 0.009636896133422852, 0.009587679862976074, 0.009777152061462402, 0.009946399688720704, 0.009921248435974122, 0.00970911979675293, 0.009785856246948242, 0.009733471870422363, 0.010002592086791991, 0.009820608139038086, 0.009666560173034668, 0.009624863624572754, 0.009693920135498047, 0.009965567588806153, 0.009689023971557617, 0.009649344444274902, 0.009658592224121094, 0.009709952354431152, 0.009810111999511718, 0.009734016418457031, 0.009324447631835937, 0.009783040046691895, 0.010004096031188965, 0.009716511726379395, 0.009659584045410157, 0.009577280044555663, 0.009647456169128418, 0.009822879791259766, 0.00975481605529785, 0.009663807868957519, 0.009665023803710938, 0.009686176300048827, 0.009759584426879882, 0.01031334400177002, 0.009582976341247558, 0.009553759574890137, 0.009645631790161133, 0.009722463607788086, 0.009999615669250488, 0.009787199974060058, 0.0096943998336792, 0.009804863929748536, 0.009683391571044923, 0.009639776229858399, 0.009537119865417481, 0.009600095748901367, 0.009856800079345703, 0.009711551666259765, 0.009590239524841308, 0.00949084758758545, 0.009553695678710937, 0.009572640419006347, 0.009614463806152343, 0.009545696258544923, 0.009482527732849121, 0.00950556755065918, 0.009602368354797363, 0.00977791976928711, 0.00964083194732666, 0.00946889591217041, 0.009449472427368164, 0.009522527694702148, 0.009611007690429687, 0.009581472396850586, 0.009457663536071777, 0.009452896118164062, 0.009521471977233886, 0.009627872467041016, 0.00960524845123291, 0.009490431785583496, 0.009446656227111817, 0.009554976463317872, 0.009576160430908203, 0.009555968284606933, 0.009685152053833008, 0.00946774387359619, 0.009518879890441895, 0.009708928108215333, 0.009663328170776368, 0.009561599731445313, 0.009482751846313477, 0.009731712341308594, 0.009614815711975097, 0.00924127960205078, 0.009455743789672852, 0.009497759819030761, 0.00968502426147461, 0.009558719635009766, 0.010251903533935547, 0.0095480318069458, 0.009683072090148926, 0.009730048179626465, 0.009525055885314942, 0.00950496006011963, 0.009444831848144531, 0.009530207633972169, 0.009655232429504395, 0.009650943756103515, 0.009615360260009765, 0.009739263534545899, 0.009575327873229981, 0.009672800064086913, 0.009515007972717286, 0.00954361629486084, 0.009714816093444824, 0.009724287986755372, 0.009853023529052735, 0.009703904151916505, 0.009592896461486816, 0.009551520347595216, 0.009916704177856445, 0.009881600379943848, 0.009712736129760742, 0.009630559921264648, 0.009492544174194336, 0.00974028778076172, 0.009627327919006348, 0.00959654426574707, 0.00953228759765625, 0.009513855934143067, 0.009630656242370606, 0.009590335845947266, 0.009908608436584473, 0.00963987159729004, 0.009566143989562988, 0.009709728240966798, 0.009777215957641602, 0.009641183853149413, 0.009544447898864746, 0.009582592010498046, 0.009688608169555664, 0.009856831550598145, 0.009602016448974609, 0.009486016273498534, 0.009834495544433594, 0.009794719696044922, 0.009779423713684081, 0.00953212833404541, 0.00949619197845459, 0.009578783988952637, 0.009973343849182128, 0.009728416442871094, 0.009663935661315917, 0.01215340805053711, 0.012624064445495605, 0.010614080429077149, 0.009407008171081543, 0.009613439559936524, 0.00963584041595459, 0.009688608169555664, 0.009679231643676758, 0.009584416389465332, 0.009543999671936036, 0.009701631546020507, 0.009623104095458985, 0.00967903995513916, 0.009565792083740234, 0.009644448280334473, 0.009801471710205078, 0.009805215835571288, 0.009601887702941894, 0.009561663627624511, 0.009546175956726074, 0.009517279624938965, 0.009631520271301269, 0.009621376037597657, 0.009474176406860352, 0.009602879524230957, 0.009707584381103515, 0.009701503753662109, 0.009688575744628907, 0.009525823593139648, 0.009469759941101075, 0.00957852840423584, 0.009602687835693359, 0.009564640045166016, 0.009601087570190429, 0.009533375740051269, 0.009703295707702637, 0.009739935874938965, 0.009707807540893554, 0.009778911590576173, 0.009842687606811524, 0.009836480140686036, 0.010082816123962402, 0.00963587188720703, 0.009607168197631836, 0.00974028778076172, 0.009928192138671875, 0.009961919784545898, 0.010547231674194336, 0.009731519699096679, 0.009755231857299805, 0.009866847991943359, 0.00985324764251709, 0.00977888011932373, 0.009743871688842774, 0.009585568428039551, 0.009644031524658203, 0.009643648147583008, 0.010008064270019532, 0.009583488464355468, 0.009560064315795898, 0.009610272407531739, 0.009625887870788575, 0.009479904174804687, 0.009483424186706543, 0.009460895538330079, 0.009819968223571778, 0.009248640060424805, 0.00951923179626465, 0.009585375785827637, 0.009645600318908692, 0.009636320114135743, 0.009555487632751465, 0.009482720375061034, 0.009500415802001953, 0.00966489601135254, 0.009576319694519043, 0.009517056465148926, 0.009487551689147949, 0.009517919540405274, 0.009727968215942382, 0.009593088150024414, 0.0095513916015625, 0.009496479988098145, 0.009504672050476073, 0.009654687881469727, 0.009752575874328612, 0.009576448440551758, 0.009481951713562012, 0.009521344184875487, 0.009695327758789063, 0.009748448371887207, 0.009924639701843261, 0.009498335838317872, 0.009606687545776368, 0.009590847969055176, 0.009505727767944335, 0.009555135726928711, 0.00947052764892578, 0.009518848419189454, 0.00978707218170166, 0.009709504127502442, 0.009619775772094727, 0.009463935852050782, 0.009441311836242677, 0.009765024185180665, 0.009632896423339844, 0.009779647827148437, 0.009548352241516114, 0.00958892822265625, 0.009641504287719727, 0.009560576438903809, 0.009553248405456544, 0.00953990364074707, 0.009449695587158203, 0.009682720184326171, 0.009678239822387696, 0.009623968124389648, 0.009478560447692871, 0.009506591796875, 0.009694432258605957, 0.009567008018493653, 0.009854463577270508, 0.009525983810424805, 0.009578271865844727, 0.009613311767578125, 0.010550911903381349, 0.009539584159851074, 0.00955020809173584, 0.009653887748718261, 0.009373472213745118, 0.009565952301025391, 0.009543807983398437, 0.00952233600616455, 0.009727071762084961, 0.009559552192687988, 0.009567584037780762, 0.009505696296691894, 0.009494048118591308, 0.009638303756713868, 0.009558079719543457, 0.009800992012023925, 0.010285792350769042, 0.009606528282165527, 0.009648256301879883, 0.00981760025024414, 0.009647104263305664, 0.009461888313293457, 0.009594592094421386, 0.009832703590393066, 0.010064800262451172, 0.009516032218933105, 0.009455103874206543, 0.009784000396728516, 0.01057363224029541, 0.009832448005676269, 0.00985647964477539, 0.009919008255004884, 0.009594047546386719, 0.009748319625854492, 0.0095283842086792, 0.009684896469116211, 0.009512063980102538, 0.009552127838134765, 0.009660384178161622, 0.009534111976623535, 0.009480192184448242, 0.009549823760986328, 0.01001798439025879, 0.009726719856262207, 0.00959494400024414, 0.009444543838500977, 0.009642815589904786, 0.00962384033203125, 0.009669376373291015, 0.009779647827148437, 0.009520768165588379, 0.009525728225708009, 0.009537216186523437, 0.009620223999023438, 0.009498623847961426, 0.009478303909301758, 0.009709183692932128, 0.00953990364074707, 0.009707743644714356, 0.009508543968200684, 0.009436672210693359, 0.009593184471130372, 0.009582176208496093, 0.009648896217346192, 0.009586496353149414, 0.009508864402770996, 0.009566304206848144, 0.009300576210021973, 0.009879039764404298, 0.009508352279663086, 0.00951296043395996, 0.009681920051574706, 0.009637887954711915, 0.009502719879150391, 0.009736415863037109, 0.009456576347351075, 0.010092703819274903, 0.009616064071655274, 0.009629695892333985, 0.00951910400390625, 0.009500672340393066, 0.009643136024475097, 0.009677696228027343, 0.009588031768798829, 0.00945792007446289, 0.009454048156738282, 0.009615167617797852, 0.00952131175994873, 0.009510911941528321, 0.00956339168548584, 0.009485055923461914, 0.009570303916931153, 0.009782655715942383, 0.009577088356018067, 0.009478143692016602, 0.009458880424499511, 0.009608160018920899, 0.009571935653686523, 0.009593088150024414, 0.009510911941528321, 0.009523200035095216, 0.009631744384765625, 0.009588735580444336, 0.00950825595855713, 0.009492480278015136, 0.00945417594909668, 0.009607168197631836, 0.00952627182006836, 0.009630175590515136, 0.009644576072692871, 0.009545727729797364, 0.00955619239807129, 0.009584639549255371, 0.009473919868469238, 0.00948419189453125, 0.009662464141845703, 0.00971776008605957, 0.00973209571838379, 0.009766816139221191, 0.009523296356201172, 0.009446847915649413, 0.0096278076171875, 0.009648351669311524, 0.009525440216064452, 0.009545727729797364, 0.009548864364624023, 0.009778112411499023, 0.010213215827941895, 0.009592160224914551, 0.009587103843688966]",tokens/s,103.60260656541853,,, 4bit-awq-exllama-v2-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,834.297856,666.763264,0.0,281.018368,267.897856,s,1,9.7487314453125,9.7487314453125,0.0,9.7487314453125,9.7487314453125,9.7487314453125,9.7487314453125,[9.7487314453125],,kWh,1.8641986416666373e-05,2.049004257987276e-06,5.974171445999928e-06,2.6665162120653578e-05,,MB,1292.558336,775.815168,0.0,360.710144,344.082944,s,16,0.1921689281463623,0.012010558009147645,0.000512649345502284,0.01184768009185791,0.012276256084442138,0.012852343797683716,0.013650430154800415,"[0.011750080108642579, 0.01183631992340088, 0.012016160011291504, 0.011856224060058594, 0.011839136123657227, 0.01196505641937256, 0.011892383575439452, 0.01169324779510498, 0.01384995174407959, 0.011805824279785156, 0.01192409610748291, 0.012519807815551757, 0.011749119758605956, 0.01177558422088623, 0.01203270435333252, 0.01166323184967041]",tokens/s,21314.580039080764,kWh,3.484283349890613e-07,3.842517544291831e-08,1.7924603308114568e-07,5.660995435131254e-07,tokens/kWh,452217287.46026534,MB,1339.191296,800.980992,0.0,385.875968,344.085504,s,16,9.9976279296875,0.6248517456054687,0.003508880843096318,0.6253663940429688,0.6285144348144531,0.6294359283447266,0.6305980255126953,"[0.6227091674804688, 0.6269884033203125, 0.6208970336914063, 0.6239468383789063, 0.6280771484375, 0.626294189453125, 0.6191254272460938, 0.6308885498046874, 0.6289517211914063, 0.6255499267578125, 0.626571533203125, 0.6273207397460937, 0.625182861328125, 0.6248849487304687, 0.6231543579101563, 0.6170850830078125]",tokens/s,100.823916141827,kWh,1.7954850922823432e-05,1.9801037852585724e-06,6.657547630793859e-06,2.659250233887586e-05,tokens/kWh,2369088.8204943254,,s,1008,9.987487665176385,0.009908221890055944,0.00018679770234734434,0.009897808074951171,0.010067919921875,0.010159094572067262,0.010522610530853271,"[0.009408639907836915, 0.00982364845275879, 0.009908703804016114, 0.009716992378234863, 0.009835007667541504, 0.00976307201385498, 0.00977455997467041, 0.009771712303161621, 0.009793375968933106, 0.009768863677978516, 0.009846624374389648, 0.009815775871276855, 0.00986575984954834, 0.00998969554901123, 0.00981833553314209, 0.009930975914001465, 0.009793120384216309, 0.009922240257263184, 0.009825023651123047, 0.009760736465454101, 0.009781536102294921, 0.009985759735107421, 0.009909248352050782, 0.009934080123901368, 0.009770336151123047, 0.009695648193359375, 0.009754624366760254, 0.010108736038208008, 0.009869024276733399, 0.009812447547912597, 0.00971894359588623, 0.00970019245147705, 0.00981760025024414, 0.009754495620727539, 0.009771648406982423, 0.009752832412719726, 0.009770400047302246, 0.009875807762145996, 0.009869312286376953, 0.009845888137817383, 0.009874303817749024, 0.009906175613403321, 0.010027008056640625, 0.009904128074645996, 0.009799360275268554, 0.009760607719421386, 0.009801888465881347, 0.0109050235748291, 0.011457183837890624, 0.009854687690734863, 0.009908767700195312, 0.009887104034423827, 0.009777759552001953, 0.010061152458190918, 0.00985974407196045, 0.009793472290039063, 0.009762880325317382, 0.009836544036865234, 0.009785344123840332, 0.00984614372253418, 0.00979212760925293, 0.009918463706970216, 0.009915583610534668, 0.009495936393737792, 0.009733152389526368, 0.00985267162322998, 0.009779199600219727, 0.009862560272216797, 0.009943648338317871, 0.009926207542419433, 0.009869600296020508, 0.009940192222595215, 0.00997379207611084, 0.009974687576293946, 0.00992083168029785, 0.01001193618774414, 0.009978272438049317, 0.009844736099243164, 0.009905695915222168, 0.009854656219482422, 0.009724255561828613, 0.009728447914123535, 0.009934816360473633, 0.009887776374816894, 0.00992460823059082, 0.01010211181640625, 0.009994784355163574, 0.009875519752502442, 0.009924896240234375, 0.010080032348632813, 0.01023084831237793, 0.010026047706604004, 0.010176383972167968, 0.00994268798828125, 0.010060128211975097, 0.009942943572998048, 0.010057151794433594, 0.00989417552947998, 0.010006912231445312, 0.009922528266906738, 0.009891488075256348, 0.00982643222808838, 0.00979792022705078, 0.009890015602111817, 0.009655136108398438, 0.010103487968444824, 0.010458815574645995, 0.010007264137268067, 0.009985856056213379, 0.00993455982208252, 0.00999779224395752, 0.009943008422851562, 0.010771295547485352, 0.009787391662597657, 0.010039263725280762, 0.009803808212280274, 0.009803392410278321, 0.009810303688049317, 0.009883744239807129, 0.009850784301757813, 0.009828351974487304, 0.010088159561157226, 0.009916671752929688, 0.010102815628051758, 0.009957792282104493, 0.009938528060913086, 0.009751392364501952, 0.009954719543457032, 0.009894495964050292, 0.009815199851989746, 0.009876288414001465, 0.009690239906311035, 0.00980678367614746, 0.009893280029296875, 0.009830975532531738, 0.009563424110412598, 0.009593567848205566, 0.009773056030273437, 0.009774975776672364, 0.009751999855041504, 0.009763520240783691, 0.00964147186279297, 0.009827903747558594, 0.009665472030639648, 0.009819552421569825, 0.009681504249572754, 0.009510911941528321, 0.009379839897155762, 0.009422623634338378, 0.009600319862365723, 0.009709983825683595, 0.009738752365112305, 0.010182208061218261, 0.009818559646606445, 0.00990611171722412, 0.009879232406616211, 0.010072640419006348, 0.009826111793518066, 0.009895936012268066, 0.009831647872924804, 0.009981760025024414, 0.010095583915710448, 0.009973759651184083, 0.009783103942871093, 0.010062015533447266, 0.009766912460327149, 0.009682944297790527, 0.009729791641235351, 0.00980627155303955, 0.009807680130004883, 0.009773056030273437, 0.010053631782531738, 0.01015830421447754, 0.009824288368225098, 0.009741375923156739, 0.009870176315307616, 0.00966419219970703, 0.009997920036315918, 0.009967264175415038, 0.010459903717041016, 0.009881888389587402, 0.009927647590637207, 0.009843615531921387, 0.01071718406677246, 0.00982204818725586, 0.010060031890869141, 0.009898943901062012, 0.009898240089416505, 0.009884063720703124, 0.00983180809020996, 0.010090815544128417, 0.010140095710754395, 0.00986508846282959, 0.009879551887512206, 0.009744128227233887, 0.00988105583190918, 0.00963868808746338, 0.009609439849853515, 0.009676544189453126, 0.009703136444091797, 0.009761088371276856, 0.00962559986114502, 0.010383359909057617, 0.009935040473937989, 0.009966943740844727, 0.009941503524780274, 0.009844415664672852, 0.009920127868652344, 0.00994166374206543, 0.009718976020812987, 0.009900095939636231, 0.00996016025543213, 0.010397727966308595, 0.009961376190185547, 0.009990240097045898, 0.010016063690185546, 0.009906880378723145, 0.009809920310974121, 0.009712991714477539, 0.009810591697692871, 0.009828351974487304, 0.00985478401184082, 0.009959615707397462, 0.009827487945556641, 0.00982102394104004, 0.009881855964660645, 0.009796832084655762, 0.01007043170928955, 0.01008448028564453, 0.009801792144775391, 0.009855008125305176, 0.009827967643737792, 0.009724191665649414, 0.009723423957824707, 0.009797760009765625, 0.00978172779083252, 0.009793343544006348, 0.009839679718017578, 0.010075008392333985, 0.009922687530517577, 0.010106880187988282, 0.009916031837463379, 0.009920479774475098, 0.009925024032592773, 0.009971712112426758, 0.00996275234222412, 0.009931520462036133, 0.009872960090637208, 0.010084799766540527, 0.009902079582214356, 0.010006848335266114, 0.009875136375427246, 0.009541855812072755, 0.009906271934509277, 0.00991750431060791, 0.009915328025817871, 0.009873408317565918, 0.009910079956054687, 0.00990659236907959, 0.009877535820007324, 0.009926719665527344, 0.010077919960021973, 0.00997327995300293, 0.010006976127624512, 0.010059776306152344, 0.010012672424316407, 0.010010623931884765, 0.01002012825012207, 0.009999199867248535, 0.009973152160644531, 0.009920415878295898, 0.010003007888793946, 0.009971712112426758, 0.01021951961517334, 0.009984000205993653, 0.010223103523254394, 0.009972255706787109, 0.010135519981384277, 0.009953280448913575, 0.009856191635131836, 0.009898752212524413, 0.00982636833190918, 0.00983625602722168, 0.009870911598205566, 0.009816800117492676, 0.009969087600708007, 0.009962047576904297, 0.009951552391052247, 0.009907168388366699, 0.01006390380859375, 0.010031807899475097, 0.01024015998840332, 0.009927968025207519, 0.009976384162902831, 0.009977855682373048, 0.009984031677246093, 0.010005855560302734, 0.010142335891723633, 0.009942975997924805, 0.0100065279006958, 0.00999839973449707, 0.009944864273071289, 0.009934047698974609, 0.009861760139465332, 0.009953887939453124, 0.009869088172912597, 0.009869312286376953, 0.009906175613403321, 0.009848511695861817, 0.009893183708190917, 0.010269696235656739, 0.010080256462097169, 0.009899295806884766, 0.009798368453979493, 0.009823712348937988, 0.009508511543273927, 0.00991260814666748, 0.009867136001586913, 0.00970787239074707, 0.009889887809753419, 0.009878975868225098, 0.009928832054138184, 0.009899711608886719, 0.009829216003417969, 0.009903936386108399, 0.00991641616821289, 0.01042636775970459, 0.009890048027038575, 0.010038175582885743, 0.01007049560546875, 0.009895615577697754, 0.009917119979858398, 0.00982204818725586, 0.0098787202835083, 0.009997376441955566, 0.009714783668518067, 0.009798496246337891, 0.009891807556152344, 0.01012940788269043, 0.010039392471313477, 0.009990048408508301, 0.009906175613403321, 0.010002495765686036, 0.010000320434570312, 0.010070015907287597, 0.009902079582214356, 0.009992095947265625, 0.00983414363861084, 0.00999779224395752, 0.01002137565612793, 0.010060256004333495, 0.01011302375793457, 0.009988096237182617, 0.00999014377593994, 0.009996288299560547, 0.009920512199401856, 0.009991328239440917, 0.009908672332763671, 0.009945247650146484, 0.009939071655273437, 0.0098754243850708, 0.009915583610534668, 0.009885696411132813, 0.009901215553283691, 0.009846464157104492, 0.00988326358795166, 0.009850432395935058, 0.010126399993896484, 0.009905471801757812, 0.009954912185668945, 0.009862239837646485, 0.009866559982299804, 0.00987161636352539, 0.009867744445800781, 0.009908191680908203, 0.009861087799072266, 0.00993065643310547, 0.010024959564208985, 0.009539584159851074, 0.00998588752746582, 0.00986742401123047, 0.010096447944641113, 0.009840607643127441, 0.009924448013305665, 0.009857407569885254, 0.009883647918701171, 0.009900032043457031, 0.009991552352905273, 0.010027456283569335, 0.010491711616516113, 0.010121408462524415, 0.010062047958374024, 0.010014687538146973, 0.00988803195953369, 0.0097543363571167, 0.009758720397949219, 0.00978883171081543, 0.00984943962097168, 0.009880864143371582, 0.009793472290039063, 0.009724096298217773, 0.009687647819519044, 0.009787391662597657, 0.009750528335571289, 0.009846847534179687, 0.00994934368133545, 0.009852352142333985, 0.009797856330871582, 0.00975875186920166, 0.009784799575805664, 0.009908672332763671, 0.009941535949707031, 0.009735103607177734, 0.009729023933410644, 0.009714879989624024, 0.009634336471557618, 0.00976249599456787, 0.009761088371276856, 0.009946880340576171, 0.009789695739746094, 0.009856255531311035, 0.009702272415161133, 0.009777024269104003, 0.009859071731567384, 0.009791487693786622, 0.009768287658691406, 0.009651103973388671, 0.009721023559570312, 0.009787967681884766, 0.009656319618225098, 0.009674240112304687, 0.009603232383728027, 0.00965078353881836, 0.00969600009918213, 0.009684160232543945, 0.00978656005859375, 0.00974499225616455, 0.009772416114807128, 0.009790495872497558, 0.009684063911437989, 0.009675295829772949, 0.009361599922180176, 0.009678079605102538, 0.009673600196838378, 0.009854496002197266, 0.00989014434814453, 0.009829888343811035, 0.009864768028259278, 0.009855648040771485, 0.00991875171661377, 0.00995468807220459, 0.009916383743286133, 0.009938624382019043, 0.010001407623291016, 0.009987615585327148, 0.009965472221374512, 0.01006163215637207, 0.009923295974731445, 0.009952832221984863, 0.010200768470764161, 0.010123167991638184, 0.011272480010986328, 0.009906880378723145, 0.009973888397216796, 0.009981247901916504, 0.010250080108642578, 0.009982208251953124, 0.009883392333984374, 0.009953472137451172, 0.009939359664916993, 0.009994527816772462, 0.010128671646118165, 0.009947456359863282, 0.009913599967956543, 0.009894783973693849, 0.010012831687927246, 0.009899264335632324, 0.010014911651611329, 0.009965984344482422, 0.010036288261413574, 0.01000931167602539, 0.009961471557617188, 0.009904512405395508, 0.010151776313781738, 0.00994918441772461, 0.009862879753112792, 0.009846464157104492, 0.010000991821289062, 0.010061823844909668, 0.010016768455505372, 0.010330368041992187, 0.010233599662780762, 0.009983519554138183, 0.009994751930236816, 0.010002400398254395, 0.010133791923522949, 0.010002143859863281, 0.010009856224060058, 0.010032032012939453, 0.010026368141174316, 0.00998038387298584, 0.01000380802154541, 0.010499903678894043, 0.010343263626098633, 0.010375167846679688, 0.010065664291381836, 0.010233535766601562, 0.010262335777282715, 0.010031871795654297, 0.010280960083007813, 0.010018879890441895, 0.009975008010864258, 0.009863967895507813, 0.009889280319213867, 0.009859616279602051, 0.009901280403137208, 0.009873888015747071, 0.010021087646484374, 0.009918463706970216, 0.009879232406616211, 0.00989020824432373, 0.009811871528625489, 0.009939264297485352, 0.00997548770904541, 0.010172415733337402, 0.009936896324157715, 0.0098854398727417, 0.009916159629821777, 0.009924192428588868, 0.00998259162902832, 0.009899456024169922, 0.0099901123046875, 0.009933856010437012, 0.010012448310852051, 0.009911520004272461, 0.009966431617736816, 0.00991055965423584, 0.009907360076904298, 0.00987718391418457, 0.009892352104187012, 0.009963135719299316, 0.009890560150146485, 0.010151583671569825, 0.010074591636657715, 0.00996070384979248, 0.009864895820617677, 0.00995852756500244, 0.00993667221069336, 0.009930272102355957, 0.00993673610687256, 0.009964192390441894, 0.00993830394744873, 0.009956000328063964, 0.01002847957611084, 0.010001055717468262, 0.009963359832763672, 0.009934880256652833, 0.009893888473510743, 0.009877792358398437, 0.010133279800415038, 0.009985856056213379, 0.01000153636932373, 0.010005727767944336, 0.010061311721801757, 0.009951071739196777, 0.009889792442321778, 0.009888192176818848, 0.009496383666992188, 0.009869088172912597, 0.009806816101074218, 0.009777088165283204, 0.009814047813415527, 0.009949024200439454, 0.009820063591003418, 0.009826208114624023, 0.009972064018249512, 0.010008352279663086, 0.009959648132324219, 0.009950592041015624, 0.0099170560836792, 0.009895520210266113, 0.009906335830688477, 0.010199295997619628, 0.009971712112426758, 0.010020735740661622, 0.01011520004272461, 0.009937151908874512, 0.010010656356811523, 0.009792256355285644, 0.010167263984680175, 0.010018815994262695, 0.009862208366394043, 0.009921216011047363, 0.009830944061279297, 0.00973209571838379, 0.009690655708312988, 0.009930047988891601, 0.009962080001831054, 0.009826208114624023, 0.00975705623626709, 0.010201087951660156, 0.01031372833251953, 0.011048864364624024, 0.010609087944030761, 0.00980348777770996, 0.009717696189880371, 0.009743776321411133, 0.00978991985321045, 0.00984505558013916, 0.009998208045959472, 0.009869600296020508, 0.009819808006286622, 0.009820032119750976, 0.009757887840270997, 0.009872544288635253, 0.0099202880859375, 0.009876543998718262, 0.009976832389831543, 0.009819519996643067, 0.00977567958831787, 0.009748031616210937, 0.009877792358398437, 0.010022239685058594, 0.009823040008544922, 0.00983788776397705, 0.009889856338500976, 0.009824895858764648, 0.009826144218444825, 0.0098819522857666, 0.00987116813659668, 0.009582592010498046, 0.009877311706542968, 0.009949536323547363, 0.009848671913146973, 0.009986047744750976, 0.009799679756164551, 0.00984825611114502, 0.009914976119995117, 0.009857279777526855, 0.009717472076416015, 0.009863167762756348, 0.010041279792785645, 0.010002495765686036, 0.009915488243103027, 0.009915007591247558, 0.009906463623046875, 0.009873696327209473, 0.00984659194946289, 0.009828191757202148, 0.010061887741088868, 0.009926912307739257, 0.009968992233276366, 0.009845151901245117, 0.00981606388092041, 0.009865216255187988, 0.009785344123840332, 0.009775039672851562, 0.009789823532104492, 0.009897664070129394, 0.00977280044555664, 0.009931008338928222, 0.009930496215820312, 0.009997568130493163, 0.009935647964477538, 0.009789664268493653, 0.009843008041381836, 0.009842368125915527, 0.009768768310546875, 0.010244447708129883, 0.009922240257263184, 0.009891776084899903, 0.009922335624694825, 0.009998208045959472, 0.009809696197509765, 0.009722944259643554, 0.009733951568603516, 0.009871264457702637, 0.00991385555267334, 0.009809503555297852, 0.009851807594299317, 0.00994876766204834, 0.009968031883239746, 0.009897983551025391, 0.009910112380981445, 0.009907936096191406, 0.010084896087646485, 0.009874719619750977, 0.010139328002929687, 0.009927519798278809, 0.01039072036743164, 0.011686816215515136, 0.01042841625213623, 0.009970848083496093, 0.00963584041595459, 0.00992255973815918, 0.009889792442321778, 0.010130784034729005, 0.01004800033569336, 0.010051551818847657, 0.009879967689514161, 0.010059712409973144, 0.010064064025878906, 0.010052319526672363, 0.010090527534484863, 0.010224543571472167, 0.010113183975219726, 0.010159520149230957, 0.010058112144470215, 0.010116191864013671, 0.010094816207885743, 0.00997590446472168, 0.009921119689941407, 0.009969759941101074, 0.00997987174987793, 0.010049535751342773, 0.010048959732055664, 0.009798208236694335, 0.009866368293762208, 0.009794431686401367, 0.009803775787353516, 0.009866944313049316, 0.00975494384765625, 0.009953184127807617, 0.009850079536437989, 0.009851200103759766, 0.009835071563720703, 0.00981328010559082, 0.009816800117492676, 0.009952735900878907, 0.009825984001159667, 0.009894975662231445, 0.009770175933837891, 0.009829216003417969, 0.009951071739196777, 0.010200575828552246, 0.00991478443145752, 0.009917856216430664, 0.010011232376098633, 0.009981951713562011, 0.009943039894104003, 0.009897600173950196, 0.009927040100097656, 0.009963520050048828, 0.009946911811828614, 0.009844127655029298, 0.009891839981079101, 0.009941823959350586, 0.00993222427368164, 0.009897952079772948, 0.009988127708435058, 0.009909088134765626, 0.009974783897399902, 0.010068703651428222, 0.009954496383666992, 0.009876288414001465, 0.00994099235534668, 0.009512224197387696, 0.00995145606994629, 0.00991385555267334, 0.009958399772644042, 0.009921759605407714, 0.009968416213989258, 0.00984444808959961, 0.010078495979309082, 0.00994825553894043, 0.009949824333190919, 0.009791775703430176, 0.009728351593017577, 0.009850175857543945, 0.009802047729492187, 0.009932831764221191, 0.00981606388092041, 0.010031328201293946, 0.00994217586517334, 0.009912223815917968, 0.009894720077514648, 0.009838591575622559, 0.009787296295166016, 0.00985852813720703, 0.009784992218017579, 0.0098538236618042, 0.00993280029296875, 0.009965567588806153, 0.009846400260925293, 0.009888128280639648, 0.009969663619995118, 0.009817695617675782, 0.010056096076965332, 0.010008928298950195, 0.010524319648742677, 0.01022156810760498, 0.009992192268371582, 0.010045280456542969, 0.009897279739379882, 0.009853216171264648, 0.009787967681884766, 0.009859071731567384, 0.009930944442749024, 0.009933823585510254, 0.010072735786437989, 0.009879712104797364, 0.010154111862182617, 0.009831904411315918, 0.00977347183227539, 0.009750495910644532, 0.009721823692321777, 0.009936960220336914, 0.009836544036865234, 0.009838047981262208, 0.009919008255004884, 0.009854880332946778, 0.009861215591430664, 0.009872447967529297, 0.009915328025817871, 0.010067584037780762, 0.010217599868774414, 0.009916671752929688, 0.009830495834350587, 0.009887424468994141, 0.009615039825439453, 0.009916255950927735, 0.009929375648498535, 0.009891424179077148, 0.009815520286560058, 0.009874176025390625, 0.009940287590026856, 0.009893664360046387, 0.009864095687866212, 0.009928704261779785, 0.009850879669189454, 0.009816224098205567, 0.010280960083007813, 0.009846624374389648, 0.009928192138671875, 0.009953791618347169, 0.009963520050048828, 0.009967616081237793, 0.009915424346923827, 0.009998592376708984, 0.009951807975769044, 0.009993503570556641, 0.009984383583068848, 0.010073760032653808, 0.009900768280029297, 0.009936896324157715, 0.009842080116271973, 0.009938655853271484, 0.009992735862731934, 0.009959903717041016, 0.009904128074645996, 0.009910271644592286, 0.00990719985961914, 0.010003135681152344, 0.009916735649108886, 0.00991436767578125, 0.009926655769348144, 0.00993830394744873, 0.009912960052490235, 0.009846783638000489, 0.009773056030273437, 0.009807007789611816, 0.009724767684936523, 0.009734432220458984, 0.010073184013366699, 0.009834815979003906, 0.009848383903503418, 0.010449760437011719, 0.009981311798095704, 0.00971776008605957, 0.009918272018432617, 0.009889887809753419, 0.009882431983947754, 0.009860063552856445, 0.009804927825927734, 0.009928416252136231, 0.00991436767578125, 0.009803775787353516, 0.009779168128967284, 0.00986524772644043, 0.00982806396484375, 0.009976384162902831, 0.009900992393493652, 0.009580767631530762, 0.010156064033508301, 0.009839679718017578, 0.009720735549926758, 0.009885312080383301, 0.00980019187927246, 0.00978217601776123, 0.010032192230224609, 0.00995686435699463, 0.009830816268920899, 0.009832639694213868, 0.009916223526000977, 0.009827648162841797, 0.009885791778564454, 0.009931424140930175, 0.009956288337707519, 0.010269696235656739, 0.009892064094543457, 0.00982323169708252, 0.009739232063293458, 0.009829376220703125, 0.010011455535888673, 0.009971391677856446, 0.009748255729675293, 0.010937024116516113, 0.009953184127807617, 0.009930015563964844, 0.009876128196716308, 0.00988969612121582, 0.009873056411743164, 0.009920448303222655, 0.009998847961425781, 0.009946623802185058, 0.009934656143188477, 0.00989459228515625, 0.010010560035705567, 0.009915776252746582, 0.009841343879699708, 0.009742719650268555, 0.009785120010375976, 0.009821248054504395, 0.01003923225402832, 0.009835359573364257, 0.009678751945495605, 0.009660639762878419, 0.00961683177947998, 0.009673151969909668, 0.009683967590332031, 0.009747584342956543, 0.009885567665100098, 0.009783295631408692, 0.009851903915405273, 0.009888768196105957, 0.009825440406799316, 0.009784159660339355, 0.009866944313049316, 0.00980412769317627, 0.009887807846069336, 0.009858976364135743, 0.009926655769348144, 0.010043423652648925, 0.009856991767883301, 0.009834495544433594, 0.009512543678283691, 0.009809760093688965, 0.009812735557556152, 0.009850303649902343, 0.010625408172607423, 0.00986252784729004, 0.00989574432373047, 0.009807680130004883, 0.00981708812713623, 0.009686016082763671, 0.00967091178894043, 0.009658559799194336, 0.00972009563446045, 0.009853471755981445, 0.009882975578308105, 0.009838687896728515, 0.00986460781097412, 0.0099399995803833, 0.00987337589263916, 0.009785247802734374, 0.009760383605957032, 0.009738016128540038, 0.009693023681640624, 0.009786368370056153, 0.009641728401184081, 0.009651359558105468, 0.009555071830749511, 0.0096396484375, 0.009576448440551758, 0.009457663536071777, 0.009498975753784179, 0.009461055755615235, 0.009395968437194825, 0.009629311561584473, 0.009552864074707032, 0.009431136131286622, 0.009695136070251464, 0.00960307216644287, 0.009629119873046875, 0.009941568374633789, 0.009637408256530762, 0.009529824256896973, 0.009657504081726075, 0.009763135910034179, 0.009773728370666503, 0.009993151664733886, 0.009950143814086914, 0.009947135925292968, 0.009814016342163086, 0.009809920310974121, 0.01002291202545166, 0.010018815994262695, 0.00990822410583496, 0.009859135627746582, 0.01022380828857422, 0.009905695915222168, 0.009873056411743164, 0.009887935638427735, 0.009779071807861328, 0.009876128196716308, 0.01000227165222168, 0.010082367897033692, 0.010007519721984864]",tokens/s,100.92628234371867,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,831.893504,666.763264,0.0,281.018368,267.897856,s,1,10.0142451171875,10.0142451171875,0.0,10.0142451171875,10.0142451171875,10.0142451171875,10.0142451171875,[10.0142451171875],,kWh,1.8046305170833912e-05,1.9833146055481723e-06,5.168615245999819e-06,2.5198235022381905e-05,,MB,1251.4304,775.815168,0.0,360.710144,344.082944,s,21,0.19524713706970212,0.009297482717604864,0.0001770074854136784,0.00928108787536621,0.009428192138671875,0.009685600280761719,0.009716268920898437,"[0.009685600280761719, 0.009387392044067383, 0.009384320259094239, 0.009723936080932617, 0.009428192138671875, 0.009395199775695801, 0.009289024353027343, 0.00928108787536621, 0.009147968292236328, 0.009244319915771484, 0.009195584297180176, 0.00915561580657959, 0.009373536109924316, 0.009367775917053223, 0.009072128295898438, 0.009268927574157714, 0.008988832473754883, 0.009237759590148926, 0.0093187837600708, 0.009256416320800782, 0.009044735908508302]",tokens/s,27534.334590938448,kWh,2.7363691433145896e-07,3.0176325469129224e-08,1.431725606485474e-07,4.4698580044913553e-07,tokens/kWh,572725128.5002092,MB,1297.793024,798.88384,0.0,383.778816,344.085504,s,21,10.226491394042966,0.48697578066871267,0.008746852984113134,0.4841940612792969,0.4983719482421875,0.49846926879882814,0.5027245422363281,"[0.5037883605957031, 0.4943537292480469, 0.4961960144042969, 0.4983719482421875, 0.49660528564453127, 0.49846926879882814, 0.4933099975585937, 0.49139312744140623, 0.471669921875, 0.48066192626953125, 0.474512939453125, 0.4807530822753906, 0.4843514709472656, 0.4908294677734375, 0.48114617919921876, 0.48362307739257815, 0.4837484436035156, 0.4841940612792969, 0.4837935791015625, 0.4806653442382812, 0.4740541687011719]",tokens/s,129.36988347446913,kWh,1.4108932130112977e-05,1.5559505434870885e-06,5.579940483446673e-06,2.1244823157046742e-05,tokens/kWh,2965428.3085478824,,s,1323,10.213597888946534,0.007720028638659512,0.0003448043095214563,0.007695263862609864,0.00792732162475586,0.008045948696136474,0.008787071018218993,"[0.00760214376449585, 0.007857279777526855, 0.0078405442237854, 0.007827455997467042, 0.007878367900848388, 0.007872352123260498, 0.007935776233673095, 0.007940639972686767, 0.007827712059020995, 0.007833471775054932, 0.007781888008117676, 0.007907519817352295, 0.007841824054718017, 0.007990848064422608, 0.007852767944335938, 0.007806879997253418, 0.007872767925262451, 0.009319744110107421, 0.010504735946655273, 0.01048697566986084, 0.008555071830749512, 0.008086943626403808, 0.0078439040184021, 0.0078057279586791995, 0.007845536231994629, 0.00790118408203125, 0.00820464038848877, 0.007878560066223145, 0.007894879817962646, 0.007829760074615479, 0.007882207870483398, 0.007893792152404785, 0.007860032081604004, 0.007780288219451904, 0.008220383644104004, 0.007790688037872314, 0.007762239933013916, 0.00788431978225708, 0.0077686400413513185, 0.007735072135925293, 0.007647232055664062, 0.007730688095092773, 0.008059040069580078, 0.007781983852386475, 0.007746623992919922, 0.008462016105651855, 0.007811039924621582, 0.007795775890350342, 0.007692927837371826, 0.007780096054077148, 0.00812876796722412, 0.007975296020507812, 0.007810239791870117, 0.007881760120391846, 0.007981152057647704, 0.007884448051452636, 0.007845888137817383, 0.007770112037658691, 0.007812255859375, 0.007734111785888672, 0.007753727912902832, 0.008034303665161132, 0.00782972812652588, 0.007544928073883057, 0.00786636781692505, 0.00784819221496582, 0.007710048198699951, 0.00813212776184082, 0.007830399990081787, 0.00789692783355713, 0.007859551906585693, 0.0078080959320068356, 0.0077636799812316895, 0.00778982400894165, 0.007981760025024414, 0.0077926721572875975, 0.007759200096130371, 0.007640960216522216, 0.007574079990386963, 0.0076495361328125, 0.007823359966278077, 0.007649600028991699, 0.007689727783203125, 0.007771391868591309, 0.007737279891967774, 0.007721983909606934, 0.007673183917999268, 0.007764639854431152, 0.00770579195022583, 0.007760159969329834, 0.007731743812561035, 0.007710432052612305, 0.007761792182922363, 0.007715231895446777, 0.0077844481468200685, 0.007732384204864502, 0.007719776153564453, 0.008011039733886718, 0.007744224071502686, 0.007832575798034667, 0.007779583930969238, 0.007788383960723877, 0.007733248233795166, 0.007751584053039551, 0.007675903797149658, 0.007755775928497314, 0.007847040176391602, 0.00782047986984253, 0.00786191987991333, 0.007782144069671631, 0.007833920001983642, 0.007738815784454346, 0.00772764778137207, 0.007999199867248535, 0.00806281566619873, 0.007983232021331787, 0.007739712238311767, 0.007902463912963868, 0.007940127849578857, 0.00794271993637085, 0.007892352104187011, 0.009161503791809082, 0.007948287963867188, 0.007735360145568848, 0.007980991840362548, 0.008793984413146973, 0.007521344184875488, 0.007832352161407471, 0.007923711776733398, 0.007830912113189698, 0.007830048084259033, 0.0078009281158447265, 0.007830687999725342, 0.007826176166534424, 0.007958623886108398, 0.00876255989074707, 0.007856639862060547, 0.007829408168792724, 0.007753471851348877, 0.007745376110076904, 0.007987264156341553, 0.007883520126342773, 0.007920767784118652, 0.007787199974060058, 0.007820543766021728, 0.007830336093902587, 0.007800960063934327, 0.00783785581588745, 0.007718751907348633, 0.007759871959686279, 0.007856383800506591, 0.007856063842773438, 0.00785097599029541, 0.007875360012054444, 0.008046496391296386, 0.007858335971832275, 0.007771455764770508, 0.0077933440208435055, 0.007890944004058837, 0.008003552436828614, 0.007943808078765869, 0.0077398080825805664, 0.007800127983093262, 0.007799039840698243, 0.007776127815246582, 0.007700607776641845, 0.007698847770690918, 0.007895071983337403, 0.0077760319709777835, 0.007783967971801758, 0.007723936080932617, 0.007747360229492187, 0.007795775890350342, 0.007932511806488038, 0.007706528186798096, 0.007721407890319824, 0.007729152202606201, 0.008089599609375, 0.00801587200164795, 0.007816256046295166, 0.008004768371582031, 0.007860159873962403, 0.007857952117919922, 0.007841599941253661, 0.007780608177185059, 0.007820320129394532, 0.00803059196472168, 0.007807136058807373, 0.008829376220703126, 0.007377664089202881, 0.00788646411895752, 0.00788108777999878, 0.0077209601402282715, 0.0077835841178894045, 0.0077625918388366695, 0.007719295978546143, 0.007958335876464844, 0.007897088050842285, 0.007955872058868408, 0.00812502384185791, 0.008134336471557617, 0.007902783870697021, 0.00785484790802002, 0.007891071796417235, 0.007824543952941895, 0.007815807819366456, 0.00809993553161621, 0.007837088108062744, 0.00779529619216919, 0.007837696075439453, 0.007905280113220215, 0.007956543922424316, 0.008120320320129394, 0.007959775924682617, 0.007801343917846679, 0.007841375827789307, 0.00783955192565918, 0.007879487991333008, 0.007849184036254883, 0.007844639778137206, 0.007845888137817383, 0.007970816135406494, 0.008181759834289551, 0.007964511871337891, 0.007929215908050537, 0.007967616081237793, 0.007851647853851318, 0.007829792022705078, 0.007872159957885743, 0.007913536071777343, 0.007915616035461426, 0.007882175922393798, 0.008043328285217284, 0.008114239692687989, 0.007929183959960938, 0.007983744144439697, 0.007760064125061035, 0.00787824010848999, 0.007829599857330323, 0.007832608222961425, 0.007803616046905518, 0.00776582384109497, 0.00807369613647461, 0.008011232376098633, 0.008063296318054199, 0.007860447883605957, 0.007901055812835693, 0.007909088134765626, 0.007928224086761474, 0.007780352115631104, 0.008015680313110351, 0.008032320022583007, 0.007629471778869629, 0.007837440013885498, 0.0078438401222229, 0.007885119915008544, 0.00784991979598999, 0.007810783863067627, 0.007883391857147216, 0.00786191987991333, 0.007730559825897217, 0.007928415775299073, 0.007898623943328858, 0.007760767936706543, 0.007820543766021728, 0.007815584182739258, 0.007771967887878418, 0.007781760215759277, 0.0078117442131042485, 0.00781328010559082, 0.008007904052734376, 0.007841567993164062, 0.007855455875396728, 0.0077151360511779786, 0.007708384037017822, 0.007889440059661865, 0.007794655799865722, 0.00772108793258667, 0.007757696151733398, 0.007768288135528565, 0.008714143753051757, 0.00798422384262085, 0.007920544147491455, 0.007696447849273681, 0.008281888008117676, 0.007759391784667969, 0.007825503826141358, 0.007836192131042481, 0.008059167861938476, 0.007812640190124513, 0.00781769609451294, 0.0077610878944396975, 0.007719456195831299, 0.007827263832092285, 0.00787443208694458, 0.007826848030090332, 0.007815648078918457, 0.00773363208770752, 0.007993343830108643, 0.0077927041053771975, 0.007714943885803223, 0.007728256225585937, 0.008088352203369141, 0.007868288040161132, 0.008042112350463867, 0.008126943588256837, 0.007895040035247802, 0.007911424160003662, 0.007880703926086426, 0.007919007778167725, 0.007899104118347168, 0.007932479858398438, 0.008267840385437012, 0.008015040397644044, 0.007834144115447998, 0.007445375919342041, 0.00810912036895752, 0.007805568218231201, 0.007964320182800293, 0.0077108159065246585, 0.0077069120407104496, 0.00772870397567749, 0.007682496070861816, 0.007854080200195313, 0.007727168083190918, 0.00782860803604126, 0.007730016231536865, 0.007776256084442139, 0.007788544178009033, 0.007784607887268066, 0.008017727851867676, 0.0077538881301879884, 0.007747424125671387, 0.007817215919494629, 0.007725312232971192, 0.007669119834899902, 0.007723616123199463, 0.007883679866790772, 0.0077402877807617185, 0.007725056171417236, 0.007655360221862793, 0.007692351818084717, 0.007686143875122071, 0.007763391971588135, 0.007651904106140137, 0.007985151767730713, 0.007695551872253418, 0.007662399768829346, 0.007627007961273194, 0.007626783847808838, 0.007631616115570068, 0.007723999977111816, 0.007948224067687987, 0.007753087997436524, 0.00776032018661499, 0.007782656192779541, 0.007844128131866456, 0.007688960075378418, 0.007638112068176269, 0.007949696063995362, 0.007887360095977783, 0.007747583866119385, 0.007662623882293701, 0.007549503803253174, 0.009116064071655274, 0.009774880409240723, 0.01297430419921875, 0.00923635196685791, 0.007665791988372803, 0.007628191947937011, 0.0075976958274841305, 0.007535583972930908, 0.0075855679512023926, 0.007870816230773925, 0.0076696319580078125, 0.007618559837341309, 0.00765337610244751, 0.007796735763549805, 0.007318719863891602, 0.007619391918182373, 0.007648640155792237, 0.007765696048736572, 0.007707104206085205, 0.007731808185577393, 0.007759583950042725, 0.007778463840484619, 0.0077578239440917966, 0.00780079984664917, 0.007801087856292724, 0.007870592117309571, 0.008046239852905273, 0.007849696159362794, 0.007784128189086914, 0.007916224002838135, 0.007830656051635743, 0.0077770562171936035, 0.007731071949005127, 0.007876959800720215, 0.007825183868408202, 0.007858335971832275, 0.007665472030639648, 0.007639423847198486, 0.007882400035858154, 0.007770016193389893, 0.007723104000091552, 0.007755775928497314, 0.007804927825927735, 0.007671807765960693, 0.007614463806152344, 0.008060928344726562, 0.007696383953094482, 0.007548031806945801, 0.0076780481338500975, 0.01010972785949707, 0.010175647735595704, 0.007887712001800537, 0.007745888233184815, 0.007850751876831055, 0.007783328056335449, 0.007688000202178955, 0.007646944046020508, 0.007585951805114746, 0.007622943878173828, 0.007680160045623779, 0.007769599914550781, 0.00772054386138916, 0.00758681583404541, 0.007673823833465576, 0.008126272201538085, 0.007848127841949463, 0.007650591850280762, 0.0076943678855896, 0.007665728092193604, 0.007616511821746826, 0.007821824073791504, 0.0077263998985290525, 0.007695263862609864, 0.007546016216278076, 0.007776832103729248, 0.0076574721336364745, 0.007789792060852051, 0.007358463764190673, 0.008105216026306152, 0.007660255908966064, 0.007632927894592285, 0.00762172794342041, 0.0075857281684875485, 0.007686719894409179, 0.007741439819335938, 0.007753151893615723, 0.007676544189453125, 0.00767574405670166, 0.007510176181793213, 0.007487840175628662, 0.007479231834411621, 0.007532544136047363, 0.007728479862213135, 0.007853792190551758, 0.007774784088134766, 0.007651040077209473, 0.007973599910736083, 0.0077274560928344725, 0.007691904067993164, 0.007831583976745606, 0.007770048141479492, 0.007573535919189453, 0.007436319828033448, 0.007355711936950684, 0.00793068790435791, 0.007637152194976807, 0.007505504131317139, 0.007497056007385254, 0.007543200016021728, 0.0076455678939819334, 0.007716032028198242, 0.007752511978149414, 0.008292351722717285, 0.01295577621459961, 0.010304384231567383, 0.007874783992767333, 0.007633696079254151, 0.007763199806213379, 0.007686719894409179, 0.007815360069274902, 0.007731328010559082, 0.007776127815246582, 0.007718783855438232, 0.007684224128723145, 0.007743391990661621, 0.007755104064941406, 0.0074934401512145995, 0.007527359962463379, 0.0075345921516418455, 0.007641088008880615, 0.007700255870819092, 0.00759830379486084, 0.007583583831787109, 0.007802591800689697, 0.00756166410446167, 0.007526400089263916, 0.007513376235961914, 0.007518400192260742, 0.007520319938659668, 0.00743881607055664, 0.007124767780303955, 0.007530720233917237, 0.007510015964508057, 0.007524767875671387, 0.007429728031158447, 0.007362239837646485, 0.0074651198387146, 0.007497888088226318, 0.007485792160034179, 0.007580607891082764, 0.007625440120697022, 0.007624703884124756, 0.007682047843933106, 0.007675903797149658, 0.007616479873657226, 0.007552127838134765, 0.0077446079254150394, 0.007521503925323486, 0.0075203518867492675, 0.007549215793609619, 0.007524576187133789, 0.007708831787109375, 0.00761840009689331, 0.007501823902130127, 0.00739737606048584, 0.0073705921173095705, 0.007415775775909424, 0.007393152236938477, 0.007341663837432861, 0.007420608043670654, 0.007464352130889892, 0.007406208038330078, 0.007611519813537597, 0.007428415775299072, 0.0074000639915466305, 0.007323584079742432, 0.007375904083251953, 0.007343071937561035, 0.007393280029296875, 0.00745472002029419, 0.007483104228973389, 0.007378528118133545, 0.007298751831054688, 0.0073482880592346195, 0.007369664192199707, 0.007464128017425537, 0.007467840194702148, 0.00758128023147583, 0.007440800189971924, 0.007415904045104981, 0.00743836784362793, 0.007407487869262696, 0.007452672004699707, 0.007472256183624268, 0.007565408229827881, 0.00743503999710083, 0.007403456211090088, 0.007460671901702881, 0.007393695831298828, 0.007417695999145508, 0.007522016048431396, 0.007630144119262695, 0.00770470380783081, 0.007451104164123535, 0.0077324481010437015, 0.00816646385192871, 0.0075764799118041995, 0.007521088123321533, 0.0076145920753479006, 0.007578879833221436, 0.007437088012695312, 0.0076260800361633305, 0.00763750410079956, 0.007532767772674561, 0.007571231842041015, 0.0075345921516418455, 0.0075833601951599125, 0.007535232067108154, 0.007615583896636963, 0.0075978879928588865, 0.007701344013214111, 0.0077489280700683595, 0.007750592231750488, 0.007794112205505371, 0.007708831787109375, 0.007947936058044433, 0.008283904075622558, 0.007987008094787598, 0.00785913610458374, 0.00775596809387207, 0.007679647922515869, 0.0077415680885314945, 0.007743423938751221, 0.00787401580810547, 0.007881343841552734, 0.007702784061431885, 0.007659264087677002, 0.007627071857452393, 0.0076481919288635255, 0.007660287857055664, 0.007687839984893799, 0.007604288101196289, 0.007632991790771484, 0.007608223915100098, 0.007567008018493652, 0.007619200229644775, 0.007768223762512207, 0.007661087989807129, 0.007975232124328614, 0.00786636781692505, 0.007753376007080078, 0.007651552200317383, 0.007513472080230713, 0.007601119995117187, 0.007525792121887207, 0.007422431945800782, 0.007339935779571533, 0.007284095764160156, 0.007230144023895263, 0.007182271957397461, 0.007192575931549072, 0.007208159923553467, 0.007313536167144775, 0.00730998420715332, 0.0072679038047790525, 0.00727510404586792, 0.007065599918365479, 0.007546847820281983, 0.007571104049682617, 0.007544256210327148, 0.007541696071624756, 0.007457824230194092, 0.007412928104400635, 0.007438111782073975, 0.0074670081138610836, 0.007712096214294434, 0.007617184162139893, 0.007472959995269776, 0.007426239967346191, 0.0073854079246521, 0.007365632057189942, 0.00728764820098877, 0.007257599830627442, 0.007209311962127685, 0.007263743877410888, 0.0074134721755981445, 0.007553247928619384, 0.007440959930419922, 0.007416959762573242, 0.007414559841156006, 0.007501952171325684, 0.0075467839241027835, 0.007485504150390625, 0.007599423885345459, 0.007516032218933106, 0.0073788480758666996, 0.007496607780456543, 0.007458816051483155, 0.007374847888946533, 0.0073434882164001465, 0.007404160022735595, 0.007450623989105225, 0.007702432155609131, 0.007594079971313477, 0.007660672187805175, 0.007838592052459717, 0.007571455955505371, 0.007385087966918945, 0.007387296199798584, 0.0073810238838195805, 0.0074158720970153805, 0.007321343898773193, 0.007411520004272461, 0.007483583927154541, 0.007632895946502686, 0.007683328151702881, 0.0077331199645996095, 0.0077218561172485356, 0.007796224117279053, 0.007715487957000732, 0.007697247982025147, 0.007658495903015137, 0.007571616172790527, 0.007641151905059814, 0.007878431797027588, 0.007816383838653565, 0.007871359825134277, 0.007831679821014405, 0.007649184226989746, 0.0072371201515197756, 0.007869344234466552, 0.007571455955505371, 0.007513919830322266, 0.00759007978439331, 0.00758784008026123, 0.0075673599243164065, 0.007532767772674561, 0.00758351993560791, 0.007426239967346191, 0.007357952117919922, 0.00733625602722168, 0.007339935779571533, 0.007519807815551758, 0.007529344081878662, 0.0075608320236206054, 0.007783936023712158, 0.007604832172393799, 0.007605855941772461, 0.0076884799003601074, 0.007614528179168701, 0.007561312198638916, 0.0075259838104248045, 0.007565216064453125, 0.0075504322052001955, 0.007697343826293945, 0.007616384029388428, 0.007524159908294678, 0.007610688209533692, 0.007542784214019775, 0.007478591918945313, 0.007523007869720459, 0.0075690560340881344, 0.007741663932800293, 0.007597311973571778, 0.007517183780670166, 0.007443327903747559, 0.007572415828704834, 0.007559232234954834, 0.007554687976837158, 0.007696767807006836, 0.007729119777679443, 0.007754879951477051, 0.0077341761589050295, 0.007723008155822754, 0.007684095859527588, 0.007808703899383545, 0.00789737606048584, 0.008004863739013672, 0.0078504319190979, 0.007806687831878662, 0.00777180814743042, 0.007805920124053955, 0.0077209601402282715, 0.007706624031066894, 0.007648767948150635, 0.007668223857879639, 0.007710944175720215, 0.0076900157928466795, 0.007737343788146973, 0.0076037440299987795, 0.00756163215637207, 0.0076689600944519045, 0.007369408130645752, 0.007791935920715332, 0.00775875186920166, 0.007800479888916015, 0.007744256019592285, 0.00771449613571167, 0.00776643180847168, 0.0077430720329284665, 0.007678112030029297, 0.007628191947937011, 0.007653215885162354, 0.007551040172576904, 0.007631103992462158, 0.007931263923645019, 0.007732128143310547, 0.007686143875122071, 0.007669888019561768, 0.00757747220993042, 0.0075038719177246095, 0.007483551979064942, 0.007587679862976074, 0.007532544136047363, 0.007583744049072265, 0.007659776210784912, 0.007866015911102295, 0.007872608184814453, 0.007766335964202881, 0.007624703884124756, 0.007548384189605713, 0.007638783931732178, 0.007614943981170654, 0.007585343837738037, 0.0076745920181274414, 0.007665664196014404, 0.007591551780700684, 0.007602272033691407, 0.007585792064666748, 0.00757916784286499, 0.007573823928833007, 0.008021568298339844, 0.007776256084442139, 0.007793248176574707, 0.00783792018890381, 0.007656511783599854, 0.007602911949157715, 0.007640384197235108, 0.007649663925170898, 0.00777235221862793, 0.007607935905456543, 0.007672319889068604, 0.007702527999877929, 0.007771840095520019, 0.007764287948608398, 0.007600192070007324, 0.007638976097106933, 0.007823552131652832, 0.007621471881866455, 0.007578591823577881, 0.007673344135284424, 0.007729119777679443, 0.007759520053863525, 0.007738495826721191, 0.007715648174285889, 0.007298912048339844, 0.007635104179382324, 0.007617887973785401, 0.00768067216873169, 0.007696256160736084, 0.007684224128723145, 0.007596096038818359, 0.007839104175567628, 0.008012351989746093, 0.0077285442352294925, 0.0076724162101745605, 0.007686143875122071, 0.007609983921051025, 0.007827839851379394, 0.007727200031280517, 0.0076973757743835445, 0.007716832160949707, 0.007709023952484131, 0.007654143810272217, 0.007673312187194825, 0.007578015804290772, 0.007848095893859864, 0.007606112003326416, 0.007589888095855713, 0.007681568145751953, 0.007704736232757569, 0.0076854400634765625, 0.008231936454772949, 0.00774348783493042, 0.00777843189239502, 0.007844863891601562, 0.007758975982666016, 0.007792384147644043, 0.007903232097625732, 0.007720863819122315, 0.007702623844146729, 0.007710720062255859, 0.007814976215362548, 0.007852223873138428, 0.0077636799812316895, 0.007737631797790527, 0.008036319732666015, 0.00781932783126831, 0.007788512229919433, 0.0077762241363525395, 0.007770207881927491, 0.007722784042358399, 0.007709983825683594, 0.008739456176757813, 0.007895103931427002, 0.008284352302551269, 0.008699295997619629, 0.007711328029632568, 0.007661888122558594, 0.00767299222946167, 0.008063520431518555, 0.0077003841400146485, 0.007639391899108887, 0.007761663913726806, 0.007947968006134034, 0.007779679775238037, 0.007709280014038086, 0.00753107213973999, 0.007247871875762939, 0.007706399917602539, 0.007629024028778076, 0.007609920024871826, 0.00755683183670044, 0.007528863906860352, 0.00755344009399414, 0.007517920017242431, 0.00760646390914917, 0.007614463806152344, 0.007708672046661377, 0.007700223922729493, 0.007727712154388428, 0.007855072021484374, 0.0076819839477539065, 0.007778304100036621, 0.00783616018295288, 0.008025504112243653, 0.007790527820587158, 0.0076902399063110355, 0.007678400039672852, 0.007677567958831787, 0.00767471981048584, 0.00770630407333374, 0.007663424015045166, 0.0076349759101867675, 0.007642943859100342, 0.007526495933532715, 0.00759987211227417, 0.007649824142456054, 0.007540736198425293, 0.007500288009643555, 0.00745356798171997, 0.007844768047332763, 0.007548927783966064, 0.007523392200469971, 0.00755398416519165, 0.007571040153503418, 0.007495200157165528, 0.007523200035095215, 0.007684095859527588, 0.007450623989105225, 0.007595935821533203, 0.00754256010055542, 0.007463391780853272, 0.0074709439277648925, 0.007482560157775879, 0.007562047958374023, 0.0076267518997192385, 0.007886943817138671, 0.007772064208984375, 0.0076984319686889645, 0.007675903797149658, 0.007597472190856934, 0.007691199779510498, 0.007648928165435791, 0.00766476821899414, 0.0076251840591430664, 0.007588352203369141, 0.007587744235992432, 0.007712800025939942, 0.007620319843292236, 0.007552288055419922, 0.0075615358352661135, 0.007734975814819336, 0.007669760227203369, 0.007618752002716065, 0.007696191787719727, 0.007676928043365478, 0.007558495998382569, 0.007605728149414063, 0.007703775882720947, 0.007846879959106445, 0.007860223770141601, 0.007772480010986328, 0.007798463821411133, 0.007915775775909423, 0.00790617609024048, 0.00790822410583496, 0.007859839916229247, 0.007854464054107667, 0.007898399829864502, 0.007792384147644043, 0.007852799892425537, 0.007655519962310791, 0.007726304054260254, 0.007655968189239502, 0.007672160148620605, 0.007567584037780762, 0.00785158395767212, 0.007590047836303711, 0.007701727867126465, 0.007691135883331299, 0.00756489610671997, 0.0076455998420715335, 0.007500895977020264, 0.0074388480186462404, 0.0074141759872436526, 0.00738099193572998, 0.007465184211730957, 0.0074217281341552735, 0.007411712169647216, 0.007622623920440674, 0.007540800094604492, 0.007536608219146729, 0.007565311908721924, 0.0075972480773925785, 0.007662591934204102, 0.007771520137786865, 0.0075903358459472655, 0.0076162881851196285, 0.007655648231506348, 0.007669760227203369, 0.008271200180053711, 0.008106687545776366, 0.008050656318664551, 0.008664896011352539, 0.007587168216705322, 0.007531040191650391, 0.007380383968353272, 0.0073532481193542484, 0.0073134078979492185, 0.007358335971832276, 0.007446464061737061, 0.007303071975708008, 0.007465248107910156, 0.007284224033355713, 0.007672160148620605, 0.00763315200805664, 0.007627999782562256, 0.007516799926757812, 0.00748748779296875, 0.007342080116271973, 0.007422207832336426, 0.007448319911956787, 0.0074934720993041996, 0.007401375770568847, 0.007401919841766357, 0.00743609619140625, 0.008453184127807618, 0.007683008193969726, 0.007651552200317383, 0.007519552230834961, 0.0075103998184204105, 0.0073500161170959475, 0.007383232116699219, 0.007477344036102295, 0.007695712089538574, 0.007547711849212647, 0.007421855926513672, 0.00734771203994751, 0.007363071918487549, 0.00743123197555542, 0.007449535846710205, 0.007618815898895264, 0.007644447803497314, 0.007632800102233887, 0.007686719894409179, 0.008171520233154296, 0.007798079967498779, 0.007740096092224121, 0.007734784126281738, 0.007750400066375732, 0.007749375820159912, 0.007845888137817383, 0.007681151866912842, 0.0078037757873535155, 0.007948287963867188, 0.008177087783813477, 0.007784575939178467, 0.0077881598472595214, 0.007877439975738525, 0.008007871627807616, 0.0077617278099060055, 0.007790592193603516, 0.007704895973205566, 0.0076735677719116215, 0.007648575782775879, 0.007749407768249512, 0.007664127826690674, 0.007713280200958252, 0.007784512042999268, 0.007687200069427491, 0.00767193603515625, 0.0077216320037841795, 0.00783680009841919, 0.00808131217956543, 0.00783846378326416, 0.007874783992767333, 0.007350272178649903, 0.007791935920715332, 0.007800896167755127, 0.007657855987548828, 0.007651584148406982, 0.007671807765960693, 0.0076943359375, 0.007575679779052735, 0.0077331199645996095, 0.007671232223510742, 0.007684832096099854, 0.008099391937255859, 0.00771014404296875, 0.007595039844512939, 0.007654399871826172, 0.007686944007873535, 0.007696288108825684, 0.007780479907989502, 0.007585792064666748, 0.00748748779296875, 0.007475200176239013, 0.007460864067077637, 0.007483391761779785, 0.007408736228942871, 0.0075047359466552735, 0.007479487895965576, 0.007480447769165039, 0.007478015899658203, 0.007550975799560547, 0.0075138239860534665, 0.0076495680809021, 0.00759113597869873, 0.007649663925170898, 0.00761897611618042, 0.007837696075439453, 0.00759603214263916, 0.007470719814300537, 0.007391615867614746, 0.007568704128265381, 0.007862239837646485, 0.008208127975463867, 0.008481568336486816, 0.007809184074401856, 0.008235039710998536, 0.007948480129241943, 0.007604032039642334, 0.007612415790557861, 0.007687647819519043, 0.007718431949615479, 0.007727807998657227, 0.008079680442810058, 0.007893248081207276, 0.007751552104949952, 0.007680031776428222, 0.007676864147186279, 0.0076697921752929684, 0.0076640000343322755, 0.007565567970275879, 0.007536896228790283, 0.007921216011047363, 0.007502175807952881, 0.0074834880828857425, 0.007466464042663574, 0.007388671875, 0.007569983959197998, 0.007708032131195069, 0.007723487854003906, 0.007711071968078614, 0.007712672233581543, 0.007657599925994873, 0.0076813120841979984, 0.007610976219177246, 0.007629951953887939, 0.007687392234802246, 0.007656576156616211, 0.007568064212799072, 0.007544672012329102, 0.007779327869415284, 0.007569536209106446, 0.007555647850036621, 0.007549248218536377, 0.007675615787506104, 0.007801439762115479, 0.007737023830413818, 0.007710271835327148, 0.007794464111328125, 0.007670432090759277, 0.007761407852172852, 0.007772672176361084, 0.007808703899383545, 0.00793836784362793, 0.008080544471740723, 0.007901919841766357, 0.007764287948608398, 0.007715712070465088, 0.007592095851898194, 0.007676191806793213, 0.007550816059112549, 0.0075270719528198245, 0.007483391761779785, 0.0075304961204528805, 0.007556992053985596, 0.00760588788986206, 0.0076087360382080076, 0.007833695888519288, 0.00760422420501709, 0.007657855987548828, 0.007745151996612549, 0.007731200218200684, 0.0075345921516418455, 0.007579648017883301, 0.007674943923950195, 0.0076648640632629396, 0.007675615787506104, 0.007636864185333252, 0.007589087963104248, 0.007521183967590332, 0.007745632171630859, 0.0076789441108703616, 0.007607135772705078, 0.007614528179168701, 0.007643136024475097, 0.007642655849456787, 0.007943967819213867, 0.007651487827301026, 0.007643712043762207, 0.007362112045288086, 0.007768511772155762, 0.007718912124633789, 0.007778272151947021, 0.007780447959899902, 0.00771827220916748, 0.00767852783203125, 0.007639039993286132, 0.007759232044219971, 0.007688896179199219, 0.007661503791809082, 0.00782972812652588, 0.007692063808441162, 0.007524703979492188, 0.007476895809173584, 0.007403264045715332, 0.007460192203521728, 0.007349184036254883, 0.007370880126953125, 0.007376287937164307, 0.007573376178741455, 0.007566080093383789, 0.007860127925872804, 0.007847424030303956, 0.007811679840087891, 0.007868351936340333, 0.007765888214111328, 0.007783743858337402, 0.007756447792053223, 0.007749663829803467, 0.007755775928497314, 0.007661568164825439, 0.007608575820922852, 0.0077719039916992185, 0.007744991779327392, 0.007535136222839355, 0.0075038719177246095, 0.007640384197235108, 0.00769052791595459, 0.007632512092590332, 0.007596543788909912, 0.007528063774108887, 0.007459487915039063, 0.0076483840942382815, 0.007607391834259033, 0.007573631763458252, 0.007589183807373047, 0.007590240001678467, 0.007542240142822266, 0.007461408138275147, 0.007403295993804931, 0.007446752071380615, 0.007514143943786621, 0.007724095821380615, 0.0076397438049316405, 0.007528287887573242, 0.007599520206451416, 0.00770576000213623, 0.0076811199188232425, 0.007551360130310059, 0.007485792160034179, 0.007476863861083985, 0.007555456161499024, 0.007080512046813965, 0.00742137622833252, 0.007617472171783447, 0.007497407913208008, 0.007446400165557861, 0.007368832111358642, 0.007354015827178955, 0.007315008163452148, 0.007515135765075683, 0.00743507194519043, 0.007377888202667236, 0.007511839866638183, 0.007628223896026611, 0.007578495979309082, 0.007605535984039307, 0.007561791896820069, 0.007607552051544189, 0.007660672187805175, 0.007661312103271484, 0.0076836800575256345, 0.0076637759208679195, 0.007767551898956298, 0.007643743991851807, 0.007783743858337402, 0.007525152206420899, 0.007505919933319092, 0.007437439918518066, 0.007482240200042725, 0.007749631881713868, 0.00750822401046753, 0.007513792037963867, 0.007485824108123779, 0.007538368225097656, 0.0075447998046875, 0.0075502400398254396, 0.007437056064605713, 0.007484416007995606, 0.00745136022567749, 0.007553664207458496, 0.007497312068939209, 0.007622719764709473, 0.007680287837982178, 0.0076039361953735354, 0.0075731201171875, 0.0075838398933410645, 0.007612383842468261, 0.007478784084320069, 0.007545216083526611, 0.007438560009002685, 0.007438560009002685, 0.007438464164733887, 0.007534783840179443, 0.007572480201721191, 0.007533247947692871, 0.007409664154052734, 0.007628384113311767, 0.007449215888977051, 0.007365856170654297, 0.0073151359558105465, 0.007236480236053467, 0.007359968185424805, 0.007543327808380127, 0.007500864028930664]",tokens/s,129.53319823093787,,, 4bit-awq-gemm-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemm,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,829.616128,666.763264,0.0,281.018368,267.897856,s,1,9.8784375,9.8784375,0.0,9.8784375,9.8784375,9.8784375,9.8784375,[9.8784375],,kWh,1.590189947499899e-05,1.7466632093362058e-06,4.294170101999933e-06,2.1942732786335125e-05,,MB,1281.08544,775.815168,0.0,360.710144,344.082944,s,20,0.18771663951873777,0.00938583197593689,0.00015274587235024058,0.00936188793182373,0.009490463638305665,0.009540688276290893,0.00983568968772888,"[0.009909440040588378, 0.009521280288696289, 0.009374336242675781, 0.009246912002563477, 0.009212703704833984, 0.009309568405151368, 0.00946998405456543, 0.009449567794799805, 0.009325728416442872, 0.009487039566040039, 0.009413439750671387, 0.00921555233001709, 0.00934943962097168, 0.009271903991699219, 0.009478079795837402, 0.009443039894104005, 0.009347999572753906, 0.009375967979431153, 0.009215519905090333, 0.0092991361618042]",tokens/s,27275.15266162073,kWh,2.7081202673160223e-07,2.986575457215874e-08,1.4224990765677167e-07,4.4292768896053265e-07,tokens/kWh,577972446.4749167,MB,1327.988736,800.980992,0.0,385.875968,344.085504,s,20,9.926833679199218,0.4963416839599609,0.004407894203706206,0.4968547210693359,0.5014775085449219,0.5034424209594727,0.504056233215332,"[0.4982658996582031, 0.49839389038085935, 0.498930908203125, 0.501263671875, 0.49628701782226564, 0.49851321411132815, 0.5034020385742187, 0.49742242431640626, 0.49560894775390624, 0.5007698974609375, 0.49753250122070314, 0.4907862548828125, 0.4941471252441406, 0.4898678894042969, 0.4952970886230469, 0.5042096862792969, 0.49067034912109375, 0.4947938232421875, 0.48678912353515624, 0.49388192749023435]",tokens/s,126.92869052900684,kWh,1.3951180819101697e-05,1.5385681687541659e-06,5.466180505009894e-06,2.0955929492865754e-05,tokens/kWh,3006309.026829268,,s,1260,9.914113731861116,0.007868344231635805,0.00020790832087812502,0.007851855993270873,0.008027135562896729,0.008127570915222168,0.008633991632461552,"[0.007504223823547363, 0.00797814416885376, 0.00799625587463379, 0.00794547176361084, 0.007926527976989745, 0.007898943901062012, 0.007807328224182129, 0.007831391811370849, 0.008006784439086914, 0.007857024192810058, 0.007978047847747803, 0.00799180793762207, 0.007950560092926025, 0.008166655540466309, 0.008168416023254394, 0.008042495727539062, 0.008023776054382325, 0.007979040145874023, 0.008038528442382812, 0.007934080123901368, 0.007931903839111328, 0.008079680442810058, 0.007949312210083008, 0.008135392189025878, 0.007997407913208007, 0.008535200119018555, 0.007858367919921875, 0.007866399765014648, 0.007795328140258789, 0.007749184131622315, 0.007770688056945801, 0.007837247848510743, 0.007768383979797363, 0.007833439826965332, 0.007901343822479248, 0.007927616119384766, 0.00786246395111084, 0.00782697582244873, 0.007805376052856445, 0.007784480094909668, 0.007708064079284668, 0.0076698241233825685, 0.00767145586013794, 0.007760767936706543, 0.007783679962158203, 0.007940576076507569, 0.007776095867156983, 0.007901631832122802, 0.007733248233795166, 0.007811039924621582, 0.007851808071136475, 0.00784329605102539, 0.007870463848114014, 0.007790431976318359, 0.00790828800201416, 0.00793017578125, 0.008012543678283692, 0.008057600021362304, 0.007845856189727784, 0.008109984397888183, 0.007792960166931153, 0.007790592193603516, 0.007830880165100097, 0.00772051191329956, 0.007920063972473145, 0.00789638376235962, 0.007862048149108887, 0.007831583976745606, 0.007747712135314941, 0.007811935901641845, 0.007810976028442383, 0.007932096004486085, 0.007771423816680908, 0.007725599765777588, 0.007809023857116699, 0.007890463829040528, 0.007820767879486084, 0.007812096118927002, 0.0078537278175354, 0.007809375762939453, 0.007875807762145996, 0.007776127815246582, 0.008004511833190918, 0.007878367900848388, 0.007878687858581543, 0.007769792079925537, 0.007804480075836182, 0.00795136022567749, 0.0077948799133300785, 0.007804736137390137, 0.007862271785736084, 0.007902431964874267, 0.007869247913360595, 0.007831552028656007, 0.008116288185119629, 0.007884704113006591, 0.007968768119812012, 0.007898975849151611, 0.00789084815979004, 0.007873023986816406, 0.008115967750549317, 0.008458239555358887, 0.007970367908477783, 0.007839360237121583, 0.00790006399154663, 0.007933440208435059, 0.008499872207641602, 0.007988224029541016, 0.007950592041015624, 0.007936511993408203, 0.007904640197753906, 0.007924511909484863, 0.007906816005706786, 0.008008031845092774, 0.007871488094329833, 0.007861248016357422, 0.007821087837219238, 0.00783996820449829, 0.007865439891815186, 0.008027039527893066, 0.007833600044250488, 0.007905375957489014, 0.0079616961479187, 0.007877439975738525, 0.00798476791381836, 0.007916160106658935, 0.007544832229614258, 0.007948448181152343, 0.007918560028076171, 0.0079269437789917, 0.007962143898010254, 0.008021599769592285, 0.00797756814956665, 0.008136704444885253, 0.007996575832366943, 0.008005503654479981, 0.008014816284179688, 0.007964672088623047, 0.007929024219512939, 0.00795740795135498, 0.007898752212524413, 0.007878399848937989, 0.008231455802917481, 0.008157183647155761, 0.008004735946655274, 0.008039360046386718, 0.007954368114471436, 0.007903232097625732, 0.007928095817565917, 0.007992127895355224, 0.007838272094726562, 0.007864672183990479, 0.007880703926086426, 0.007938240051269531, 0.007834911823272705, 0.007864480018615722, 0.007948512077331544, 0.007820703983306886, 0.007992063999176026, 0.007725056171417236, 0.007743391990661621, 0.008275168418884277, 0.00793497610092163, 0.007712255954742431, 0.007696447849273681, 0.007958847999572754, 0.007765088081359863, 0.007777184009552002, 0.007933951854705811, 0.0079300799369812, 0.007826655864715577, 0.007894944190979004, 0.007766079902648926, 0.00799724817276001, 0.007793439865112305, 0.007724031925201416, 0.007830527782440186, 0.00783564805984497, 0.007804927825927735, 0.007759871959686279, 0.007816256046295166, 0.007887807846069336, 0.008112031936645508, 0.007899231910705566, 0.007919616222381591, 0.007864319801330566, 0.007955679893493653, 0.008008735656738281, 0.00787225580215454, 0.00755014419555664, 0.008220576286315917, 0.008227744102478028, 0.007975135803222656, 0.008042271614074708, 0.007852287769317626, 0.008184864044189452, 0.007852384090423584, 0.007849408149719237, 0.007842912197113036, 0.007823296070098877, 0.007857632160186768, 0.007964352130889892, 0.008067520141601562, 0.007890336036682129, 0.007950335979461669, 0.007969056129455566, 0.007850560188293457, 0.007876768112182617, 0.00797049617767334, 0.007882976055145263, 0.007862271785736084, 0.007849984169006348, 0.007781983852386475, 0.007901599884033203, 0.007881984233856202, 0.008139616012573242, 0.007964511871337891, 0.007931968212127686, 0.007927807807922363, 0.007826623916625976, 0.007754271984100342, 0.00789961576461792, 0.007805823802947998, 0.007793600082397461, 0.007725056171417236, 0.007714816093444824, 0.007810272216796875, 0.007922463893890381, 0.007763999938964844, 0.007819231986999511, 0.008067071914672852, 0.00803388786315918, 0.007868512153625488, 0.007913951873779296, 0.007866464138031005, 0.007878399848937989, 0.007870368003845215, 0.00936355209350586, 0.008167455673217774, 0.007968192100524902, 0.007915808200836181, 0.007913343906402588, 0.007993728160858153, 0.008207615852355956, 0.009161184310913086, 0.007798687934875488, 0.007798175811767578, 0.007810239791870117, 0.007810880184173584, 0.007935232162475585, 0.007768799781799316, 0.007760128021240234, 0.007374783992767334, 0.007812032222747803, 0.007790592193603516, 0.00806713581085205, 0.007947648048400879, 0.007779168128967285, 0.00782102394104004, 0.007857503890991211, 0.007825215816497803, 0.007936575889587402, 0.007858367919921875, 0.007757343769073486, 0.007729728221893311, 0.007774208068847656, 0.007860256195068359, 0.007874527931213378, 0.007931519985198974, 0.007880928039550782, 0.007812479972839356, 0.007907648086547852, 0.007799263954162598, 0.007866144180297852, 0.007870463848114014, 0.007776159763336182, 0.007817056179046631, 0.007822976112365723, 0.00782204818725586, 0.007876736164093017, 0.008126463890075684, 0.008044384002685546, 0.007907487869262696, 0.007872447967529297, 0.007867455959320068, 0.007730239868164062, 0.007808127880096436, 0.007881472110748291, 0.007825471878051759, 0.007919392108917237, 0.007834976196289062, 0.007850880146026612, 0.007829152107238769, 0.00780025577545166, 0.007785376071929932, 0.007862271785736084, 0.007926815986633301, 0.007982048034667968, 0.007861440181732177, 0.007850304126739501, 0.007848447799682617, 0.00784774398803711, 0.007872128009796142, 0.007843776226043701, 0.007895199775695801, 0.008248991966247558, 0.007885568141937256, 0.007866432189941406, 0.00787446403503418, 0.007978847980499268, 0.007910880088806152, 0.007875264167785645, 0.007987264156341553, 0.008038432121276855, 0.007864319801330566, 0.007448575973510742, 0.0077619199752807615, 0.007765279769897461, 0.007817567825317383, 0.007887231826782226, 0.007823296070098877, 0.007831615924835205, 0.00784716796875, 0.007885568141937256, 0.007823359966278077, 0.007860352039337158, 0.007857376098632813, 0.00786729621887207, 0.007964416027069092, 0.00791155195236206, 0.007901055812835693, 0.007900320053100585, 0.008419424057006837, 0.008028927803039551, 0.007966720104217529, 0.00811520004272461, 0.007891392230987549, 0.007802752017974854, 0.007800640106201172, 0.00788159990310669, 0.007784736156463623, 0.007947999954223633, 0.007849855899810791, 0.007836991786956788, 0.007853951930999756, 0.007795904159545898, 0.007843584060668944, 0.00784716796875, 0.007920383930206299, 0.008184032440185547, 0.007927264213562012, 0.007940415859222411, 0.007919616222381591, 0.007940192222595215, 0.008029760360717774, 0.00819200038909912, 0.007978943824768067, 0.007917888164520263, 0.007878687858581543, 0.007863711833953858, 0.007914144039154053, 0.007880095958709716, 0.007848544120788574, 0.007847040176391602, 0.007922560214996337, 0.008040448188781739, 0.008019040107727051, 0.008028127670288087, 0.007972832202911378, 0.007914015769958496, 0.007948607921600341, 0.007896255970001221, 0.007890944004058837, 0.007846208095550537, 0.007827104091644287, 0.007908383846282958, 0.007847807884216308, 0.007812479972839356, 0.007600128173828125, 0.007952383995056152, 0.008337056159973145, 0.008012127876281739, 0.008077312469482421, 0.007974271774291991, 0.007999872207641602, 0.007921919822692872, 0.007794688224792481, 0.007864543914794922, 0.007875743865966798, 0.00790822410583496, 0.007988992214202881, 0.008036383628845215, 0.007994527816772462, 0.007926432132720948, 0.007869696140289307, 0.007862847805023193, 0.007960927963256837, 0.007860000133514405, 0.007868639945983887, 0.007947936058044433, 0.007941887855529786, 0.007918176174163818, 0.007890944004058837, 0.007919616222381591, 0.007843616008758545, 0.00820035171508789, 0.007948351860046386, 0.008032416343688965, 0.007964447975158691, 0.00809993553161621, 0.0079585599899292, 0.007859903812408447, 0.007837503910064698, 0.007804800033569336, 0.00781766414642334, 0.007831424236297608, 0.00787395191192627, 0.007770944118499756, 0.008007552146911621, 0.007891104221343995, 0.007857855796813964, 0.007854112148284912, 0.007849408149719237, 0.0077933440208435055, 0.00798531198501587, 0.007813119888305664, 0.007824639797210693, 0.007928575992584229, 0.007938015937805175, 0.007917600154876709, 0.008048288345336914, 0.007807328224182129, 0.00802406406402588, 0.00784339189529419, 0.007868192195892333, 0.007958720207214356, 0.008937055587768555, 0.009333632469177245, 0.008792063713073731, 0.008433024406433105, 0.007920447826385499, 0.007503104209899902, 0.007869184017181396, 0.007892864227294922, 0.007925695896148681, 0.00785427188873291, 0.007823616027832032, 0.007860000133514405, 0.007761888027191162, 0.007751808166503906, 0.007772031784057617, 0.007862271785736084, 0.007862271785736084, 0.007742847919464111, 0.007809663772583008, 0.007761631965637207, 0.007819071769714355, 0.008011327743530274, 0.007924863815307617, 0.00776313591003418, 0.007860640048980712, 0.007852384090423584, 0.007933792114257812, 0.0077656002044677735, 0.007748095989227295, 0.007869887828826904, 0.00787446403503418, 0.007874720096588135, 0.007825823783874511, 0.00789299201965332, 0.00783785581588745, 0.007838848114013672, 0.007922399997711181, 0.008095744132995606, 0.007902400016784668, 0.007883008003234864, 0.008151488304138184, 0.008512991905212403, 0.008004256248474121, 0.007854080200195313, 0.007868415832519531, 0.00814303970336914, 0.007992159843444825, 0.007875552177429199, 0.007958528041839599, 0.008129759788513184, 0.007818016052246093, 0.007886655807495117, 0.007909567832946777, 0.007890783786773681, 0.008318880081176757, 0.007853504180908202, 0.007822368144989014, 0.007812096118927002, 0.007911488056182862, 0.007870272159576416, 0.007803455829620361, 0.007686495780944824, 0.007882175922393798, 0.00787660789489746, 0.007844480037689209, 0.007886655807495117, 0.007807072162628174, 0.007863455772399903, 0.00744159984588623, 0.007821695804595947, 0.007798816204071045, 0.007774623870849609, 0.007929152011871337, 0.00787724781036377, 0.007830719947814941, 0.007812032222747803, 0.0079584641456604, 0.008076576232910157, 0.007882719993591308, 0.007783071994781494, 0.007818784236907959, 0.007844704151153565, 0.008799967765808105, 0.008064640045166016, 0.007960031986236573, 0.007811583995819092, 0.008076959609985352, 0.007875328063964843, 0.007845888137817383, 0.007862271785736084, 0.007794688224792481, 0.007837120056152344, 0.007729248046875, 0.007788640022277832, 0.007747968196868896, 0.007814752101898194, 0.0078032960891723634, 0.007872223854064941, 0.007838047981262208, 0.0078000321388244625, 0.007832575798034667, 0.007853792190551758, 0.007862271785736084, 0.008054783821105957, 0.007907328128814697, 0.007753183841705322, 0.007805600166320801, 0.007802175998687744, 0.007732992172241211, 0.007867199897766113, 0.007880703926086426, 0.007862048149108887, 0.007784671783447265, 0.0077430720329284665, 0.007708447933197022, 0.0077926721572875975, 0.007770463943481445, 0.007779583930969238, 0.007830016136169434, 0.008241312026977539, 0.007876800060272218, 0.007745984077453613, 0.007735199928283691, 0.007819071769714355, 0.00777942419052124, 0.007904096126556397, 0.007788735866546631, 0.007818751811981202, 0.007893375873565674, 0.007862592220306396, 0.007892672061920167, 0.007620607852935791, 0.007927552223205566, 0.008063232421875, 0.00783680009841919, 0.008046527862548829, 0.008082592010498046, 0.007900191783905029, 0.007950975894927978, 0.007983359813690185, 0.007985087871551513, 0.007982912063598633, 0.0078951678276062, 0.008058879852294922, 0.007952383995056152, 0.008150976181030273, 0.008068191528320312, 0.008033568382263184, 0.007900032043457031, 0.007908160209655762, 0.007933311939239502, 0.007963263988494873, 0.007896128177642823, 0.007938432216644286, 0.007848512172698975, 0.007972864151000977, 0.00786636781692505, 0.007925568103790283, 0.007869664192199708, 0.008127455711364745, 0.007946239948272706, 0.007904736042022706, 0.007899775981903076, 0.007847839832305909, 0.007913568019866944, 0.007948383808135987, 0.008013855934143067, 0.008207200050354004, 0.007996352195739746, 0.007908576011657715, 0.00787663984298706, 0.007953152179718018, 0.008146783828735352, 0.007964672088623047, 0.008157343864440917, 0.007845888137817383, 0.007871615886688232, 0.007906208038330078, 0.008016927719116211, 0.007858560085296631, 0.008013631820678711, 0.007891871929168702, 0.0079268798828125, 0.007869184017181396, 0.00780083179473877, 0.008094783782958984, 0.007876992225646973, 0.007826015949249268, 0.007796768188476562, 0.007799967765808106, 0.007815968036651612, 0.007872511863708496, 0.007790592193603516, 0.007844031810760498, 0.007538976192474366, 0.00795417594909668, 0.007990655899047851, 0.00793673610687256, 0.00786732816696167, 0.007866496086120606, 0.007840576171875, 0.007822559833526612, 0.007744480133056641, 0.00781824016571045, 0.007794591903686523, 0.008050944328308106, 0.008392736434936524, 0.007921631813049317, 0.007881375789642334, 0.007913472175598145, 0.007886112213134766, 0.00782204818725586, 0.007899104118347168, 0.007890016078948974, 0.0078058881759643555, 0.007866528034210205, 0.008068960189819336, 0.008017919540405273, 0.007949312210083008, 0.007910399913787843, 0.007823359966278077, 0.007751679897308349, 0.007798399925231934, 0.007852416038513183, 0.007854080200195313, 0.007884992122650146, 0.007919424057006836, 0.008062975883483887, 0.007970208168029786, 0.00787507200241089, 0.007975008010864258, 0.007987199783325195, 0.007870592117309571, 0.007873600006103515, 0.007768864154815674, 0.007764224052429199, 0.007810848236083984, 0.00801734447479248, 0.007869056224822998, 0.007818848133087157, 0.007834112167358399, 0.008027999877929688, 0.007833600044250488, 0.007790431976318359, 0.007812255859375, 0.00775270414352417, 0.007804927825927735, 0.007976191997528076, 0.007866144180297852, 0.007825727939605714, 0.00784607982635498, 0.008272480010986329, 0.007833471775054932, 0.00789292812347412, 0.007853472232818603, 0.007736095905303955, 0.007903103828430175, 0.00734822416305542, 0.007786496162414551, 0.007814464092254638, 0.007880640029907227, 0.007867231845855712, 0.007833439826965332, 0.007784512042999268, 0.007738463878631592, 0.007838624000549316, 0.007823359966278077, 0.007766304016113282, 0.007786208152770996, 0.007677951812744141, 0.007770080089569092, 0.00779475212097168, 0.007856095790863037, 0.00787663984298706, 0.007740384101867676, 0.008037471771240234, 0.007845727920532227, 0.007771679878234863, 0.007772223949432373, 0.007859903812408447, 0.007734047889709473, 0.007743680000305176, 0.00785200023651123, 0.0077116479873657225, 0.007687104225158691, 0.007719103813171387, 0.007710527896881103, 0.007751679897308349, 0.007788064002990723, 0.00782697582244873, 0.007840703964233399, 0.008103903770446777, 0.00784764814376831, 0.008134528160095215, 0.00792742395401001, 0.007854911804199219, 0.007807168006896972, 0.007883999824523926, 0.007735455989837646, 0.0076845440864562985, 0.007682015895843506, 0.0076638078689575196, 0.00763375997543335, 0.007797760009765625, 0.007624192237854004, 0.0077127361297607425, 0.00772160005569458, 0.007751584053039551, 0.007644544124603272, 0.007637856006622315, 0.007893951892852783, 0.007717599868774414, 0.007669119834899902, 0.007674623966217041, 0.007704671859741211, 0.007815072059631348, 0.007858176231384278, 0.007711904048919678, 0.007750495910644532, 0.007876480102539062, 0.007448192119598389, 0.007740128040313721, 0.007836544036865234, 0.007877600193023682, 0.007897088050842285, 0.007868383884429932, 0.007850016117095947, 0.007814720153808593, 0.007857823848724365, 0.007762656211853027, 0.007685311794281006, 0.007727551937103272, 0.007753151893615723, 0.0077727680206298826, 0.00784015989303589, 0.00815120029449463, 0.008009056091308593, 0.00781763219833374, 0.00784764814376831, 0.007882719993591308, 0.007878880023956298, 0.007932096004486085, 0.00791756820678711, 0.007723231792449951, 0.007703487873077393, 0.007704895973205566, 0.007770431995391846, 0.007866591930389404, 0.00782047986984253, 0.0077543997764587405, 0.007894591808319092, 0.007831168174743653, 0.0077465281486511234, 0.007755775928497314, 0.007817215919494629, 0.007737343788146973, 0.007747935771942139, 0.007797632217407226, 0.007819263935089112, 0.007798687934875488, 0.007908383846282958, 0.007841631889343262, 0.007712768077850342, 0.007745728015899658, 0.008046560287475586, 0.00791100788116455, 0.008129856109619141, 0.007961535930633545, 0.00790937614440918, 0.007935808181762696, 0.007866591930389404, 0.0078123841285705565, 0.007772863864898682, 0.007827455997467042, 0.007839744091033935, 0.007824895858764648, 0.007842304229736329, 0.008016032218933105, 0.007837535858154297, 0.007786496162414551, 0.007956352233886719, 0.00785145616531372, 0.007736095905303955, 0.007405087947845459, 0.007787072181701661, 0.007788544178009033, 0.007798783779144287, 0.007770080089569092, 0.0077292799949646, 0.007798463821411133, 0.007762432098388672, 0.007810783863067627, 0.007710720062255859, 0.00782528018951416, 0.007990623950958252, 0.008059167861938476, 0.00793446397781372, 0.007923327922821044, 0.007878880023956298, 0.007798912048339844, 0.007675168037414551, 0.007778495788574219, 0.007753376007080078, 0.007730016231536865, 0.007671872138977051, 0.007751679897308349, 0.007726687908172608, 0.007960000038146973, 0.007699423789978027, 0.00752566385269165, 0.007525055885314941, 0.007544864177703858, 0.007568480014801026, 0.007590816020965576, 0.007597631931304931, 0.007670207977294922, 0.007741439819335938, 0.007714047908782959, 0.007887712001800537, 0.007761824131011963, 0.008017919540405273, 0.007781568050384521, 0.007834208011627198, 0.00821065616607666, 0.007835968017578125, 0.0077777280807495115, 0.007731455802917481, 0.007716959953308106, 0.007682144165039062, 0.00773305606842041, 0.007868224143981934, 0.007725247859954834, 0.007638463973999024, 0.007845695972442626, 0.007799680233001709, 0.007686016082763672, 0.007949696063995362, 0.007742080211639404, 0.007712768077850342, 0.0077926721572875975, 0.007869696140289307, 0.007682784080505371, 0.007673439979553223, 0.00785868787765503, 0.007772064208984375, 0.007666816234588623, 0.007342847824096679, 0.007700160026550293, 0.007823359966278077, 0.0077916159629821775, 0.007788832187652588, 0.007819488048553466, 0.007760384082794189, 0.0077578239440917966, 0.007722239971160888, 0.007709440231323242, 0.00780511999130249, 0.007669119834899902, 0.007852735996246338, 0.007764063835144043, 0.007827136039733886, 0.007921504020690919, 0.008564767837524414, 0.007917664051055907, 0.008093024253845215, 0.007893663883209228, 0.007790624141693115, 0.007730751991271973, 0.007737855911254882, 0.007751584053039551, 0.007862271785736084, 0.007898560047149658, 0.007868991851806641, 0.007946400165557862, 0.007847616195678711, 0.007784607887268066, 0.007746912002563476, 0.007770783901214599, 0.007780416011810303, 0.007769375801086426, 0.00784880018234253, 0.007862080097198486, 0.008109472274780273, 0.008100223541259766, 0.007859551906585693, 0.007893887996673584, 0.00788479995727539, 0.007846911907196046, 0.00786297607421875, 0.00784015989303589, 0.007887104034423827, 0.00787830400466919, 0.007853407859802247, 0.00791593599319458, 0.007861631870269776, 0.007895391941070556, 0.007930399894714356, 0.007874559879302979, 0.00786636781692505, 0.008078592300415039, 0.00793062400817871, 0.007788832187652588, 0.008096896171569824, 0.007764607906341553, 0.007735007762908935, 0.007769408226013183, 0.007897312164306641, 0.007851903915405273, 0.007842336177825928, 0.007569439888000488, 0.007991360187530518, 0.007911392211914062, 0.007910560131072998, 0.00787446403503418, 0.007846271991729737, 0.007887360095977783, 0.007818272113800049, 0.007862592220306396, 0.007865024089813233, 0.00784774398803711, 0.007841951847076416, 0.007884479999542237, 0.007881023883819581, 0.007833504199981689, 0.008116095542907716, 0.007901663780212403, 0.007849984169006348, 0.00794598388671875, 0.007913343906402588, 0.007932032108306885, 0.007872543811798095, 0.007839295864105224, 0.007840447902679444, 0.007925471782684327, 0.007899136066436767, 0.00793724822998047, 0.007953279972076416, 0.007908895969390869, 0.007917952060699463, 0.00812217617034912, 0.00800812816619873, 0.008545760154724122, 0.009316767692565918, 0.011201472282409667, 0.008290687561035156, 0.007871039867401123, 0.007915584087371827, 0.008103872299194336, 0.007878655910491944, 0.00787279987335205, 0.007843776226043701, 0.007882527828216553, 0.007819263935089112, 0.00826153564453125, 0.007923232078552247, 0.00785260820388794, 0.007824607849121094, 0.007801439762115479, 0.007796927928924561, 0.007897088050842285, 0.00800767993927002, 0.007968768119812012, 0.007897088050842285, 0.00781654405593872, 0.007906976222991944, 0.007834688186645507, 0.007794112205505371, 0.00815385627746582, 0.007955615997314452, 0.007926527976989745, 0.007848063945770263, 0.007818975925445556, 0.007473152160644531, 0.007839744091033935, 0.007881728172302246, 0.007860799789428712, 0.007919936180114747, 0.00790335988998413, 0.007964288234710694, 0.007951935768127442, 0.007890048027038575, 0.007943647861480713, 0.007864223957061768, 0.007884607791900635, 0.008178175926208496, 0.007842048168182373, 0.00798854398727417, 0.007892960071563721, 0.007848415851593018, 0.007767807960510254, 0.007733759880065918, 0.00787395191192627, 0.007790688037872314, 0.007859551906585693, 0.007754464149475098, 0.007701951980590821, 0.00792784023284912, 0.007753983974456787, 0.007758175849914551, 0.007758111953735352, 0.007714655876159668, 0.0077292160987854006, 0.007706560134887695, 0.007642208099365234, 0.007676928043365478, 0.007629759788513183, 0.007721951961517334, 0.007771423816680908, 0.007661344051361084, 0.0075887999534606934, 0.0077209601402282715, 0.007708672046661377, 0.007694176197052002, 0.007704671859741211, 0.007833663940429687, 0.007786496162414551, 0.007708384037017822, 0.007772607803344727, 0.007958208084106445, 0.0077890558242797855, 0.0077247037887573245, 0.007597824096679687, 0.007760128021240234, 0.007747583866119385, 0.007710527896881103, 0.0077131838798522945, 0.00769977617263794, 0.007599743843078613, 0.007758687973022461, 0.007651328086853027, 0.0077142720222473146, 0.007727327823638916, 0.007790304183959961, 0.007680607795715332, 0.007813375949859619, 0.007430592060089111, 0.00782044792175293, 0.007815904140472412, 0.007804927825927735, 0.007770112037658691, 0.007780416011810303, 0.007784383773803711, 0.007718463897705078, 0.007696095943450928, 0.007714784145355225, 0.007725376129150391, 0.007716832160949707, 0.007710944175720215, 0.007854080200195313, 0.007840320110321045, 0.007761600017547607, 0.007806655883789063, 0.007708672046661377, 0.007765535831451416, 0.00798799991607666, 0.007826784133911132, 0.007813439846038819, 0.007750048160552979, 0.007665631771087647, 0.0077142400741577145, 0.007702591896057129, 0.007758304119110108, 0.007659840106964111, 0.007684031963348389, 0.007740799903869629, 0.00775548791885376, 0.008061599731445313, 0.00820751953125, 0.009003328323364259, 0.008608223915100098, 0.007802944183349609, 0.007810592174530029, 0.007918047904968261, 0.007972864151000977, 0.007819263935089112, 0.007639039993286132, 0.007673984050750733, 0.007679999828338623, 0.007730400085449219, 0.00766428804397583, 0.007763967990875244, 0.007681471824645996, 0.007733823776245118, 0.008302495956420899, 0.009394271850585938, 0.008951040267944336, 0.007870304107666015, 0.007781280040740967, 0.00769155216217041, 0.007660255908966064, 0.008093152046203613, 0.007631392002105713, 0.007569632053375244, 0.007647007942199707, 0.007663616180419922, 0.007635295867919922, 0.007622303962707519, 0.007632895946502686, 0.007284639835357666, 0.007592031955718994, 0.00755238389968872, 0.007570047855377197, 0.00757696008682251, 0.007605088233947754, 0.007613408088684082, 0.0077090878486633305, 0.0076306557655334475, 0.007600736141204834, 0.007687359809875488, 0.007630720138549805, 0.007612607955932617, 0.007702303886413574, 0.007737792015075683, 0.007897632122039795, 0.007736800193786621, 0.007645472049713135, 0.007703968048095703, 0.00768291187286377, 0.0077495999336242676, 0.007751711845397949, 0.007694047927856445, 0.007730495929718017, 0.007788703918457031, 0.007842175960540772, 0.007809184074401856, 0.007771711826324463, 0.007748320102691651, 0.007772160053253174, 0.007776095867156983, 0.008108192443847657, 0.007876480102539062, 0.007750976085662842, 0.00772156810760498, 0.007737567901611328, 0.0077610559463500975, 0.007737984180450439, 0.007632544040679931, 0.007649727821350098, 0.007669919967651367, 0.00775164794921875, 0.007751679897308349, 0.007714816093444824, 0.007765984058380127, 0.007943552017211914, 0.007723167896270752, 0.007657983779907227, 0.007585792064666748, 0.007600128173828125, 0.0076018238067626955, 0.007719264030456543, 0.007700064182281494, 0.007709440231323242, 0.007675551891326905, 0.007939104080200196, 0.0076500802040100095, 0.0076414718627929685, 0.0076839041709899905, 0.007861279964447021, 0.008108736038208008, 0.007725344181060791, 0.0077901120185852055, 0.007383200168609619, 0.00787436819076538, 0.00772435188293457, 0.0078056640625, 0.0077209601402282715, 0.007747712135314941, 0.00772051191329956, 0.007655007839202881, 0.007641119956970215, 0.007645887851715088, 0.007747583866119385, 0.007835360050201415, 0.007946752071380615, 0.00781440019607544, 0.007888671875, 0.007845759868621826, 0.00787936019897461, 0.008048831939697266, 0.008804351806640624, 0.007868415832519531, 0.007844863891601562, 0.007783711910247803, 0.008091679573059082, 0.008189472198486328, 0.007819104194641113, 0.008029919624328613, 0.007866015911102295, 0.007785408020019531, 0.007821311950683594, 0.007913472175598145, 0.007884736061096192, 0.007930016040802002, 0.008144800186157226, 0.007868031978607177, 0.007877024173736572, 0.007868383884429932, 0.00780083179473877, 0.008671072006225586, 0.007874720096588135, 0.00788479995727539, 0.007841792106628418, 0.007856128215789794, 0.007802527904510498, 0.007784959793090821, 0.007847616195678711, 0.008024224281311035, 0.007728672027587891, 0.007727231979370117, 0.007890655994415283, 0.007807968139648437, 0.0077816638946533205, 0.007696320056915283, 0.00771888017654419, 0.0075977277755737305, 0.00751855993270874, 0.007645823955535889, 0.007665408134460449, 0.007684192180633545, 0.007614463806152344, 0.007700479984283447, 0.0076574721336364745, 0.007560224056243897, 0.0075747199058532715]",tokens/s,127.09154182392741,,, 4bit-awq-gemv-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,MB,829.468672,666.763264,0.0,281.018368,267.897856,s,1,9.6287314453125,9.6287314453125,0.0,9.6287314453125,9.6287314453125,9.6287314453125,9.6287314453125,[9.6287314453125],,kWh,1.5180897170833417e-05,1.6670602500036167e-06,5.1775041419997204e-06,2.2025461562836755e-05,,MB,1245.851648,775.815168,0.0,360.710144,344.082944,s,20,0.19236044788360598,0.009618022394180298,0.0006343652001617168,0.009499328136444092,0.00973256597518921,0.009938745546340944,0.01181761132240295,"[0.00972339153289795, 0.009383296012878418, 0.009815135955810546, 0.009310144424438477, 0.012287327766418457, 0.009464415550231933, 0.009556639671325683, 0.00952787208557129, 0.00953542423248291, 0.009400128364562988, 0.009319071769714355, 0.00955673599243164, 0.009577312469482423, 0.009176992416381835, 0.009283679962158203, 0.009197855949401855, 0.009470784187316894, 0.00964476776123047, 0.009682784080505371, 0.009446687698364258]",tokens/s,26616.698267920572,kWh,2.7255625332243883e-07,3.00581677804388e-08,1.4423784752007436e-07,4.46852268622952e-07,tokens/kWh,572896274.6209294,MB,1292.484608,800.980992,0.0,385.875968,344.085504,s,20,9.904938964843751,0.49524694824218746,0.005486446484585594,0.4955020294189453,0.5022049835205079,0.5030523483276367,0.5069318319702149,"[0.49549691772460935, 0.4981065368652344, 0.4927342529296875, 0.5006013488769532, 0.5021391906738282, 0.502797119140625, 0.4972896728515625, 0.49019790649414063, 0.5079017028808593, 0.4962594909667969, 0.4973736572265625, 0.49428482055664064, 0.4963424072265625, 0.4940211181640625, 0.4881112365722656, 0.4848570556640625, 0.48676266479492186, 0.4906120300292969, 0.49354269409179685, 0.49550714111328126]",tokens/s,127.20926443587395,kWh,1.3767582652233182e-05,1.5183167232338346e-06,5.375621065718039e-06,2.0661520441185052e-05,tokens/kWh,3049146.3674871065,,s,1260,9.892112445831302,0.007850882893516903,0.0003436467944562197,0.007813823938369751,0.008005893993377686,0.008149097681045532,0.010008320159912116,"[0.007663072109222412, 0.0079202880859375, 0.007790592193603516, 0.007812767982482911, 0.007841824054718017, 0.007864672183990479, 0.007780096054077148, 0.007780576229095459, 0.00788809585571289, 0.007764768123626709, 0.007952256202697754, 0.007852159976959228, 0.007825407981872558, 0.007861311912536622, 0.007940576076507569, 0.009740768432617187, 0.010504192352294921, 0.010383359909057617, 0.007989408016204834, 0.007757887840270996, 0.007806655883789063, 0.007806464195251465, 0.007788447856903076, 0.007648096084594726, 0.007827295780181884, 0.0077578239440917966, 0.0077471680641174314, 0.007622655868530274, 0.007686560153961182, 0.007620607852935791, 0.007585504055023193, 0.007772448062896728, 0.0077619199752807615, 0.007673855781555176, 0.007617728233337402, 0.007543551921844483, 0.007465023994445801, 0.007481344223022461, 0.007667712211608887, 0.007907328128814697, 0.00749948787689209, 0.007455039978027344, 0.007833407878875733, 0.007561376094818115, 0.007528448104858398, 0.007585279941558838, 0.0077053442001342775, 0.00799887990951538, 0.007666016101837158, 0.007903232097625732, 0.007792064189910889, 0.007567935943603515, 0.007573728084564209, 0.007737120151519775, 0.0078063678741455075, 0.007698272228240967, 0.007719679832458496, 0.007708159923553467, 0.007830143928527832, 0.0077064957618713376, 0.007757760047912597, 0.007789951801300049, 0.0077617278099060055, 0.007423935890197754, 0.007898367881774903, 0.007826144218444825, 0.0078787522315979, 0.007958528041839599, 0.00783564805984497, 0.007885151863098145, 0.007972256183624267, 0.00795468807220459, 0.00789299201965332, 0.007929855823516846, 0.008148672103881836, 0.00783785581588745, 0.00782537603378296, 0.007917952060699463, 0.00791487979888916, 0.007901631832122802, 0.00786243200302124, 0.00789692783355713, 0.007886847972869874, 0.008230239868164062, 0.008174240112304687, 0.007844031810760498, 0.007929855823516846, 0.007909440040588378, 0.007994656085968017, 0.007814879894256593, 0.007798943996429443, 0.00782806396484375, 0.00781660795211792, 0.007763775825500488, 0.007812064170837402, 0.007837503910064698, 0.007903232097625732, 0.007878655910491944, 0.007800672054290771, 0.007765471935272217, 0.007736000061035156, 0.007785920143127441, 0.007766592025756836, 0.008085503578186035, 0.007929599761962891, 0.007786399841308593, 0.007800576210021973, 0.007756447792053223, 0.007779551982879639, 0.007873248100280762, 0.007999167919158936, 0.00794598388671875, 0.007872223854064941, 0.007929728031158448, 0.00796291208267212, 0.007828447818756103, 0.007892704010009766, 0.007806975841522217, 0.007755775928497314, 0.0077844481468200685, 0.0077552638053894046, 0.008049152374267577, 0.007860320091247559, 0.007918784141540527, 0.008383296012878417, 0.008642592430114747, 0.007518239974975586, 0.007935808181762696, 0.007946144104003907, 0.008196415901184083, 0.00794211196899414, 0.007860223770141601, 0.007899136066436767, 0.007996575832366943, 0.007928671836853028, 0.007860223770141601, 0.007860000133514405, 0.007942368030548095, 0.007872511863708496, 0.007831776142120361, 0.00806275177001953, 0.007842944145202637, 0.008012703895568848, 0.007955520153045655, 0.007845056056976319, 0.007820256233215332, 0.007857920169830323, 0.007838592052459717, 0.007835775852203368, 0.007790592193603516, 0.007915679931640624, 0.00796892786026001, 0.007697247982025147, 0.007734111785888672, 0.007839744091033935, 0.007755136013031006, 0.007733888149261474, 0.007765632152557373, 0.007817759990692138, 0.008005472183227539, 0.007842976093292237, 0.00783839988708496, 0.007812704086303711, 0.007844287872314453, 0.007769919872283935, 0.007788864135742188, 0.007867904186248779, 0.007791103839874268, 0.00769593620300293, 0.007719071865081787, 0.007689792156219483, 0.0075803837776184085, 0.0075632638931274416, 0.007582880020141601, 0.007616576194763184, 0.007942944049835206, 0.007625919818878174, 0.007709504127502442, 0.00769977617263794, 0.007628543853759766, 0.007705535888671875, 0.007745535850524903, 0.0077099518775939945, 0.007631616115570068, 0.007648447990417481, 0.007772448062896728, 0.00798364782333374, 0.007818880081176758, 0.007802624225616455, 0.007413760185241699, 0.008410335540771485, 0.007942143917083741, 0.007934751987457276, 0.007968768119812012, 0.007806431770324707, 0.0077972798347473145, 0.008257535934448243, 0.008079360008239746, 0.007899231910705566, 0.007820864200592042, 0.00781660795211792, 0.007826367855072021, 0.008010848045349121, 0.007842720031738281, 0.007910592079162598, 0.007825727939605714, 0.007789184093475341, 0.007763455867767334, 0.007762303829193115, 0.007804128170013427, 0.007815072059631348, 0.00784496021270752, 0.007808544158935547, 0.007978464126586914, 0.007869376182556152, 0.007986656188964844, 0.007941664218902588, 0.007975776195526123, 0.00783564805984497, 0.007841983795166015, 0.00785584020614624, 0.007751776218414307, 0.007745120048522949, 0.007731616020202637, 0.0082423677444458, 0.008134976387023925, 0.007930335998535156, 0.007939199924468994, 0.008029088020324707, 0.007920832157135009, 0.007988031864166259, 0.007809184074401856, 0.007786335945129394, 0.007806975841522217, 0.00780083179473877, 0.007773439884185791, 0.007856287956237794, 0.007780672073364258, 0.007818560123443603, 0.00783846378326416, 0.008024127960205078, 0.007831711769104004, 0.007913472175598145, 0.007996543884277344, 0.007881472110748291, 0.007770112037658691, 0.00790057611465454, 0.008255776405334472, 0.008163776397705078, 0.007946112155914307, 0.008925408363342284, 0.00921177577972412, 0.00997158432006836, 0.008089216232299804, 0.007853600025177002, 0.007870368003845215, 0.007983200073242188, 0.007811935901641845, 0.00791756820678711, 0.007882559776306152, 0.008065216064453125, 0.00783561611175537, 0.007764224052429199, 0.007816991806030273, 0.00835590362548828, 0.007826879978179931, 0.008279647827148438, 0.007875232219696045, 0.007758304119110108, 0.007812448024749756, 0.007823808193206787, 0.00783571195602417, 0.007890880107879639, 0.008160639762878417, 0.008014464378356933, 0.007860544204711915, 0.008009407997131348, 0.007800608158111572, 0.007995935916900634, 0.007890624046325683, 0.00783519983291626, 0.00786681604385376, 0.007890495777130127, 0.007876895904541015, 0.00796073579788208, 0.007924895763397216, 0.007920767784118652, 0.007917183876037598, 0.007890143871307372, 0.007859263896942139, 0.007935935974121094, 0.007913343906402588, 0.007937151908874512, 0.00811302375793457, 0.007900896072387696, 0.007870751857757569, 0.007952288150787353, 0.007954783916473388, 0.00794707202911377, 0.007975200176239013, 0.007938560009002685, 0.00785529613494873, 0.007881855964660645, 0.007880064010620117, 0.007886559963226318, 0.007862271785736084, 0.007899424076080323, 0.007929855823516846, 0.007944704055786133, 0.007934207916259766, 0.007866015911102295, 0.007903295993804932, 0.008218751907348634, 0.007968639850616455, 0.008011391639709473, 0.007602176189422607, 0.007878655910491944, 0.007886847972869874, 0.00786246395111084, 0.007948095798492432, 0.007950560092926025, 0.007907104015350342, 0.007854080200195313, 0.0081080322265625, 0.00790937614440918, 0.007913472175598145, 0.007795839786529541, 0.007807199954986572, 0.007864992141723632, 0.007927807807922363, 0.007855135917663575, 0.008332256317138673, 0.007907328128814697, 0.008265727996826172, 0.008025343894958497, 0.007993216037750245, 0.008024191856384278, 0.007910143852233887, 0.008469856262207032, 0.007992159843444825, 0.007886208057403565, 0.008005855560302734, 0.007852255821228027, 0.007831456184387207, 0.007906879901885986, 0.00786624002456665, 0.007806719779968262, 0.007787583827972412, 0.008007519721984864, 0.00790067195892334, 0.008091648101806641, 0.007988800048828125, 0.008003775596618651, 0.008006239891052246, 0.00792294406890869, 0.007890944004058837, 0.007994048118591309, 0.007956416130065917, 0.008447999954223634, 0.008157183647155761, 0.008470815658569336, 0.00777129602432251, 0.008364895820617675, 0.008409024238586426, 0.007983168125152589, 0.007933792114257812, 0.00809785556793213, 0.007944096088409423, 0.007954016208648681, 0.007983391761779784, 0.007889472007751464, 0.007912767887115478, 0.007862720012664795, 0.007867648124694824, 0.007815072059631348, 0.007696352005004883, 0.007658207893371582, 0.008100000381469727, 0.010228511810302734, 0.010290623664855957, 0.007780159950256348, 0.007861023902893066, 0.0078089919090271, 0.007818367958068848, 0.007875807762145996, 0.007853824138641358, 0.007840832233428956, 0.008162240028381347, 0.007821216106414794, 0.007925055980682372, 0.007751423835754395, 0.00770249605178833, 0.007902527809143066, 0.00786620807647705, 0.00788979196548462, 0.007794847965240478, 0.007786687850952149, 0.007733280181884765, 0.007813695907592774, 0.007737664222717286, 0.0078089919090271, 0.00782099199295044, 0.007898591995239257, 0.007742303848266602, 0.00775980806350708, 0.007816991806030273, 0.00786236810684204, 0.007747488021850586, 0.007731200218200684, 0.007769728183746338, 0.007706399917602539, 0.007728896141052246, 0.007742303848266602, 0.007655424118041992, 0.00760972785949707, 0.007815807819366456, 0.007727104187011719, 0.007731296062469482, 0.007779327869415284, 0.007723584175109864, 0.007989535808563233, 0.0078439040184021, 0.00785203218460083, 0.007852287769317626, 0.00777132797241211, 0.007755807876586914, 0.007713312149047851, 0.00774348783493042, 0.007862271785736084, 0.007686143875122071, 0.007755775928497314, 0.007749631881713868, 0.007860223770141601, 0.007967967987060547, 0.007900224208831788, 0.007829216003417969, 0.007794688224792481, 0.007738848209381103, 0.007868095874786377, 0.007785568237304688, 0.007895040035247802, 0.0073324480056762694, 0.007654560089111328, 0.0075929279327392575, 0.007681951999664306, 0.007693535804748535, 0.0077909440994262695, 0.007614304065704345, 0.007678112030029297, 0.007695072174072266, 0.007737120151519775, 0.0076165437698364254, 0.007875648021697998, 0.00795472002029419, 0.0077584958076477055, 0.00781932783126831, 0.007776351928710937, 0.007847775936126709, 0.0078067522048950195, 0.007836959838867187, 0.007862336158752441, 0.007827455997467042, 0.007752927780151367, 0.007960224151611328, 0.007863935947418212, 0.007913856029510499, 0.008103103637695312, 0.007940991878509522, 0.007917535781860352, 0.007831039905548096, 0.007836128234863281, 0.008034463882446288, 0.007818751811981202, 0.007798175811767578, 0.007815839767456055, 0.00768233585357666, 0.0075504322052001955, 0.007611167907714844, 0.0077760000228881835, 0.007710464000701904, 0.007608575820922852, 0.007571104049682617, 0.007524703979492188, 0.007532095909118653, 0.007715040206909179, 0.007700543880462646, 0.007600287914276123, 0.007634943962097168, 0.007628799915313721, 0.0077578239440917966, 0.007602176189422607, 0.007708864212036133, 0.007855040073394775, 0.0076624641418457035, 0.00795420789718628, 0.007950560092926025, 0.007888895988464355, 0.00794985580444336, 0.007834176063537597, 0.007907104015350342, 0.007880832195281982, 0.007855199813842773, 0.007868415832519531, 0.007994559764862061, 0.007500095844268799, 0.008073408126831055, 0.007999263763427734, 0.007971136093139649, 0.010618559837341309, 0.010504192352294921, 0.00799129581451416, 0.007960576057434082, 0.008517536163330078, 0.007997536182403564, 0.00793782377243042, 0.008644831657409669, 0.008068479537963868, 0.00802086353302002, 0.008007424354553223, 0.0079268798828125, 0.007994272232055665, 0.007929535865783691, 0.008173439979553223, 0.007997536182403564, 0.00795084810256958, 0.007954271793365478, 0.007921311855316162, 0.008087936401367188, 0.0079584641456604, 0.00792784023284912, 0.007964672088623047, 0.007907328128814697, 0.007910880088806152, 0.007928351879119873, 0.007922815799713134, 0.007945087909698487, 0.00788479995727539, 0.00789904022216797, 0.007925856113433837, 0.008017184257507325, 0.00830742359161377, 0.00800153636932373, 0.008023903846740723, 0.007999648094177246, 0.008010784149169921, 0.007994624137878418, 0.007924928188323974, 0.007891488075256348, 0.00786191987991333, 0.007890719890594483, 0.007930431842803955, 0.007898240089416505, 0.00792416000366211, 0.00793235206604004, 0.00783564805984497, 0.00785644817352295, 0.007933536052703857, 0.008082655906677247, 0.007968863964080811, 0.007926559925079346, 0.007907328128814697, 0.007879776000976562, 0.00787779188156128, 0.007883679866790772, 0.00790835189819336, 0.00784284782409668, 0.007813951969146728, 0.007405375957489014, 0.007835999965667724, 0.008142815589904786, 0.007894752025604248, 0.00786198377609253, 0.007805088043212891, 0.007929855823516846, 0.007884384155273438, 0.007882719993591308, 0.00791868782043457, 0.008069024085998536, 0.007810304164886475, 0.008187871932983398, 0.007899807929992677, 0.007882495880126954, 0.008255743980407714, 0.007976960182189942, 0.007835072040557861, 0.007776991844177246, 0.007804927825927735, 0.007778143882751465, 0.007823359966278077, 0.007865856170654297, 0.00783571195602417, 0.007838175773620606, 0.00782755184173584, 0.007880576133728027, 0.007891104221343995, 0.007972832202911378, 0.007822400093078613, 0.008010687828063965, 0.007958399772644042, 0.00787062406539917, 0.007886559963226318, 0.00783903980255127, 0.008100671768188477, 0.007882207870483398, 0.007869311809539795, 0.007897823810577393, 0.007964960098266602, 0.007955103874206544, 0.007885024070739747, 0.00785590410232544, 0.007851520061492919, 0.008036864280700684, 0.0078438401222229, 0.007868192195892333, 0.007864543914794922, 0.007858176231384278, 0.007753568172454834, 0.007726880073547363, 0.0077209601402282715, 0.007747968196868896, 0.007753791809082031, 0.008316191673278808, 0.007766687870025634, 0.0076687679290771485, 0.007762911796569824, 0.007648672103881836, 0.007789279937744141, 0.0077597441673278806, 0.007685664176940918, 0.007657824039459228, 0.007416863918304443, 0.0077424321174621585, 0.008713631629943848, 0.008604255676269532, 0.007800640106201172, 0.007798655986785889, 0.010604864120483398, 0.010345727920532226, 0.00774015998840332, 0.0077036161422729494, 0.007779263973236084, 0.007768064022064209, 0.007989247798919678, 0.00783516788482666, 0.007768544197082519, 0.0078438401222229, 0.007811071872711181, 0.007849760055541992, 0.00779695987701416, 0.007882751941680909, 0.007749887943267822, 0.007751423835754395, 0.007839295864105224, 0.007727551937103272, 0.007956543922424316, 0.00774345588684082, 0.0076771841049194335, 0.007759871959686279, 0.007639008045196533, 0.0076193280220031735, 0.007620607852935791, 0.0076871681213378906, 0.007641536235809326, 0.007628799915313721, 0.007608160018920899, 0.007713727951049805, 0.007583424091339112, 0.007567455768585205, 0.007679872035980225, 0.007716224193572998, 0.007697152137756348, 0.007740575790405274, 0.007795008182525634, 0.007825503826141358, 0.007876063823699951, 0.007885280132293702, 0.00809443187713623, 0.007911200046539307, 0.007802879810333252, 0.007788544178009033, 0.007872064113616944, 0.007825632095336914, 0.00776358413696289, 0.007761792182922363, 0.007778783798217773, 0.008085760116577148, 0.007895103931427002, 0.007749184131622315, 0.007786240100860596, 0.0077872958183288574, 0.007818751811981202, 0.0077348160743713375, 0.007764351844787598, 0.007409120082855224, 0.007708864212036133, 0.0076457920074462895, 0.007625984191894531, 0.007644095897674561, 0.007625823974609375, 0.00766537618637085, 0.007643648147583008, 0.007678463935852051, 0.007606272220611572, 0.00756499195098877, 0.007643455982208252, 0.007530335903167725, 0.0076464319229125974, 0.007610655784606933, 0.007631328105926514, 0.00762883186340332, 0.007678112030029297, 0.007664991855621338, 0.007930528163909913, 0.007794688224792481, 0.007735167980194092, 0.009314432144165039, 0.007906720161437989, 0.007797344207763672, 0.008218624114990235, 0.008138303756713866, 0.007950784206390381, 0.007968063831329347, 0.007819744110107421, 0.007801055908203125, 0.008112256050109862, 0.00788262414932251, 0.0078063678741455075, 0.00776416015625, 0.00784835195541382, 0.007812960147857666, 0.007747744083404541, 0.008130559921264649, 0.007884448051452636, 0.007827231884002686, 0.007876800060272218, 0.007842175960540772, 0.007809023857116699, 0.007803135871887207, 0.007824543952941895, 0.007791200160980225, 0.007785920143127441, 0.007889599800109863, 0.007892896175384521, 0.00790934419631958, 0.007972864151000977, 0.007940095901489258, 0.007812543869018555, 0.00788643217086792, 0.00785097599029541, 0.008171072006225586, 0.007940896034240722, 0.007941792011260987, 0.008008992195129395, 0.007918303966522216, 0.007847424030303956, 0.007842463970184326, 0.0074973440170288085, 0.007815552234649659, 0.007813119888305664, 0.007840767860412597, 0.007865344047546387, 0.008202239990234375, 0.007886079788208008, 0.00787497615814209, 0.007847551822662353, 0.01065443229675293, 0.010317631721496582, 0.007870272159576416, 0.007819647789001466, 0.007829504013061523, 0.007790815830230713, 0.0077285442352294925, 0.007944575786590576, 0.007763391971588135, 0.007737184047698974, 0.007779039859771728, 0.007811071872711181, 0.007789984226226807, 0.0077707200050354, 0.007651072025299072, 0.00762713623046875, 0.00763862419128418, 0.007587423801422119, 0.007648128032684326, 0.0076817598342895504, 0.0077227201461792, 0.007713247776031494, 0.007580992221832276, 0.007798848152160645, 0.00754307222366333, 0.007614528179168701, 0.007745728015899658, 0.00871014404296875, 0.008159232139587403, 0.007895040035247802, 0.007792160034179687, 0.00783788776397705, 0.007723296165466308, 0.007825503826141358, 0.007712063789367676, 0.007715424060821533, 0.007818336009979249, 0.007650207996368408, 0.008738816261291504, 0.007712768077850342, 0.007687583923339843, 0.0076204161643981935, 0.007699391841888428, 0.0076696000099182125, 0.007686143875122071, 0.007755775928497314, 0.007741439819335938, 0.007692287921905518, 0.007673439979553223, 0.007600543975830078, 0.007602015972137451, 0.007819424152374268, 0.0077405118942260745, 0.00765225601196289, 0.007348415851593018, 0.007876416206359864, 0.0075998401641845705, 0.007616991996765137, 0.007608320236206055, 0.007501823902130127, 0.007542208194732666, 0.007500351905822754, 0.007467360019683838, 0.008111647605895996, 0.007546144008636475, 0.0077528319358825684, 0.007601215839385987, 0.007781248092651367, 0.0076100797653198245, 0.007540800094604492, 0.007636127948760986, 0.007563488006591797, 0.0076130561828613285, 0.007581056118011475, 0.007504511833190918, 0.007600031852722168, 0.007566592216491699, 0.008112128257751466, 0.007596896171569824, 0.00759603214263916, 0.007834784030914307, 0.007805600166320801, 0.007772255897521973, 0.007661695957183838, 0.007800479888916015, 0.0076776638031005855, 0.0076416640281677245, 0.007536672115325928, 0.007605696201324463, 0.007789120197296142, 0.007571455955505371, 0.0075632638931274416, 0.007705920219421387, 0.007920127868652344, 0.009857215881347657, 0.009547807693481446, 0.008760831832885741, 0.007659999847412109, 0.007700479984283447, 0.007683072090148926, 0.007627359867095947, 0.007669407844543457, 0.007750592231750488, 0.007655231952667236, 0.0076902399063110355, 0.007716864109039307, 0.007966720104217529, 0.007788544178009033, 0.007772384166717529, 0.007705535888671875, 0.00772105598449707, 0.0077803201675415035, 0.007936800003051758, 0.010061183929443359, 0.009454208374023438, 0.007743616104125977, 0.007902080059051514, 0.0073045120239257815, 0.0076154241561889645, 0.007605696201324463, 0.007606847763061524, 0.007945343971252442, 0.007562272071838379, 0.007563104152679443, 0.007507967948913574, 0.007713056087493896, 0.0076953921318054196, 0.007901023864746094, 0.00777507209777832, 0.00789254379272461, 0.01063980770111084, 0.0102456636428833, 0.007802527904510498, 0.007738175868988037, 0.007794112205505371, 0.007740992069244385, 0.0076912641525268555, 0.008126560211181641, 0.008095647811889648, 0.007735648155212402, 0.007740128040313721, 0.00769324779510498, 0.0076267518997192385, 0.007591455936431885, 0.007578271865844727, 0.007643167972564697, 0.007601952075958252, 0.007646975994110107, 0.007608799934387207, 0.007591775894165039, 0.007631968021392823, 0.00763808012008667, 0.007606272220611572, 0.007694111824035644, 0.007491583824157715, 0.007563488006591797, 0.00797379207611084, 0.0076830401420593265, 0.007516064167022705, 0.007573503971099854, 0.007466591835021972, 0.007456480026245117, 0.007459584236145019, 0.0074930558204650876, 0.007628543853759766, 0.007584191799163818, 0.007577919960021972, 0.007597407817840576, 0.007533215999603272, 0.007505248069763184, 0.007559391975402832, 0.007528895854949951, 0.00752019214630127, 0.00747321605682373, 0.007743616104125977, 0.0077064957618713376, 0.007682047843933106, 0.007771520137786865, 0.007625343799591064, 0.007604479789733887, 0.007567488193511963, 0.007731200218200684, 0.007787903785705566, 0.007749472141265869, 0.00774838399887085, 0.007729152202606201, 0.007735104084014893, 0.007766079902648926, 0.00781283187866211, 0.007708479881286621, 0.007684800148010254, 0.007752895832061768, 0.007885536193847657, 0.007786496162414551, 0.0078106880187988285, 0.007747424125671387, 0.007851967811584473, 0.007634687900543213, 0.007578559875488281, 0.007550784111022949, 0.007682144165039062, 0.007641088008880615, 0.007584959983825684, 0.007639711856842041, 0.007704192161560058, 0.007688064098358155, 0.007707263946533203, 0.007671679973602295, 0.007687839984893799, 0.007721471786499024, 0.007659135818481445, 0.007644896030426025, 0.007887519836425782, 0.0076902079582214355, 0.0075838398933410645, 0.007589216232299805, 0.007544447898864746, 0.007582687854766846, 0.007548927783966064, 0.007623712062835694, 0.0075222721099853515, 0.007488192081451416, 0.007428512096405029, 0.007489439964294434, 0.007620672225952149, 0.007616447925567627, 0.007566463947296142, 0.007757887840270996, 0.007818048000335693, 0.007771967887878418, 0.0076672320365905764, 0.007609151840209961, 0.0075630397796630855, 0.007612480163574219, 0.007646944046020508, 0.0077560958862304685, 0.007720608234405518, 0.007712831974029541, 0.007721216201782227, 0.00799126386642456, 0.007780288219451904, 0.007733344078063965, 0.007941247940063477, 0.007664544105529785, 0.008185055732727051, 0.008608768463134766, 0.008070752143859864, 0.007770207881927491, 0.007992832183837891, 0.00767855978012085, 0.007902207851409913, 0.0078057279586791995, 0.007734687805175782, 0.007709504127502442, 0.00766374397277832, 0.007657567977905274, 0.007714911937713623, 0.00769708776473999, 0.007688511848449707, 0.007832032203674316, 0.007757279872894287, 0.007684512138366699, 0.007723296165466308, 0.007725344181060791, 0.007652703762054443, 0.007717440128326416, 0.00785209608078003, 0.007991327762603759, 0.007967648029327392, 0.007712831974029541, 0.007692543983459472, 0.0077192320823669435, 0.007708928108215332, 0.007538015842437744, 0.007570047855377197, 0.007401440143585205, 0.007450784206390381, 0.007743391990661621, 0.007693759918212891, 0.007668511867523193, 0.0075568962097167965, 0.007614463806152344, 0.0075980801582336424, 0.00759603214263916, 0.007624224185943603, 0.007636544227600098, 0.007654304027557373, 0.007970816135406494, 0.00760211181640625, 0.007915584087371827, 0.007562655925750733, 0.007576511859893799, 0.007482048034667969, 0.007412703990936279, 0.007349728107452392, 0.007346720218658447, 0.007522304058074952, 0.00759603214263916, 0.007723008155822754, 0.007622655868530274, 0.00774348783493042, 0.007780704021453857, 0.007786143779754639, 0.007924992084503174, 0.007771935939788819, 0.007812064170837402, 0.0073786239624023435, 0.007806911945343018, 0.007731488227844238, 0.00768995189666748, 0.007665503978729248, 0.00756547212600708, 0.007640960216522216, 0.007684224128723145, 0.0076186881065368655, 0.007569536209106446, 0.0078023681640625, 0.007702816009521485, 0.007598048210144043, 0.007600128173828125, 0.007771359920501709, 0.007680799961090088, 0.007746848106384277, 0.0077506561279296875, 0.007714144229888916, 0.007778719902038574, 0.00828822422027588, 0.007937632083892822, 0.007917695999145508, 0.0079137601852417, 0.007907135963439942, 0.007839935779571533, 0.007833631992340087, 0.007850272178649902, 0.00786198377609253, 0.007740543842315674, 0.007805791854858399, 0.007933152198791505, 0.007856480121612549, 0.007835135936737061, 0.007730112075805664, 0.007825056076049804, 0.007809375762939453, 0.00774124813079834, 0.00786236810684204, 0.007751584053039551, 0.008145088195800782, 0.007864319801330566, 0.007837503910064698, 0.007917600154876709, 0.007728447914123535, 0.007756671905517578, 0.007655392169952393, 0.007620607852935791, 0.0077003521919250485, 0.007737343788146973, 0.007839295864105224, 0.007815423965454102, 0.007822944164276122, 0.007749375820159912, 0.007715807914733887, 0.007798399925231934, 0.007692639827728272, 0.007659552097320557, 0.0076771841049194335, 0.008061696052551269, 0.00794758415222168, 0.007779007911682129, 0.007732895851135254, 0.007422080039978027, 0.007862783908843995, 0.007956639766693115, 0.007886591911315919, 0.007833759784698486, 0.007769567966461182, 0.007797056198120117, 0.0077907519340515135, 0.0076876158714294434, 0.007785215854644776, 0.007710527896881103, 0.00773529577255249, 0.007783904075622559, 0.007788735866546631, 0.007893343925476075, 0.007995391845703125, 0.007926976203918457, 0.008134752273559571, 0.00798790407180786, 0.00796777582168579, 0.007875135898590088, 0.007941952228546142, 0.007793280124664306, 0.007921152114868164, 0.00793398380279541, 0.007866847991943359, 0.00782697582244873, 0.007872032165527345, 0.007734208106994629, 0.00785103988647461, 0.007805920124053955, 0.007842912197113036, 0.007822144031524658, 0.007802976131439209, 0.008071071624755859, 0.007884223937988282, 0.00787113618850708, 0.007833695888519288, 0.007802783966064453, 0.0077305278778076175, 0.007688543796539307, 0.0076817917823791505, 0.007692512035369873, 0.007805280208587647, 0.007817215919494629, 0.007747712135314941, 0.007832736015319823, 0.007850719928741454, 0.007802879810333252, 0.007749631881713868, 0.008091039657592773, 0.00788646411895752, 0.007725088119506836, 0.007742656230926514, 0.00772435188293457, 0.007747647762298584, 0.007739327907562256, 0.007716991901397705, 0.007754047870635986, 0.007696383953094482, 0.008067071914672852, 0.007771840095520019, 0.0077760319709777835, 0.007587520122528077, 0.007936607837677002, 0.007925504207611084, 0.007878111839294434, 0.007809023857116699, 0.00786246395111084, 0.007834112167358399, 0.00785584020614624, 0.007844223976135253, 0.007874303817749024, 0.007915296077728272, 0.007866975784301757, 0.0078089280128479, 0.007856095790863037, 0.008140543937683105, 0.008014080047607421, 0.007815167903900147, 0.00785423994064331, 0.007806816101074219, 0.007768256187438965, 0.007781472206115723, 0.007729887962341309, 0.00786959981918335, 0.007815872192382813, 0.007895199775695801, 0.007870592117309571, 0.007870336055755616, 0.00787772798538208, 0.00811680030822754, 0.008073568344116211, 0.007833600044250488, 0.007804927825927735, 0.0077023677825927735, 0.007673823833465576, 0.007675648212432861, 0.0077216320037841795, 0.007764800071716309, 0.007676896095275879, 0.007671999931335449, 0.00774124813079834, 0.007667712211608887, 0.007956480026245117, 0.007888512134552002, 0.007913536071777343, 0.007897215843200684, 0.007887392044067383, 0.007881440162658692, 0.007932064056396484, 0.007930655956268311, 0.007895040035247802, 0.007979008197784423, 0.00796447992324829, 0.007917471885681152, 0.00797648000717163, 0.008012831687927246, 0.007820608139038086, 0.007678368091583252, 0.0077578239440917966, 0.00782863998413086, 0.007826272010803223, 0.007888895988464355, 0.007874783992767333, 0.008054559707641601]",tokens/s,127.37420918936128,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3143.491584,4639.883264,0.0,4244.635648,4125.520384,s,1,11.9792587890625,11.9792587890625,0.0,11.9792587890625,11.9792587890625,11.9792587890625,11.9792587890625,[11.9792587890625],,kWh,0.00014281466581666488,1.574390353824077e-05,5.3873098654011464e-05,0.00021243166800891713,,MB,3187.0976,4813.94688,0.0,4404.0192,4310.797312,s,10,1.1434629516601562,0.11434629516601562,0.00016764718123132437,0.11430782318115235,0.1145461784362793,0.11459340934753418,0.11463119407653809,"[0.11464064025878906, 0.1145203857421875, 0.11453568267822266, 0.11433599853515625, 0.11418809509277343, 0.11411901092529297, 0.11417081451416015, 0.1144258270263672, 0.11427964782714843, 0.1142468490600586]",tokens/s,2238.8132438250145,kWh,3.4241915051362132e-06,3.7762772297003015e-07,2.2542459894419778e-06,6.056065217548221e-06,tokens/kWh,42271671.58936918,MB,3191.300096,4816.044032,0.0,4406.116352,4310.799872,s,10,22.40705126953125,2.2407051269531246,0.01225969367185595,2.2395146484375,2.2519275390624998,2.259535791015625,2.265622392578125,"[2.243108154296875, 2.26714404296875, 2.25023681640625, 2.238847412109375, 2.239515625, 2.22377685546875, 2.222224609375, 2.235335205078125, 2.247348876953125, 2.239513671875]",tokens/s,28.116149350569124,kWh,6.422840164111306e-05,7.084238282746035e-06,3.3550254876357245e-05,0.00010486289480021634,tokens/kWh,600784.4826335085,,s,630,22.40389617156981,0.03556173995487274,0.0011464844853278735,0.03544276809692383,0.03588814811706543,0.036251442718505854,0.03793366279602052,"[0.03651932907104492, 0.035850849151611325, 0.03918447875976563, 0.035675838470458986, 0.03560620880126953, 0.035442623138427734, 0.03555145645141602, 0.03538927841186523, 0.035374942779541015, 0.03515667343139649, 0.03546316909790039, 0.03536844635009766, 0.0354411506652832, 0.035438591003417966, 0.03575603103637695, 0.03582361602783203, 0.03530697631835938, 0.03554483032226562, 0.035240062713623045, 0.03538332748413086, 0.035312255859375, 0.03546665573120117, 0.036733535766601565, 0.035608417510986326, 0.0355101432800293, 0.03543888092041016, 0.03530255889892578, 0.035668830871582034, 0.035518081665039065, 0.03567177581787109, 0.035582622528076174, 0.035483230590820314, 0.03551478576660156, 0.0357498893737793, 0.035422206878662106, 0.03555737686157227, 0.03540172958374024, 0.035456321716308595, 0.03542086410522461, 0.0353950080871582, 0.035574337005615235, 0.03539046478271484, 0.035714046478271484, 0.035464256286621094, 0.035703136444091794, 0.035412574768066404, 0.03551027297973633, 0.03536281585693359, 0.035434337615966795, 0.0354428482055664, 0.03650547027587891, 0.03544076919555664, 0.03534793472290039, 0.035374847412109375, 0.035526592254638674, 0.0354947509765625, 0.03556556701660156, 0.0358109130859375, 0.03537097549438477, 0.03529561614990234, 0.035538593292236326, 0.035601024627685544, 0.03519382476806641, 0.036413345336914066, 0.035544734954833984, 0.03553580856323242, 0.03579904174804688, 0.03585152053833008, 0.0370445442199707, 0.03593875122070313, 0.06162636947631836, 0.03573964691162109, 0.03575356674194336, 0.035598751068115234, 0.03659932708740234, 0.03576675033569336, 0.035950592041015625, 0.03573168182373047, 0.03546495819091797, 0.03563932800292969, 0.03566284942626953, 0.03549622344970703, 0.035524703979492187, 0.03518323135375977, 0.0350904312133789, 0.03521094512939453, 0.03521955108642578, 0.035319393157958984, 0.03522732925415039, 0.03551728057861328, 0.03550585556030274, 0.03547177505493164, 0.03544678497314453, 0.03559219360351563, 0.03560009765625, 0.036042400360107425, 0.03571913528442383, 0.035541664123535155, 0.03555123138427734, 0.03566796875, 0.03534364700317383, 0.035438591003417966, 0.035437217712402345, 0.03563248062133789, 0.035477310180664065, 0.03521628952026367, 0.03530752182006836, 0.035244033813476565, 0.03552179336547852, 0.03573763275146485, 0.03537753677368164, 0.03522524642944336, 0.03530207824707031, 0.03523993682861328, 0.03533580780029297, 0.03557756805419922, 0.03546588897705078, 0.035342464447021486, 0.035523681640625, 0.035377857208251956, 0.03525846481323242, 0.03538473510742188, 0.03571363067626953, 0.035823646545410155, 0.03546646499633789, 0.03546092987060547, 0.03595775985717774, 0.03531465530395508, 0.03604006576538086, 0.035307296752929686, 0.03528793716430664, 0.03610009765625, 0.035748992919921875, 0.035326847076416014, 0.03514518356323242, 0.035653537750244144, 0.03534214401245117, 0.03565241622924804, 0.03538489532470703, 0.03544313430786133, 0.035901439666748046, 0.035383201599121096, 0.035170398712158206, 0.0355491828918457, 0.035068958282470704, 0.0352239990234375, 0.0353408317565918, 0.035491199493408204, 0.03538188934326172, 0.03558720016479492, 0.03543081665039063, 0.035546878814697265, 0.03537583923339844, 0.0356495361328125, 0.03543654251098633, 0.03556966400146484, 0.036420894622802735, 0.03589177703857422, 0.0354439697265625, 0.03619318389892578, 0.03560857772827149, 0.03585539245605469, 0.03826937484741211, 0.03554668807983399, 0.03557993698120117, 0.035875232696533206, 0.035797534942626955, 0.03577622222900391, 0.03544707107543945, 0.03635577774047852, 0.03550396728515625, 0.03568483352661133, 0.03552870559692383, 0.035722496032714844, 0.035385982513427734, 0.0357367057800293, 0.03563849639892578, 0.03581219100952148, 0.03577148818969727, 0.03592585754394531, 0.03551846313476562, 0.03577036666870117, 0.03800668716430664, 0.03598140716552734, 0.03585433578491211, 0.03565363311767578, 0.03575603103637695, 0.03612992095947266, 0.0356545295715332, 0.0364101448059082, 0.03558732986450195, 0.03541823959350586, 0.035396095275878905, 0.03551859283447266, 0.03548364639282227, 0.035366912841796876, 0.03626803207397461, 0.0356495361328125, 0.03564271926879883, 0.03602908706665039, 0.03562905502319336, 0.035514110565185546, 0.03597558212280273, 0.03613679885864258, 0.035833854675292966, 0.03529523086547852, 0.03544598388671875, 0.03530575942993164, 0.0354411506652832, 0.03565283203125, 0.03560323333740235, 0.035520511627197264, 0.0356126708984375, 0.03550822448730469, 0.03564892959594727, 0.03701148986816406, 0.03565011215209961, 0.03531161499023437, 0.03548303985595703, 0.035839935302734376, 0.03554780960083008, 0.03520105743408203, 0.03684473419189453, 0.035226432800292966, 0.03539673614501953, 0.035133472442626955, 0.03514860916137695, 0.03509785461425781, 0.03518940734863281, 0.03531897735595703, 0.035146656036376955, 0.03517440032958984, 0.035166206359863283, 0.03522528076171875, 0.03493510437011719, 0.03486105728149414, 0.03511852645874024, 0.03526076889038086, 0.035425567626953126, 0.035414974212646486, 0.03570278549194336, 0.035381248474121094, 0.03553484725952148, 0.035418113708496096, 0.03539785766601562, 0.03545577621459961, 0.03575081634521484, 0.03560198211669922, 0.03585612869262695, 0.035523006439208984, 0.03547580718994141, 0.03540563201904297, 0.03639014434814453, 0.03567030334472656, 0.03817929458618164, 0.035677375793457033, 0.03548275375366211, 0.03568576049804688, 0.03557414245605469, 0.035403713226318356, 0.03523993682861328, 0.035999744415283204, 0.03643116760253906, 0.03557369613647461, 0.03532262420654297, 0.03544063949584961, 0.03522355270385742, 0.03539052963256836, 0.036979904174804686, 0.035332927703857424, 0.03526067352294922, 0.03523244857788086, 0.03530259323120117, 0.035251007080078126, 0.03525836944580078, 0.03536281585693359, 0.03549184036254883, 0.03527884674072266, 0.035168254852294925, 0.03553484725952148, 0.03559004974365235, 0.03545471954345703, 0.0356130256652832, 0.035438591003417966, 0.035618976593017576, 0.035636577606201175, 0.035291648864746096, 0.035415775299072264, 0.035364768981933595, 0.03535305786132813, 0.0354299201965332, 0.03542240142822266, 0.03553507232666016, 0.035544544219970706, 0.035660289764404295, 0.03555648040771484, 0.03553068923950195, 0.035496673583984374, 0.036357376098632814, 0.03562700653076172, 0.03552355194091797, 0.03553484725952148, 0.03553814315795899, 0.03525302505493164, 0.03562627029418945, 0.035465953826904296, 0.03522355270385742, 0.035198974609375, 0.035190910339355466, 0.035168128967285155, 0.03520307159423828, 0.035135486602783206, 0.03523174285888672, 0.03543040084838867, 0.03538336181640625, 0.03585760116577148, 0.03567919921875, 0.035448833465576174, 0.03537625503540039, 0.035557662963867184, 0.03605155181884766, 0.03603046417236328, 0.03544268798828125, 0.03495935821533203, 0.03518873596191406, 0.03519452667236328, 0.0350456657409668, 0.035186080932617186, 0.03501071929931641, 0.034845184326171875, 0.03485232162475586, 0.03505337524414062, 0.03504969787597656, 0.035068416595458986, 0.03523583984375, 0.035340286254882815, 0.035178497314453126, 0.03494633483886719, 0.035153888702392576, 0.03544579315185547, 0.034985183715820316, 0.03500902557373047, 0.034824127197265624, 0.03495328140258789, 0.03529523086547852, 0.035222751617431644, 0.035017505645751956, 0.0353521614074707, 0.03550864028930664, 0.035411903381347656, 0.035468639373779295, 0.035252063751220704, 0.03545695877075195, 0.03536767959594726, 0.035317951202392575, 0.03521945571899414, 0.035366912841796876, 0.03509862518310547, 0.035370559692382814, 0.03511270523071289, 0.035269313812255856, 0.035244033813476565, 0.03530332946777344, 0.03541763305664063, 0.03543657684326172, 0.03535295867919922, 0.03525833511352539, 0.035493408203125, 0.03556143951416016, 0.03524371337890625, 0.035507198333740234, 0.035552833557128904, 0.03534646224975586, 0.03522396850585938, 0.035462814331054686, 0.03528515243530273, 0.035393726348876955, 0.03529305648803711, 0.03572079849243164, 0.03543049621582031, 0.03497872161865234, 0.03512700653076172, 0.035147136688232425, 0.035300064086914065, 0.03513568115234375, 0.03544063949584961, 0.03524723052978516, 0.03556966400146484, 0.03549008178710938, 0.035342304229736325, 0.03536460876464844, 0.035647361755371094, 0.03544927978515625, 0.037517887115478515, 0.03634175872802734, 0.035581344604492186, 0.035258975982666016, 0.03526860809326172, 0.034961406707763674, 0.03526041412353516, 0.03557580947875977, 0.03513481521606445, 0.03504399871826172, 0.03511862564086914, 0.03738163375854492, 0.035347423553466796, 0.03481577682495117, 0.03507228851318359, 0.03523276901245117, 0.035019710540771486, 0.034928638458251955, 0.035176448822021485, 0.035020671844482425, 0.03517862319946289, 0.03503081512451172, 0.03519830322265625, 0.03500851058959961, 0.03513740921020508, 0.03491123199462891, 0.03507164764404297, 0.03501398468017578, 0.035195903778076174, 0.03504864120483398, 0.03499091339111328, 0.03484675216674805, 0.03501052856445312, 0.03473923110961914, 0.035424480438232424, 0.034688926696777346, 0.034855777740478516, 0.03498307037353516, 0.03554307174682617, 0.03484960174560547, 0.03492252731323242, 0.03504563140869141, 0.03529414367675781, 0.03532470321655273, 0.03540582275390625, 0.03511283111572266, 0.03545449447631836, 0.03516179275512695, 0.03580108642578125, 0.03530246353149414, 0.03512368011474609, 0.03522608184814453, 0.035362239837646484, 0.0353608627319336, 0.035455455780029295, 0.037754878997802735, 0.03544416046142578, 0.03561119842529297, 0.03556697463989258, 0.03553897476196289, 0.03534908676147461, 0.035692543029785154, 0.035743743896484374, 0.035514366149902346, 0.035211265563964846, 0.03536076736450195, 0.03538534545898438, 0.035437889099121093, 0.03529296112060547, 0.03506883239746094, 0.03508428955078125, 0.035358592987060546, 0.03525830459594727, 0.035415233612060545, 0.03538431930541992, 0.03539558410644531, 0.03529235076904297, 0.03534521484375, 0.035641056060791015, 0.035502368927001954, 0.03652403259277344, 0.03531980895996094, 0.035381248474121094, 0.03541356658935547, 0.03547321701049805, 0.03588774490356445, 0.035678207397460936, 0.035359905242919924, 0.03558281707763672, 0.03562905502319336, 0.03565356826782227, 0.03590969467163086, 0.0354856948852539, 0.03560771179199219, 0.035404640197753905, 0.035315711975097655, 0.03538739013671875, 0.035573760986328126, 0.0353177604675293, 0.03680188751220703, 0.03525904083251953, 0.035282943725585936, 0.03534988784790039, 0.035127777099609375, 0.03495116806030273, 0.03527695846557617, 0.035073631286621096, 0.035111328125, 0.035098079681396485, 0.03529372787475586, 0.03522326278686523, 0.035901439666748046, 0.035448833465576174, 0.035503456115722656, 0.03570959854125977, 0.035487743377685545, 0.03568230438232422, 0.03539756774902344, 0.03556358337402344, 0.03558127975463867, 0.03550598526000977, 0.03544150543212891, 0.03577446365356445, 0.035368896484375, 0.035665409088134765, 0.03552880096435547, 0.036657089233398436, 0.036350494384765626, 0.03583590316772461, 0.03545651245117187, 0.03557126235961914, 0.03525888061523438, 0.035279296875, 0.03560595321655274, 0.03533676910400391, 0.03541788864135742, 0.035408096313476564, 0.03523174285888672, 0.03961446380615234, 0.03623116683959961, 0.035545215606689454, 0.03554441452026367, 0.03561321640014648, 0.03530508804321289, 0.03550812911987305, 0.03502889633178711, 0.035184734344482424, 0.035339839935302736, 0.03524291229248047, 0.03522745513916015, 0.035415809631347654, 0.03528953552246094, 0.03554870223999024, 0.03529571151733398, 0.03540377426147461, 0.03560780715942383, 0.035576576232910155, 0.035514366149902346, 0.03565894317626953, 0.03621356964111328, 0.03949363327026367, 0.03598255920410156, 0.035535358428955076, 0.03554947280883789, 0.03567756652832031, 0.03527888107299805, 0.035431007385253906, 0.03556073760986328, 0.035310016632080075, 0.03554111862182617, 0.03577241516113281, 0.03537526321411133, 0.035465217590332034, 0.035211265563964846, 0.03600588989257812, 0.03557558441162109, 0.03532207870483398, 0.035581310272216796, 0.03587926483154297, 0.036020511627197264, 0.035659774780273434, 0.035878398895263675, 0.03546572875976563, 0.03548364639282227, 0.03539328002929688, 0.035837535858154294, 0.03554780960083008, 0.035533824920654294, 0.03535769653320313, 0.0367342414855957, 0.03571779251098633, 0.03561648178100586, 0.03525462341308594, 0.03513753509521484, 0.03543222427368164, 0.03532128143310547, 0.03560335922241211, 0.036015998840332034, 0.03541936111450195, 0.03530217742919922, 0.035606529235839846, 0.035850238800048825, 0.03602447891235352, 0.03529507064819336, 0.035534656524658204, 0.03552614212036133, 0.035515071868896485, 0.03549958419799805, 0.03569440078735352, 0.03608633422851563, 0.03528857421875, 0.03567865753173828, 0.036149375915527346, 0.03532099151611328, 0.03523622512817383, 0.03555100631713867, 0.03516384124755859, 0.03520000076293945, 0.035274177551269534, 0.03517279815673828, 0.03505936050415039, 0.03513190460205078, 0.035437759399414064, 0.03598416137695312, 0.035536319732666015, 0.03524358367919922, 0.03592515182495117, 0.03534627151489258, 0.03541740798950195, 0.03546182250976562, 0.03561827087402344, 0.035230239868164065, 0.035443840026855467, 0.03533913421630859, 0.03535478210449219, 0.03536576080322266, 0.03552150344848633]",tokens/s,28.120108894249366,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciLM-7B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciLM-7B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7165.108224,11446.124544,0.0,11043.602432,10644.85888,s,1,15.7751533203125,15.7751533203125,0.0,15.7751533203125,15.7751533203125,15.7751533203125,15.7751533203125,[15.7751533203125],,kWh,0.00024206430987082586,2.669443745340421e-05,0.00010852758682200481,0.0003772863341462349,,MB,3060.89984,11464.998912,0.0,11047.796736,10193.631744,s,10,3.767375183105468,0.37673751831054686,0.0007211279943865331,0.37685900878906253,0.3774028839111328,0.3775787612915039,0.37771946319580074,"[0.37504647827148435, 0.37696453857421874, 0.3762663269042969, 0.37660128784179686, 0.3770697021484375, 0.37728143310546874, 0.37675347900390627, 0.37736380004882814, 0.37627349853515624, 0.377754638671875]",tokens/s,679.5181991642727,kWh,1.1012407687036822e-05,1.2142990624317782e-06,7.313740418888799e-06,1.95404471683574e-05,tokens/kWh,13101030.78984552,MB,3064.385536,11464.998912,0.0,11047.796736,10216.945152,s,10,31.02382958984375,3.102382958984375,0.005613867344000379,3.1036630859375,3.1080973876953126,3.1085290405273436,3.1088743627929687,"[3.102741943359375, 3.093324462890625, 3.103872314453125, 3.0974462890625, 3.10800146484375, 3.106240966796875, 3.092713134765625, 3.107074462890625, 3.103453857421875, 3.108960693359375]",tokens/s,20.306970748905954,kWh,7.523066486671181e-05,8.298192851494044e-06,4.991572203131186e-05,0.0001334445797495177,tokens/kWh,472106.0991630699,,s,630,31.021249328613287,0.04924007829938616,0.0008027221667489253,0.04911606407165527,0.04976451644897461,0.05023721599578858,0.052640840911865244,"[0.05024038314819336, 0.04905779266357422, 0.048584705352783204, 0.04890828704833984, 0.04904547119140625, 0.04886937713623047, 0.04877519989013672, 0.04880384063720703, 0.04896768188476563, 0.04900044631958008, 0.04938150405883789, 0.049030208587646486, 0.049476417541503906, 0.050016254425048826, 0.04987289428710937, 0.049554656982421875, 0.04975392150878906, 0.050340225219726566, 0.04948847961425781, 0.0495753288269043, 0.04948233413696289, 0.04878745651245117, 0.048791553497314455, 0.04864419174194336, 0.04878921508789062, 0.04889728164672852, 0.048765888214111326, 0.04887756729125976, 0.05234483337402344, 0.04929740905761719, 0.04937039947509766, 0.049175262451171875, 0.04952473449707031, 0.049246177673339844, 0.05009555053710937, 0.04971990585327148, 0.049512001037597654, 0.049303966522216795, 0.049231105804443356, 0.04979779052734375, 0.04951571273803711, 0.0493076171875, 0.04907926559448242, 0.04895539093017578, 0.04894425582885742, 0.04868185424804688, 0.048791553497314455, 0.04906732940673828, 0.0488897590637207, 0.04874934387207031, 0.04868710327148437, 0.049255809783935546, 0.04968902587890625, 0.04926268768310547, 0.04879801559448242, 0.04938310241699219, 0.04891247940063476, 0.0487014389038086, 0.04957782363891602, 0.048680702209472654, 0.049118846893310544, 0.04926748657226562, 0.04879564666748047, 0.049883777618408204, 0.049315391540527345, 0.04951408004760742, 0.04905660629272461, 0.04916403198242188, 0.04918422317504883, 0.048971904754638675, 0.04901955032348633, 0.05396275329589844, 0.04940390396118164, 0.05386579132080078, 0.048767646789550784, 0.048672447204589846, 0.04938582229614258, 0.049240062713623044, 0.048901599884033205, 0.048974048614501955, 0.04921990585327148, 0.04903936004638672, 0.048844799041748044, 0.04919094467163086, 0.0492441291809082, 0.04924934387207031, 0.04945948791503906, 0.04889667129516601, 0.04871097564697266, 0.048833217620849606, 0.04887551879882813, 0.049040607452392575, 0.04862179183959961, 0.0484890251159668, 0.048541694641113284, 0.04860243225097656, 0.04854179382324219, 0.04856668853759766, 0.04830227279663086, 0.048464897155761716, 0.04867974472045898, 0.04851116943359375, 0.0487374382019043, 0.04891324615478516, 0.04849868774414062, 0.048613086700439456, 0.049067615509033206, 0.04874924850463867, 0.048575809478759766, 0.04894704055786133, 0.0488702392578125, 0.0490332145690918, 0.04926259231567383, 0.04937318420410156, 0.048965633392333986, 0.048801631927490235, 0.048543712615966794, 0.04864223861694336, 0.04841062545776367, 0.04854988861083984, 0.04860704040527344, 0.048660671234130856, 0.049608062744140625, 0.04972812652587891, 0.049313728332519534, 0.04935440063476562, 0.05031731033325195, 0.04991727828979492, 0.04918339157104492, 0.0488787841796875, 0.0487083854675293, 0.04862515258789062, 0.04865209579467773, 0.04852134323120117, 0.04856659317016602, 0.04872582244873047, 0.0516426887512207, 0.049987743377685544, 0.04913913726806641, 0.049467967987060546, 0.048905632019042966, 0.04859145736694336, 0.04847206497192383, 0.049036479949951174, 0.048501567840576174, 0.05274755096435547, 0.04948054504394531, 0.04923747253417969, 0.04924662399291992, 0.04898739242553711, 0.04867488098144531, 0.04876544189453125, 0.04929145431518555, 0.059176319122314455, 0.04935465621948242, 0.04902166366577149, 0.048904129028320316, 0.049883201599121095, 0.04915135955810547, 0.0490563850402832, 0.04899580764770508, 0.04883305740356445, 0.048852481842041016, 0.049637889862060545, 0.049154048919677736, 0.04901683044433594, 0.04976025772094726, 0.049168384552001954, 0.048721214294433594, 0.04869337463378906, 0.04854022216796875, 0.04859270477294922, 0.048447681427001954, 0.048332607269287106, 0.04883065414428711, 0.04851286315917969, 0.04845792007446289, 0.0487276496887207, 0.04977606582641601, 0.049540031433105466, 0.04920070266723633, 0.04924870300292969, 0.04895948791503906, 0.04876438522338867, 0.048605342864990235, 0.0488513298034668, 0.04877212905883789, 0.04913046264648437, 0.048683006286621096, 0.05006099319458008, 0.04924857711791992, 0.04892671966552734, 0.04877721786499024, 0.049317886352539066, 0.04899020767211914, 0.0493568000793457, 0.04933631896972656, 0.04990156936645508, 0.04955305480957031, 0.049146240234375, 0.04889596939086914, 0.04940521621704102, 0.04927139282226563, 0.049495521545410155, 0.049001087188720704, 0.048686241149902346, 0.04872211074829102, 0.04876972961425781, 0.04882947158813476, 0.04870652770996094, 0.04893036651611328, 0.049393985748291014, 0.04902659225463867, 0.049111038208007815, 0.04917049789428711, 0.04881856155395508, 0.04894940948486328, 0.04897788619995117, 0.04904758453369141, 0.04915971374511719, 0.04911718368530273, 0.049277408599853516, 0.05003868865966797, 0.05005526351928711, 0.04907350540161133, 0.04858492660522461, 0.04870598220825195, 0.04879087829589844, 0.048758785247802736, 0.04888643264770508, 0.048922622680664066, 0.04985164642333984, 0.04883942413330078, 0.048945056915283204, 0.0497553596496582, 0.04885593414306641, 0.04909056091308594, 0.049476673126220706, 0.04926537704467773, 0.0489813117980957, 0.048933502197265624, 0.0486976318359375, 0.04904719924926758, 0.04938313674926758, 0.04918102264404297, 0.049324321746826175, 0.049186206817626955, 0.04919766235351562, 0.04936908721923828, 0.0496800651550293, 0.04966022491455078, 0.04927840042114258, 0.052379585266113284, 0.04981356811523437, 0.050337791442871094, 0.04917862319946289, 0.049219070434570314, 0.0497628173828125, 0.04880342483520508, 0.048761249542236325, 0.04895318222045898, 0.04866057586669922, 0.048662593841552734, 0.048486400604248046, 0.04905779266357422, 0.04879753494262695, 0.04909616088867187, 0.04935750579833984, 0.04918272018432617, 0.04955692672729492, 0.04900268936157227, 0.050382465362548826, 0.049172286987304685, 0.04869011306762695, 0.0486583366394043, 0.04847420883178711, 0.048793182373046876, 0.05048556900024414, 0.04910704040527344, 0.04887478256225586, 0.04907843017578125, 0.055575103759765626, 0.050028545379638675, 0.051509246826171876, 0.04925439834594727, 0.048914432525634766, 0.04897702407836914, 0.04931878280639648, 0.050496673583984374, 0.04901113510131836, 0.04887798309326172, 0.049161888122558596, 0.05083580780029297, 0.04923577499389648, 0.04952492904663086, 0.04863974380493164, 0.04887356948852539, 0.04893689727783203, 0.04909065628051758, 0.0487213134765625, 0.04879433441162109, 0.04881331253051758, 0.04905558395385742, 0.04884492874145508, 0.04866438293457031, 0.04927791976928711, 0.050233345031738284, 0.04866048049926758, 0.048684894561767576, 0.04861148834228515, 0.04870095825195313, 0.04856217575073242, 0.04921392059326172, 0.04873769760131836, 0.0491209602355957, 0.05044224166870117, 0.05360639953613281, 0.04993212890625, 0.04921155166625977, 0.0489881591796875, 0.04925798416137695, 0.04962128067016602, 0.049255905151367185, 0.04902169418334961, 0.049137664794921876, 0.052910079956054686, 0.04929724884033203, 0.049275039672851566, 0.04902092742919922, 0.04896112060546875, 0.049162113189697265, 0.04915980911254883, 0.04976323318481445, 0.04933631896972656, 0.05172633743286133, 0.0494202880859375, 0.04907417678833008, 0.0490588493347168, 0.04947353744506836, 0.04943743896484375, 0.049350879669189454, 0.04966195297241211, 0.049293312072753906, 0.04930559921264648, 0.04936240005493164, 0.04890671920776367, 0.048793567657470706, 0.04868310546875, 0.04856175994873047, 0.048679328918457034, 0.04864527893066406, 0.04862432098388672, 0.04847737503051758, 0.04878550338745117, 0.04856419372558594, 0.049228702545166016, 0.049288543701171875, 0.04894287872314453, 0.04888051223754883, 0.04926054382324219, 0.0492564468383789, 0.04921654510498047, 0.0495052490234375, 0.04974591827392578, 0.04914694213867187, 0.049197822570800784, 0.049104640960693356, 0.04878790283203125, 0.04927644729614258, 0.04914838409423828, 0.049079807281494144, 0.04885964965820312, 0.04860470581054688, 0.04879206466674805, 0.04880121612548828, 0.04905014419555664, 0.04890419387817383, 0.04867071914672851, 0.05045616149902344, 0.04965961456298828, 0.049380321502685544, 0.049358463287353514, 0.04940428924560547, 0.049014751434326174, 0.04897999954223633, 0.04872806549072266, 0.0487116813659668, 0.048745918273925784, 0.04884307098388672, 0.048629310607910155, 0.048398399353027345, 0.04834921646118164, 0.04848495864868164, 0.0486316146850586, 0.04885023880004883, 0.049200000762939455, 0.04917452621459961, 0.04902844619750977, 0.04867139053344727, 0.048797695159912106, 0.04849868774414062, 0.048584320068359374, 0.04852159881591797, 0.04910847854614258, 0.04910476684570313, 0.04934873580932617, 0.04916044616699219, 0.04916016006469726, 0.04909904098510742, 0.049307647705078124, 0.0490618896484375, 0.05021897506713867, 0.049084449768066404, 0.048906238555908206, 0.04914790344238281, 0.048996353149414064, 0.04886528015136719, 0.04869254302978516, 0.0489315185546875, 0.04877926254272461, 0.04845977783203125, 0.04836761474609375, 0.04848166275024414, 0.04849676895141602, 0.04876339340209961, 0.04916979217529297, 0.04955609512329102, 0.04923392105102539, 0.04899785614013672, 0.04902556610107422, 0.050114559173583983, 0.04934656143188477, 0.049637374877929685, 0.04954662322998047, 0.04939772796630859, 0.049189537048339844, 0.049108894348144534, 0.04914339065551758, 0.04903923034667969, 0.05156099319458008, 0.04971699142456055, 0.050459743499755856, 0.04937308883666992, 0.049277374267578125, 0.04960678482055664, 0.04911967849731445, 0.04913151931762695, 0.0489117431640625, 0.049095104217529294, 0.049141216278076175, 0.04908224105834961, 0.05016169738769531, 0.04959494400024414, 0.05002880096435547, 0.04968447875976562, 0.04957388687133789, 0.049532928466796876, 0.050051071166992187, 0.04957523345947266, 0.04962783813476562, 0.04937843322753906, 0.04939209747314453, 0.04944527816772461, 0.05181644821166992, 0.04980275344848633, 0.04931020736694336, 0.04918067169189453, 0.049170433044433595, 0.04942006301879883, 0.049492191314697266, 0.04965372848510742, 0.04935641479492187, 0.04910745620727539, 0.04890537643432617, 0.04907855987548828, 0.04888825607299805, 0.04878134536743164, 0.04919500732421875, 0.049142879486083986, 0.048992767333984374, 0.04878976058959961, 0.048553600311279296, 0.04866457748413086, 0.048678462982177734, 0.04879849624633789, 0.04935084915161133, 0.048846847534179685, 0.04866252899169922, 0.04871372985839844, 0.049135616302490234, 0.049249599456787106, 0.04934931182861328, 0.04948976135253906, 0.04950147247314453, 0.04921433639526367, 0.04905331039428711, 0.04888614273071289, 0.04923324966430664, 0.049619617462158205, 0.04933539199829102, 0.04920739364624024, 0.049310527801513675, 0.04939756774902344, 0.049229183197021485, 0.050276065826416014, 0.05043024063110352, 0.05038819122314453, 0.04954329681396484, 0.04952105712890625, 0.04923622512817383, 0.04931484985351563, 0.0493199348449707, 0.0496258544921875, 0.04868438339233398, 0.05087731170654297, 0.049827136993408204, 0.04867961502075195, 0.04856422424316406, 0.04865228652954102, 0.04924399948120117, 0.049473697662353516, 0.04926668930053711, 0.048899326324462894, 0.04926950454711914, 0.04922777557373047, 0.049278209686279294, 0.048783199310302734, 0.04868495941162109, 0.048856128692626954, 0.04860886383056641, 0.048701793670654296, 0.048979839324951174, 0.04928438568115234, 0.04895209503173828, 0.04895126342773438, 0.04901855850219727, 0.04901315307617188, 0.04885830307006836, 0.049223743438720706, 0.04920703887939453, 0.04938444900512695, 0.04923187255859375, 0.04905574417114258, 0.04911494445800781, 0.049197086334228514, 0.04912470245361328, 0.04955424118041992, 0.04965785598754883, 0.0493834228515625, 0.049403873443603516, 0.04920556640625, 0.04933776092529297, 0.049250080108642576, 0.04927337646484375, 0.04934633636474609, 0.04943689727783203, 0.049446910858154294, 0.04906393432617188, 0.049797119140625, 0.04929536056518555, 0.049239967346191404, 0.049040512084960936, 0.049189281463623044, 0.04902511978149414, 0.048989662170410155, 0.049337345123291014, 0.04907212829589844, 0.05012688064575195, 0.04905923080444336, 0.048947681427001954, 0.04867289733886719, 0.05107712173461914, 0.04885504150390625, 0.04936492919921875, 0.049159934997558594, 0.04962518310546875, 0.04907030487060547, 0.04877238464355469, 0.049232608795166014, 0.04947763061523437, 0.049167808532714845, 0.0488289909362793, 0.04887756729125976, 0.04877449417114258, 0.048707775115966793, 0.049037792205810546, 0.04940185546875, 0.04909465789794922, 0.04915526580810547, 0.049279743194580075, 0.049027137756347657, 0.04953664016723633, 0.04960908889770508, 0.04953011322021485, 0.049566017150878904, 0.04896137619018555, 0.04945161437988281, 0.04911494445800781, 0.049672382354736325, 0.05024563217163086, 0.0494219856262207, 0.049025089263916015, 0.049000736236572265, 0.04926464080810547, 0.05017184066772461, 0.04940982437133789, 0.04916662216186524, 0.04910076904296875, 0.04924419021606445, 0.049459201812744144, 0.049430526733398435, 0.049860607147216796, 0.04900044631958008, 0.049269920349121095, 0.049382240295410156, 0.04905574417114258, 0.04906198501586914, 0.04964956665039062, 0.04956470489501953, 0.05219631958007812, 0.04972544097900391, 0.05018009567260742, 0.04894425582885742, 0.04874911880493164, 0.04874886322021484, 0.04891638565063477, 0.049082176208496094, 0.04928745651245117, 0.04905574417114258, 0.049805313110351565]",tokens/s,20.30865982624699,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 6.12 MiB is free. Process 113462 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 241.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-14B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-14B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Qwen/Qwen-72B contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Qwen/Qwen-72B. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4088, in from_pretrained hf_quantizer.postprocess_model(model) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 195, in postprocess_model return self._process_model_after_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 107, in _process_model_after_weight_loading model = post_init_awq_exllama_modules(model, self.quantization_config.exllama_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 460, in post_init_awq_exllama_modules model = exllamav2_post_init( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 193, in exllamav2_post_init submodule.post_init(scratch_space=model.scratch_spaces[device]) File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/exllamav2.py"", line 78, in post_init self.q_handle = exlv2_ext.make_q_matrix( RuntimeError: q_weight and gptq_scales have incompatible shapes " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1558.757376,1861.156864,0.0,1465.909248,1358.169088,s,1,8.9897099609375,8.9897099609375,0.0,8.9897099609375,8.9897099609375,8.9897099609375,8.9897099609375,[8.9897099609375],,kWh,5.637756384169279e-05,6.211118228914528e-06,2.0644460960006494e-05,8.323314303061381e-05,,MB,1643.012096,1882.128384,0.0,1472.200704,1356.544512,s,10,0.4452473640441894,0.04452473640441894,0.000230934573435603,0.044446897506713864,0.04470375595092774,0.04491909484863281,0.04509136596679687,"[0.04513443374633789, 0.04442534255981445, 0.044324703216552734, 0.044585472106933595, 0.04443088150024414, 0.04446291351318359, 0.04465590286254883, 0.04453984069824219, 0.04426470565795899, 0.044423168182373046]",tokens/s,5749.612926952506,kWh,1.3550279394869336e-06,1.4942703200066107e-07,8.961992766296999e-07,2.4006542481172944e-06,tokens/kWh,106637596.89708221,MB,1651.46624,1882.128384,0.0,1472.200704,1409.94816,s,10,11.271697265625,1.1271697265624998,0.018579239538430867,1.1324287109375,1.15225517578125,1.153508386230469,1.1545109545898438,"[1.1547615966796876, 1.111167724609375, 1.1320802001953125, 1.1327772216796874, 1.1519766845703125, 1.1377652587890625, 1.1368804931640626, 1.1102855224609376, 1.1004166259765624, 1.1035859375]",tokens/s,55.89220373415232,kWh,3.237725702801701e-05,3.5706078520038297e-06,1.5798369634570826e-05,5.174623451459166e-05,tokens/kWh,1217479.8918409212,,s,630,11.268769048690773,0.017886934997921898,0.0004886429251878175,0.017771887779235843,0.01840783042907715,0.01857043237686157,0.018941482048034673,"[0.018433664321899416, 0.018131328582763673, 0.018302112579345702, 0.01831942367553711, 0.018147775650024414, 0.018092384338378908, 0.018077247619628905, 0.0179238395690918, 0.0179168643951416, 0.017909503936767577, 0.018465791702270508, 0.018035711288452147, 0.01802239990234375, 0.01822719955444336, 0.018233343124389647, 0.018263200759887695, 0.018498271942138673, 0.01828860855102539, 0.01826812744140625, 0.018292928695678713, 0.018290687561035156, 0.018284543991088868, 0.018455808639526367, 0.018359039306640627, 0.01818204879760742, 0.018145376205444336, 0.01825382423400879, 0.018358272552490236, 0.018330848693847657, 0.018278911590576173, 0.01834832000732422, 0.018462656021118164, 0.01850783920288086, 0.018591583251953123, 0.01856492805480957, 0.018301279067993163, 0.018294271469116212, 0.01831065559387207, 0.018435007095336915, 0.01837868881225586, 0.018342016220092773, 0.018309120178222657, 0.01821286392211914, 0.01837808036804199, 0.018365087509155272, 0.018550783157348632, 0.018481151580810547, 0.018245311737060548, 0.018372928619384766, 0.018571296691894532, 0.01852207946777344, 0.018329599380493163, 0.01840742492675781, 0.018327552795410155, 0.01836841583251953, 0.0184116153717041, 0.020241504669189454, 0.018825759887695314, 0.01843849563598633, 0.017876031875610352, 0.017907743453979493, 0.017949567794799805, 0.018101472854614258, 0.019706239700317384, 0.017653087615966796, 0.01745167922973633, 0.017554719924926757, 0.017468128204345703, 0.017345792770385744, 0.017400575637817384, 0.017555456161499023, 0.01745510482788086, 0.017375232696533204, 0.017441856384277345, 0.017454015731811524, 0.01746124839782715, 0.01737094306945801, 0.017497695922851563, 0.018227807998657225, 0.01897881507873535, 0.01772297668457031, 0.01759859275817871, 0.0174881591796875, 0.01740185546875, 0.017373184204101562, 0.017508352279663086, 0.017590272903442384, 0.017483232498168945, 0.017393760681152344, 0.017585599899291992, 0.017953792572021485, 0.017864479064941406, 0.01753264045715332, 0.01751705551147461, 0.0174400634765625, 0.01744879913330078, 0.01737603187561035, 0.017469375610351563, 0.017360671997070313, 0.017690975189208983, 0.01762099266052246, 0.017502208709716797, 0.017467391967773437, 0.017485824584960938, 0.017831199645996092, 0.01754185676574707, 0.017559551239013673, 0.0175119686126709, 0.017581632614135742, 0.017893312454223632, 0.01783875274658203, 0.017813823699951173, 0.01775152015686035, 0.017729536056518554, 0.01768502426147461, 0.017740896224975586, 0.01778361511230469, 0.017735679626464843, 0.01764156723022461, 0.01778838348388672, 0.01762505531311035, 0.01751206398010254, 0.017428735733032226, 0.017519296646118163, 0.017484832763671875, 0.017649951934814452, 0.017761983871459962, 0.017817375183105468, 0.017662496566772462, 0.01787215995788574, 0.017810144424438477, 0.0175914249420166, 0.017557952880859377, 0.01784876823425293, 0.017641471862792968, 0.017542623519897462, 0.01753273582458496, 0.017749984741210936, 0.0175882568359375, 0.017563648223876953, 0.017638111114501955, 0.017489919662475584, 0.0174202880859375, 0.017505407333374023, 0.017604768753051756, 0.017684415817260744, 0.017590848922729493, 0.017475807189941406, 0.017456703186035157, 0.0175927677154541, 0.017880640029907226, 0.01823583984375, 0.0183474235534668, 0.018221664428710937, 0.018705440521240235, 0.018252351760864257, 0.0183525447845459, 0.018416704177856444, 0.018613183975219726, 0.01829203224182129, 0.018320064544677734, 0.018134912490844726, 0.01815283203125, 0.01830988883972168, 0.018139135360717772, 0.01807535934448242, 0.018112127304077148, 0.018117279052734376, 0.01822105598449707, 0.018089984893798827, 0.01801420783996582, 0.01798921585083008, 0.01802684783935547, 0.018038848876953124, 0.018018207550048827, 0.017981536865234377, 0.018114303588867186, 0.018036991119384765, 0.017897056579589843, 0.017856927871704103, 0.01793769645690918, 0.017805023193359373, 0.01789187240600586, 0.018076128005981445, 0.018232351303100587, 0.018244192123413085, 0.01835865592956543, 0.018327232360839843, 0.018790719985961914, 0.018585887908935547, 0.01828233528137207, 0.018249887466430664, 0.01824358367919922, 0.01825382423400879, 0.018388736724853517, 0.018350175857543945, 0.018569375991821287, 0.01884297561645508, 0.01866819190979004, 0.018275903701782226, 0.01829478454589844, 0.01826041603088379, 0.018229248046875, 0.018318944931030274, 0.01838688087463379, 0.018430431365966796, 0.018650848388671874, 0.018850080490112303, 0.01822105598449707, 0.018206720352172853, 0.018203840255737305, 0.017910591125488283, 0.017886655807495117, 0.017885759353637697, 0.01776416015625, 0.017709247589111327, 0.017762304306030274, 0.0177458553314209, 0.017657920837402342, 0.01765171241760254, 0.01781350326538086, 0.0181712646484375, 0.018135679244995116, 0.017913856506347657, 0.01782921600341797, 0.017889951705932616, 0.01773695945739746, 0.018146047592163084, 0.017671743392944337, 0.01846259117126465, 0.017652288436889648, 0.018015520095825195, 0.017611200332641602, 0.01765318489074707, 0.017382240295410155, 0.017532928466796875, 0.017483135223388672, 0.01807833671569824, 0.017571647644042968, 0.01748601531982422, 0.017460224151611328, 0.017521663665771483, 0.017452768325805664, 0.017518367767333985, 0.018015743255615235, 0.017642080307006838, 0.017667680740356444, 0.017762144088745116, 0.01751139259338379, 0.017573888778686524, 0.017639039993286133, 0.017713535308837892, 0.01848646354675293, 0.018223743438720703, 0.021685823440551758, 0.018537088394165038, 0.018241535186767577, 0.018126848220825196, 0.0182061767578125, 0.01810259246826172, 0.018261856079101562, 0.01879599952697754, 0.018278656005859376, 0.018562911987304687, 0.01877689552307129, 0.018374176025390626, 0.018432384490966798, 0.018344032287597657, 0.018319360733032225, 0.01840947151184082, 0.018406463623046876, 0.01825267219543457, 0.018075712203979494, 0.018110368728637697, 0.018276447296142577, 0.01839321517944336, 0.01823321533203125, 0.0183306884765625, 0.018526880264282227, 0.018126848220825196, 0.018122495651245116, 0.017789695739746095, 0.017766176223754884, 0.017709056854248048, 0.017751712799072266, 0.017727296829223634, 0.01787673568725586, 0.01772991943359375, 0.017666463851928712, 0.017655487060546874, 0.01764793586730957, 0.017977344512939454, 0.01820979118347168, 0.018195327758789064, 0.01833718490600586, 0.018291423797607422, 0.018280448913574218, 0.01831888008117676, 0.018177663803100586, 0.018223968505859375, 0.018305023193359374, 0.01819647979736328, 0.01840742492675781, 0.01832979202270508, 0.01820038414001465, 0.018339839935302735, 0.018751520156860352, 0.018386911392211915, 0.019165184020996092, 0.01835612869262695, 0.018217056274414063, 0.018187295913696288, 0.018202688217163084, 0.018258848190307618, 0.01803468894958496, 0.018173824310302733, 0.018450559616088866, 0.017936384201049805, 0.01781724739074707, 0.017903968811035155, 0.018753536224365236, 0.017752063751220702, 0.017750015258789064, 0.01763705635070801, 0.01749839973449707, 0.017688608169555663, 0.01768841552734375, 0.017879199981689454, 0.017739776611328126, 0.01779427146911621, 0.017916032791137695, 0.01768649673461914, 0.01768684768676758, 0.017669952392578125, 0.017506879806518556, 0.017498111724853514, 0.017479232788085938, 0.017520511627197265, 0.017436256408691408, 0.017578975677490234, 0.017426431655883787, 0.01763532829284668, 0.017524736404418945, 0.017661951065063478, 0.017672191619873046, 0.0177475528717041, 0.01771356773376465, 0.01756159973144531, 0.01764352035522461, 0.01798963165283203, 0.01803059196472168, 0.01809542465209961, 0.01816032028198242, 0.018155519485473632, 0.018025632858276366, 0.018137727737426758, 0.018221376419067382, 0.018190143585205078, 0.018151519775390625, 0.018310495376586914, 0.018437856674194335, 0.01814214324951172, 0.022026239395141603, 0.01843564796447754, 0.01823711967468262, 0.018156288146972656, 0.018277408599853516, 0.01849350357055664, 0.018315296173095703, 0.018279296875, 0.018765888214111327, 0.018593727111816408, 0.01884364891052246, 0.018498752593994142, 0.018327455520629882, 0.01840764808654785, 0.018301376342773436, 0.018281728744506835, 0.018490560531616212, 0.01826259231567383, 0.01831484794616699, 0.01823299217224121, 0.018136064529418947, 0.018075647354125975, 0.018150720596313476, 0.018121408462524413, 0.018382848739624022, 0.01849750328063965, 0.018589727401733397, 0.018357696533203124, 0.0184202880859375, 0.018241535186767577, 0.018124799728393554, 0.017897472381591797, 0.01786675262451172, 0.01783955192565918, 0.017818208694458007, 0.017838048934936523, 0.017804351806640625, 0.017725439071655275, 0.017660863876342775, 0.017958560943603517, 0.017626880645751953, 0.017711711883544923, 0.0176312313079834, 0.01772537612915039, 0.017649280548095704, 0.017521087646484374, 0.01800115203857422, 0.01811315155029297, 0.018142623901367186, 0.01861222457885742, 0.018684288024902344, 0.01820483207702637, 0.018243776321411134, 0.018142847061157225, 0.01812905693054199, 0.018156991958618165, 0.018180896759033203, 0.018077823638916017, 0.01808572769165039, 0.01799171257019043, 0.01803004837036133, 0.018307615280151367, 0.018182144165039063, 0.01840742492675781, 0.018261119842529296, 0.018074495315551758, 0.01815705680847168, 0.01811916732788086, 0.017901439666748047, 0.01779520034790039, 0.017795072555541993, 0.017838079452514647, 0.017706207275390625, 0.01803651237487793, 0.017799392700195312, 0.017890079498291016, 0.017682207107543944, 0.017682655334472656, 0.017532352447509766, 0.01745542335510254, 0.017581056594848633, 0.017377439498901366, 0.017377056121826173, 0.017406719207763672, 0.017283071517944337, 0.017319936752319336, 0.01791542434692383, 0.018665952682495116, 0.017520639419555666, 0.0174653434753418, 0.017661951065063478, 0.017550559997558595, 0.0174354248046875, 0.017764352798461915, 0.017429920196533204, 0.017422943115234374, 0.017506303787231444, 0.017496063232421876, 0.017591999053955077, 0.01748204803466797, 0.01744428825378418, 0.017465919494628907, 0.017569215774536132, 0.01744748878479004, 0.01746329689025879, 0.01860745620727539, 0.017494688034057616, 0.017421375274658202, 0.01743929672241211, 0.01742448043823242, 0.017359136581420898, 0.01740563201904297, 0.017487232208251952, 0.017674367904663087, 0.017715423583984376, 0.0176595516204834, 0.01822153663635254, 0.018229408264160155, 0.017934431076049806, 0.017761695861816407, 0.017701696395874024, 0.017780479431152345, 0.017764608383178712, 0.017794111251831054, 0.017777599334716798, 0.01789129638671875, 0.017690656661987304, 0.017732767105102538, 0.018058080673217774, 0.01761894416809082, 0.017561151504516603, 0.017580352783203124, 0.01741632080078125, 0.01755267143249512, 0.017545951843261718, 0.01760870361328125, 0.01764659118652344, 0.017443296432495117, 0.017406496047973632, 0.017514495849609374, 0.017573888778686524, 0.01746086311340332, 0.017567743301391603, 0.017529056549072265, 0.017409824371337892, 0.017345535278320313, 0.01741721534729004, 0.017330175399780275, 0.017441823959350587, 0.017777631759643555, 0.01781488037109375, 0.017603231430053712, 0.017489599227905273, 0.017426624298095703, 0.017418367385864258, 0.01738751983642578, 0.01740390396118164, 0.017518592834472657, 0.01762918472290039, 0.017333343505859376, 0.017375808715820312, 0.01739401626586914, 0.017327360153198242, 0.01739379119873047, 0.017336959838867187, 0.017331743240356447, 0.017359071731567383, 0.01734681510925293, 0.017354751586914064, 0.01737913513183594, 0.017451200485229492, 0.018071296691894532, 0.01765350341796875, 0.017499776840209962, 0.017516576766967773, 0.017371295928955078, 0.01744054412841797, 0.017371583938598632, 0.017609184265136718, 0.01739151954650879, 0.017494112014770507, 0.017469440460205078, 0.01737932777404785, 0.017372928619384765, 0.0175229434967041, 0.017482847213745118, 0.017582815170288087, 0.017360639572143555, 0.017449407577514647, 0.017494016647338868, 0.01742959976196289, 0.0175645751953125, 0.01741619110107422, 0.017369087219238282, 0.01743212890625, 0.017510015487670897, 0.017482559204101564, 0.017479167938232423, 0.01738598442077637, 0.017451007843017577, 0.017464895248413086, 0.01745052719116211, 0.017403871536254882, 0.017465599060058595, 0.017448671340942384, 0.01761484718322754, 0.017519968032836914, 0.017423007965087892, 0.017364320755004884, 0.017498687744140626, 0.017373311996459962, 0.017350528717041017, 0.017426528930664063, 0.01737932777404785, 0.017340063095092773, 0.017350400924682617, 0.017375839233398437, 0.017391263961791994, 0.017305696487426758, 0.017253984451293947, 0.017402240753173828, 0.017335840225219726, 0.01732275199890137, 0.01739571189880371, 0.01741004753112793, 0.017391616821289063, 0.017393760681152344, 0.01742019271850586, 0.017422143936157226, 0.017413951873779296, 0.017375616073608397, 0.017530879974365234, 0.01754521560668945, 0.02065577507019043, 0.01744316864013672, 0.017487871170043946, 0.017467391967773437, 0.017451007843017577, 0.017356800079345702, 0.017384672164916994, 0.01740060806274414, 0.01741414451599121, 0.017383424758911133, 0.017395679473876952, 0.017338399887084962, 0.017364992141723632, 0.017348159790039064, 0.017337791442871092, 0.01737126350402832, 0.017355648040771485, 0.017284223556518555, 0.017331071853637695, 0.017358400344848632, 0.01739107131958008, 0.017340639114379882, 0.01740444755554199, 0.018694368362426758, 0.018763776779174804, 0.017467391967773437, 0.017411487579345703, 0.017504863739013672, 0.017491455078125, 0.01807756805419922, 0.01774790382385254, 0.017465631484985353, 0.017426271438598633, 0.017433151245117188, 0.017474624633789064]",tokens/s,55.90672745868311,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1572.00384,1861.156864,0.0,1465.909248,1358.169088,s,1,8.7545458984375,8.7545458984375,0.0,8.7545458984375,8.7545458984375,8.7545458984375,8.7545458984375,[8.7545458984375],,kWh,5.511534694168555e-05,6.072308957361274e-06,1.9198904248007853e-05,8.038656014705468e-05,,MB,1672.114176,1882.128384,0.0,1472.200704,1356.544512,s,10,0.4443775062561035,0.04443775062561035,0.0001839494016502686,0.04437872123718262,0.044650616455078125,0.044781340026855465,0.04488591888427734,"[0.04491206359863281, 0.04436921691894531, 0.044349407196044924, 0.044292064666748045, 0.04443267059326172, 0.044427104949951175, 0.044268577575683594, 0.04431660842895508, 0.04462156677246094, 0.04438822555541992]",tokens/s,5760.8676495984055,kWh,1.3502476793010703e-06,1.4890786084304384e-07,8.933430085805946e-07,2.3924985487247085e-06,tokens/kWh,107001109.83827247,MB,1680.297984,1882.128384,0.0,1472.200704,1409.94816,s,10,11.17914501953125,1.117914501953125,0.003509245620187121,1.1180933227539063,1.122057958984375,1.1229037963867188,1.1235804663085938,"[1.119744873046875, 1.1185635986328124, 1.1198131103515625, 1.1237496337890625, 1.1218699951171875, 1.111178955078125, 1.117623046875, 1.11444482421875, 1.1170386962890626, 1.1151182861328126]",tokens/s,56.354935811219704,kWh,3.26895830231972e-05,3.604478886908341e-06,1.578705922462118e-05,5.2081121134726705e-05,tokens/kWh,1209651.3789906262,,s,630,11.176699878692624,0.017740793458242265,0.00029612535460284,0.017657711982727052,0.0180024995803833,0.01817117118835449,0.019014234676361097,"[0.017864799499511717, 0.017692703247070313, 0.01762735939025879, 0.01767331123352051, 0.01756972885131836, 0.017605600357055665, 0.017727487564086913, 0.017770496368408203, 0.017764352798461915, 0.018087743759155273, 0.017756351470947264, 0.01768387222290039, 0.017666431427001954, 0.017735391616821288, 0.017735551834106446, 0.017759199142456054, 0.01789507293701172, 0.017671648025512694, 0.017693023681640625, 0.017569055557250978, 0.01766022491455078, 0.017584672927856447, 0.017732736587524413, 0.01768134307861328, 0.01761075210571289, 0.01756972885131836, 0.0176265926361084, 0.018002336502075195, 0.01767647933959961, 0.017547264099121093, 0.017704063415527344, 0.017658239364624025, 0.017627647399902344, 0.017588031768798827, 0.017600191116333007, 0.019212799072265627, 0.01799782371520996, 0.018074911117553712, 0.017809343338012696, 0.017707807540893555, 0.017703935623168944, 0.0176363525390625, 0.017624063491821287, 0.01773465538024902, 0.01764556884765625, 0.017625087738037108, 0.017657312393188476, 0.017555999755859374, 0.017524736404418945, 0.017620096206665038, 0.017707647323608397, 0.01756755256652832, 0.017543615341186522, 0.017555360794067384, 0.017578079223632814, 0.017584127426147463, 0.01762099266052246, 0.01760870361328125, 0.017638879776000975, 0.017523231506347655, 0.020685983657836915, 0.018528480529785157, 0.017826112747192382, 0.01776076889038086, 0.017960960388183594, 0.017564960479736328, 0.017708799362182618, 0.01763206481933594, 0.017829919815063478, 0.01798124885559082, 0.018213184356689453, 0.017772544860839845, 0.017496063232421876, 0.01770086479187012, 0.019812255859375, 0.017743967056274415, 0.01754521560668945, 0.017585344314575195, 0.017621824264526367, 0.017580032348632812, 0.017765888214111326, 0.017696704864501953, 0.017656288146972655, 0.01758415985107422, 0.017571903228759764, 0.017542943954467774, 0.017635488510131837, 0.0175534725189209, 0.017497695922851563, 0.017662303924560547, 0.017550432205200195, 0.01765456008911133, 0.017665632247924806, 0.017691232681274413, 0.0175861759185791, 0.01759846305847168, 0.017547264099121093, 0.017766016006469727, 0.017604991912841796, 0.017635072708129883, 0.01759187126159668, 0.017598207473754884, 0.017524991989135742, 0.017702720642089845, 0.017609600067138673, 0.01765376091003418, 0.01780940818786621, 0.017625087738037108, 0.017719039916992186, 0.017689952850341795, 0.017685407638549804, 0.017700544357299806, 0.01779715156555176, 0.01775644874572754, 0.017759424209594726, 0.01777542304992676, 0.017739072799682617, 0.017791296005249025, 0.01792243194580078, 0.01816166305541992, 0.01804204750061035, 0.017916736602783204, 0.01788217544555664, 0.01785331153869629, 0.01787091255187988, 0.018142784118652344, 0.018132991790771484, 0.01786444854736328, 0.01789673614501953, 0.017529024124145507, 0.017726240158081056, 0.017610240936279296, 0.017617408752441405, 0.017882335662841798, 0.01777129554748535, 0.017719295501708983, 0.017738912582397463, 0.01775292778015137, 0.017672191619873046, 0.01762054443359375, 0.017767936706542968, 0.017610719680786133, 0.017570751190185547, 0.017550975799560546, 0.0176376953125, 0.017705055236816408, 0.017719295501708983, 0.01752182388305664, 0.017559776306152342, 0.017588863372802733, 0.017612800598144532, 0.017969152450561524, 0.017917951583862304, 0.017778688430786133, 0.017795072555541993, 0.01767011260986328, 0.018067487716674803, 0.01765376091003418, 0.01761075210571289, 0.01845248031616211, 0.017936384201049805, 0.017609952926635742, 0.017720096588134764, 0.01785856056213379, 0.01780735969543457, 0.017780736923217775, 0.017870847702026366, 0.01780940818786621, 0.017827840805053712, 0.017803007125854493, 0.017862655639648437, 0.017858816146850587, 0.018030527114868165, 0.01793440055847168, 0.017950368881225587, 0.017889631271362304, 0.01778483200073242, 0.017675968170166017, 0.017754432678222656, 0.017905664443969727, 0.017966943740844725, 0.017690784454345704, 0.017731584548950196, 0.017762304306030274, 0.01778998374938965, 0.01765216064453125, 0.01776652717590332, 0.01762544059753418, 0.017592384338378907, 0.01809984016418457, 0.017715103149414064, 0.0177139835357666, 0.01764556884765625, 0.017630815505981445, 0.017637792587280272, 0.017663999557495116, 0.0175914249420166, 0.017693567276000976, 0.017924095153808595, 0.01796505546569824, 0.017903615951538086, 0.01804083251953125, 0.01802239990234375, 0.017931936264038086, 0.018085407257080077, 0.018436576843261717, 0.017955167770385742, 0.018520063400268554, 0.017944576263427735, 0.017878656387329103, 0.018010112762451173, 0.017951072692871092, 0.01804086494445801, 0.018077375411987305, 0.01790598487854004, 0.01788857650756836, 0.017867456436157225, 0.01764556884765625, 0.0175631046295166, 0.017591903686523438, 0.01755232048034668, 0.017656831741333007, 0.017802240371704102, 0.017817535400390626, 0.01774188804626465, 0.017612800598144532, 0.017679424285888673, 0.017585088729858398, 0.017526784896850587, 0.017725343704223632, 0.01757939147949219, 0.017593055725097655, 0.017563648223876953, 0.01762099266052246, 0.01764339256286621, 0.017725568771362304, 0.01762918472290039, 0.01761075210571289, 0.017582080841064454, 0.0176200008392334, 0.01784726333618164, 0.01790924835205078, 0.01785251235961914, 0.017886848449707032, 0.01790208053588867, 0.018223392486572267, 0.018255872726440428, 0.01807360076904297, 0.018043903350830077, 0.018037567138671873, 0.018145151138305664, 0.017888927459716798, 0.018495487213134765, 0.018255872726440428, 0.01823468780517578, 0.01816374397277832, 0.017996448516845703, 0.017889280319213868, 0.018147327423095702, 0.017835424423217772, 0.01776291275024414, 0.017864255905151366, 0.017883167266845704, 0.01790403175354004, 0.017874496459960938, 0.018106559753417968, 0.017873151779174805, 0.01803264045715332, 0.01763942337036133, 0.01761894416809082, 0.01763942337036133, 0.017710527420043944, 0.017685056686401367, 0.017625087738037108, 0.017655807495117186, 0.017682207107543944, 0.017637407302856446, 0.017701055526733397, 0.017643232345581055, 0.017583776473999023, 0.01772127914428711, 0.017600992202758788, 0.017665599822998045, 0.017828479766845703, 0.01799580764770508, 0.01789254379272461, 0.017879871368408202, 0.01864089584350586, 0.017898719787597658, 0.017756959915161134, 0.017712480545043947, 0.017676959991455077, 0.0177675838470459, 0.01750921630859375, 0.017657087326049804, 0.017627904891967774, 0.01761484718322754, 0.017669824600219725, 0.01764588737487793, 0.017582080841064454, 0.01802239990234375, 0.017716672897338866, 0.01776083183288574, 0.01840127944946289, 0.017709056854248048, 0.017563648223876953, 0.01769385528564453, 0.017718048095703126, 0.01763539123535156, 0.017718847274780274, 0.017626752853393556, 0.01765193557739258, 0.017635936737060546, 0.01767011260986328, 0.01763248062133789, 0.01763532829284668, 0.0176978874206543, 0.0175664005279541, 0.017688800811767578, 0.017628992080688476, 0.017567935943603515, 0.017504255294799806, 0.01754857635498047, 0.017586624145507813, 0.017481056213378907, 0.017761215209960938, 0.01753900718688965, 0.017598527908325196, 0.017564767837524413, 0.01758812713623047, 0.01756208038330078, 0.017535167694091795, 0.0175097599029541, 0.017572832107543946, 0.01755683135986328, 0.017558176040649413, 0.017508352279663086, 0.017501344680786134, 0.01747235107421875, 0.017522335052490234, 0.017455295562744142, 0.0175699520111084, 0.017571647644042968, 0.017598400115966795, 0.017530975341796876, 0.017590112686157226, 0.01754319953918457, 0.01757375907897949, 0.017521055221557617, 0.01760771179199219, 0.017504287719726563, 0.0175482234954834, 0.01763865661621094, 0.017676319122314453, 0.017803167343139647, 0.017767135620117187, 0.017647712707519532, 0.01760256004333496, 0.017747968673706056, 0.01787059211730957, 0.01755366325378418, 0.017573888778686524, 0.0175797119140625, 0.017548736572265626, 0.01749900817871094, 0.017528543472290038, 0.017613088607788086, 0.0175861759185791, 0.017557407379150392, 0.01759651184082031, 0.017528831481933595, 0.01762643241882324, 0.01763603210449219, 0.020391040802001954, 0.017656448364257813, 0.017705215454101562, 0.017596351623535157, 0.017549375534057617, 0.017657632827758788, 0.017557727813720704, 0.01763532829284668, 0.017498111724853514, 0.017604608535766602, 0.01753628730773926, 0.01769913673400879, 0.017588191986083985, 0.017680831909179687, 0.01800396728515625, 0.01826812744140625, 0.017889312744140625, 0.01802595138549805, 0.01790822410583496, 0.01778819274902344, 0.01787980842590332, 0.017821407318115233, 0.018286880493164064, 0.017895360946655274, 0.01787091255187988, 0.017726783752441407, 0.017746623992919923, 0.01776950454711914, 0.017630176544189455, 0.017645248413085936, 0.01762131118774414, 0.0176363525390625, 0.017622016906738282, 0.017663423538208007, 0.017558080673217773, 0.017715200424194336, 0.018067455291748045, 0.017674240112304687, 0.017625087738037108, 0.01764512062072754, 0.017682367324829102, 0.017526880264282226, 0.017524703979492188, 0.017529279708862304, 0.01785158348083496, 0.017531423568725585, 0.0177476806640625, 0.017543743133544922, 0.017510400772094727, 0.017528831481933595, 0.01762918472290039, 0.017669792175292968, 0.017529184341430665, 0.017542240142822265, 0.017561567306518554, 0.017578943252563477, 0.017524736404418945, 0.017475584030151366, 0.017539039611816406, 0.017755552291870116, 0.017561855316162108, 0.01760908889770508, 0.01925324821472168, 0.019099647521972657, 0.017713151931762695, 0.017677759170532225, 0.017588768005371094, 0.01766099166870117, 0.017606143951416017, 0.017539583206176757, 0.01762918472290039, 0.017520639419555666, 0.017491647720336914, 0.01746361541748047, 0.017483776092529296, 0.01753433609008789, 0.017604991912841796, 0.017488128662109376, 0.017508352279663086, 0.017589887619018554, 0.01880512046813965, 0.01765135955810547, 0.017561952590942384, 0.018274303436279296, 0.017886367797851563, 0.017492128372192384, 0.017535680770874022, 0.01756159973144531, 0.017537023544311522, 0.017528160095214844, 0.01775027275085449, 0.017830303192138672, 0.017713151931762695, 0.01784182357788086, 0.017694623947143554, 0.018373056411743163, 0.017571807861328125, 0.01756572723388672, 0.01762214469909668, 0.01749286460876465, 0.017952768325805665, 0.017530143737792967, 0.017799840927124024, 0.017604671478271484, 0.017645408630371093, 0.017627296447753907, 0.01757980728149414, 0.01751203155517578, 0.017582176208496093, 0.017473888397216798, 0.01771334457397461, 0.01758361625671387, 0.01754368019104004, 0.018017440795898437, 0.017774784088134765, 0.01753763198852539, 0.017631296157836915, 0.017649663925170898, 0.0175897274017334, 0.01757814407348633, 0.01757369613647461, 0.017582656860351563, 0.01769385528564453, 0.01769558334350586, 0.017661760330200196, 0.017811647415161135, 0.017767999649047853, 0.017793472290039063, 0.017915903091430666, 0.017991296768188475, 0.01801625633239746, 0.018569536209106445, 0.01821014404296875, 0.017996448516845703, 0.01799577522277832, 0.01804697608947754, 0.017958688735961913, 0.01786617660522461, 0.017807647705078124, 0.017731712341308593, 0.017514080047607423, 0.017677087783813477, 0.01761267280578613, 0.01766537666320801, 0.017697471618652344, 0.01823958396911621, 0.017909759521484374, 0.017739391326904295, 0.017615232467651367, 0.01764761543273926, 0.01757798385620117, 0.017534975051879884, 0.01755340766906738, 0.018585599899291993, 0.0178272647857666, 0.01760927963256836, 0.017541120529174805, 0.01761689567565918, 0.01760870361328125, 0.017780351638793945, 0.017667583465576172, 0.01763007926940918, 0.01762291145324707, 0.017684608459472655, 0.017833951950073243, 0.01792617607116699, 0.017756160736083985, 0.01776630401611328, 0.017582176208496093, 0.01817724800109863, 0.01766275215148926, 0.017575935363769533, 0.017534975051879884, 0.01753900718688965, 0.017450624465942383, 0.017588672637939454, 0.0175861759185791, 0.017532928466796875, 0.017548736572265626, 0.01767875289916992, 0.017921375274658202, 0.017603328704833984, 0.017672256469726564, 0.017616575241088867, 0.017688480377197266, 0.0175251522064209, 0.01761193656921387, 0.01765052795410156, 0.01761075210571289, 0.01756060791015625, 0.01750262451171875, 0.017611135482788087, 0.01754070472717285, 0.017611360549926756, 0.017570751190185547, 0.01798739242553711, 0.01769286346435547, 0.01749932861328125, 0.01761568069458008, 0.017588224411010742, 0.01757798385620117, 0.017537023544311522, 0.018276544570922853, 0.018802047729492188, 0.018051519393920898, 0.01785958480834961, 0.017728511810302734, 0.01779097557067871, 0.017657791137695313, 0.017590335845947266, 0.01763942337036133, 0.017506303787231444, 0.017582080841064454, 0.017653408050537108, 0.017612735748291017, 0.01762892723083496, 0.01765648078918457, 0.017530879974365234, 0.017557376861572264, 0.017537151336669922, 0.017551103591918946, 0.01772159957885742, 0.017665151596069337, 0.01749081611633301, 0.017596416473388672, 0.017534975051879884, 0.017711103439331053, 0.01776131248474121, 0.019202207565307616, 0.018428735733032227, 0.017860128402709962, 0.017736160278320312, 0.0176680965423584, 0.017633279800415038, 0.01782374382019043, 0.01761484718322754, 0.017620927810668947, 0.017495840072631837, 0.017481599807739258, 0.017487840652465822, 0.017760704040527344, 0.017616191864013673, 0.017918655395507813, 0.017556575775146483, 0.017505184173583984, 0.017496063232421876, 0.017530879974365234, 0.01767398452758789, 0.017524255752563476, 0.01749065589904785, 0.017510400772094727, 0.017588224411010742, 0.01756070327758789, 0.017590816497802735, 0.017665855407714842, 0.017537567138671876, 0.017540191650390623]",tokens/s,56.36726465215715,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 29058 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6641.700864,9387.835392,0.0,8992.587776,8404.320768,s,1,14.4562421875,14.4562421875,0.0,14.4562421875,14.4562421875,14.4562421875,14.4562421875,[14.4562421875],,kWh,0.00021343599917499127,2.3536222536619963e-05,9.489063146797538e-05,0.0003318628531795866,,MB,1564.639232,9402.515456,0.0,8992.587776,7879.473152,s,10,3.2490415954589844,0.32490415954589846,0.0005050043342131917,0.3249153594970703,0.3256789093017578,0.32569710540771485,0.3257116622924805,"[0.3243965454101562, 0.3239740295410156, 0.324656494140625, 0.32492047119140627, 0.32567486572265625, 0.32468362426757813, 0.32495562744140627, 0.3257153015136719, 0.32515438842773436, 0.32491024780273436]",tokens/s,787.9246617150048,kWh,9.496914052687149e-06,1.0473449039228742e-06,6.288436930387383e-06,1.6832695886997404e-05,tokens/kWh,15208496.70894072,MB,1567.612928,9402.515456,0.0,8992.587776,8125.43744,s,10,22.417164306640625,2.2417164306640625,0.0017633449796451378,2.241837158203125,2.243854833984375,2.244349658203125,2.244745517578125,"[2.24002197265625, 2.2391669921875, 2.240408935546875, 2.244844482421875, 2.239778076171875, 2.243744873046875, 2.241950927734375, 2.241723388671875, 2.243001220703125, 2.2425234375]",tokens/s,28.10346533496993,kWh,6.519396790940043e-05,7.190828362416798e-06,4.344726952481049e-05,0.00011583206579662773,tokens/kWh,543890.8437548918,,s,630,22.412986602783178,0.03557616921076699,0.0003406645445687422,0.03560273551940918,0.036014133071899414,0.03606753921508789,0.036162389793396,"[0.03562540817260742, 0.03507193756103515, 0.03492460632324219, 0.0348364143371582, 0.03492435073852539, 0.035019008636474606, 0.035110080718994144, 0.03505596923828125, 0.035095008850097656, 0.03513753509521484, 0.035233440399169924, 0.035176799774169924, 0.035243648529052735, 0.035092609405517575, 0.03514291381835938, 0.03523673629760742, 0.03522544097900391, 0.03515158462524414, 0.0351965446472168, 0.03526342391967773, 0.03540566253662109, 0.03540598297119141, 0.0354304313659668, 0.03528496170043945, 0.03542985534667969, 0.035356704711914065, 0.03539926528930664, 0.03548787307739258, 0.03551337432861328, 0.035467105865478514, 0.03546511840820313, 0.03556272125244141, 0.035601184844970706, 0.03555327987670898, 0.03572537612915039, 0.0356638069152832, 0.035743743896484374, 0.0357210578918457, 0.03566608047485351, 0.03566294479370117, 0.03573980712890625, 0.03578668975830078, 0.0358326416015625, 0.035778209686279296, 0.035804672241210936, 0.03578524780273438, 0.03587923049926758, 0.03590348815917969, 0.035907230377197265, 0.03583350372314453, 0.03600624084472656, 0.03598332977294922, 0.03585676956176758, 0.03581740951538086, 0.03594655990600586, 0.03599087905883789, 0.03606185531616211, 0.03599283218383789, 0.0360568962097168, 0.0360621452331543, 0.03611008071899414, 0.036063488006591794, 0.03609571075439453, 0.03567795181274414, 0.035084320068359376, 0.035041534423828125, 0.03491635131835937, 0.034917953491210935, 0.03499411010742187, 0.03516604614257812, 0.03508700942993164, 0.035074047088623043, 0.03501039886474609, 0.03528646469116211, 0.035100833892822265, 0.035007232666015624, 0.03511072158813477, 0.03514934539794922, 0.03516985702514648, 0.035138721466064456, 0.03505740737915039, 0.03516019058227539, 0.035168128967285155, 0.035422206878662106, 0.03539952087402344, 0.03530358505249023, 0.0353361930847168, 0.035401569366455075, 0.03543056106567383, 0.035423328399658206, 0.035376033782958984, 0.035479328155517575, 0.03541638565063476, 0.03542825698852539, 0.03543392181396485, 0.03552707290649414, 0.03564300918579102, 0.03570127868652344, 0.03564739227294922, 0.03573360061645508, 0.0356940803527832, 0.03566233444213867, 0.035640705108642576, 0.03569113540649414, 0.035694751739501956, 0.03575190353393555, 0.03575590515136719, 0.03575209426879883, 0.03580912017822266, 0.035827713012695314, 0.035768318176269534, 0.035835166931152344, 0.03587350463867187, 0.036026336669921874, 0.03584841537475586, 0.03591088104248047, 0.035983230590820314, 0.036059711456298826, 0.03602025604248047, 0.036014110565185546, 0.03606537628173828, 0.03611366271972656, 0.035937023162841794, 0.036073471069335936, 0.036183807373046876, 0.036286720275878905, 0.03581542587280274, 0.03510204696655273, 0.03499689483642578, 0.03492470550537109, 0.03498787307739258, 0.03496160125732422, 0.03504048156738281, 0.03502755355834961, 0.035194881439208986, 0.03518217468261719, 0.03520467376708984, 0.03530217742919922, 0.035162078857421876, 0.03527484893798828, 0.03535257720947266, 0.035282943725585936, 0.03530342483520508, 0.03520675277709961, 0.03521769714355469, 0.035244480133056644, 0.035272384643554686, 0.03525228881835937, 0.03534019088745117, 0.035384574890136716, 0.03533903884887695, 0.03523497772216797, 0.0353897590637207, 0.035573951721191405, 0.035536319732666015, 0.035438720703125, 0.035449886322021486, 0.03541801452636719, 0.035546974182128904, 0.03555123138427734, 0.0355978889465332, 0.03563708877563477, 0.03571785736083984, 0.03579276657104492, 0.035784702301025394, 0.035702880859375, 0.03574262237548828, 0.03569561767578125, 0.035741855621337894, 0.0357210578918457, 0.035861534118652345, 0.03585142517089844, 0.03589510345458984, 0.035831649780273436, 0.035783039093017575, 0.0357509765625, 0.0358397445678711, 0.035799808502197265, 0.036026336669921874, 0.0360450553894043, 0.03613491058349609, 0.035983360290527344, 0.03597312164306641, 0.03606118392944336, 0.036132865905761716, 0.036067329406738284, 0.036050430297851564, 0.03606915283203125, 0.03614998245239258, 0.03597321701049805, 0.0352525749206543, 0.03530758285522461, 0.03508736038208008, 0.035324737548828124, 0.03512543869018555, 0.03517030334472656, 0.0350860481262207, 0.035330337524414064, 0.03527376174926758, 0.03523231887817383, 0.03515228652954101, 0.03523788833618164, 0.035186912536621096, 0.03527459335327148, 0.03516851043701172, 0.03522937774658203, 0.03528297424316406, 0.035315582275390625, 0.035299423217773435, 0.0353175048828125, 0.03534668731689453, 0.03546268844604492, 0.035425888061523435, 0.035496448516845705, 0.03551785659790039, 0.03565667343139649, 0.03578396987915039, 0.03573657608032227, 0.03558572769165039, 0.035593696594238285, 0.035565921783447266, 0.035541217803955076, 0.035560672760009765, 0.03561510467529297, 0.03574758529663086, 0.03587696075439453, 0.035770942687988284, 0.035833854675292966, 0.03573676681518555, 0.03577859115600586, 0.03576502227783203, 0.03580723190307617, 0.0357531852722168, 0.035865345001220704, 0.035823070526123046, 0.03581542587280274, 0.03575235366821289, 0.035829025268554686, 0.03586547088623047, 0.03600598526000977, 0.03596278381347656, 0.0360299186706543, 0.03621532821655273, 0.03617724609375, 0.03606771087646484, 0.03609219360351563, 0.036007102966308595, 0.03611219024658203, 0.03602092742919922, 0.036025726318359375, 0.03596777725219726, 0.03617308807373047, 0.03575833511352539, 0.03521353530883789, 0.035116832733154295, 0.03498147201538086, 0.03622092819213867, 0.034977344512939455, 0.03497865676879883, 0.035135486602783206, 0.03513459014892578, 0.03510156631469727, 0.03513328170776367, 0.03510444641113281, 0.035200576782226566, 0.035144065856933596, 0.035098400115966794, 0.035130111694335935, 0.035198974609375, 0.035209217071533204, 0.03525577545166016, 0.03530124664306641, 0.035485599517822264, 0.03546803283691406, 0.03548124694824219, 0.03543075180053711, 0.03543225479125976, 0.03539068984985352, 0.03549657440185547, 0.03555977630615234, 0.03554508972167969, 0.03544473648071289, 0.03545395278930664, 0.03538547134399414, 0.03543743896484375, 0.03540172958374024, 0.035471359252929685, 0.035671199798583984, 0.035655998229980466, 0.03560428619384766, 0.03569452667236328, 0.03568310546875, 0.03564246368408203, 0.03563020706176758, 0.035718944549560545, 0.035689952850341794, 0.03564352035522461, 0.03574620819091797, 0.035768192291259764, 0.03577775955200195, 0.03576924896240234, 0.03570483016967774, 0.035899391174316404, 0.03594649505615234, 0.03594153594970703, 0.03589120101928711, 0.03610323333740234, 0.03616745758056641, 0.036001792907714845, 0.03590758514404297, 0.03594768142700195, 0.03587916946411133, 0.036014335632324215, 0.036011966705322265, 0.03604931259155274, 0.03595468902587891, 0.035331966400146485, 0.0350885124206543, 0.03501571273803711, 0.035064609527587894, 0.03504352188110352, 0.03502643203735351, 0.03508070373535156, 0.03522099304199219, 0.03517113494873047, 0.03523142242431641, 0.0351723518371582, 0.03522355270385742, 0.035133472442626955, 0.035279838562011716, 0.035293598175048825, 0.03528755187988281, 0.03521987152099609, 0.03529404830932617, 0.035794849395751956, 0.035310462951660154, 0.03536288070678711, 0.03544678497314453, 0.035491584777832035, 0.03544281768798828, 0.03541516876220703, 0.03555228805541992, 0.035565536499023435, 0.03561881637573242, 0.035552928924560544, 0.03558457565307617, 0.03557936096191406, 0.035598751068115234, 0.03561580657958984, 0.035811328887939455, 0.035695457458496095, 0.035828990936279295, 0.03578291320800781, 0.03578112030029297, 0.0356864013671875, 0.03586457443237305, 0.03592396926879883, 0.0358823356628418, 0.03584579086303711, 0.03592704010009766, 0.03582156753540039, 0.035847518920898436, 0.03580380630493164, 0.035917438507080075, 0.035926273345947266, 0.03602035140991211, 0.03592953491210937, 0.03593606567382813, 0.03596774291992187, 0.03603472137451172, 0.03593603134155274, 0.03591993713378906, 0.03597638320922852, 0.03602924728393555, 0.03604889678955078, 0.03597107315063477, 0.03599155044555664, 0.03613081741333008, 0.036001182556152346, 0.03529584121704102, 0.03508367919921875, 0.0349923210144043, 0.03510857772827149, 0.03506041717529297, 0.035059711456298825, 0.03503104019165039, 0.03506300735473633, 0.035068126678466795, 0.035099006652832034, 0.03511881637573242, 0.03513782501220703, 0.03518624114990234, 0.035197566986083985, 0.03515801620483398, 0.035209217071533204, 0.03522544097900391, 0.03523535919189453, 0.035426334381103514, 0.03543888092041016, 0.03541241455078125, 0.03537395095825195, 0.03543276977539062, 0.035399681091308595, 0.03539558410644531, 0.03550892639160156, 0.03559417724609375, 0.035622974395751954, 0.035556991577148436, 0.0355425910949707, 0.03555411148071289, 0.035598175048828125, 0.03560630416870117, 0.035581695556640626, 0.035625503540039065, 0.035761920928955075, 0.03574390411376953, 0.035696830749511715, 0.03561270523071289, 0.035880672454833985, 0.03592012786865234, 0.03586191940307617, 0.03584588623046875, 0.03583369445800781, 0.03576115036010742, 0.035805118560791015, 0.03587475204467774, 0.035902751922607425, 0.03589616012573242, 0.035878623962402344, 0.03584233474731445, 0.03590758514404297, 0.035917823791503906, 0.03596492767333984, 0.035953792572021484, 0.0360398063659668, 0.03597040176391601, 0.0361453742980957, 0.036133056640625, 0.036139007568359374, 0.03609193420410156, 0.03614716720581055, 0.03589225769042969, 0.0351798095703125, 0.034998943328857425, 0.03501590347290039, 0.03501536178588867, 0.03497129440307617, 0.03505811309814453, 0.035147232055664064, 0.0352562255859375, 0.03523798370361328, 0.035160511016845704, 0.03510691070556641, 0.035297279357910154, 0.035138782501220704, 0.035189537048339846, 0.035192832946777344, 0.03512934494018555, 0.03506380844116211, 0.035149822235107424, 0.03524367904663086, 0.03523004913330078, 0.03520476913452148, 0.0354015998840332, 0.03536943817138672, 0.03537510299682617, 0.035383296966552735, 0.03547545623779297, 0.035544864654541014, 0.03561049652099609, 0.035572063446044924, 0.035495712280273435, 0.035496158599853514, 0.035520511627197264, 0.035555233001708986, 0.03563859176635742, 0.035608993530273435, 0.03579308700561523, 0.03583347320556641, 0.035762752532958984, 0.035773696899414065, 0.03579366302490234, 0.03586374282836914, 0.03590841674804687, 0.035792064666748044, 0.03584236907958984, 0.03573113632202148, 0.03580601501464844, 0.03592192077636719, 0.03595395278930664, 0.035976959228515626, 0.03595734405517578, 0.03588768005371094, 0.036031806945800784, 0.03600230407714844, 0.036128799438476564, 0.03609196853637695, 0.03613180923461914, 0.036035488128662106, 0.036020030975341795, 0.03603478240966797, 0.03609135818481445, 0.03608425521850586, 0.03611856079101562, 0.03573161697387695, 0.035061153411865234, 0.034914752960205075, 0.03488083267211914, 0.03489247894287109, 0.03491430282592774, 0.03507820892333984, 0.03523366546630859, 0.035498046875, 0.0352911376953125, 0.03536627197265625, 0.03595737457275391, 0.03553279876708984, 0.035272705078125, 0.03527280044555664, 0.0352639045715332, 0.035315841674804685, 0.035314048767089844, 0.03534774398803711, 0.035270977020263675, 0.03541158294677734, 0.03543734359741211, 0.03546316909790039, 0.03539904022216797, 0.03550067138671875, 0.035518657684326174, 0.03560432052612305, 0.03559408187866211, 0.03596915054321289, 0.03554899215698242, 0.035459262847900394, 0.03546656036376953, 0.035599040985107425, 0.035676158905029294, 0.03568374252319336, 0.03562268829345703, 0.03565856170654297, 0.035776512145996094, 0.035781761169433594, 0.035746623992919925, 0.0358031997680664, 0.03574288177490234, 0.03572848129272461, 0.03573859024047851, 0.035727745056152345, 0.03569295883178711, 0.03586777496337891, 0.035865215301513674, 0.035875072479248045, 0.03588083267211914, 0.035932449340820315, 0.03587836837768555, 0.035869247436523435, 0.0359381103515625, 0.03597875213623047, 0.03595929718017578, 0.03590758514404297, 0.03587276840209961, 0.035985408782958986, 0.0359628791809082, 0.03590956878662109, 0.03603462219238281, 0.03607513427734375, 0.03585244750976563, 0.035157089233398435, 0.03495769500732422, 0.03497315216064453, 0.03508303833007813, 0.035135616302490236, 0.03511641693115235, 0.03512911987304688, 0.0353267822265625, 0.035291168212890626, 0.035196769714355466, 0.03520937728881836, 0.035249473571777344, 0.0352749137878418, 0.035314208984375, 0.035272350311279295, 0.035324256896972654, 0.035209217071533204, 0.03520512008666992, 0.03529654312133789, 0.03527753448486328, 0.035362495422363284, 0.0355140495300293, 0.035377792358398434, 0.03546316909790039, 0.03537299346923828, 0.03551852798461914, 0.03550592041015625, 0.035635456085205075, 0.035620864868164064, 0.035640670776367185, 0.03550255966186523, 0.03556726455688477, 0.035566078186035154, 0.035649566650390624, 0.03568435287475586, 0.03580332946777344, 0.0357743034362793, 0.03579804611206055, 0.03579747009277344, 0.03576607894897461, 0.03595481491088867, 0.0359714241027832, 0.03579923248291016, 0.03578675079345703, 0.03578675079345703, 0.03577971267700195, 0.03575900650024414, 0.03589673614501953, 0.03590361785888672, 0.036001598358154294, 0.036006366729736325, 0.0360654411315918, 0.03601408004760742, 0.03605052947998047, 0.03602793502807617, 0.03596992111206055, 0.03590758514404297, 0.03590553665161133, 0.03586867141723633, 0.03600953674316406, 0.03599200057983398, 0.03596646499633789]",tokens/s,28.10870372455261,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8205.9264,12277.710848,0.0,11882.463232,11315.947008,s,1,16.886255859375,16.886255859375,0.0,16.886255859375,16.886255859375,16.886255859375,16.886255859375,[16.886255859375],,kWh,0.00029382946845003057,3.239582406865539e-05,0.00012235482010597432,0.00044858011262466027,,MB,4044.82048,12294.488064,0.0,11884.560384,11070.308352,s,10,3.9773246765136716,0.3977324676513672,0.0019224772243556913,0.3980414886474609,0.39965287475585937,0.40012190856933594,0.4004971356201172,"[0.3930919189453125, 0.39734170532226565, 0.39863742065429686, 0.39619915771484376, 0.39800732421875, 0.3980756530761719, 0.3974848022460937, 0.39954864501953125, 0.39834710693359376, 0.4005909423828125]",tokens/s,643.6487358240943,kWh,1.1554067951282704e-05,1.2737012906724567e-06,7.650198427846875e-06,2.0477967669802032e-05,tokens/kWh,12501240.559018562,MB,4049.084416,12296.585216,0.0,11886.657536,11070.310912,s,10,25.94716357421875,2.594716357421875,0.0037278301135179426,2.5959627685546875,2.598407739257812,2.599555920410156,2.6004744653320313,"[2.589595458984375, 2.590308837890625, 2.589087158203125, 2.5964873046875, 2.597010498046875, 2.595438232421875, 2.6007041015625, 2.593673095703125, 2.596706298828125, 2.598152587890625]",tokens/s,24.28011054842124,kWh,7.587325129079908e-05,8.36829190384015e-06,5.051604254955066e-05,0.00013475758574418988,tokens/kWh,467506.1492982874,,s,630,25.943864662170412,0.04118073755900065,0.00041361422757583017,0.041152158737182615,0.04148256874084473,0.04162520122528076,0.04329115642547607,"[0.04304620742797852, 0.041142974853515625, 0.04068518447875977, 0.04066345596313477, 0.04065884780883789, 0.04080031967163086, 0.04055449676513672, 0.041585983276367186, 0.04063711929321289, 0.04067324829101562, 0.04066275024414062, 0.040726497650146486, 0.04103728103637695, 0.04088918304443359, 0.04075475311279297, 0.04083145523071289, 0.04086761474609375, 0.04097660827636719, 0.04093308639526367, 0.04084793472290039, 0.04087548828125, 0.04087228775024414, 0.041002880096435546, 0.04102352142333984, 0.04114636611938476, 0.04117830276489258, 0.04121401596069336, 0.041073406219482425, 0.04101939010620117, 0.04103577423095703, 0.040928417205810544, 0.04102969741821289, 0.041032352447509766, 0.041025665283203124, 0.040908798217773434, 0.040925472259521485, 0.04104544067382813, 0.04109135818481445, 0.04109635162353516, 0.04102025604248047, 0.0409536018371582, 0.04107468795776367, 0.0412407341003418, 0.041210113525390626, 0.04124787139892578, 0.04132937622070312, 0.04231507110595703, 0.041175743103027344, 0.04117913436889648, 0.041283424377441404, 0.04131564712524414, 0.041243328094482425, 0.04143468856811523, 0.04141638565063477, 0.041294273376464845, 0.0413967056274414, 0.04133267211914062, 0.041187614440917966, 0.041223617553710935, 0.041191585540771486, 0.04113945770263672, 0.0413111686706543, 0.04125696182250976, 0.043718593597412106, 0.04122236633300781, 0.04070380783081055, 0.04063030242919922, 0.04066918563842774, 0.040574977874755856, 0.04062412643432617, 0.04071177673339844, 0.04063068771362305, 0.040586593627929685, 0.04067190551757813, 0.040793663024902345, 0.0407677116394043, 0.04069807815551758, 0.04061167907714844, 0.040736446380615236, 0.04100451278686523, 0.041032703399658206, 0.04080844879150391, 0.04090876770019531, 0.0409536018371582, 0.04086403274536133, 0.041390079498291016, 0.041390079498291016, 0.041471038818359375, 0.04128659057617187, 0.041203071594238284, 0.04100531387329102, 0.04100723266601562, 0.0408653450012207, 0.04086240005493164, 0.040804351806640625, 0.04080025482177734, 0.04099673461914063, 0.040951934814453125, 0.04104806518554688, 0.04114432144165039, 0.041233695983886716, 0.04114883041381836, 0.04108319854736328, 0.04126924896240235, 0.04119148635864258, 0.041170654296875, 0.04131967926025391, 0.04120038223266602, 0.04140054321289063, 0.041282943725585934, 0.041482559204101564, 0.04266815948486328, 0.04129606246948242, 0.04120899200439453, 0.041169471740722656, 0.04131568145751953, 0.04120207977294922, 0.04121654510498047, 0.041170879364013674, 0.0411786880493164, 0.04113974380493164, 0.04120787048339844, 0.041317279815673826, 0.041373695373535156, 0.04130508804321289, 0.04132761764526367, 0.04329564666748047, 0.041224193572998044, 0.04083091354370117, 0.04070406341552734, 0.04072857666015625, 0.040855552673339846, 0.04079411315917969, 0.04072447967529297, 0.04075020980834961, 0.04069059371948242, 0.040771102905273436, 0.04081913757324219, 0.04077772903442383, 0.04089651107788086, 0.040855552673339846, 0.0409354248046875, 0.04085964965820312, 0.04084326553344726, 0.04086793518066406, 0.04105628967285156, 0.04102336120605469, 0.040925182342529294, 0.04103168106079102, 0.04125286483764649, 0.041260639190673826, 0.041162689208984374, 0.04109257507324219, 0.041067615509033206, 0.0409640007019043, 0.040822784423828126, 0.04078742218017578, 0.0407737922668457, 0.040804737091064455, 0.04098457717895508, 0.04095180892944336, 0.04085964965820312, 0.04087795257568359, 0.04086732864379883, 0.040852096557617186, 0.04096540832519531, 0.04100780868530273, 0.04104307174682617, 0.04131923294067383, 0.04129110336303711, 0.04147071838378906, 0.04155187225341797, 0.04148223876953125, 0.041463294982910154, 0.04153539276123047, 0.041441600799560545, 0.04140060806274414, 0.0412938232421875, 0.04129536056518555, 0.04129824066162109, 0.04126534271240234, 0.04118096160888672, 0.04115420913696289, 0.04121247863769531, 0.041164798736572264, 0.041338878631591795, 0.041381889343261716, 0.04128153610229492, 0.04126310348510742, 0.04327043151855469, 0.04138134384155273, 0.040948032379150394, 0.04075990295410156, 0.04076927947998047, 0.040820545196533206, 0.0407815055847168, 0.044096160888671875, 0.04069580841064453, 0.040697856903076174, 0.04073062515258789, 0.040775360107421874, 0.040890113830566406, 0.04084940719604492, 0.04082912063598633, 0.04078400039672852, 0.040812801361083985, 0.04086950302124023, 0.04100339126586914, 0.04107843017578125, 0.0410906867980957, 0.041151199340820316, 0.04120560073852539, 0.0413812141418457, 0.04129017639160156, 0.041217758178710935, 0.04114684677124023, 0.04101548767089844, 0.04100080108642578, 0.04111990356445312, 0.041100959777832034, 0.04094569778442383, 0.040929023742675784, 0.04098310470581055, 0.04196352005004883, 0.04114176177978516, 0.04111315155029297, 0.04098700714111328, 0.041084800720214844, 0.04118710327148437, 0.041184158325195314, 0.04115456008911133, 0.04122009658813477, 0.041218143463134765, 0.041387935638427735, 0.04146585464477539, 0.04159814453125, 0.0415404167175293, 0.04151295852661133, 0.041391265869140624, 0.04136758422851562, 0.04136838531494141, 0.04126310348510742, 0.04122009658813477, 0.04129977416992187, 0.04117900848388672, 0.0412163200378418, 0.0413076171875, 0.04124931335449219, 0.041267200469970705, 0.04115660858154297, 0.0413573112487793, 0.04134108734130859, 0.04328243255615234, 0.041371105194091796, 0.04089705657958984, 0.04063235092163086, 0.040818462371826174, 0.04085299301147461, 0.04089516830444336, 0.04086783981323242, 0.04083846282958985, 0.04078662490844726, 0.04085916900634766, 0.04078435134887695, 0.04079801559448242, 0.040747230529785156, 0.04086492919921875, 0.04076367950439453, 0.04098716735839844, 0.041095169067382815, 0.041336830139160154, 0.041306110382080076, 0.041111553192138675, 0.04117094421386719, 0.04140851211547852, 0.041398273468017575, 0.04130099105834961, 0.041261215209960934, 0.041113983154296874, 0.04098710250854492, 0.040900672912597656, 0.040957889556884765, 0.041095169067382815, 0.04106607818603516, 0.04109353637695313, 0.04114236831665039, 0.04129119873046875, 0.04119315338134766, 0.04123747253417969, 0.041146175384521484, 0.04107651138305664, 0.041226463317871095, 0.041164798736572264, 0.04122803115844727, 0.04139033508300781, 0.04143513488769531, 0.04142419052124023, 0.04171004867553711, 0.041737823486328124, 0.04164451217651367, 0.041734432220458986, 0.041463680267333984, 0.04137350463867188, 0.04139145660400391, 0.041395038604736326, 0.041397918701171876, 0.04141910552978516, 0.04124467086791992, 0.04126022338867188, 0.041216831207275394, 0.04131951904296875, 0.04147702407836914, 0.041441280364990236, 0.04135286331176758, 0.04146790313720703, 0.04349507141113281, 0.0413680305480957, 0.04082259368896484, 0.040634334564208986, 0.04075734329223633, 0.040791999816894534, 0.04088147354125977, 0.04284880065917969, 0.04063868713378906, 0.04085968017578125, 0.04095974349975586, 0.04088604736328125, 0.04080643081665039, 0.0408191032409668, 0.040994815826416016, 0.04099484634399414, 0.04095734405517578, 0.04099116897583008, 0.041201793670654296, 0.04112524795532226, 0.04122060775756836, 0.041211040496826175, 0.04135011291503906, 0.04135718536376953, 0.0413779182434082, 0.04137139129638672, 0.04117248153686524, 0.0410775032043457, 0.040970241546630856, 0.041003009796142575, 0.04112179183959961, 0.04103968048095703, 0.040933441162109375, 0.040978561401367186, 0.040892414093017575, 0.040869888305664064, 0.04092067337036133, 0.04087039947509766, 0.04094537734985351, 0.040945854187011715, 0.04097820663452149, 0.04111177444458008, 0.04121395111083984, 0.04127129745483398, 0.04131033706665039, 0.04129324722290039, 0.04162953567504883, 0.04172009658813477, 0.04161990356445312, 0.04149440002441406, 0.04152681732177734, 0.041322975158691405, 0.04127859115600586, 0.041237377166748045, 0.04114950561523437, 0.04126611328125, 0.0412586898803711, 0.0412204475402832, 0.04126051330566406, 0.041259521484375, 0.041381855010986325, 0.04145084762573242, 0.041366207122802735, 0.043733089447021485, 0.04141007995605469, 0.040938270568847655, 0.040857921600341796, 0.040861473083496094, 0.04081039810180664, 0.04081478500366211, 0.04091872024536133, 0.040834880828857424, 0.04089907073974609, 0.04092278289794922, 0.04094521713256836, 0.0409403190612793, 0.04087398529052735, 0.04086982345581055, 0.04078598403930664, 0.040888160705566404, 0.040925342559814455, 0.0411212158203125, 0.041344799041748044, 0.04131305694580078, 0.04128992080688477, 0.04151059341430664, 0.04151103973388672, 0.04152048110961914, 0.04144604873657227, 0.04143513488769531, 0.04119507217407226, 0.04104227066040039, 0.041154144287109375, 0.04115302276611328, 0.041045761108398436, 0.04104217529296875, 0.04181350326538086, 0.04113049697875976, 0.04114636611938476, 0.04105152130126953, 0.0412410888671875, 0.04119769668579101, 0.0410503044128418, 0.04115372848510742, 0.04126988983154297, 0.04149459075927735, 0.041666496276855466, 0.04167679977416992, 0.04158259201049805, 0.041702560424804684, 0.0415362548828125, 0.04164751815795899, 0.04154812622070313, 0.04144342422485352, 0.04144153594970703, 0.04140614318847656, 0.04138016128540039, 0.04135644912719726, 0.04135110473632812, 0.04128646469116211, 0.041304126739501956, 0.04132799911499024, 0.041396480560302734, 0.041268993377685546, 0.041337505340576175, 0.041790817260742186, 0.04329471969604492, 0.04136345672607422, 0.04087113571166992, 0.040733470916748046, 0.04086697769165039, 0.041032352447509766, 0.04115670394897461, 0.040826465606689455, 0.04081919860839844, 0.040793407440185545, 0.04079654312133789, 0.040845535278320313, 0.0409334716796875, 0.04085145568847656, 0.04085939025878906, 0.04084467315673828, 0.04086179351806641, 0.040839969635009764, 0.04109270477294922, 0.040968448638916015, 0.040933536529541015, 0.04091292953491211, 0.04106441497802735, 0.041307552337646485, 0.04123503875732422, 0.04114361572265625, 0.041156639099121095, 0.04102006530761719, 0.04105420684814453, 0.0410742073059082, 0.04119776153564453, 0.041037696838378906, 0.04109507369995117, 0.04114246368408203, 0.041160190582275394, 0.04110009765625, 0.04105215835571289, 0.04116396713256836, 0.04106032180786133, 0.04102025604248047, 0.04125696182250976, 0.041488384246826174, 0.041404415130615234, 0.04149878311157226, 0.04152268981933594, 0.04137318420410156, 0.041378654479980466, 0.041411582946777346, 0.04146688079833984, 0.04137286376953125, 0.04134147262573242, 0.04121014404296875, 0.04127059173583984, 0.0413067512512207, 0.04140447998046875, 0.04119955062866211, 0.041295585632324217, 0.04116831970214844, 0.041214241027832034, 0.04136342239379883, 0.041290302276611325, 0.041302112579345705, 0.041250816345214845, 0.04327228927612305, 0.04131020736694336, 0.04089440155029297, 0.040779518127441405, 0.04078755187988281, 0.040671966552734376, 0.04069375991821289, 0.04080230331420898, 0.040845439910888674, 0.040929153442382814, 0.04094927978515625, 0.04088675308227539, 0.040894462585449216, 0.04088790512084961, 0.040874401092529294, 0.04093734359741211, 0.04092700958251953, 0.04098287963867187, 0.041070465087890626, 0.04132220840454102, 0.0410442886352539, 0.04105583953857422, 0.041140735626220705, 0.041262081146240234, 0.041253887176513675, 0.042757312774658204, 0.04095375823974609, 0.04106537628173828, 0.04110540771484375, 0.04112371063232422, 0.04095808029174805, 0.040959999084472655, 0.04106854248046875, 0.04117504119873047, 0.04110745620727539, 0.041011199951171876, 0.040945758819580076, 0.040976158142089845, 0.04120528030395508, 0.04108553695678711, 0.0410107536315918, 0.041296321868896486, 0.041443134307861326, 0.041375934600830076, 0.041527294158935545, 0.041678848266601565, 0.041637889862060545, 0.04153926467895508, 0.04154732894897461, 0.04148441696166992, 0.041512737274169924, 0.04143804931640625, 0.041350399017333984, 0.041339839935302734, 0.04138995361328125, 0.04124665451049805, 0.04138172912597656, 0.04127356719970703, 0.04137567901611328, 0.0413941764831543, 0.04133184051513672, 0.041384830474853515, 0.041404415130615234, 0.04336435317993164, 0.04125696182250976, 0.040890113830566406, 0.04073289489746094, 0.04072246551513672, 0.040818145751953125, 0.04090934371948242, 0.0410082893371582, 0.040991008758544924, 0.04099334335327148, 0.04100505447387695, 0.041064254760742186, 0.04090284729003906, 0.04091289520263672, 0.04095084762573242, 0.040960960388183594, 0.040796161651611325, 0.04091888046264648, 0.04096150588989258, 0.04090950393676758, 0.04102259063720703, 0.041210750579833984, 0.041221569061279294, 0.04129644775390625, 0.04132044982910156, 0.041336830139160154, 0.041148414611816404, 0.04103168106079102, 0.04095590209960937, 0.04106198501586914, 0.0410810546875, 0.04119577789306641, 0.041097152709960935, 0.041215774536132815, 0.041102718353271485, 0.041151294708251955, 0.041148448944091795, 0.041199169158935546, 0.041228736877441406, 0.04130118560791016, 0.04132742309570313, 0.0413675537109375, 0.04135289764404297, 0.0413856315612793, 0.0414826545715332, 0.041480449676513674, 0.04149168014526367, 0.04155196762084961, 0.041529727935791017, 0.04155833435058594, 0.04150179290771484, 0.04149955368041992, 0.04137984085083008, 0.04137779235839844, 0.041248542785644535, 0.04128496170043945, 0.04129267120361328, 0.04134912109375, 0.042031105041503904, 0.041737472534179684, 0.04144607925415039, 0.04140652847290039, 0.04135116958618164]",tokens/s,24.28319790453669,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 651, in resolve_trust_remote_code answer = input( EOFError: EOF when reading a line During handling of the above exception, another exception occurred: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 149, in load_transformers_model self.create_no_weights_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 269, in create_no_weights_model meta_model = self.automodel_loader.from_config(self.pretrained_config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 423, in from_config trust_remote_code = resolve_trust_remote_code( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 664, in resolve_trust_remote_code raise ValueError( ValueError: The repository for Deci/DeciCoder-1b contains custom code which must be executed to correctly load the model. You can inspect the repository content at https://hf.co/Deci/DeciCoder-1b. Please pass the argument `trust_remote_code=True` to allow custom code to be run. " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4920.836096,8001.61792,0.0,7606.370304,6988.678144,s,1,13.69842578125,13.69842578125,0.0,13.69842578125,13.69842578125,13.69842578125,13.69842578125,[13.69842578125],,kWh,0.0001918337320374121,2.1153670764710996e-05,7.7197283980035e-05,0.00029018468678215813,,MB,2802.900992,8018.395136,0.0,7608.467456,6915.136,s,10,2.203863525390625,0.2203863525390625,0.001473780131591768,0.2205274887084961,0.22190032501220702,0.2221533332824707,0.22235573989868163,"[0.21690550231933595, 0.22057408142089843, 0.22184410095214843, 0.22048089599609375, 0.22132266235351564, 0.2190339813232422, 0.22027871704101562, 0.21995756530761718, 0.22105967712402344, 0.22240634155273437]",tokens/s,1161.596428502192,kWh,6.414966657065093e-06,7.074569528675595e-07,4.248040838044473e-06,1.1370464447977125e-05,tokens/kWh,22514471.697375912,MB,2807.226368,8020.492288,0.0,7610.564608,6915.13856,s,10,19.13417175292969,1.913417175292969,0.013176174665317692,1.9097753295898436,1.9265622436523437,1.9365471130371092,1.9445350085449218,"[1.9243433837890624, 1.9071304931640625, 1.8996845703125, 1.9014300537109374, 1.917813720703125, 1.946531982421875, 1.906928955078125, 1.904153076171875, 1.9137353515625, 1.912420166015625]",tokens/s,32.9253864831405,kWh,5.557457008085287e-05,6.1297175610456665e-06,3.68125475659516e-05,9.851683520785015e-05,tokens/kWh,639484.6105955701,,s,630,19.131715995788586,0.030367803167918372,0.000670981992929344,0.030225759506225587,0.030812736701965333,0.031045494365692136,0.032240393447875985,"[0.030822303771972655, 0.03043654441833496, 0.03013724708557129, 0.030142463684082032, 0.03018547248840332, 0.03038617515563965, 0.030471328735351563, 0.030905088424682616, 0.030527584075927733, 0.030470144271850585, 0.030658432006835937, 0.03050111961364746, 0.03050864028930664, 0.03044528007507324, 0.030552032470703126, 0.03032678413391113, 0.03029052734375, 0.030351200103759766, 0.03053526306152344, 0.030386751174926757, 0.03100876808166504, 0.03033228874206543, 0.03027008056640625, 0.030711904525756836, 0.030354848861694338, 0.031936704635620115, 0.03046227264404297, 0.030305791854858398, 0.030296064376831053, 0.03053004837036133, 0.030698720932006835, 0.03054217529296875, 0.030609855651855467, 0.03060326385498047, 0.030676992416381835, 0.030664575576782226, 0.030681503295898437, 0.030602079391479492, 0.030694271087646486, 0.030658367156982422, 0.030623935699462892, 0.03056003189086914, 0.030381599426269532, 0.030229183197021486, 0.030265344619750976, 0.03033907127380371, 0.030840831756591795, 0.03030531120300293, 0.030247007369995117, 0.030179519653320313, 0.030460607528686522, 0.0336814079284668, 0.03030611228942871, 0.030324672698974608, 0.030245119094848633, 0.030273536682128906, 0.03040870475769043, 0.03037558364868164, 0.0304051513671875, 0.030496063232421874, 0.030406976699829103, 0.030563520431518554, 0.030473215103149414, 0.03134864044189453, 0.031150655746459963, 0.030840032577514647, 0.030401311874389648, 0.032004096984863284, 0.03170211219787598, 0.03014678382873535, 0.030212799072265626, 0.02991916847229004, 0.030052000045776368, 0.030179264068603516, 0.030453567504882813, 0.03029212760925293, 0.030185983657836913, 0.030187519073486328, 0.02993356704711914, 0.02998271942138672, 0.030054399490356445, 0.02997657585144043, 0.030007295608520508, 0.030111743927001954, 0.03015670394897461, 0.030107744216918947, 0.030089216232299806, 0.030104896545410157, 0.030039039611816407, 0.03012076759338379, 0.03020275115966797, 0.029936927795410156, 0.029952735900878907, 0.030320640563964843, 0.030130176544189452, 0.030267391204833984, 0.03035545539855957, 0.030535104751586915, 0.030408863067626954, 0.030347679138183595, 0.030598239898681642, 0.03032566452026367, 0.03034726333618164, 0.030029504776000977, 0.03002809524536133, 0.03002947235107422, 0.030077280044555663, 0.030615007400512696, 0.030080928802490234, 0.030038272857666016, 0.03013875198364258, 0.030007423400878905, 0.030103424072265624, 0.03045964813232422, 0.0302957763671875, 0.03047478485107422, 0.03042665672302246, 0.03028611183166504, 0.03000339126586914, 0.03005232048034668, 0.030127168655395508, 0.03006287956237793, 0.02998159980773926, 0.030258975982666015, 0.02994790458679199, 0.029879711151123048, 0.031131391525268556, 0.030425216674804686, 0.030215456008911134, 0.03011452865600586, 0.029929216384887696, 0.029819135665893556, 0.030179168701171873, 0.030058656692504883, 0.02991231918334961, 0.03005516815185547, 0.03016067123413086, 0.030159072875976564, 0.029968095779418946, 0.030052255630493165, 0.030148223876953126, 0.029971200942993163, 0.030078975677490235, 0.03034726333618164, 0.030344287872314454, 0.030411327362060547, 0.030610944747924803, 0.030232831954956053, 0.030345312118530275, 0.030038015365600586, 0.03005695915222168, 0.02993152046203613, 0.029970432281494142, 0.030003103256225586, 0.030068479537963867, 0.03006230354309082, 0.029997695922851564, 0.030048255920410157, 0.02997452735900879, 0.03002732849121094, 0.030208032608032228, 0.030366111755371093, 0.030148319244384766, 0.03025644874572754, 0.030374879837036132, 0.03029750442504883, 0.030392927169799806, 0.03033497619628906, 0.03036774444580078, 0.030229663848876952, 0.030141120910644532, 0.030277088165283204, 0.030218591690063478, 0.030112096786499024, 0.030066816329956055, 0.030035839080810547, 0.030034975051879884, 0.03001852798461914, 0.029945632934570313, 0.0299704647064209, 0.03008492851257324, 0.030103935241699218, 0.030168479919433593, 0.030114400863647462, 0.030121503829956056, 0.030013216018676757, 0.03009164810180664, 0.03007315254211426, 0.03002908706665039, 0.030905344009399413, 0.030178720474243165, 0.03000992012023926, 0.030005279541015624, 0.02995782470703125, 0.03002566337585449, 0.03006287956237793, 0.03003606414794922, 0.03003392028808594, 0.02990460777282715, 0.029985055923461915, 0.029917184829711913, 0.029888511657714844, 0.030041791915893554, 0.030042400360107423, 0.03010166358947754, 0.029908287048339845, 0.030048095703125, 0.029975263595581055, 0.030302207946777345, 0.02983526420593262, 0.029906944274902345, 0.02993302345275879, 0.03006719970703125, 0.02988649559020996, 0.030017440795898437, 0.03023676872253418, 0.029913087844848633, 0.03010908889770508, 0.029872896194458008, 0.02985763168334961, 0.03015475273132324, 0.030021631240844726, 0.030013439178466796, 0.03003392028808594, 0.029954048156738283, 0.029925376892089843, 0.03030201530456543, 0.029892799377441406, 0.029924896240234374, 0.029841888427734376, 0.030607072830200196, 0.02990675163269043, 0.03006483268737793, 0.03008950424194336, 0.03010576057434082, 0.03010304069519043, 0.030086944580078125, 0.030032447814941406, 0.02999203109741211, 0.03013657569885254, 0.030196384429931642, 0.030459903717041017, 0.031066112518310547, 0.03077280044555664, 0.030685632705688477, 0.030717023849487303, 0.030695903778076173, 0.030781568527221678, 0.03080633544921875, 0.031340543746948245, 0.03077939224243164, 0.030692352294921874, 0.031217439651489258, 0.03060563278198242, 0.03073843193054199, 0.030754911422729493, 0.030605215072631836, 0.030822399139404297, 0.031264192581176756, 0.030827104568481447, 0.030702880859375, 0.03062396812438965, 0.030826335906982423, 0.0307553596496582, 0.03164374351501465, 0.03384108734130859, 0.03127833557128906, 0.030849311828613283, 0.0309703369140625, 0.03058460807800293, 0.030541183471679688, 0.030448543548583985, 0.030138368606567382, 0.030086847305297853, 0.02999737548828125, 0.0300677433013916, 0.029959135055541992, 0.02997216033935547, 0.031531328201293944, 0.030031200408935546, 0.030106271743774414, 0.029908992767333983, 0.03020307159423828, 0.029993087768554687, 0.030141120910644532, 0.03056025505065918, 0.030062623977661133, 0.030039583206176758, 0.030310848236083984, 0.03010748863220215, 0.029990463256835936, 0.030257568359375, 0.03007516860961914, 0.03029596710205078, 0.02995199966430664, 0.030106912612915038, 0.029938400268554686, 0.03011075210571289, 0.030098304748535157, 0.030136415481567383, 0.030031328201293946, 0.030336736679077148, 0.03030918312072754, 0.030480384826660156, 0.03008892822265625, 0.03002191925048828, 0.030115808486938477, 0.030998592376708985, 0.030174816131591797, 0.03008473587036133, 0.030044544219970704, 0.030113920211791993, 0.030208255767822264, 0.030230783462524415, 0.03027244758605957, 0.031049728393554688, 0.030468095779418947, 0.030724096298217773, 0.030672672271728516, 0.030265567779541015, 0.03898777770996094, 0.030620800018310548, 0.030339391708374023, 0.030519872665405273, 0.030664831161499023, 0.030812032699584962, 0.03079987144470215, 0.031133792877197267, 0.03067686462402344, 0.03070476722717285, 0.030978687286376955, 0.030775583267211915, 0.030732479095458985, 0.030830400466918945, 0.03099033546447754, 0.030760959625244142, 0.030834911346435546, 0.030897439956665038, 0.03071027183532715, 0.03096575927734375, 0.030633983612060548, 0.03054182434082031, 0.030430335998535157, 0.030472415924072266, 0.039051937103271483, 0.030514335632324217, 0.030573183059692383, 0.030064863204956056, 0.03122585678100586, 0.03178700828552246, 0.030265344619750976, 0.029904895782470704, 0.029997055053710937, 0.03006035232543945, 0.030074527740478516, 0.030060096740722655, 0.03003081512451172, 0.030263296127319338, 0.032933887481689454, 0.0308570556640625, 0.03024502372741699, 0.030313983917236328, 0.030605472564697266, 0.03052169609069824, 0.03062291145324707, 0.03104031944274902, 0.030724096298217773, 0.03078144073486328, 0.030484384536743164, 0.03071753692626953, 0.03047065544128418, 0.03038412857055664, 0.030275936126708983, 0.03058243179321289, 0.030418943405151368, 0.03049990463256836, 0.03060736083984375, 0.030327743530273437, 0.031317440032958985, 0.030605472564697266, 0.030624319076538085, 0.030680767059326174, 0.030482879638671877, 0.030552032470703126, 0.03058691215515137, 0.031021055221557618, 0.03071731185913086, 0.030406879425048827, 0.030382495880126953, 0.03054515266418457, 0.030262144088745117, 0.03045568084716797, 0.03050841522216797, 0.030313087463378907, 0.030183040618896484, 0.030456192016601564, 0.03022233581542969, 0.030117887496948242, 0.029992895126342775, 0.030023712158203125, 0.030392192840576173, 0.030036127090454102, 0.030547744750976564, 0.030446048736572265, 0.030472192764282226, 0.029979711532592775, 0.029931711196899413, 0.03026812744140625, 0.03006447982788086, 0.030164703369140625, 0.02995974349975586, 0.029950464248657226, 0.0299521598815918, 0.030066272735595704, 0.02984796714782715, 0.030086463928222656, 0.02994246482849121, 0.030011392593383788, 0.030269439697265626, 0.03002572822570801, 0.02998678398132324, 0.02994588851928711, 0.030238624572753905, 0.0303985595703125, 0.030498815536499024, 0.029962240219116212, 0.030074464797973634, 0.029877792358398436, 0.03020684814453125, 0.030439424514770507, 0.030021215438842775, 0.030107200622558592, 0.029924064636230468, 0.030002399444580077, 0.030008224487304686, 0.030084352493286132, 0.030132991790771484, 0.03115007972717285, 0.03039254379272461, 0.030240192413330078, 0.03013657569885254, 0.030203392028808593, 0.030568864822387694, 0.02998067283630371, 0.030299232482910155, 0.02989148712158203, 0.030036224365234374, 0.030199552536010744, 0.030048128128051757, 0.03044950485229492, 0.03027382469177246, 0.030097408294677733, 0.029989887237548828, 0.0299550724029541, 0.030138208389282228, 0.03009347152709961, 0.02996633529663086, 0.029925376892089843, 0.029937664031982423, 0.03033225631713867, 0.030371807098388673, 0.029986751556396483, 0.02996944046020508, 0.03009507179260254, 0.030046207427978516, 0.02993356704711914, 0.030090528488159178, 0.030098047256469727, 0.03000534439086914, 0.02999235153198242, 0.03020057678222656, 0.030054239273071288, 0.02995814323425293, 0.03001532745361328, 0.029922496795654296, 0.03015603256225586, 0.03029782485961914, 0.030357503890991212, 0.03023052787780762, 0.030244319915771485, 0.030073375701904298, 0.03003392028808594, 0.030293888092041015, 0.03086297607421875, 0.030444032669067384, 0.030516544342041017, 0.030388927459716795, 0.03057459259033203, 0.0306824951171875, 0.030376575469970704, 0.030449344635009767, 0.030359872817993162, 0.030466047286987305, 0.030430591583251954, 0.030405248641967773, 0.030662656784057617, 0.030585920333862305, 0.030593984603881835, 0.030357503890991212, 0.03046713638305664, 0.03030521583557129, 0.030051328659057616, 0.03011417579650879, 0.03002022361755371, 0.03142451286315918, 0.030699520111083983, 0.030697471618652345, 0.03062518310546875, 0.030601823806762695, 0.03227443313598633, 0.03215705490112305, 0.03084351921081543, 0.03073993682861328, 0.03083500862121582, 0.03080828857421875, 0.0307957763671875, 0.030875648498535156, 0.030674943923950194, 0.03087936019897461, 0.030652799606323243, 0.030731712341308594, 0.030605888366699217, 0.030717952728271485, 0.030631935119628906, 0.03055356788635254, 0.030577152252197266, 0.030381439208984375, 0.03015951919555664, 0.030130176544189452, 0.02994175910949707, 0.02993561553955078, 0.03000912094116211, 0.029873632431030275, 0.029934335708618164, 0.030117887496948242, 0.030361600875854492, 0.03014860725402832, 0.03001468849182129, 0.02995894432067871, 0.029894847869873047, 0.029949760437011717, 0.03061555290222168, 0.030058496475219725, 0.029998752593994142, 0.029953567504882813, 0.029986879348754884, 0.030024288177490234, 0.030163103103637696, 0.02983526420593262, 0.029970111846923827, 0.029947296142578125, 0.030020511627197266, 0.02993756866455078, 0.030053535461425782, 0.030012351989746094, 0.030062591552734375, 0.030040063858032227, 0.029995008468627928, 0.030033599853515624, 0.030001472473144532, 0.030094751358032225, 0.029936223983764648, 0.030046207427978516, 0.02998476791381836, 0.031067840576171873, 0.03129558372497559, 0.03016726493835449, 0.030892160415649413, 0.030498687744140624, 0.03034726333618164, 0.03032294464111328, 0.030519039154052734, 0.030016576766967774, 0.030159807205200194, 0.03016089630126953, 0.030198047637939453, 0.030164703369140625, 0.030070783615112305, 0.0301844482421875, 0.02997865676879883, 0.03004863929748535, 0.03009596824645996, 0.030192863464355468, 0.03019651222229004, 0.03014790344238281, 0.030474943161010744, 0.030104703903198242, 0.02998566436767578, 0.030091424942016602, 0.03006447982788086, 0.02994598388671875, 0.030065696716308595, 0.029937631607055665, 0.0299815673828125, 0.03002070426940918, 0.030276512145996092, 0.030304256439208983, 0.030395551681518553, 0.030331743240356444, 0.030165023803710937, 0.030512351989746094, 0.03002649688720703, 0.030087104797363283, 0.030068031311035158, 0.030231359481811524, 0.030062528610229493, 0.030103424072265624, 0.02997260856628418, 0.029949888229370118, 0.029931295394897462, 0.029911327362060546, 0.029959775924682616, 0.02983760070800781, 0.029991039276123045, 0.031324159622192385, 0.030003200531005858, 0.030707296371459962, 0.03062006378173828, 0.0304005126953125, 0.03034316825866699, 0.030212160110473632, 0.03453459167480469, 0.03081907272338867, 0.030760959625244142, 0.03075836753845215, 0.03082604789733887, 0.030753471374511718, 0.030969728469848634, 0.031072639465332032, 0.031031328201293944]",tokens/s,32.92961280308994,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11035.037696,14897.053696,0.0,14501.80608,13634.065408,s,1,18.76700390625,18.76700390625,0.0,18.76700390625,18.76700390625,18.76700390625,18.76700390625,[18.76700390625],,kWh,0.0003418410694708503,3.769941840282371e-05,0.00015415567887998305,0.0005336961667536571,,MB,2132.942848,14911.73376,0.0,14501.80608,12898.830848,s,10,6.400340270996093,0.6400340270996093,0.000510936811757298,0.6400107116699219,0.6404652282714843,0.6408448883056641,0.6411486163330078,"[0.640380859375, 0.640076904296875, 0.6392645874023437, 0.6397262573242187, 0.6396455078125, 0.640072998046875, 0.6412245483398438, 0.6403334350585937, 0.639666748046875, 0.6399484252929688]",tokens/s,399.9787341933906,kWh,1.8703781793228794e-05,2.0617305319390656e-06,1.2342978624375368e-05,3.310849094954323e-05,tokens/kWh,7732155.488153767,MB,2149.777408,14911.73376,0.0,14501.80608,13243.63264,s,10,36.24666357421875,3.624666357421875,0.0019243924507422602,3.625056518554688,3.626326318359375,3.6270015380859375,3.6275417138671875,"[3.62180712890625, 3.6256484375, 3.624166748046875, 3.6247294921875, 3.62556982421875, 3.624913818359375, 3.62617626953125, 3.62519921875, 3.62077587890625, 3.6276767578125]",tokens/s,17.380910072178384,kWh,0.0001057513950405222,1.1664895981548868e-05,7.05961988380235e-05,0.00018801248986009461,tokens/kWh,335084.12152235245,,s,630,36.24219458389281,0.05752729299030606,0.0005091918130798269,0.05749966430664062,0.05819214363098144,0.0583145601272583,0.05850669719696045,"[0.057396446228027344, 0.056642143249511716, 0.05666396713256836, 0.056575904846191405, 0.0567353286743164, 0.05672604751586914, 0.05664588928222656, 0.05652252960205078, 0.05687078475952148, 0.05687945556640625, 0.05689548873901367, 0.05690761566162109, 0.05681987380981445, 0.056798912048339846, 0.05684659194946289, 0.05698976135253906, 0.05698713684082031, 0.05707417678833008, 0.057112575531005856, 0.057024513244628906, 0.05711052703857422, 0.05706547164916992, 0.056973312377929686, 0.057124671936035154, 0.057254016876220705, 0.057314910888671876, 0.057395294189453126, 0.05736486434936523, 0.05748876953125, 0.05740771102905273, 0.05740585708618164, 0.057296897888183596, 0.05749350357055664, 0.05741292953491211, 0.05740409469604492, 0.05775779342651367, 0.05767113494873047, 0.057668033599853515, 0.057659393310546876, 0.057640960693359375, 0.05781708908081055, 0.05778841781616211, 0.05784783935546875, 0.057866207122802736, 0.057945537567138675, 0.05799168014526367, 0.05812752151489258, 0.05810179138183594, 0.05816153717041016, 0.05792409515380859, 0.05802793502807617, 0.05830460739135742, 0.05822259140014648, 0.05820620727539062, 0.05825273513793945, 0.05805318450927734, 0.05823283386230469, 0.05813862228393555, 0.05809971237182617, 0.05818527984619141, 0.058220993041992186, 0.058355712890625, 0.058365951538085936, 0.05756124877929687, 0.05680704116821289, 0.05668476867675781, 0.05650838470458985, 0.05690140914916992, 0.056957054138183597, 0.05675203323364258, 0.056703201293945314, 0.05694464111328125, 0.05726163101196289, 0.057270015716552734, 0.05722127914428711, 0.05710492706298828, 0.057215999603271485, 0.0573897590637207, 0.05743648147583008, 0.05748121643066406, 0.05725183868408203, 0.05706137466430664, 0.05708390426635742, 0.05700198364257812, 0.056997886657714845, 0.05716377639770508, 0.05706489562988281, 0.05735391998291016, 0.05735513687133789, 0.05737881469726563, 0.05749964904785156, 0.05773305511474609, 0.05764432144165039, 0.05763151931762695, 0.05765929412841797, 0.05775779342651367, 0.05783667373657227, 0.057736064910888674, 0.05797868728637695, 0.05782956695556641, 0.05758156967163086, 0.05753606414794922, 0.057600448608398434, 0.05777203369140625, 0.057679039001464844, 0.05781177520751953, 0.05767168045043945, 0.05774131011962891, 0.05779865646362305, 0.057773086547851564, 0.05781132888793945, 0.05774358367919922, 0.057903488159179686, 0.0578785285949707, 0.05784175872802735, 0.05791254425048828, 0.05816403198242188, 0.058138111114501956, 0.05809347152709961, 0.05815491104125976, 0.05809177780151367, 0.058232864379882815, 0.05839094543457031, 0.05826067352294922, 0.05821305465698242, 0.05826355361938477, 0.05762038421630859, 0.05673177719116211, 0.056608543395996094, 0.05649001693725586, 0.056704513549804686, 0.056711006164550784, 0.05681375885009766, 0.056939136505126955, 0.057100223541259765, 0.057173152923583985, 0.05691187286376953, 0.05700201416015625, 0.057008575439453125, 0.056984001159667966, 0.05693030548095703, 0.05702656173706055, 0.05718220901489258, 0.05736198425292969, 0.0571847038269043, 0.05716377639770508, 0.057060798645019534, 0.05707219314575195, 0.057006080627441405, 0.05703475189208984, 0.05706467056274414, 0.05721987152099609, 0.05737472152709961, 0.05730416107177734, 0.057265056610107425, 0.05740544128417969, 0.05737062454223633, 0.05732966232299805, 0.05743404769897461, 0.05737180709838867, 0.05744323348999023, 0.057591808319091796, 0.05758566284179688, 0.057501697540283205, 0.057575424194335936, 0.05769833755493164, 0.05792559814453125, 0.05795993423461914, 0.05817804718017578, 0.05816115188598633, 0.05801369476318359, 0.058071041107177736, 0.05809151840209961, 0.05811609649658203, 0.058141792297363284, 0.05813702392578125, 0.058055137634277346, 0.05820415878295898, 0.05808137512207031, 0.05802403259277344, 0.058005214691162106, 0.05790729522705078, 0.058103809356689455, 0.05804646301269531, 0.05827139282226562, 0.05847894287109375, 0.05849468612670899, 0.05839503860473633, 0.05848614501953125, 0.05733583831787109, 0.056799198150634764, 0.05689548873901367, 0.0566640625, 0.05678079986572265, 0.05696428680419922, 0.056767295837402344, 0.05671321487426758, 0.0567562255859375, 0.05692006301879883, 0.05684633636474609, 0.05705740737915039, 0.057076961517333984, 0.05726883316040039, 0.05714540863037109, 0.0571671371459961, 0.057197025299072266, 0.05715135955810547, 0.05709862518310547, 0.05740339279174805, 0.05719190216064453, 0.05713359832763672, 0.05717187118530274, 0.05739324951171875, 0.05731107330322266, 0.05744655990600586, 0.05743001556396484, 0.057425918579101565, 0.05747420883178711, 0.05751692962646485, 0.05749084854125976, 0.05748336029052734, 0.05752870559692383, 0.057493598937988284, 0.05747097778320313, 0.05764278411865234, 0.05760227203369141, 0.05749964904785156, 0.05785411071777344, 0.05766128158569336, 0.057643009185791017, 0.05767686462402344, 0.057753662109375, 0.057758590698242185, 0.05778636932373047, 0.05773926544189453, 0.057908927917480466, 0.05805702209472656, 0.05809766387939453, 0.05808892822265625, 0.058040863037109376, 0.05802361679077148, 0.057962814331054685, 0.058068992614746094, 0.05822387313842774, 0.05813529586791992, 0.058120159149169924, 0.05803830337524414, 0.05817958450317383, 0.05834515380859375, 0.05834988784790039, 0.05848608016967773, 0.058563262939453124, 0.057452545166015626, 0.05680105590820313, 0.056602272033691406, 0.056603199005126954, 0.05655276870727539, 0.05661721420288086, 0.05667011260986328, 0.05662511825561523, 0.056887550354003905, 0.056926528930664064, 0.05678694534301758, 0.05678672027587891, 0.05676873779296875, 0.05694668960571289, 0.05701827239990234, 0.05696112060546875, 0.057106433868408205, 0.05728015899658203, 0.057289249420166014, 0.05738476943969727, 0.05736038589477539, 0.05732352066040039, 0.057181953430175785, 0.05719065475463867, 0.05737859344482422, 0.05732694244384766, 0.057471263885498045, 0.05737123107910156, 0.057427967071533206, 0.057450496673583984, 0.057484928131103515, 0.0575200309753418, 0.05791587066650391, 0.05785599899291992, 0.05768396759033203, 0.057745407104492184, 0.057692161560058595, 0.057665534973144535, 0.057819168090820314, 0.0577371826171875, 0.05769772720336914, 0.05788934326171875, 0.0580055046081543, 0.05797478485107422, 0.05792144012451172, 0.05808547210693359, 0.05810790252685547, 0.05806284713745117, 0.05804032135009766, 0.058087425231933595, 0.05816320037841797, 0.058028030395507815, 0.057974945068359374, 0.058007392883300785, 0.05806204986572266, 0.05812918472290039, 0.058267520904541015, 0.05824313735961914, 0.058187488555908204, 0.058388126373291015, 0.05837894439697266, 0.05845372772216797, 0.05835776138305664, 0.057401344299316405, 0.0566558723449707, 0.056570945739746095, 0.05650527954101563, 0.05667839813232422, 0.05671094512939453, 0.05666838455200195, 0.05667795181274414, 0.05695052719116211, 0.05702067184448242, 0.057135425567626956, 0.05714956665039062, 0.05704294586181641, 0.05697052764892578, 0.05707440185546875, 0.05714739227294922, 0.057079105377197265, 0.05712966537475586, 0.057071872711181644, 0.05701919937133789, 0.057293758392333985, 0.05734604644775391, 0.05730012893676758, 0.05737673568725586, 0.05740019226074219, 0.057308513641357424, 0.05752284622192383, 0.05749308776855469, 0.057444766998291014, 0.057507839202880856, 0.057353374481201175, 0.057485279083251954, 0.05755311965942383, 0.057520576477050785, 0.057718528747558596, 0.057702880859375, 0.057621726989746096, 0.05771548843383789, 0.05779865646362305, 0.05773516845703125, 0.05784985733032227, 0.05773311996459961, 0.05777622222900391, 0.05778979110717773, 0.057939903259277344, 0.057813377380371095, 0.05779891204833985, 0.057896831512451175, 0.058048576354980466, 0.05817049789428711, 0.0579898567199707, 0.0580425910949707, 0.05790841674804687, 0.057996097564697265, 0.0579317741394043, 0.058054656982421876, 0.05833692932128906, 0.058208606719970704, 0.058191871643066405, 0.058431488037109375, 0.05856025695800781, 0.05850547027587891, 0.05854003143310547, 0.05740614318847656, 0.05675151824951172, 0.056648414611816404, 0.05658828735351563, 0.056704864501953126, 0.05674403381347656, 0.05707574462890625, 0.056938785552978516, 0.05699760055541992, 0.057086078643798825, 0.05690153503417969, 0.05709852981567383, 0.0573226547241211, 0.05755551910400391, 0.056952640533447264, 0.056926399230957034, 0.05701033782958984, 0.05732115173339844, 0.057333919525146486, 0.057236671447753906, 0.057215808868408206, 0.05728665542602539, 0.0572083511352539, 0.0572523193359375, 0.057606273651123044, 0.05738489532470703, 0.057391040802001955, 0.05728460693359375, 0.05736243057250977, 0.057565185546875, 0.05768806457519531, 0.0574213752746582, 0.057395648956298825, 0.05740544128417969, 0.05751603317260742, 0.057513568878173826, 0.05753283309936524, 0.05756694412231445, 0.05751631927490235, 0.057474239349365235, 0.05759673690795898, 0.057765151977539064, 0.05773731231689453, 0.05770207977294922, 0.05804048156738281, 0.05796726226806641, 0.057892990112304685, 0.0579317741394043, 0.05815500640869141, 0.05810940933227539, 0.05834121704101562, 0.058337535858154294, 0.058116222381591795, 0.0581286735534668, 0.058009056091308596, 0.058104385375976564, 0.058264896392822264, 0.05815372848510742, 0.058124225616455076, 0.05845590209960937, 0.05834915161132812, 0.05853241729736328, 0.058705921173095706, 0.05755644989013672, 0.05683705520629883, 0.05670902252197266, 0.05684838485717773, 0.056888896942138674, 0.05684415817260742, 0.056785472869873045, 0.056923423767089844, 0.05698015975952148, 0.05695695877075195, 0.05692195129394531, 0.05686492919921875, 0.05694367980957031, 0.056916927337646486, 0.057054561614990236, 0.05703132629394531, 0.057006080627441405, 0.057133056640625, 0.05706752014160156, 0.05701836776733398, 0.057194496154785154, 0.05718220901489258, 0.057134208679199217, 0.05710470581054688, 0.05725830459594727, 0.0572578239440918, 0.05745043182373047, 0.05751753616333008, 0.05732668685913086, 0.05730652618408203, 0.05723392105102539, 0.05736816024780273, 0.05739916610717773, 0.05757606506347656, 0.05773455810546875, 0.05786995315551758, 0.05770556640625, 0.0577042236328125, 0.057788063049316406, 0.05792124938964844, 0.057847713470458986, 0.05785468673706055, 0.05781462478637695, 0.057876895904541016, 0.057987071990966796, 0.057896961212158204, 0.05791129684448242, 0.0579788818359375, 0.05793920135498047, 0.05805110549926758, 0.058275360107421875, 0.05815740966796875, 0.05817808151245117, 0.05822009658813477, 0.05822079849243164, 0.05813407897949219, 0.05832134246826172, 0.05815849685668945, 0.05814707183837891, 0.05816953659057617, 0.05830627059936523, 0.05849542236328125, 0.058535743713378906, 0.05749967956542969, 0.05670406341552734, 0.056548255920410156, 0.05652275085449219, 0.056758014678955075, 0.05678515243530274, 0.05669411087036133, 0.05672412872314453, 0.05810966491699219, 0.05670694351196289, 0.05678531265258789, 0.057012222290039063, 0.05699174499511719, 0.05695078277587891, 0.05701017761230469, 0.05700771331787109, 0.057158046722412106, 0.05723244857788086, 0.05701670455932617, 0.05691587066650391, 0.05711529541015625, 0.05710831832885742, 0.05713116836547852, 0.05729718399047851, 0.057242942810058595, 0.057262561798095704, 0.05736441421508789, 0.0573493766784668, 0.057406208038330075, 0.057390079498291016, 0.057322494506835936, 0.05737267303466797, 0.05766105651855469, 0.05752617645263672, 0.057600479125976566, 0.057616382598876956, 0.05756073760986328, 0.05742364883422851, 0.05758627319335938, 0.05777814483642578, 0.05784928131103516, 0.05781356811523437, 0.05772252655029297, 0.05772457504272461, 0.05775843048095703, 0.0577658576965332, 0.05781897735595703, 0.057845664978027345, 0.057897216796875, 0.05800252914428711, 0.058022815704345705, 0.057987071990966796, 0.057984928131103515, 0.057962017059326174, 0.05796124649047851, 0.05788425445556641, 0.05813267135620117, 0.0581341438293457, 0.05810214233398438, 0.05815606307983399, 0.05814575958251953, 0.058210304260253906, 0.058218494415283206, 0.05731497573852539, 0.056812065124511715, 0.05687289428710938, 0.056721214294433595, 0.056735488891601564, 0.056901695251464844, 0.05692681503295898, 0.057079841613769534, 0.057003902435302733, 0.05693423843383789, 0.05721116638183594, 0.05718425750732422, 0.05704499053955078, 0.05699971389770508, 0.05698892974853516, 0.05711356735229492, 0.05715558242797852, 0.05722735977172851, 0.05758761596679687, 0.05724700927734375, 0.05725462341308594, 0.05715126419067383, 0.05720905685424805, 0.05728979110717773, 0.0573513298034668, 0.05756086349487305, 0.05767955017089844, 0.05744262313842773, 0.05732681655883789, 0.057477249145507815, 0.05749593734741211, 0.05738528060913086, 0.057460575103759765, 0.05745577621459961, 0.057598945617675784, 0.05774131011962891, 0.05764710235595703, 0.05757132720947265, 0.057708545684814455, 0.05779455947875976, 0.05781913757324219, 0.057915393829345706, 0.05792153549194336, 0.05781094360351562, 0.05809356689453125, 0.05798912048339844, 0.05793564987182617, 0.05783564758300781, 0.057853374481201175, 0.05788467025756836, 0.05827651214599609, 0.05814470291137695, 0.05820745468139649, 0.058261791229248044, 0.05822118377685547, 0.058253246307373045, 0.05817913436889648, 0.058194591522216794, 0.05822000122070312, 0.05834511947631836, 0.058284000396728514, 0.05842160034179687, 0.05850719833374023]",tokens/s,17.38305329556373,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3551.899648,5264.83456,0.0,4869.586944,4520.068608,s,1,11.1682099609375,11.1682099609375,0.0,11.1682099609375,11.1682099609375,11.1682099609375,11.1682099609375,[11.1682099609375],,kWh,0.00012159290330833225,1.3405123182575374e-05,5.266587546599866e-05,0.00018766390195690627,,MB,1475.104768,5298.388992,0.0,4888.461312,4194.016256,s,10,1.7663264923095703,0.17663264923095703,0.00017453915418906213,0.17667317962646484,0.17684921112060545,0.17685737533569335,0.17686390670776367,"[0.17672930908203124, 0.1763408966064453, 0.17639692687988281, 0.17686553955078124, 0.1767637481689453, 0.17661705017089843, 0.17673196411132813, 0.17650860595703124, 0.1765250549316406, 0.17684739685058593]",tokens/s,1449.3356755650861,kWh,5.215853408333378e-06,5.752153263915551e-07,3.445940256750049e-06,9.237008991474982e-06,tokens/kWh,27714598.98288152,MB,1483.50976,5306.7776,0.0,4896.84992,4194.018816,s,10,16.84819104003906,1.6848191040039062,0.0057024234270836435,1.6827443847656252,1.6934502075195312,1.6938809265136718,1.6942255017089842,"[1.68153759765625, 1.68524853515625, 1.6789527587890625, 1.6933544921875, 1.679438720703125, 1.680484130859375, 1.691267333984375, 1.683951171875, 1.6943116455078124, 1.6796446533203124]",tokens/s,37.39273839564318,kWh,4.8736988856666766e-05,5.3754525794993705e-06,3.1029559545849735e-05,8.514200098201587e-05,tokens/kWh,739940.3264354473,,s,630,16.84579932975769,0.0267393640154884,0.0005493166513169489,0.02661803150177002,0.0270858341217041,0.027548237800598143,0.028608866176605227,"[0.027618944168090822, 0.02668582344055176, 0.026681312561035155, 0.026505247116088867, 0.026524991989135743, 0.02651411247253418, 0.02670182418823242, 0.026804224014282226, 0.026466304779052735, 0.02630985641479492, 0.02633123207092285, 0.02650592041015625, 0.026574560165405273, 0.026399007797241213, 0.02642736053466797, 0.026434816360473633, 0.026428192138671876, 0.02641663932800293, 0.02638489532470703, 0.026550271987915038, 0.026398719787597655, 0.026558464050292968, 0.026494911193847656, 0.026512895584106445, 0.02656928062438965, 0.026658016204833983, 0.026519615173339842, 0.026512096405029297, 0.026436800003051757, 0.026411840438842774, 0.02695782470703125, 0.026563808441162108, 0.026440383911132813, 0.0285548152923584, 0.02910611152648926, 0.027166847229003907, 0.026685087203979493, 0.02676924705505371, 0.027085792541503905, 0.026681343078613282, 0.026654111862182618, 0.026775392532348632, 0.02760576057434082, 0.026798080444335938, 0.027072511672973632, 0.026619903564453123, 0.026462207794189452, 0.0265031681060791, 0.026358783721923826, 0.026612735748291014, 0.026752031326293946, 0.026889184951782226, 0.02674406433105469, 0.026700351715087892, 0.02662419128417969, 0.02652774429321289, 0.02647859191894531, 0.02650435256958008, 0.02643849563598633, 0.026458112716674805, 0.02653539276123047, 0.02637177658081055, 0.026467168807983398, 0.028073951721191405, 0.027975839614868166, 0.028180032730102538, 0.026595775604248046, 0.02666700744628906, 0.02650726318359375, 0.02709065628051758, 0.02666249656677246, 0.02671891212463379, 0.026431488037109374, 0.026650623321533205, 0.0265765438079834, 0.02691312026977539, 0.026400768280029296, 0.02664816093444824, 0.02673196792602539, 0.0266331844329834, 0.026818208694458008, 0.026674976348876955, 0.026847679138183592, 0.02668351936340332, 0.026674911499023436, 0.026835231781005858, 0.02697417640686035, 0.026966047286987305, 0.02691276741027832, 0.026652671813964843, 0.026705919265747072, 0.026695327758789064, 0.026757471084594725, 0.026658815383911134, 0.026472448348999023, 0.026390527725219725, 0.026451967239379884, 0.026607616424560547, 0.02714022445678711, 0.026566335678100586, 0.026749088287353517, 0.02718921661376953, 0.02644780731201172, 0.026386367797851563, 0.02637433624267578, 0.026605567932128905, 0.027000831604003905, 0.026671104431152344, 0.026627904891967775, 0.026484928131103515, 0.02654412841796875, 0.026746591567993163, 0.026990304946899413, 0.026886528015136718, 0.026663103103637696, 0.026899871826171876, 0.026919519424438477, 0.026822240829467773, 0.026513824462890623, 0.026560480117797852, 0.026402847290039062, 0.02639664077758789, 0.02649295997619629, 0.026434688568115233, 0.02650815963745117, 0.02672230339050293, 0.027522880554199217, 0.026837024688720703, 0.02672630310058594, 0.026400447845458985, 0.026398464202880858, 0.026433536529541016, 0.027437952041625975, 0.027551679611206054, 0.02641312026977539, 0.026455232620239258, 0.026515520095825196, 0.02683545684814453, 0.026384288787841798, 0.026501472473144532, 0.02647596740722656, 0.026552831649780274, 0.026429088592529296, 0.026941471099853516, 0.0263822078704834, 0.02645622444152832, 0.026435935974121094, 0.026556415557861326, 0.02660963249206543, 0.026454048156738283, 0.026318431854248047, 0.026456192016601564, 0.02640105628967285, 0.026427040100097655, 0.026241376876831056, 0.026372095108032227, 0.026216064453125, 0.026351295471191406, 0.026219072341918944, 0.02667532730102539, 0.026888191223144533, 0.026927104949951174, 0.026844800949096678, 0.02664486312866211, 0.026613407135009766, 0.026415456771850587, 0.026543935775756835, 0.026538143157958983, 0.02645199966430664, 0.02636390495300293, 0.026597375869750976, 0.026480640411376953, 0.0263372802734375, 0.026259456634521484, 0.02650307273864746, 0.027050079345703124, 0.026802047729492188, 0.028313728332519533, 0.028058847427368163, 0.026680095672607422, 0.02671001625061035, 0.026975456237792968, 0.026639135360717773, 0.0266092472076416, 0.026618207931518555, 0.026697792053222657, 0.026681343078613282, 0.026609567642211913, 0.026488319396972656, 0.027512575149536134, 0.02698854446411133, 0.02675884819030762, 0.026687807083129882, 0.026871135711669922, 0.028630943298339845, 0.027058528900146483, 0.026816991806030272, 0.0270533447265625, 0.026924800872802735, 0.02686649513244629, 0.02757232093811035, 0.026943168640136718, 0.02701958465576172, 0.026796031951904296, 0.026613088607788087, 0.02691337585449219, 0.02695721626281738, 0.027054048538208007, 0.02694828796386719, 0.026884096145629883, 0.02678771209716797, 0.027073663711547853, 0.02673971176147461, 0.026671104431152344, 0.02674892807006836, 0.026674623489379882, 0.026554943084716797, 0.026609376907348634, 0.02981475257873535, 0.027050304412841796, 0.026682783126831054, 0.026774112701416015, 0.0265031681060791, 0.02685087966918945, 0.02697056007385254, 0.02706800079345703, 0.02715484809875488, 0.026658815383911134, 0.02675916862487793, 0.026756704330444334, 0.026420736312866212, 0.02660153579711914, 0.026643295288085938, 0.026473888397216795, 0.026993247985839845, 0.02656870460510254, 0.02633932876586914, 0.027229728698730468, 0.026581472396850585, 0.026401792526245117, 0.026266624450683593, 0.026343423843383788, 0.02691833686828613, 0.02664041519165039, 0.0269333438873291, 0.027288000106811525, 0.026611711502075194, 0.02670182418823242, 0.0265482234954834, 0.026957504272460936, 0.026413375854492188, 0.026482688903808595, 0.026839456558227538, 0.026427328109741213, 0.026478208541870118, 0.02632761573791504, 0.02639030456542969, 0.02634979248046875, 0.02633318328857422, 0.026294271469116212, 0.026425344467163086, 0.026564607620239256, 0.02672230339050293, 0.026470111846923827, 0.026431072235107423, 0.027030208587646484, 0.02672230339050293, 0.03003948783874512, 0.027544031143188475, 0.026929248809814454, 0.02643494415283203, 0.02641164779663086, 0.026443775177001954, 0.02691481590270996, 0.026503103256225586, 0.02642336082458496, 0.02651955223083496, 0.026451967239379884, 0.026558464050292968, 0.02676736068725586, 0.02721177673339844, 0.026492927551269533, 0.026462207794189452, 0.02637740707397461, 0.026434015274047852, 0.026445600509643556, 0.02691539192199707, 0.02677497673034668, 0.02652627182006836, 0.02656051254272461, 0.026526784896850585, 0.026617855072021485, 0.02647750473022461, 0.026467391967773438, 0.02655740737915039, 0.026511327743530273, 0.02647769546508789, 0.02658188819885254, 0.0263701114654541, 0.026478143692016603, 0.02645644760131836, 0.02641001510620117, 0.026917055130004884, 0.026850080490112303, 0.026516639709472656, 0.026781984329223633, 0.027480640411376954, 0.026533119201660155, 0.0264749755859375, 0.026517791748046873, 0.026832895278930666, 0.02692095947265625, 0.026556415557861326, 0.02655436706542969, 0.026396543502807616, 0.027586271286010742, 0.026786144256591798, 0.026727584838867186, 0.02662633514404297, 0.026575584411621094, 0.026558464050292968, 0.026512863159179688, 0.02676380729675293, 0.026884096145629883, 0.026845184326171875, 0.026684768676757814, 0.026755744934082032, 0.026839040756225587, 0.026742624282836913, 0.026590911865234376, 0.026544607162475586, 0.026445823669433592, 0.026445632934570314, 0.02662188720703125, 0.026444032669067384, 0.026388032913208008, 0.026765535354614258, 0.026811744689941405, 0.02648931121826172, 0.026495391845703126, 0.026341087341308595, 0.02642099189758301, 0.026761760711669923, 0.02654742431640625, 0.026823455810546876, 0.026556415557861326, 0.02672790336608887, 0.02657539176940918, 0.02722105598449707, 0.026657312393188477, 0.02674492835998535, 0.026851200103759767, 0.026806175231933595, 0.02674127960205078, 0.026626047134399415, 0.026502592086791992, 0.026362432479858398, 0.026430944442749023, 0.026636768341064453, 0.02649075126647949, 0.026501312255859374, 0.026410144805908205, 0.026325855255126953, 0.02659724807739258, 0.026634368896484375, 0.026570751190185548, 0.026630144119262695, 0.02615910339355469, 0.02642323112487793, 0.026558528900146483, 0.026372095108032227, 0.02712335968017578, 0.026552671432495116, 0.026791263580322265, 0.02778531265258789, 0.027348608016967774, 0.026999359130859376, 0.026700031280517577, 0.027456096649169922, 0.027567167282104493, 0.02659347152709961, 0.026501855850219726, 0.026595327377319337, 0.026605567932128905, 0.026646272659301758, 0.026418912887573243, 0.02632143974304199, 0.026408960342407226, 0.026462207794189452, 0.026471839904785157, 0.02647920036315918, 0.026755071640014647, 0.026541471481323242, 0.026857280731201173, 0.026780448913574218, 0.026673152923583986, 0.02672198486328125, 0.026827072143554686, 0.026563615798950196, 0.026637279510498046, 0.02973654365539551, 0.02759516716003418, 0.026998207092285155, 0.02673721694946289, 0.02669158363342285, 0.027451391220092772, 0.028436479568481447, 0.02668339157104492, 0.02671615982055664, 0.026805376052856444, 0.02758902359008789, 0.026834495544433595, 0.02701510429382324, 0.027048927307128906, 0.02697420883178711, 0.026842239379882813, 0.027706239700317385, 0.02671615982055664, 0.02675712013244629, 0.02656870460510254, 0.026648576736450196, 0.02674483108520508, 0.0265645751953125, 0.026530975341796874, 0.026552448272705077, 0.026599903106689454, 0.02671843147277832, 0.026880064010620118, 0.026779647827148437, 0.027191200256347657, 0.027273311614990234, 0.026463775634765624, 0.026431936264038086, 0.02650691223144531, 0.026345888137817384, 0.026445472717285156, 0.02653139114379883, 0.0267640323638916, 0.026836992263793946, 0.026848447799682616, 0.026573600769042968, 0.027165792465209962, 0.02655462455749512, 0.026552736282348634, 0.026478784561157227, 0.02647859191894531, 0.026436992645263672, 0.027634016036987306, 0.026972448348999024, 0.026496192932128907, 0.026720863342285156, 0.026715808868408204, 0.026863552093505858, 0.026827360153198244, 0.026744863510131837, 0.026629791259765626, 0.026614112854003905, 0.02660313606262207, 0.02644416046142578, 0.026642431259155275, 0.02654991912841797, 0.026433887481689452, 0.026445823669433592, 0.026531679153442383, 0.026646688461303712, 0.026729631423950195, 0.026667871475219727, 0.026711360931396484, 0.026576799392700197, 0.026794591903686524, 0.02685152053833008, 0.02708620834350586, 0.026921600341796876, 0.027862815856933593, 0.02673072052001953, 0.026985824584960936, 0.026821279525756837, 0.026685440063476562, 0.027033599853515625, 0.02675916862487793, 0.026847232818603517, 0.0269835205078125, 0.026997663497924804, 0.026816511154174806, 0.02752921676635742, 0.02649087905883789, 0.02668329620361328, 0.026505311965942382, 0.026597312927246094, 0.026695903778076173, 0.026613344192504884, 0.02643382453918457, 0.02639587211608887, 0.026342144012451173, 0.026445823669433592, 0.026552255630493165, 0.026740127563476563, 0.026878623962402343, 0.026666656494140625, 0.026504543304443358, 0.02652592086791992, 0.027060319900512695, 0.026546880722045897, 0.026447872161865234, 0.02873129653930664, 0.026760639190673827, 0.026585439682006835, 0.026693952560424804, 0.02676531219482422, 0.027047935485839843, 0.026944927215576172, 0.027755104064941406, 0.02793788719177246, 0.02672643280029297, 0.026495872497558595, 0.027031328201293944, 0.026505151748657228, 0.026535968780517578, 0.026520959854125975, 0.026854272842407226, 0.026816511154174806, 0.02671001625061035, 0.02653113555908203, 0.02652025604248047, 0.026644479751586913, 0.026501279830932617, 0.026500959396362305, 0.026703744888305663, 0.026816064834594727, 0.026812736511230468, 0.026761472702026366, 0.026570335388183593, 0.026583456039428712, 0.02652899169921875, 0.02650601577758789, 0.027711488723754882, 0.035465217590332034, 0.027033599853515625, 0.02670796775817871, 0.02679193687438965, 0.02654617691040039, 0.02716057586669922, 0.026706016540527344, 0.02667510414123535, 0.02672591972351074, 0.026575328826904297, 0.027076223373413085, 0.026585216522216796, 0.026429151535034178, 0.02661020851135254, 0.026410623550415038, 0.026507648468017578, 0.027133951187133788, 0.026431488037109374, 0.02644528007507324, 0.02673308753967285, 0.026557855606079102, 0.026558496475219726, 0.026655168533325196, 0.02658915138244629, 0.026494367599487305, 0.026479360580444335, 0.026466304779052735, 0.026533472061157228, 0.026476160049438476, 0.026669376373291014, 0.02670025634765625, 0.027389951705932617, 0.02717923164367676, 0.026647808074951172, 0.026702272415161134, 0.02737571144104004, 0.02672230339050293, 0.026599359512329102, 0.02662816047668457, 0.026468351364135743, 0.026527103424072267, 0.026718015670776366, 0.026626752853393554, 0.027408512115478515, 0.026408960342407226, 0.02647859191894531, 0.026663103103637696, 0.02720915222167969, 0.02659881591796875, 0.02652003288269043, 0.026465824127197266, 0.02658198356628418, 0.026617855072021485, 0.02741596794128418, 0.0266713924407959, 0.02644950485229492, 0.026249824523925783, 0.026406784057617188, 0.02625702476501465, 0.026264192581176758, 0.026349504470825194, 0.02643881607055664, 0.02645084762573242, 0.026513408660888672, 0.02627299118041992, 0.026460960388183595, 0.02652569580078125, 0.02629631996154785, 0.026320831298828125, 0.026277408599853516, 0.02659996795654297, 0.027694719314575195, 0.027961023330688478, 0.027642175674438475, 0.026812736511230468, 0.02643155288696289, 0.026425344467163086, 0.026477951049804688, 0.027073408126831056, 0.026683135986328124, 0.026410688400268556, 0.026536191940307617, 0.026431295394897462, 0.026566911697387695, 0.02701702308654785, 0.02671836853027344, 0.026308416366577148, 0.02651772880554199, 0.02654204750061035, 0.026488672256469725, 0.02658527946472168, 0.026605567932128905, 0.02634480094909668, 0.026388736724853517]",tokens/s,37.39804729165451,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5178.445824,6356.33664,0.0,5953.814528,5766.738432,s,1,12.8077490234375,12.8077490234375,0.0,12.8077490234375,12.8077490234375,12.8077490234375,12.8077490234375,[12.8077490234375],,kWh,0.00016412928043745673,1.8097546312977244e-05,7.355894773600213e-05,0.0002557857744864361,,MB,1779.716096,6412.959744,0.0,5995.757568,5259.958272,s,10,2.6030535888671875,0.26030535888671874,0.00010952417724945265,0.2603069610595703,0.2604299224853516,0.2604627029418945,0.2604889273071289,"[0.2603931884765625, 0.26035992431640625, 0.260203857421875, 0.26019973754882814, 0.26042263793945314, 0.26034475708007815, 0.2602691650390625, 0.26013336181640623, 0.2602314758300781, 0.2604954833984375]",tokens/s,983.46035246784,kWh,7.675493994658539e-06,8.46455288006945e-07,5.104662203384797e-06,1.362661148605028e-05,tokens/kWh,18786768.835529666,MB,1782.624256,6429.73696,0.0,6012.534784,5259.960832,s,10,32.79974145507813,3.2799741455078126,0.3833146451211057,3.51670947265625,3.5693166259765623,3.5771255981445313,3.5833727758789062,"[3.523601806640625, 3.5849345703125, 3.561357666015625, 3.546097412109375, 3.509817138671875, 3.567581298828125, 3.410418212890625, 2.709788330078125, 2.706625, 2.67952001953125]",tokens/s,19.207468475409645,kWh,7.85058492895107e-05,8.659243729595577e-06,4.653399021861465e-05,0.0001336990832377209,tokens/kWh,471207.419485324,,s,630,32.796046398162844,0.05205721650502039,0.0063037057454885855,0.05588668823242188,0.05689449348449707,0.057269836616516114,0.05867891613006592,"[0.05704115295410156, 0.05584076690673828, 0.05591628646850586, 0.05609088134765625, 0.055856830596923826, 0.05573196792602539, 0.05563654327392578, 0.05534003067016602, 0.055532543182373044, 0.055365631103515625, 0.05554995346069336, 0.05571337509155273, 0.055922622680664065, 0.056003040313720706, 0.05585715103149414, 0.055863487243652345, 0.055707393646240236, 0.05585465621948242, 0.056019039154052735, 0.05611939239501953, 0.056154014587402344, 0.05589238357543945, 0.0559697265625, 0.056038974761962894, 0.05612777709960937, 0.05597206497192383, 0.05603123092651367, 0.05627222442626953, 0.056375934600830076, 0.05601897430419922, 0.056014270782470704, 0.0559351692199707, 0.05566502380371094, 0.055754718780517576, 0.055488544464111327, 0.05575475311279297, 0.05551542282104492, 0.055666400909423826, 0.05669875335693359, 0.05601497650146484, 0.05580799865722656, 0.055467521667480466, 0.05572473526000977, 0.05536134338378906, 0.05555199813842773, 0.05591654586791992, 0.05593859100341797, 0.055906558990478514, 0.05633433532714844, 0.056960830688476564, 0.05616038513183594, 0.05629123306274414, 0.05631619262695312, 0.05621529769897461, 0.05643503952026367, 0.056414207458496096, 0.056080192565917966, 0.055503040313720706, 0.05580799865722656, 0.055529376983642575, 0.0558900146484375, 0.05571932983398437, 0.05560380935668945, 0.0568996467590332, 0.05608883285522461, 0.05604518508911133, 0.056525184631347654, 0.05644902420043945, 0.05597743988037109, 0.0598612174987793, 0.05696092987060547, 0.057013919830322266, 0.05712358474731445, 0.056777793884277346, 0.05721903991699219, 0.05663638305664063, 0.056659969329833984, 0.05740339279174805, 0.05666611099243164, 0.05686272048950195, 0.05709775924682617, 0.057119041442871096, 0.056715423583984376, 0.05660028839111328, 0.056920352935791015, 0.05704073715209961, 0.05722035217285156, 0.05674076843261719, 0.056448513031005856, 0.056361473083496094, 0.057839614868164066, 0.05869136047363281, 0.056301376342773435, 0.05624873733520508, 0.056659969329833984, 0.05772224044799805, 0.057807296752929685, 0.05984479904174805, 0.05643468856811523, 0.05682995223999023, 0.056403934478759764, 0.056460384368896485, 0.056759071350097653, 0.05657174301147461, 0.058353984832763675, 0.05696307373046875, 0.05697257614135742, 0.05643132781982422, 0.05667225646972656, 0.05636505508422852, 0.056459072113037106, 0.05614815902709961, 0.05679513549804688, 0.05758156967163086, 0.056442302703857423, 0.05767852783203125, 0.0564161262512207, 0.05671321487426758, 0.05653718566894531, 0.05645097732543945, 0.056600574493408204, 0.057294849395751954, 0.056407329559326175, 0.05613596725463867, 0.05657199859619141, 0.05639817428588867, 0.05723926544189453, 0.05679087829589844, 0.05681343841552734, 0.05702076721191406, 0.05699107360839844, 0.057301822662353515, 0.05678672027587891, 0.0568507194519043, 0.056516799926757816, 0.05656924819946289, 0.05648630523681641, 0.05978012847900391, 0.05672204971313476, 0.0565722541809082, 0.0569354248046875, 0.05731414413452148, 0.05680144119262695, 0.05761555099487305, 0.05666284942626953, 0.05670089721679687, 0.05653916931152344, 0.056422401428222656, 0.05670297622680664, 0.05668659210205078, 0.056637439727783206, 0.056847679138183595, 0.05654188919067383, 0.05626265716552734, 0.05618233489990234, 0.056070209503173825, 0.055742431640625, 0.05577305603027344, 0.056199745178222654, 0.05626057434082031, 0.056127422332763674, 0.05621059036254883, 0.0560813102722168, 0.05588336181640625, 0.056226207733154294, 0.05684854507446289, 0.056997726440429684, 0.05627494430541992, 0.05650431823730469, 0.0571412467956543, 0.056442817687988284, 0.056434719085693356, 0.056263744354248045, 0.05607843017578125, 0.056286079406738285, 0.05578521728515625, 0.05573564910888672, 0.05586832046508789, 0.05610684967041016, 0.05613372802734375, 0.056222782135009766, 0.05637222290039062, 0.05575884628295898, 0.05609183883666992, 0.05616419219970703, 0.05595574569702148, 0.05643155288696289, 0.0569280014038086, 0.056194496154785153, 0.057914974212646485, 0.055834270477294924, 0.056064769744873046, 0.05621926498413086, 0.056070526123046874, 0.05585100936889648, 0.05601046371459961, 0.055834880828857424, 0.05608371353149414, 0.0559071044921875, 0.05616230392456055, 0.057328895568847654, 0.05736524963378906, 0.05598822402954102, 0.05650172805786133, 0.05599020767211914, 0.056096832275390626, 0.055940673828125, 0.05594380950927735, 0.055653823852539065, 0.0559516487121582, 0.05580044937133789, 0.05607014465332031, 0.057083263397216796, 0.056780479431152345, 0.05623494338989258, 0.056298622131347655, 0.0562344970703125, 0.05612515258789062, 0.05672617721557617, 0.0564081916809082, 0.056124481201171875, 0.05582521438598633, 0.055975456237792966, 0.0568939208984375, 0.057132896423339845, 0.05708832168579102, 0.05748899078369141, 0.056841983795166015, 0.05669331359863281, 0.05656675338745117, 0.056114143371582034, 0.056041473388671874, 0.056198654174804685, 0.0558331184387207, 0.05597561645507813, 0.055931167602539064, 0.058064830780029296, 0.05610502243041992, 0.056624446868896484, 0.056885311126708984, 0.055960193634033206, 0.05548345565795899, 0.05569779205322266, 0.055575103759765626, 0.05571993637084961, 0.05608620834350586, 0.05598035049438477, 0.05687705612182617, 0.05584880065917969, 0.0557938232421875, 0.05599548721313476, 0.05575324630737305, 0.056635391235351565, 0.05617113494873047, 0.05688348770141602, 0.05650403213500976, 0.05546425628662109, 0.055043777465820315, 0.055498367309570314, 0.05523417663574219, 0.056847103118896486, 0.055922496795654295, 0.055583038330078126, 0.055097217559814456, 0.055131263732910156, 0.05513302230834961, 0.05505436706542969, 0.05551923370361328, 0.0586484489440918, 0.055748001098632816, 0.05550128173828125, 0.05531856155395508, 0.05548662567138672, 0.055476287841796874, 0.05509868621826172, 0.056281791687011716, 0.055841854095458984, 0.05562860870361328, 0.055471328735351565, 0.05582735824584961, 0.05556623840332031, 0.055294048309326174, 0.055949249267578126, 0.05510665512084961, 0.05513315200805664, 0.05575065612792969, 0.05544940948486328, 0.05571753692626953, 0.055556896209716794, 0.05514828872680664, 0.05504409790039062, 0.055275520324707034, 0.05517107009887695, 0.0555335693359375, 0.05545779037475586, 0.055707584381103514, 0.056481857299804684, 0.05527987289428711, 0.05546368026733398, 0.05522771072387695, 0.05554451370239258, 0.055391777038574216, 0.05528406524658203, 0.055285888671875, 0.05562345504760742, 0.0552388801574707, 0.05897216033935547, 0.05610211181640625, 0.05569615936279297, 0.05586943817138672, 0.05551497650146484, 0.055838878631591794, 0.05608857727050781, 0.05598003387451172, 0.05574041748046875, 0.05657072067260742, 0.056239967346191404, 0.05789507293701172, 0.05615411376953125, 0.05597100830078125, 0.05795510482788086, 0.056909534454345705, 0.05607027053833008, 0.055865535736083986, 0.055955169677734375, 0.05645699310302734, 0.05637276840209961, 0.05632329559326172, 0.05623782348632812, 0.05596979141235352, 0.056627201080322265, 0.05649407958984375, 0.05642144012451172, 0.05638019180297851, 0.056318111419677734, 0.05614591979980469, 0.05634204864501953, 0.05651827239990234, 0.05973027038574219, 0.05631820678710937, 0.05612073516845703, 0.05703286361694336, 0.05616086578369141, 0.056753440856933596, 0.05586207962036133, 0.0564571533203125, 0.05693241500854492, 0.05624934387207031, 0.05660704040527344, 0.05615232086181641, 0.05600505447387695, 0.05605580902099609, 0.056033279418945314, 0.05595750427246094, 0.0558900146484375, 0.0559021110534668, 0.056404064178466794, 0.05614976119995117, 0.05595561599731445, 0.056043521881103515, 0.05682566452026367, 0.05709433746337891, 0.05684598541259766, 0.05663779067993164, 0.05670912170410156, 0.05676851272583008, 0.057100353240966795, 0.05705088043212891, 0.05658547210693359, 0.05757228851318359, 0.06011836624145508, 0.05809775924682617, 0.05667865753173828, 0.05649382400512695, 0.05699769592285156, 0.057544769287109374, 0.05669075012207031, 0.05628384017944336, 0.05851513671875, 0.05676268768310547, 0.05671321487426758, 0.05629679870605469, 0.05632428741455078, 0.05585763168334961, 0.056110816955566405, 0.056064289093017576, 0.056024417877197266, 0.056218273162841795, 0.056287006378173826, 0.056115104675292966, 0.056074016571044924, 0.056113536834716794, 0.05594524765014648, 0.05593715286254883, 0.05601484680175781, 0.05583164978027344, 0.056046497344970705, 0.05586937713623047, 0.05568723297119141, 0.055540897369384765, 0.056875137329101565, 0.056100608825683594, 0.05591094589233398, 0.05552896118164063, 0.05558163070678711, 0.05577523040771484, 0.055834304809570315, 0.05571763229370117, 0.05587615966796875, 0.05633180618286133, 0.05621094512939453, 0.056525344848632815, 0.0563040657043457, 0.05629312133789063, 0.0563059196472168, 0.05613568115234375, 0.056360160827636716, 0.05600131225585937, 0.05669820785522461, 0.0561814079284668, 0.05602284622192383, 0.05564963150024414, 0.055530593872070315, 0.05537926483154297, 0.055734432220458985, 0.05643088150024414, 0.056184192657470704, 0.056506240844726566, 0.05732223892211914, 0.05794611358642578, 0.05625360107421875, 0.04395708847045898, 0.04308377456665039, 0.04283548736572266, 0.04292988967895508, 0.04276095962524414, 0.04280934524536133, 0.04256358337402344, 0.045163711547851565, 0.0430948486328125, 0.04305891036987305, 0.043880992889404294, 0.04317184066772461, 0.043433982849121096, 0.042692607879638675, 0.042907615661621094, 0.04317788696289063, 0.042939647674560544, 0.04307238388061523, 0.04294438552856445, 0.042832000732421875, 0.043226913452148436, 0.043062686920166016, 0.04315001678466797, 0.04317401504516601, 0.043112449645996094, 0.042864639282226565, 0.043078815460205075, 0.042945377349853514, 0.04292403030395508, 0.042907230377197264, 0.04292240142822266, 0.04293593597412109, 0.043159934997558595, 0.04297929763793945, 0.04389795303344726, 0.043170238494873045, 0.04293478393554687, 0.042856449127197264, 0.04284822463989258, 0.042674209594726564, 0.042797054290771484, 0.04279011154174805, 0.042861183166503905, 0.042653854370117185, 0.042780895233154294, 0.043003681182861325, 0.042925952911376956, 0.04290377426147461, 0.04296883010864258, 0.04280876922607422, 0.04281961441040039, 0.042889919281005856, 0.04608761596679688, 0.042965568542480466, 0.04270284652709961, 0.04272742462158203, 0.042665313720703125, 0.04272380828857422, 0.042751583099365234, 0.04310470581054687, 0.04281875228881836, 0.042877918243408204, 0.042884574890136716, 0.04289516830444336, 0.042877662658691404, 0.042987262725830075, 0.042705150604248045, 0.04278988647460937, 0.042810367584228515, 0.04310195159912109, 0.04270105743408203, 0.042938495635986326, 0.04330108642578125, 0.043388736724853515, 0.04351337432861328, 0.042902175903320315, 0.04333977508544922, 0.043448318481445314, 0.04275404739379883, 0.04279024124145508, 0.04274448013305664, 0.043069438934326174, 0.04287036895751953, 0.04276819229125976, 0.0427850227355957, 0.043057376861572266, 0.0427275505065918, 0.04283391952514649, 0.04274790573120117, 0.04302617645263672, 0.042780929565429685, 0.04273356628417969, 0.04278585433959961, 0.04286329650878906, 0.042807552337646486, 0.0428644790649414, 0.0429873275756836, 0.042854305267333984, 0.042809791564941406, 0.043407264709472655, 0.043138687133789065, 0.043426273345947265, 0.043103870391845704, 0.04296537780761719, 0.04291584014892578, 0.04290969467163086, 0.04320175933837891, 0.04270579147338867, 0.042780574798583985, 0.043259902954101564, 0.04263683319091797, 0.04225686264038086, 0.04210483169555664, 0.042726398468017575, 0.04253212738037109, 0.04275795364379883, 0.04241398239135742, 0.04297289657592773, 0.04262736129760742, 0.04256489562988281, 0.04253974533081055, 0.042941825866699215, 0.042639999389648436, 0.042568958282470704, 0.04357401657104492, 0.04265369415283203, 0.042454399108886716, 0.0424837760925293, 0.042480224609375, 0.042459041595458984, 0.042313247680664065, 0.0422628173828125, 0.04243276977539062, 0.04287043380737305, 0.05109148788452148, 0.04294393539428711, 0.04292153549194336, 0.042093120574951175, 0.04220927810668945, 0.042149761199951175, 0.04246745681762695, 0.04224198532104492, 0.042438720703125, 0.04236288070678711, 0.042340030670166014, 0.04203696060180664, 0.04219728088378906, 0.04227616119384765, 0.042269054412841796, 0.042234752655029295, 0.04241955184936524, 0.0425513916015625, 0.042414398193359376, 0.0426228141784668, 0.04253507232666016, 0.04229280090332031, 0.04229983901977539, 0.04247951889038086, 0.04250960159301758, 0.042449665069580075, 0.04256966400146484, 0.04235686492919922, 0.04430387115478516, 0.04289996719360351, 0.04241788864135742, 0.04228476715087891, 0.04240236663818359, 0.04239769744873047, 0.042264575958251956, 0.041949054718017575, 0.04186124801635742, 0.04168499374389648, 0.041793182373046876, 0.041850399017333985, 0.04181478500366211, 0.04364704132080078, 0.04162355041503906, 0.04223929595947266, 0.043216766357421874, 0.04273231887817383, 0.04253494262695313, 0.04254515075683594, 0.0424161262512207, 0.042246143341064454, 0.04205977630615235, 0.04206787109375, 0.04227062225341797, 0.04210611343383789, 0.04194537734985351, 0.04190390396118164, 0.042253185272216796, 0.04780646514892578, 0.04361558532714844, 0.043280353546142576, 0.04350636672973633, 0.04339875030517578, 0.042458721160888675, 0.042619361877441406, 0.04309030532836914]",tokens/s,19.209632537758914,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,892.899328,697.237504,0.0,301.989888,282.769408,s,1,7.8845537109375,7.8845537109375,0.0,7.8845537109375,7.8845537109375,7.8845537109375,7.8845537109375,[7.8845537109375],,kWh,2.6668313554152216e-05,2.9345323346169523e-06,8.527506822009201e-06,3.8130352710778365e-05,,MB,1210.179584,751.763456,0.0,341.835776,318.94528,s,17,0.18908361434936521,0.01112256554996266,0.00021222526015458952,0.011087583541870118,0.011266982078552245,0.011395865440368651,0.011730078697204589,"[0.011250687599182128, 0.011087583541870118, 0.010914239883422851, 0.011123167991638183, 0.010944448471069335, 0.010919199943542481, 0.01109347152709961, 0.011029439926147461, 0.010915871620178222, 0.01092950439453125, 0.011813632011413574, 0.011291423797607421, 0.011043744087219238, 0.011073887825012207, 0.01122047996520996, 0.011225503921508789, 0.011207327842712403]",tokens/s,23016.272536227887,kWh,3.2520368245172314e-07,3.5864289637114663e-08,1.7807704174389274e-07,5.391450138327304e-07,tokens/kWh,474825869.537623,MB,1223.41376,776.92928,0.0,367.0016,318.94784,s,17,10.032159484863282,0.5901270285213696,0.009695058014600227,0.5856761474609375,0.603881689453125,0.605954833984375,0.6079085449218751,"[0.5876029052734375, 0.5784276733398438, 0.58346044921875, 0.5853865966796875, 0.5813885498046875, 0.5830097045898438, 0.5856761474609375, 0.5824195556640624, 0.5794144287109375, 0.5796782836914063, 0.5980902099609375, 0.5937981567382813, 0.60071484375, 0.6029066162109376, 0.60839697265625, 0.596444091796875, 0.6053442993164062]",tokens/s,106.75667602931809,kWh,1.6719968791390176e-05,1.8439400282366233e-06,6.465554816367315e-06,2.5029463635994106e-05,tokens/kWh,2517033.5615742733,,s,1071,10.023562960624696,0.009359069057539398,0.00023659001780922944,0.009291839599609376,0.009652000427246094,0.009727535724639892,0.00992654056549072,"[0.009441120147705078, 0.009519743919372558, 0.009531871795654297, 0.009578495979309083, 0.009511712074279786, 0.009505760192871094, 0.009513248443603515, 0.009453280448913574, 0.009475647926330566, 0.009494976043701172, 0.009564160346984863, 0.009590784072875976, 0.00951315212249756, 0.009540448188781738, 0.009609248161315918, 0.009548416137695312, 0.00953171157836914, 0.009487839698791503, 0.0093721923828125, 0.009383935928344727, 0.009310208320617675, 0.009261055946350098, 0.009338879585266113, 0.009224191665649414, 0.009157759666442871, 0.009227168083190919, 0.009193216323852539, 0.009130208015441895, 0.009162464141845704, 0.009165087699890136, 0.009129983901977539, 0.009117024421691895, 0.009492480278015136, 0.009187999725341798, 0.009238431930541992, 0.009207136154174804, 0.0095731201171875, 0.009299967765808105, 0.009393600463867188, 0.009531807899475098, 0.009349120140075684, 0.009314463615417481, 0.009275391578674316, 0.009349311828613281, 0.00923142433166504, 0.009291839599609376, 0.009191488265991211, 0.009220447540283203, 0.009228416442871094, 0.009142335891723632, 0.00932259178161621, 0.009174912452697755, 0.009164511680603027, 0.00916051197052002, 0.009193344116210938, 0.009110239982604981, 0.009107456207275391, 0.009095040321350098, 0.009164095878601074, 0.009139360427856446, 0.009116640090942383, 0.009173695564270019, 0.009113216400146484, 0.009472000122070312, 0.009307200431823731, 0.009222304344177246, 0.009228927612304687, 0.009197088241577148, 0.009124608039855956, 0.009143327713012695, 0.009237248420715332, 0.009236576080322266, 0.009213983535766601, 0.009174880027770995, 0.00921788787841797, 0.00922163200378418, 0.009152607917785644, 0.009104063987731933, 0.00913584041595459, 0.00915017604827881, 0.009198080062866211, 0.009080896377563477, 0.009639776229858399, 0.009250176429748536, 0.009261759757995605, 0.009193568229675294, 0.009160351753234864, 0.009167200088500976, 0.009193471908569336, 0.009162752151489258, 0.009150464057922364, 0.009131872177124023, 0.009138079643249511, 0.009123647689819336, 0.009144767761230469, 0.009113280296325684, 0.009163071632385254, 0.00911695957183838, 0.009173055648803712, 0.009161375999450683, 0.009175040245056153, 0.009119744300842286, 0.009123583793640137, 0.009156864166259765, 0.009070591926574707, 0.009137248039245606, 0.009118592262268067, 0.009130016326904297, 0.009101311683654785, 0.009119744300842286, 0.009127455711364746, 0.009140704154968262, 0.009193471908569336, 0.009154272079467774, 0.009089311599731446, 0.009142271995544434, 0.009215999603271484, 0.009219103813171387, 0.009141216278076172, 0.009190624237060546, 0.009183839797973633, 0.009130175590515138, 0.009156607627868652, 0.009119168281555175, 0.00911622428894043, 0.00909721565246582, 0.0089584321975708, 0.009136256217956542, 0.009289600372314454, 0.009232383728027344, 0.00920576000213623, 0.009230463981628419, 0.009176959991455078, 0.009164128303527832, 0.009181856155395508, 0.009113439559936523, 0.009137439727783202, 0.00915135955810547, 0.0092674560546875, 0.009299712181091309, 0.00920201587677002, 0.009281184196472168, 0.009172863960266113, 0.009250304222106934, 0.009171584129333495, 0.009154175758361817, 0.009185728073120117, 0.009195648193359374, 0.009178943634033203, 0.009252863883972168, 0.009307647705078125, 0.009179648399353027, 0.00922812843322754, 0.009154815673828125, 0.009154208183288574, 0.009160608291625976, 0.009134431838989258, 0.009149696350097656, 0.009157312393188477, 0.009158720016479492, 0.009410367965698243, 0.009133343696594238, 0.009216383934020996, 0.009216544151306152, 0.009256863594055175, 0.00909119987487793, 0.009084383964538574, 0.009173503875732422, 0.00915071964263916, 0.009236224174499512, 0.009211903572082519, 0.009090271949768067, 0.00939628791809082, 0.00919212818145752, 0.011000032424926759, 0.010997568130493164, 0.00922374439239502, 0.009212351799011231, 0.00919961643218994, 0.009168448448181153, 0.009226688385009766, 0.009172927856445313, 0.009267264366149903, 0.009136128425598144, 0.009203712463378906, 0.009197376251220702, 0.009169343948364259, 0.00921945571899414, 0.00931503963470459, 0.009111807823181153, 0.009297056198120117, 0.009331551551818848, 0.009430784225463867, 0.00953983974456787, 0.009352224349975586, 0.009341919898986816, 0.00933071994781494, 0.009326560020446777, 0.009295999526977539, 0.00931827163696289, 0.009269248008728028, 0.009244671821594238, 0.009310272216796875, 0.009352479934692383, 0.00927359962463379, 0.009341279983520507, 0.00930992031097412, 0.009233792304992676, 0.009287872314453126, 0.009278271675109863, 0.009256928443908692, 0.00919961643218994, 0.009244832038879394, 0.009240544319152832, 0.00944480037689209, 0.009550527572631836, 0.009289471626281739, 0.009166144371032714, 0.009183648109436036, 0.00915833568572998, 0.009120256423950195, 0.009262271881103516, 0.009144960403442383, 0.009152671813964845, 0.009195008277893067, 0.009236191749572754, 0.009185728073120117, 0.009183103561401368, 0.009213791847229005, 0.00927023983001709, 0.009197343826293945, 0.009279071807861328, 0.009218560218811036, 0.009170944213867188, 0.00917081642150879, 0.009271167755126953, 0.009287455558776855, 0.009189279556274414, 0.009228544235229492, 0.00921724796295166, 0.009245696067810059, 0.00925932788848877, 0.009230015754699707, 0.009249823570251465, 0.009335776329040528, 0.00957852840423584, 0.009371616363525391, 0.009451519966125489, 0.009517056465148926, 0.009476096153259277, 0.009395936012268066, 0.009296064376831054, 0.008969023704528808, 0.00919753646850586, 0.009218015670776368, 0.009156864166259765, 0.009148415565490722, 0.009193599700927734, 0.009129695892333985, 0.00913590431213379, 0.009257504463195801, 0.009359487533569336, 0.009340160369873047, 0.009383935928344727, 0.009416319847106934, 0.009413536071777345, 0.009477760314941406, 0.00935315227508545, 0.009296256065368652, 0.009298048019409179, 0.009176416397094727, 0.009160991668701171, 0.00918553638458252, 0.009226240158081055, 0.009295040130615235, 0.01005241584777832, 0.009474047660827637, 0.00918943977355957, 0.009191007614135742, 0.009154560089111329, 0.009119744300842286, 0.00917689609527588, 0.00906704044342041, 0.009104607582092285, 0.009098015785217285, 0.009115648269653321, 0.00910905647277832, 0.009908703804016114, 0.009187295913696289, 0.009149920463562012, 0.009110048294067382, 0.009121888160705567, 0.009105312347412109, 0.009132032394409179, 0.009138239860534667, 0.009201408386230469, 0.00915187168121338, 0.009040703773498535, 0.009132032394409179, 0.00912713623046875, 0.009188128471374511, 0.00909216022491455, 0.009177472114562988, 0.00910598373413086, 0.009121472358703614, 0.009127584457397461, 0.009144288063049316, 0.009533535957336426, 0.00917360019683838, 0.009146240234375, 0.009265279769897461, 0.009261055946350098, 0.00918297576904297, 0.00914457607269287, 0.009107456207275391, 0.008892512321472168, 0.009139264106750488, 0.009157855987548829, 0.00912559986114502, 0.009142111778259278, 0.009184479713439941, 0.009091232299804687, 0.009070528030395509, 0.009123744010925293, 0.009094079971313477, 0.009149696350097656, 0.009157024383544921, 0.00927939224243164, 0.009169343948364259, 0.009141759872436523, 0.009187264442443848, 0.009230463981628419, 0.00912396812438965, 0.00912822437286377, 0.009181216239929199, 0.009166560173034667, 0.009178432464599609, 0.009217056274414062, 0.009236224174499512, 0.009200896263122559, 0.009191935539245605, 0.009275391578674316, 0.009185728073120117, 0.009209856033325196, 0.009264575958251952, 0.009246848106384277, 0.009198016166687011, 0.009334783554077148, 0.009237888336181641, 0.0092674560546875, 0.009309823989868165, 0.009890239715576172, 0.010084671974182129, 0.010168383598327637, 0.009383744239807128, 0.009398719787597657, 0.009307423591613769, 0.009333151817321778, 0.00923852825164795, 0.009226240158081055, 0.009205056190490722, 0.009169440269470214, 0.009164735794067383, 0.009150272369384765, 0.009203167915344238, 0.009221183776855468, 0.009201055526733398, 0.00919382381439209, 0.009132448196411133, 0.009146080017089844, 0.00915839958190918, 0.009189760208129883, 0.009180416107177734, 0.009239168167114257, 0.009403840065002441, 0.009362272262573242, 0.009399968147277832, 0.009279040336608887, 0.009047840118408203, 0.009216480255126953, 0.009375743865966797, 0.009324543952941895, 0.009206784248352052, 0.009227392196655273, 0.009168767929077148, 0.00915875244140625, 0.009140128135681153, 0.009166848182678223, 0.009137696266174316, 0.009150783538818359, 0.009129695892333985, 0.009158944129943848, 0.00919372844696045, 0.009271200180053712, 0.009185279846191406, 0.009147775650024415, 0.009124480247497558, 0.009209759712219238, 0.00914675235748291, 0.009174816131591797, 0.009169055938720702, 0.00918505573272705, 0.009165120124816895, 0.009268671989440917, 0.00937600040435791, 0.00932249641418457, 0.009250816345214843, 0.009292832374572754, 0.009232895851135254, 0.009218527793884277, 0.009254912376403808, 0.009197407722473145, 0.009291935920715332, 0.009309184074401856, 0.00935206413269043, 0.00929321575164795, 0.009313088417053222, 0.009231743812561035, 0.009341055870056153, 0.009282079696655274, 0.009303936004638672, 0.009277440071105958, 0.009297087669372558, 0.009251263618469239, 0.009366080284118653, 0.00929964828491211, 0.009326399803161621, 0.010393792152404786, 0.009527296066284179, 0.009426079750061035, 0.009467071533203125, 0.009370752334594726, 0.00938646411895752, 0.009621696472167969, 0.009910623550415039, 0.009276736259460449, 0.00930851173400879, 0.009285056114196777, 0.009228608131408692, 0.009221535682678222, 0.009239583969116212, 0.009046112060546875, 0.009224191665649414, 0.009247584342956543, 0.009262399673461914, 0.00924947166442871, 0.00935097599029541, 0.009183232307434081, 0.009279935836791993, 0.009236224174499512, 0.009267200469970703, 0.009215935707092286, 0.009261119842529296, 0.009237664222717285, 0.009216192245483399, 0.009148223876953125, 0.009400608062744141, 0.00920633602142334, 0.009137760162353516, 0.009152928352355956, 0.009200703620910645, 0.009178048133850097, 0.009332736015319825, 0.009250847816467286, 0.009303104400634766, 0.009274271965026856, 0.009221343994140626, 0.009218079566955566, 0.009151231765747071, 0.009196672439575195, 0.009169792175292969, 0.009158656120300293, 0.009136063575744629, 0.009193535804748534, 0.009385248184204102, 0.00951363182067871, 0.009457728385925294, 0.009479968070983887, 0.00954595184326172, 0.009484255790710449, 0.009280832290649414, 0.009274080276489257, 0.009213536262512208, 0.009152095794677734, 0.009136608123779297, 0.009132512092590332, 0.009119135856628418, 0.009287520408630371, 0.009249664306640626, 0.009448800086975098, 0.009152895927429199, 0.009211935997009278, 0.009108991622924804, 0.00911616039276123, 0.009119744300842286, 0.009113183975219727, 0.009143903732299804, 0.00915129566192627, 0.009217120170593262, 0.009249695777893066, 0.009315872192382812, 0.009196000099182129, 0.00922812843322754, 0.009152223587036133, 0.008919232368469239, 0.009159775733947753, 0.00913481616973877, 0.009115648269653321, 0.009250240325927734, 0.009122367858886718, 0.009110976219177246, 0.009165056228637695, 0.009088543891906739, 0.009095968246459961, 0.009112607955932618, 0.00916988754272461, 0.00909273624420166, 0.00912934398651123, 0.00909769630432129, 0.009400863647460938, 0.009184415817260743, 0.009105728149414063, 0.009296159744262695, 0.009146400451660156, 0.009177311897277832, 0.009129983901977539, 0.009178367614746094, 0.009159616470336914, 0.00910051155090332, 0.009183103561401368, 0.009143263816833495, 0.009117440223693847, 0.009103487968444825, 0.009227231979370117, 0.009210783958435059, 0.009136128425598144, 0.009119744300842286, 0.009152223587036133, 0.009170975685119629, 0.009191679954528808, 0.009258848190307617, 0.009234592437744141, 0.00927948760986328, 0.009319519996643067, 0.009251168251037598, 0.009235360145568848, 0.00919654369354248, 0.00915113639831543, 0.009154560089111329, 0.00912508773803711, 0.009200415611267089, 0.009203935623168945, 0.009259807586669921, 0.009311455726623535, 0.009267135620117188, 0.009205599784851075, 0.009283583641052246, 0.009236319541931153, 0.009248607635498047, 0.009263008117675782, 0.009229951858520508, 0.009175935745239257, 0.00928876781463623, 0.009333184242248535, 0.009421216011047364, 0.009220095634460449, 0.009181440353393555, 0.008955936431884765, 0.009155167579650878, 0.009172863960266113, 0.00912831974029541, 0.009140000343322754, 0.009149760246276855, 0.009177760124206543, 0.009137151718139648, 0.009208255767822266, 0.009228863716125489, 0.009193120002746582, 0.00913868808746338, 0.009205632209777832, 0.009265055656433105, 0.00925487995147705, 0.009240672111511231, 0.009259008407592773, 0.009157792091369629, 0.009124640464782716, 0.009233823776245117, 0.009202431678771973, 0.0091810884475708, 0.009173024177551269, 0.009170911788940429, 0.009119744300842286, 0.009156319618225097, 0.009223648071289062, 0.009132863998413086, 0.009095040321350098, 0.009462240219116212, 0.009227423667907715, 0.009239040374755859, 0.009233504295349122, 0.009200544357299804, 0.009188447952270508, 0.009177087783813476, 0.009198495864868164, 0.00920150375366211, 0.009160320281982421, 0.009202143669128419, 0.009254976272583008, 0.009154560089111329, 0.009215871810913085, 0.00919155216217041, 0.009166848182678223, 0.009150464057922364, 0.009185279846191406, 0.009189375877380371, 0.009183232307434081, 0.00921945571899414, 0.009251456260681152, 0.00912179183959961, 0.00920691204071045, 0.009200511932373047, 0.009246591567993165, 0.00919155216217041, 0.009216064453125, 0.009196864128112794, 0.009310879707336427, 0.009218208312988281, 0.009227392196655273, 0.009222623825073242, 0.009207008361816406, 0.009712063789367675, 0.009254207611083984, 0.009795743942260743, 0.009865792274475097, 0.009285632133483887, 0.009357312202453612, 0.009312416076660156, 0.009256447792053223, 0.009240703582763672, 0.00918342399597168, 0.009482272148132324, 0.009247872352600097, 0.009163935661315918, 0.009164608001708985, 0.009186623573303222, 0.009340607643127441, 0.009631839752197266, 0.009532223701477051, 0.009664511680603028, 0.009518783569335937, 0.009568832397460937, 0.00960700798034668, 0.009658271789550782, 0.009527104377746582, 0.009420991897583008, 0.009352736473083496, 0.009273471832275391, 0.009299936294555665, 0.009254752159118652, 0.009210399627685547, 0.009238656044006349, 0.009230208396911622, 0.00927235221862793, 0.009171839714050292, 0.009291711807250977, 0.009253215789794923, 0.009258079528808593, 0.009278176307678222, 0.009311519622802734, 0.00940124797821045, 0.009690208435058594, 0.00943769645690918, 0.009469504356384278, 0.009482784271240234, 0.009363231658935546, 0.009333087921142577, 0.00971564769744873, 0.00959494400024414, 0.01222646427154541, 0.009582752227783204, 0.009506752014160156, 0.009435296058654785, 0.009484352111816407, 0.009466848373413085, 0.009514911651611328, 0.009677536010742187, 0.009695424079895019, 0.009576736450195312, 0.009573472023010255, 0.009767552375793458, 0.009623295783996582, 0.009593024253845214, 0.00963539218902588, 0.009289919853210449, 0.009488351821899414, 0.00953980827331543, 0.009556480407714844, 0.009444607734680175, 0.009626111984252929, 0.009687392234802247, 0.009553824424743652, 0.009488479614257812, 0.009549823760986328, 0.009623552322387695, 0.009576607704162597, 0.009549568176269532, 0.00957027244567871, 0.009565919876098632, 0.009599455833435059, 0.009574175834655762, 0.009549983978271484, 0.009531392097473144, 0.009623552322387695, 0.00956441593170166, 0.00953116798400879, 0.009396224021911622, 0.009409631729125977, 0.009356063842773437, 0.009285728454589843, 0.009285599708557128, 0.009252256393432617, 0.009228320121765136, 0.009218688011169434, 0.009301983833312988, 0.00933407974243164, 0.009355968475341796, 0.009344863891601562, 0.009458047866821289, 0.009485759735107423, 0.009516672134399414, 0.00945257568359375, 0.009411935806274413, 0.009445792198181152, 0.009445183753967286, 0.009373087882995606, 0.009387807846069336, 0.009292736053466796, 0.009275391578674316, 0.009250720024108887, 0.009255007743835449, 0.009229887962341308, 0.009290176391601563, 0.00930611228942871, 0.0093306884765625, 0.009314111709594727, 0.009348544120788574, 0.009270015716552735, 0.009357503890991211, 0.009324352264404296, 0.009371456146240234, 0.009397760391235351, 0.009423551559448242, 0.00937388801574707, 0.009406271934509278, 0.00933676815032959, 0.009309632301330566, 0.009107232093811035, 0.009341152191162109, 0.009281536102294922, 0.009289407730102539, 0.009388192176818848, 0.009586688041687011, 0.009562335968017577, 0.009539520263671875, 0.009508864402770996, 0.009760736465454101, 0.009424927711486817, 0.009527296066284179, 0.00951296043395996, 0.009496383666992188, 0.009507007598876953, 0.009539584159851074, 0.009474047660827637, 0.009704607963562012, 0.00956015968322754, 0.00957027244567871, 0.009527296066284179, 0.009457823753356933, 0.009474687576293946, 0.009389216423034669, 0.009355968475341796, 0.009482527732849121, 0.009487903594970704, 0.009455967903137207, 0.009480192184448242, 0.009633055686950684, 0.009757504463195801, 0.009775008201599121, 0.009813471794128418, 0.009796128273010254, 0.009836544036865234, 0.009747743606567383, 0.009751263618469239, 0.009764479637145997, 0.009726335525512695, 0.00966761589050293, 0.009671648025512696, 0.009674912452697753, 0.009564127922058106, 0.009558912277221679, 0.009673407554626465, 0.009572575569152832, 0.00963798427581787, 0.009613056182861329, 0.009506303787231446, 0.009655296325683594, 0.009534367561340332, 0.00937782382965088, 0.009411135673522949, 0.009514847755432128, 0.009459903717041016, 0.009423199653625489, 0.009369407653808594, 0.009361472129821777, 0.009306240081787109, 0.00931827163696289, 0.009323776245117188, 0.00933743953704834, 0.009283743858337402, 0.009046751976013184, 0.00929043197631836, 0.009300992012023926, 0.009338815689086915, 0.009357119560241699, 0.009307552337646484, 0.009325407981872558, 0.009338687896728516, 0.009430303573608398, 0.009556672096252442, 0.009533663749694825, 0.00949830436706543, 0.009485695838928223, 0.009497535705566407, 0.009424384117126466, 0.00950704002380371, 0.009471296310424804, 0.00937059211730957, 0.009501728057861328, 0.009376735687255859, 0.00931430435180664, 0.009466976165771484, 0.009438112258911132, 0.009631584167480469, 0.009609375953674316, 0.00970137596130371, 0.0097259521484375, 0.009760767936706542, 0.009626655578613282, 0.0096243839263916, 0.009652383804321289, 0.009664447784423828, 0.009612895965576173, 0.009629759788513183, 0.009867615699768066, 0.009678912162780762, 0.009560064315795898, 0.009570303916931153, 0.009640095710754394, 0.00957151985168457, 0.009639967918395996, 0.009605536460876465, 0.009709312438964843, 0.009531488418579101, 0.009592448234558106, 0.009632320404052734, 0.009783488273620605, 0.009629216194152832, 0.009619584083557129, 0.009637951850891113, 0.009672063827514649, 0.00953983974456787, 0.00974505615234375, 0.009654272079467773, 0.009631839752197266, 0.009631775856018066, 0.0096561918258667, 0.009891776084899903, 0.009705151557922363, 0.009695615768432617, 0.009646080017089843, 0.009619296073913574, 0.009580320358276367, 0.009387552261352539, 0.009571871757507323, 0.00954259204864502, 0.009570303916931153, 0.009616864204406739, 0.009633376121520995, 0.009563072204589844, 0.009637663841247559, 0.00980399990081787, 0.00971996784210205, 0.009673824310302734, 0.00973420810699463, 0.009714176177978515, 0.00968505573272705, 0.009832736015319823, 0.009769887924194335, 0.00974124813079834, 0.00963817596435547, 0.009620927810668945, 0.009652000427246094, 0.009595392227172851, 0.009666048049926757, 0.00983296012878418, 0.009906175613403321, 0.009645312309265137, 0.009647071838378906, 0.009608991622924805, 0.00956931209564209, 0.009542079925537109, 0.009580191612243652, 0.009565119743347167, 0.009533056259155273, 0.009523103713989258, 0.009553631782531739, 0.00959558391571045, 0.009508864402770996, 0.009627872467041016, 0.00958176040649414, 0.009605440139770507, 0.009827648162841797, 0.009531392097473144, 0.009602016448974609, 0.009775103569030762, 0.009743712425231933, 0.00975648021697998, 0.009703519821166993, 0.00971615982055664, 0.009611552238464355, 0.009633824348449707, 0.009553919792175293, 0.009684991836547852, 0.00959881591796875, 0.009648096084594727, 0.009967071533203125, 0.00972873592376709, 0.009686783790588379, 0.009695039749145509, 0.00966089630126953, 0.009671680450439453, 0.009570560455322265, 0.009536224365234375, 0.00957148838043213, 0.009573216438293458, 0.009396224021911622, 0.009569664001464844, 0.009794015884399414, 0.009613375663757324, 0.009635071754455566, 0.009579360008239746, 0.009517056465148926, 0.009524415969848633, 0.009589056015014648, 0.009756768226623535, 0.009558431625366211, 0.00950607967376709, 0.00952393627166748, 0.009533375740051269, 0.009460800170898438, 0.009475071907043458, 0.009396224021911622, 0.009354816436767579, 0.00934342384338379, 0.009373696327209472, 0.009359616279602051, 0.009339712142944335, 0.009299936294555665, 0.009350079536437989, 0.00934921646118164, 0.009348671913146972, 0.00936793613433838, 0.00936518383026123, 0.009327199935913086, 0.00945907211303711, 0.009437824249267578, 0.009371359825134278, 0.009442591667175293, 0.009554847717285157, 0.009412416458129883, 0.009418527603149414, 0.009394559860229492, 0.009335935592651367, 0.009360095977783204, 0.009463616371154785, 0.009453023910522461, 0.00933347225189209, 0.00935321617126465, 0.009331839561462402, 0.009311103820800781, 0.00925705623626709, 0.009309439659118653, 0.009275296211242675, 0.009280159950256347, 0.00926636791229248, 0.009337599754333497, 0.009734272003173828, 0.010217503547668457, 0.009963680267333984, 0.009442496299743652, 0.00940492820739746, 0.009531552314758301, 0.009367168426513672, 0.009397791862487794, 0.009421664237976074, 0.009486207962036133, 0.009568384170532226, 0.009587807655334473, 0.009715840339660645, 0.009635231971740722, 0.010001055717468262, 0.009494688034057617, 0.009433088302612304, 0.009413727760314941, 0.009316864013671875, 0.009295616149902344, 0.00933135986328125, 0.009349087715148925, 0.009504799842834472, 0.00960848045349121, 0.009599007606506348, 0.009550080299377442, 0.009557600021362305, 0.009503583908081054, 0.00946940803527832, 0.009418815612792968, 0.00945359992980957, 0.009472255706787109, 0.00946604824066162, 0.009444352149963378, 0.009419520378112793, 0.009489824295043945, 0.009501536369323731, 0.009584639549255371, 0.00963913631439209, 0.009653183937072754, 0.009776224136352539, 0.009736479759216309, 0.00965011215209961, 0.009746047973632812, 0.009716352462768555, 0.009689375877380372, 0.00977468776702881, 0.009836128234863281, 0.009655263900756836, 0.009587807655334473, 0.009663104057312012, 0.009560192108154297, 0.009584735870361329, 0.009666463851928712, 0.009636896133422852, 0.009648799896240234, 0.009621343612670898, 0.009597599983215332, 0.009662272453308106, 0.00953705596923828, 0.009572959899902344, 0.009577919960021972, 0.009724448204040528, 0.009619359970092773, 0.009658528327941894, 0.009721887588500977, 0.00977302360534668, 0.009667776107788085, 0.009624223709106445, 0.009670656204223632, 0.009730048179626465, 0.009702752113342285, 0.009754719734191895, 0.009625472068786622, 0.009676704406738281]",tokens/s,106.84823392711571,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4169.474048,5925.43744,0.0,5530.189824,5138.859008,s,1,11.6259453125,11.6259453125,0.0,11.6259453125,11.6259453125,11.6259453125,11.6259453125,[11.6259453125],,kWh,0.000136070056616677,1.5002031505387146e-05,5.769143504200103e-05,0.00020876352316406518,,MB,1376.415744,5944.311808,0.0,5534.384128,4844.878336,s,10,1.9729530181884767,0.19729530181884766,0.00043113796195293495,0.19714612579345703,0.1979267852783203,0.19802846374511718,0.19810980651855467,"[0.19710159301757812, 0.1971556091308594, 0.19666685485839844, 0.19703488159179688, 0.1971697235107422, 0.1971366424560547, 0.19698876953125, 0.19766461181640624, 0.19813014221191405, 0.19790419006347656]",tokens/s,1297.547370058785,kWh,5.785537226960829e-06,6.376602793393575e-07,3.82815121066692e-06,1.0251348716967105e-05,tokens/kWh,24972323.84420715,MB,1382.596608,5944.311808,0.0,5534.384128,5015.800832,s,10,15.389005493164063,1.5389005493164063,0.014977846303433613,1.5344813842773437,1.5553174926757813,1.5602898498535156,1.564267735595703,"[1.5148800048828126, 1.5278021240234374, 1.5240762939453125, 1.5535860595703126, 1.5323665771484376, 1.5480943603515624, 1.5542125244140625, 1.532129150390625, 1.56526220703125, 1.53659619140625]",tokens/s,40.93831796212249,kWh,4.5213650301372886e-05,4.987208540213296e-06,3.003357032313415e-05,8.023442916472033e-05,tokens/kWh,785199.0804429075,,s,630,15.385902879714974,0.024422068063039627,0.0005485261684029732,0.02434662437438965,0.024897196960449217,0.025076761531829833,0.026705567264556904,"[0.025111167907714844, 0.024440832138061523, 0.024295167922973634, 0.024030975341796875, 0.02390297508239746, 0.02444585609436035, 0.023923456192016603, 0.023861087799072266, 0.02385446357727051, 0.023780223846435546, 0.023910400390625, 0.023661983489990233, 0.02385161590576172, 0.023874879837036133, 0.023857856750488283, 0.023767040252685546, 0.023934656143188477, 0.023779647827148438, 0.023981983184814454, 0.023758304595947265, 0.023777919769287108, 0.024086528778076172, 0.023758111953735353, 0.023870080947875977, 0.023722272872924804, 0.02379961585998535, 0.024037376403808593, 0.023783424377441405, 0.023846912384033202, 0.02392064094543457, 0.023909439086914064, 0.02384172821044922, 0.023774591445922852, 0.023753343582153322, 0.02388172721862793, 0.023807584762573244, 0.02391823959350586, 0.023821056365966795, 0.02411520004272461, 0.024133216857910155, 0.023875648498535157, 0.023951711654663085, 0.023814144134521483, 0.02370172882080078, 0.023791391372680663, 0.023942304611206056, 0.023835487365722656, 0.023785472869873047, 0.02390425682067871, 0.027461631774902344, 0.024371200561523438, 0.02449807929992676, 0.02407334327697754, 0.0239400634765625, 0.02408448028564453, 0.02421116828918457, 0.024219263076782228, 0.024171167373657227, 0.024379392623901368, 0.024303327560424803, 0.024272224426269532, 0.02417132759094238, 0.02423616027832031, 0.025683359146118166, 0.024443487167358398, 0.024008544921875, 0.024058015823364257, 0.023973888397216796, 0.02384614372253418, 0.02376572799682617, 0.023784992218017578, 0.02598963165283203, 0.02432204818725586, 0.02408857536315918, 0.024202720642089844, 0.024287776947021486, 0.024003807067871093, 0.02390505599975586, 0.02390220832824707, 0.02393907165527344, 0.023813343048095702, 0.02380793571472168, 0.02385408020019531, 0.02385603141784668, 0.023897024154663087, 0.024554975509643556, 0.024046112060546875, 0.023834623336791993, 0.023848064422607423, 0.023889087677001954, 0.023824064254760743, 0.023711584091186524, 0.023675039291381837, 0.023734272003173826, 0.023758848190307616, 0.023764991760253908, 0.023789567947387694, 0.024696832656860353, 0.023789152145385743, 0.024035039901733397, 0.024072895050048827, 0.024796640396118164, 0.02447551918029785, 0.02452137565612793, 0.024633344650268556, 0.02632294464111328, 0.024600576400756836, 0.024415359497070313, 0.02455401611328125, 0.02428553581237793, 0.025557119369506835, 0.024440095901489257, 0.024383424758911133, 0.024187551498413087, 0.02412928009033203, 0.025329280853271484, 0.024369792938232424, 0.02423334312438965, 0.02397987174987793, 0.02393168067932129, 0.024206687927246093, 0.023955615997314453, 0.0239498233795166, 0.024008703231811524, 0.02507366371154785, 0.0247193603515625, 0.0248558406829834, 0.02416499137878418, 0.02388991928100586, 0.023840768814086914, 0.02386944007873535, 0.023920480728149413, 0.023832223892211915, 0.023924383163452148, 0.024167264938354492, 0.024306880950927735, 0.024126272201538086, 0.02396918487548828, 0.024164735794067384, 0.023867231369018554, 0.023853439331054688, 0.023792896270751953, 0.02388636779785156, 0.023637983322143556, 0.023778783798217774, 0.023712032318115233, 0.023959232330322267, 0.02391142463684082, 0.023955263137817383, 0.02385206413269043, 0.024101856231689454, 0.024067712783813477, 0.024436927795410155, 0.024412351608276366, 0.024426496505737305, 0.024331392288208006, 0.024423295974731446, 0.024352256774902343, 0.02426838493347168, 0.024408992767333985, 0.02429267120361328, 0.024105663299560546, 0.023964704513549803, 0.023952352523803712, 0.023963008880615234, 0.02397657585144043, 0.02411929512023926, 0.023969503402709962, 0.023945184707641603, 0.023869760513305666, 0.023867071151733397, 0.02401055908203125, 0.024053855895996092, 0.02501059150695801, 0.024049663543701173, 0.023883775711059572, 0.023967744827270508, 0.023734272003173826, 0.024625152587890626, 0.025053184509277345, 0.024426496505737305, 0.024180736541748047, 0.024258560180664062, 0.02422096061706543, 0.02445158386230469, 0.02430384063720703, 0.024247615814208985, 0.024326847076416015, 0.02845929527282715, 0.02529097557067871, 0.024987775802612303, 0.025051488876342773, 0.02453913688659668, 0.024392799377441408, 0.024314783096313478, 0.0243507194519043, 0.024401920318603516, 0.024565759658813476, 0.024416128158569337, 0.024593759536743164, 0.024584127426147462, 0.024513120651245116, 0.024671775817871094, 0.02459926414489746, 0.02452236747741699, 0.02453913688659668, 0.024458816528320312, 0.024646528244018556, 0.02515545654296875, 0.024550975799560545, 0.024611167907714844, 0.024716800689697265, 0.024658592224121093, 0.02454732894897461, 0.02450758361816406, 0.024740671157836912, 0.024633344650268556, 0.024454944610595702, 0.024330495834350586, 0.02450649642944336, 0.02459017562866211, 0.024885120391845702, 0.024487327575683594, 0.024705120086669922, 0.02473958396911621, 0.024854528427124024, 0.024853376388549803, 0.024845600128173828, 0.024826591491699218, 0.024737535476684572, 0.024729759216308593, 0.02480342483520508, 0.024962400436401366, 0.02496518325805664, 0.024984159469604493, 0.024809471130371095, 0.024965343475341798, 0.024864543914794923, 0.024898752212524414, 0.024875680923461915, 0.024770719528198242, 0.02469647979736328, 0.024807775497436523, 0.024647680282592774, 0.024558975219726564, 0.02449884796142578, 0.02442032051086426, 0.024448959350585938, 0.024260671615600585, 0.02421766471862793, 0.02427039909362793, 0.02444121551513672, 0.025168575286865235, 0.02469478416442871, 0.024625152587890626, 0.02455891227722168, 0.02750035285949707, 0.02478483200073242, 0.024761568069458006, 0.024567520141601563, 0.024350656509399413, 0.024254528045654297, 0.024145824432373047, 0.02460476875305176, 0.024682048797607423, 0.024950624465942383, 0.024232608795166016, 0.024393152236938477, 0.024172096252441405, 0.024120256423950194, 0.02432204818725586, 0.02408038330078125, 0.024174591064453126, 0.024162303924560546, 0.024070144653320313, 0.024043712615966797, 0.024001792907714845, 0.024107583999633787, 0.024070144653320313, 0.023961599349975587, 0.024149343490600585, 0.023972223281860352, 0.02402332878112793, 0.02402675247192383, 0.02421798324584961, 0.024058975219726563, 0.024130464553833008, 0.023977983474731446, 0.024180736541748047, 0.023914239883422853, 0.024143743515014648, 0.02409219169616699, 0.024101728439331054, 0.02398134422302246, 0.024304351806640624, 0.024337791442871095, 0.02414963150024414, 0.024022016525268555, 0.02407219123840332, 0.024102912902832032, 0.024106464385986327, 0.024102624893188478, 0.02406483268737793, 0.024214656829833984, 0.02427369689941406, 0.024341728210449217, 0.024275840759277342, 0.024186208724975587, 0.024244895935058595, 0.02429542350769043, 0.02434867286682129, 0.024376640319824217, 0.024564416885375976, 0.024481792449951172, 0.024623104095458984, 0.025324575424194334, 0.024835039138793945, 0.0246824951171875, 0.02472550392150879, 0.024639488220214844, 0.02454528045654297, 0.024565759658813476, 0.024690624237060546, 0.02447337532043457, 0.024314144134521484, 0.024393087387084962, 0.024412799835205078, 0.024349695205688478, 0.025010208129882812, 0.02425267219543457, 0.024326879501342772, 0.024483903884887696, 0.02447990417480469, 0.0245184326171875, 0.024625152587890626, 0.024571903228759767, 0.024840351104736327, 0.024897024154663085, 0.02489081573486328, 0.02508188819885254, 0.024562559127807616, 0.02462886428833008, 0.024550912857055664, 0.024556064605712892, 0.02463279914855957, 0.024961503982543945, 0.024508031845092773, 0.02459519958496094, 0.024500255584716798, 0.02446313667297363, 0.024355039596557618, 0.024290847778320312, 0.02446588706970215, 0.024459264755249024, 0.0243158073425293, 0.024305759429931642, 0.024327232360839845, 0.02442540740966797, 0.024278976440429687, 0.024270751953125, 0.024182111740112304, 0.02423263931274414, 0.02410918426513672, 0.024213504791259766, 0.024233375549316406, 0.024928287506103517, 0.024709695816040038, 0.02461676788330078, 0.024649919509887694, 0.024639488220214844, 0.024588287353515623, 0.024666112899780275, 0.024625152587890626, 0.024657920837402345, 0.02480512046813965, 0.024631616592407226, 0.025079296112060546, 0.02477004814147949, 0.025399616241455078, 0.024852479934692383, 0.02483404731750488, 0.024688640594482423, 0.024477344512939453, 0.024643936157226563, 0.02439740753173828, 0.02428316879272461, 0.024324480056762694, 0.024422399520874022, 0.024292640686035157, 0.024453184127807618, 0.02470992088317871, 0.024512384414672853, 0.025017824172973633, 0.024373855590820313, 0.024330175399780274, 0.024358816146850586, 0.02414396858215332, 0.024238079071044923, 0.024377023696899414, 0.024461088180541993, 0.024558111190795897, 0.02484454345703125, 0.024383232116699217, 0.0242872314453125, 0.024337408065795898, 0.024382463455200197, 0.02470297622680664, 0.024564735412597655, 0.024914464950561523, 0.024822240829467775, 0.024925504684448242, 0.025652223587036133, 0.025369983673095703, 0.024824127197265625, 0.024951007843017577, 0.025038623809814455, 0.025018367767333984, 0.025011327743530272, 0.02473651123046875, 0.02473353576660156, 0.02480156707763672, 0.024759679794311523, 0.024937088012695313, 0.025005952835083008, 0.024803071975708007, 0.024656255722045897, 0.02468467140197754, 0.024481664657592772, 0.024344575881958007, 0.02434272003173828, 0.024280000686645507, 0.02471331214904785, 0.024382240295410158, 0.024426368713378905, 0.024469215393066405, 0.0244105281829834, 0.02458624076843262, 0.02464102363586426, 0.02439014434814453, 0.024770559310913084, 0.02636390495300293, 0.024819711685180663, 0.024491392135620117, 0.02450432014465332, 0.024332767486572267, 0.024262815475463866, 0.02433827209472656, 0.024184255599975585, 0.02431001663208008, 0.024215103149414063, 0.02425129508972168, 0.02431337547302246, 0.024191455841064455, 0.02433433532714844, 0.024238079071044923, 0.024248064041137694, 0.02443084716796875, 0.024156160354614258, 0.024203264236450195, 0.02415590476989746, 0.024067359924316405, 0.02402403259277344, 0.02429078483581543, 0.024013343811035155, 0.024470687866210938, 0.024066240310668945, 0.025408063888549805, 0.024436832427978516, 0.024168447494506837, 0.023993440628051758, 0.02398611259460449, 0.024182783126831055, 0.023972831726074218, 0.024043392181396485, 0.02396758460998535, 0.02512268829345703, 0.025455007553100584, 0.024328191757202147, 0.024259904861450195, 0.024203968048095704, 0.024303615570068358, 0.024190208435058595, 0.0243143367767334, 0.024903968811035158, 0.02469478416442871, 0.024631296157836914, 0.024766719818115235, 0.024366111755371095, 0.024195936203002928, 0.024098432540893555, 0.02420966339111328, 0.024045312881469726, 0.024238336563110353, 0.024131584167480468, 0.024354591369628906, 0.024180959701538086, 0.024219648361206055, 0.02427289581298828, 0.024164352416992187, 0.024360960006713867, 0.024193023681640623, 0.0242523193359375, 0.02409401512145996, 0.024208160400390626, 0.02534534454345703, 0.02462175941467285, 0.024604671478271483, 0.0245449275970459, 0.02477916717529297, 0.025284095764160155, 0.024773056030273438, 0.024832000732421877, 0.02470911979675293, 0.024791040420532227, 0.02478489685058594, 0.02462067222595215, 0.024926591873168945, 0.024772607803344726, 0.024567808151245117, 0.02457088088989258, 0.02451968002319336, 0.024386560440063477, 0.02433945655822754, 0.024368928909301757, 0.024407615661621095, 0.024705631256103516, 0.02444211196899414, 0.024560447692871093, 0.024557567596435546, 0.024528480529785155, 0.024377407073974608, 0.02868396759033203, 0.029337919235229493, 0.025039199829101563, 0.024823808670043947, 0.02452992057800293, 0.024509439468383788, 0.02492620849609375, 0.024713216781616212, 0.025691743850708007, 0.024959360122680664, 0.024917024612426758, 0.024609792709350587, 0.02450841522216797, 0.02485862350463867, 0.024823040008544923, 0.0245296630859375, 0.024657695770263673, 0.025628896713256837, 0.02684511947631836, 0.02492742347717285, 0.024699743270874024, 0.024518688201904296, 0.024393888473510743, 0.024291168212890624, 0.0245166072845459, 0.024410015106201173, 0.024933567047119142, 0.024715999603271484, 0.025858240127563478, 0.02427494430541992, 0.024227840423583984, 0.024151935577392578, 0.02411065673828125, 0.024169023513793946, 0.02414364814758301, 0.02430793571472168, 0.025218399047851562, 0.024681055068969726, 0.02480975914001465, 0.024666112899780275, 0.02465977668762207, 0.02478086471557617, 0.02450444793701172, 0.02454047966003418, 0.02435158348083496, 0.02421743965148926, 0.024457216262817383, 0.024395776748657227, 0.024459264755249024, 0.024371200561523438, 0.024694976806640626, 0.024371007919311523, 0.024538496017456054, 0.02440665626525879, 0.024470880508422853, 0.02428995132446289, 0.02431385612487793, 0.024338655471801758, 0.024233919143676758, 0.024209184646606444, 0.02439331245422363, 0.02457638359069824, 0.024430496215820312, 0.02436934471130371, 0.024258272171020508, 0.024273183822631834, 0.024163328170776367, 0.024208383560180666, 0.024178688049316405, 0.02426825523376465, 0.02439017677307129, 0.024287391662597656, 0.02418671989440918, 0.024218719482421876, 0.02410179138183594, 0.02409881591796875, 0.024170495986938476, 0.02413907241821289, 0.024053695678710938, 0.024342432022094726, 0.024265567779541017, 0.024102432250976562, 0.02437513542175293, 0.02416703987121582, 0.024030879974365236, 0.02417033576965332, 0.024147903442382813, 0.024119871139526367, 0.02408399963378906, 0.024234464645385742, 0.02413273620605469, 0.024427391052246093, 0.024104383468627928, 0.024437311172485352, 0.027150335311889647, 0.024636800765991212, 0.024267295837402343, 0.024229984283447265, 0.024143871307373048]",tokens/s,40.946573296689834,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2166.366208,2964.258816,0.0,2569.0112,2295.745536,s,1,9.6473115234375,9.6473115234375,0.0,9.6473115234375,9.6473115234375,9.6473115234375,9.6473115234375,[9.6473115234375],,kWh,7.833504790416252e-05,8.63349490721445e-06,3.1614747514036035e-05,0.000118583290325413,,MB,2210.922496,2981.036032,0.0,2571.108352,2282.381824,s,10,0.8473688659667968,0.08473688659667969,0.00044418413667075065,0.08474364852905274,0.08524638366699219,0.08527711105346679,0.08530169296264647,"[0.08514192199707031, 0.08523955535888672, 0.08446864318847656, 0.08440092468261719, 0.08451583862304687, 0.08378530883789062, 0.0853078384399414, 0.08481427001953125, 0.08502153778076171, 0.08467302703857423]",tokens/s,3021.1164261731446,kWh,2.5314591589078797e-06,2.791743459482126e-07,1.6692015269310386e-06,4.47983503178713e-06,tokens/kWh,57144961.40673164,MB,2214.264832,2981.036032,0.0,2571.108352,2391.673344,s,10,14.580236450195313,1.4580236450195314,0.009167518722685945,1.455982666015625,1.47125048828125,1.47271826171875,1.4738924804687499,"[1.466255859375, 1.4489053955078126, 1.4572235107421876, 1.4547418212890626, 1.47418603515625, 1.4539447021484375, 1.449278076171875, 1.4592376708984376, 1.47092431640625, 1.4455390625]",tokens/s,43.20917580123062,kWh,4.241340228567793e-05,4.677850070407303e-06,2.331506846046541e-05,7.040632081655064e-05,tokens/kWh,894806.0240805878,,s,630,14.57759053802491,0.023139032600039525,0.00042756544902658537,0.023002351760864258,0.023539679908752442,0.023715660858154297,0.024710056915283204,"[0.023114336013793944, 0.02290483283996582, 0.02556844711303711, 0.023786304473876953, 0.02325503921508789, 0.023262624740600587, 0.023386720657348634, 0.023386112213134767, 0.023293664932250976, 0.023371135711669922, 0.02322934341430664, 0.02330009651184082, 0.023211967468261718, 0.02341484832763672, 0.02328780746459961, 0.023185407638549805, 0.02294169616699219, 0.022982656478881838, 0.02278201675415039, 0.022904352188110353, 0.02293494415283203, 0.023133184432983397, 0.023000799179077148, 0.0229215030670166, 0.022944927215576172, 0.022940607070922853, 0.022919071197509765, 0.022996992111206056, 0.02286591911315918, 0.022931455612182617, 0.02308095932006836, 0.02309334373474121, 0.022935455322265624, 0.023345151901245118, 0.023439231872558593, 0.023256671905517577, 0.023480863571166993, 0.0231461124420166, 0.02427097511291504, 0.022927616119384767, 0.02285977554321289, 0.022859424591064454, 0.022917375564575196, 0.023052383422851562, 0.022861824035644532, 0.022882272720336914, 0.022825151443481444, 0.023070016860961915, 0.022923103332519533, 0.02299769592285156, 0.023136255264282226, 0.023613439559936524, 0.023781375885009767, 0.02371174430847168, 0.023596319198608398, 0.02359779167175293, 0.024195072174072265, 0.023811712265014648, 0.02398041534423828, 0.02362771224975586, 0.02358278465270996, 0.023563360214233397, 0.023378847122192382, 0.023551616668701172, 0.02343584060668945, 0.023367488861083984, 0.023177024841308593, 0.022986879348754884, 0.023045631408691408, 0.023251520156860352, 0.02319068717956543, 0.022917024612426756, 0.023071680068969726, 0.02279952049255371, 0.02299171257019043, 0.0230951042175293, 0.023062623977661133, 0.023203008651733397, 0.023395231246948242, 0.023323999404907227, 0.023212064743041994, 0.022873952865600587, 0.02292780876159668, 0.02279772758483887, 0.02281977653503418, 0.022726015090942384, 0.02292390441894531, 0.02287615966796875, 0.02286319923400879, 0.02273961639404297, 0.022802112579345703, 0.022814495086669922, 0.023266944885253906, 0.022852512359619142, 0.022921247482299803, 0.022796255111694336, 0.023119871139526366, 0.02274492835998535, 0.022853792190551756, 0.022789791107177736, 0.022964351654052733, 0.022740224838256835, 0.02275222396850586, 0.02272480010986328, 0.022841215133666992, 0.022874048233032226, 0.022794240951538085, 0.022749183654785156, 0.022816160202026366, 0.022820959091186522, 0.02280012893676758, 0.023089920043945313, 0.023856672286987304, 0.024348735809326172, 0.02326095962524414, 0.02292390441894531, 0.022908384323120118, 0.022835039138793947, 0.022926015853881834, 0.022841215133666992, 0.022922847747802736, 0.022841312408447265, 0.02295452880859375, 0.02276902389526367, 0.02285430335998535, 0.02285510444641113, 0.022922880172729494, 0.024281471252441407, 0.02438118362426758, 0.02303206443786621, 0.02290675163269043, 0.022838815689086914, 0.022979040145874024, 0.02292473602294922, 0.022819520950317383, 0.022861824035644532, 0.02287820816040039, 0.02308064079284668, 0.022878528594970703, 0.022855680465698244, 0.02288342475891113, 0.022721439361572265, 0.02277987289428711, 0.022784095764160156, 0.022783935546875, 0.022808576583862306, 0.022807584762573243, 0.022840288162231444, 0.022900384902954103, 0.023068735122680664, 0.023037824630737305, 0.023196063995361327, 0.023363584518432616, 0.023734272003173826, 0.023351295471191406, 0.02339571189880371, 0.02322412872314453, 0.023339839935302736, 0.02337123107910156, 0.023289472579956054, 0.023661088943481446, 0.026549888610839845, 0.023444223403930663, 0.023347103118896484, 0.023065887451171874, 0.023212352752685548, 0.0232043514251709, 0.023000640869140623, 0.022921855926513673, 0.022909759521484375, 0.022928384780883788, 0.02298396873474121, 0.022725343704223633, 0.022935552597045897, 0.02298044776916504, 0.022868127822875978, 0.02285772705078125, 0.023157983779907226, 0.02327628707885742, 0.023375648498535156, 0.02304025650024414, 0.022939647674560547, 0.022960128784179686, 0.02303340721130371, 0.022794687271118164, 0.022830432891845703, 0.022807199478149413, 0.022914144515991212, 0.02291315269470215, 0.022943744659423827, 0.022921215057373046, 0.022829055786132812, 0.02285539245605469, 0.023139904022216797, 0.022893280029296876, 0.02275107192993164, 0.02280790328979492, 0.02384160041809082, 0.022985824584960936, 0.02282534408569336, 0.022895135879516602, 0.022982656478881838, 0.022898687362670898, 0.022767616271972657, 0.022959583282470702, 0.022954208374023437, 0.023011648178100585, 0.022738208770751955, 0.02285968017578125, 0.022846271514892578, 0.02294166374206543, 0.02277174377441406, 0.02287513542175293, 0.022823936462402345, 0.022940704345703125, 0.022805471420288086, 0.022861087799072265, 0.022901472091674806, 0.02291097640991211, 0.02292278480529785, 0.02375641632080078, 0.02299737548828125, 0.02300156784057617, 0.023219520568847657, 0.02325299263000488, 0.023299968719482422, 0.023330720901489257, 0.02365532875061035, 0.02352742385864258, 0.023459487915039063, 0.02350668716430664, 0.023562816619873046, 0.023750688552856444, 0.02334422492980957, 0.023429088592529297, 0.023538623809814453, 0.023451391220092772, 0.022969888687133788, 0.023030399322509765, 0.023037696838378908, 0.023189184188842773, 0.023011199951171873, 0.02311587142944336, 0.022903327941894532, 0.023023775100708008, 0.023048255920410158, 0.023018976211547852, 0.023220703125, 0.023147647857666015, 0.02316716766357422, 0.02308780860900879, 0.022945024490356444, 0.02395136070251465, 0.023590911865234376, 0.02326937675476074, 0.023394304275512694, 0.023011327743530274, 0.023522592544555663, 0.02359164810180664, 0.023631872177124022, 0.023556095123291015, 0.023572383880615236, 0.02349679946899414, 0.023428543090820313, 0.023392223358154298, 0.02349484825134277, 0.02395084762573242, 0.023610240936279298, 0.023371807098388674, 0.02348236846923828, 0.023713407516479493, 0.023433792114257813, 0.02340553665161133, 0.02311827278137207, 0.023306655883789062, 0.023549184799194336, 0.02307753562927246, 0.023412223815917968, 0.023333663940429687, 0.023568191528320313, 0.023199743270874023, 0.023180383682250977, 0.02327235221862793, 0.02309119987487793, 0.022966272354125978, 0.02308064079284668, 0.02293552017211914, 0.02294963264465332, 0.022984928131103515, 0.022996768951416016, 0.022949600219726564, 0.02332966423034668, 0.023406591415405274, 0.023396352767944335, 0.023236608505249022, 0.02409267234802246, 0.023592863082885742, 0.024053855895996092, 0.023586111068725588, 0.023669439315795897, 0.02347542381286621, 0.023654176712036134, 0.024738815307617186, 0.02373017692565918, 0.02353561592102051, 0.02342857551574707, 0.02310812759399414, 0.0230830078125, 0.023044095993041993, 0.023138303756713868, 0.02327552032470703, 0.023258943557739258, 0.023124223709106446, 0.023070655822753906, 0.023052288055419923, 0.023145248413085937, 0.02327756881713867, 0.022943744659423827, 0.02329395294189453, 0.023113727569580078, 0.02327743911743164, 0.02326950454711914, 0.02319580841064453, 0.02293948745727539, 0.022889951705932617, 0.022841888427734373, 0.022929407119750975, 0.02285772705078125, 0.02287615966796875, 0.022828704833984376, 0.023011264801025392, 0.022923679351806642, 0.0230067195892334, 0.022915584564208984, 0.02290640068054199, 0.02284796714782715, 0.022978656768798827, 0.02304604721069336, 0.022996959686279298, 0.022947423934936522, 0.022995391845703126, 0.023172319412231444, 0.02296063995361328, 0.02289459228515625, 0.022921056747436525, 0.02290108871459961, 0.02298646354675293, 0.022801919937133788, 0.023032863616943358, 0.02299785614013672, 0.022967296600341795, 0.022971744537353515, 0.023016096115112305, 0.022914047241210937, 0.02299295997619629, 0.023124671936035155, 0.022892127990722655, 0.022934175491333007, 0.023165151596069335, 0.02287593650817871, 0.023717504501342773, 0.02285318374633789, 0.023328704833984373, 0.022962400436401367, 0.0229769287109375, 0.02303923225402832, 0.02313113594055176, 0.022968320846557616, 0.023023008346557617, 0.022955968856811525, 0.023059104919433592, 0.023023008346557617, 0.022968511581420898, 0.022931072235107423, 0.023370527267456056, 0.023044095993041993, 0.022983903884887694, 0.026568639755249025, 0.022988319396972656, 0.02299958419799805, 0.023079999923706053, 0.023392288208007813, 0.022922079086303712, 0.02289254379272461, 0.022945728302001953, 0.023099327087402345, 0.022841472625732422, 0.02300636863708496, 0.022811296463012696, 0.023220415115356444, 0.022771711349487304, 0.022976512908935546, 0.022888128280639648, 0.02291539192199707, 0.022798336029052735, 0.02285919952392578, 0.022814752578735352, 0.02283478355407715, 0.022952896118164062, 0.022965375900268554, 0.022840192794799805, 0.023134208679199218, 0.023158784866333007, 0.02293289566040039, 0.022798816680908204, 0.022912864685058595, 0.023049728393554687, 0.0228503360748291, 0.022788095474243163, 0.022906879425048828, 0.02281881523132324, 0.02291097640991211, 0.026089471817016603, 0.023821439743041992, 0.023293888092041016, 0.023110591888427734, 0.022896543502807617, 0.022888063430786133, 0.02281875228881836, 0.022856224060058595, 0.022829055786132812, 0.022926687240600586, 0.022815391540527342, 0.02289039993286133, 0.022892032623291016, 0.022901344299316406, 0.022824575424194336, 0.022859392166137697, 0.022823295593261718, 0.022897024154663086, 0.022826751708984374, 0.023224319458007812, 0.022916704177856444, 0.023007904052734375, 0.022943359375, 0.022936992645263672, 0.02294063949584961, 0.02300668716430664, 0.022859392166137697, 0.022909919738769532, 0.022935487747192382, 0.02346598434448242, 0.023219263076782227, 0.022892768859863282, 0.023433952331542968, 0.02298784065246582, 0.022905792236328125, 0.02280793571472168, 0.022815359115600585, 0.0228351993560791, 0.02288844871520996, 0.022814176559448243, 0.02286169624328613, 0.022851648330688475, 0.022878976821899415, 0.022876096725463868, 0.0229039363861084, 0.02291494369506836, 0.022864255905151367, 0.02293609619140625, 0.023054176330566407, 0.023242912292480468, 0.023400447845458985, 0.02332467269897461, 0.023658496856689453, 0.023674047470092774, 0.023537696838378905, 0.023523647308349608, 0.023535423278808594, 0.023412992477416992, 0.023525535583496095, 0.023463743209838867, 0.0234003849029541, 0.023058944702148438, 0.023007232666015624, 0.023011327743530274, 0.023418880462646483, 0.023377920150756838, 0.023396095275878905, 0.023201055526733398, 0.023237632751464843, 0.023060447692871095, 0.02307481575012207, 0.022931455612182617, 0.023418624877929686, 0.023279584884643555, 0.02318569564819336, 0.023214080810546874, 0.023185407638549805, 0.023125856399536134, 0.02307609558105469, 0.02327030372619629, 0.023551136016845702, 0.023488895416259765, 0.023220703125, 0.022932960510253907, 0.02292176055908203, 0.023045631408691408, 0.023044160842895508, 0.023189952850341797, 0.022932640075683595, 0.022862688064575195, 0.02333830451965332, 0.02300547218322754, 0.023264768600463868, 0.023192384719848632, 0.02319468879699707, 0.023486848831176757, 0.023400480270385743, 0.023588640213012695, 0.023461984634399413, 0.023441247940063477, 0.023339775085449217, 0.02342255973815918, 0.023362016677856444, 0.02346188735961914, 0.023459711074829102, 0.02347430419921875, 0.024040672302246095, 0.0246396484375, 0.023597696304321288, 0.023119871139526366, 0.022999040603637694, 0.02310745620727539, 0.023452896118164063, 0.023239583969116212, 0.023109216690063477, 0.023009567260742186, 0.022970495223999025, 0.023078527450561524, 0.023681407928466798, 0.022992895126342772, 0.022939647674560547, 0.022998783111572267, 0.02365670394897461, 0.0230699520111084, 0.022907232284545897, 0.024906143188476563, 0.024585887908935546, 0.023769439697265624, 0.023363487243652344, 0.023427167892456056, 0.023173120498657225, 0.02300851249694824, 0.02295680046081543, 0.023078432083129884, 0.023366111755371094, 0.023473247528076172, 0.026989376068115235, 0.023277280807495117, 0.0229967041015625, 0.022915744781494142, 0.022898687362670898, 0.023001087188720702, 0.02288643264770508, 0.02298240089416504, 0.02295625686645508, 0.023109632492065428, 0.022994016647338866, 0.02305731201171875, 0.022986751556396484, 0.023049983978271484, 0.02296575927734375, 0.023165695190429686, 0.02291916847229004, 0.02315807914733887, 0.023081567764282225, 0.023060480117797853, 0.023009279251098632, 0.023119871139526366, 0.02305843162536621, 0.022824960708618162, 0.023003135681152344, 0.02288332748413086, 0.02308310317993164, 0.022897567749023438, 0.02291097640991211, 0.022835071563720704, 0.022980735778808593, 0.02295225524902344, 0.022984384536743164, 0.02280243110656738, 0.022951936721801756, 0.023001087188720702, 0.02290483283996582, 0.022771615982055664, 0.02298019218444824, 0.022829599380493164, 0.02293552017211914, 0.022749183654785156, 0.02285158348083496, 0.022794048309326173, 0.02288435173034668, 0.02282624053955078, 0.022867935180664063, 0.022764511108398437, 0.022865888595581054, 0.02287151908874512, 0.022939647674560547, 0.02285420799255371, 0.022896352767944335, 0.02277507209777832, 0.022846464157104493, 0.02327452850341797, 0.023354335784912108, 0.023005184173583985, 0.022975872039794922, 0.022884992599487303, 0.02292300796508789, 0.022775104522705078, 0.022848447799682616, 0.023326719284057617, 0.022890495300292968, 0.022761472702026365, 0.023013376235961915, 0.02287615966796875, 0.022994464874267578, 0.02289302444458008, 0.023127071380615233, 0.02287926483154297, 0.023035839080810548, 0.02288787269592285, 0.02297657585144043, 0.02297088050842285, 0.022974431991577147, 0.022887935638427736, 0.02290060806274414, 0.02293827247619629, 0.02301139259338379, 0.023322559356689452]",tokens/s,43.217018502246795,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1339.94496,1148.125184,0.0,752.877568,710.554112,s,1,8.7731044921875,8.7731044921875,0.0,8.7731044921875,8.7731044921875,8.7731044921875,8.7731044921875,[8.7731044921875],,kWh,4.9749024966695284e-05,5.477843875071808e-06,1.6886957954015402e-05,7.211382679578249e-05,,MB,1337.827328,1429.143552,0.0,1019.215872,949.09696,s,10,0.26381101036071775,0.02638110103607178,0.0003819674675211477,0.026364831924438475,0.02667032299041748,0.026933353710174558,0.027143778285980222,"[0.026392799377441406, 0.026202783584594727, 0.02627484893798828, 0.02629033660888672, 0.02647478485107422, 0.02719638442993164, 0.02633686447143555, 0.02661187171936035, 0.026468511581420898, 0.025561824798583984]",tokens/s,9703.916438133589,kWh,7.683246981357689e-07,8.469786453777631e-08,5.001531779000312e-07,1.3531757405735765e-06,tokens/kWh,189184591.71569848,MB,1358.237696,1437.53216,0.0,1027.60448,949.09952,s,10,13.170380249023436,1.317038024902344,0.008806143524571609,1.3187987060546875,1.325497912597656,1.3257829528808593,1.3260109851074218,"[1.3121505126953126, 1.3254345703125, 1.3126083984375, 1.3182025146484375, 1.3260679931640624, 1.322536376953125, 1.3193948974609375, 1.323876953125, 1.315271240234375, 1.2948367919921875]",tokens/s,47.83460979015495,kWh,3.8256545591859534e-05,4.219283233828629e-06,1.577088761669926e-05,5.824671644238742e-05,tokens/kWh,1081606.0346047853,,s,630,13.167925653457653,0.020901469291202606,0.0003524974265339799,0.020911567687988283,0.021156941413879395,0.021330596446990965,0.022246307201385505,"[0.020983808517456053, 0.02114518356323242, 0.021186784744262697, 0.020934047698974608, 0.02098361587524414, 0.021183456420898437, 0.02122889518737793, 0.02095961570739746, 0.0210351676940918, 0.021003936767578123, 0.021058015823364258, 0.021014528274536134, 0.021112831115722656, 0.021120704650878907, 0.02130361557006836, 0.021121023178100586, 0.021405696868896484, 0.02085478401184082, 0.020712959289550782, 0.02057676887512207, 0.02070688056945801, 0.02066067123413086, 0.020688383102416993, 0.020515327453613282, 0.02048371124267578, 0.02095961570739746, 0.02078460884094238, 0.021479232788085938, 0.020513151168823244, 0.02039401626586914, 0.02045574378967285, 0.020321311950683593, 0.02055471992492676, 0.02044435119628906, 0.020327392578125, 0.02049007987976074, 0.02049843215942383, 0.020526559829711914, 0.02040425682067871, 0.020342784881591795, 0.020323999404907228, 0.020302688598632813, 0.020479999542236327, 0.020528799057006837, 0.02067433547973633, 0.02061574363708496, 0.02102681541442871, 0.02085478401184082, 0.02078220748901367, 0.02089459228515625, 0.02079539108276367, 0.020746240615844725, 0.020612224578857422, 0.02069183921813965, 0.020983808517456053, 0.020938304901123046, 0.020931039810180664, 0.020813791275024415, 0.020987680435180664, 0.02077939224243164, 0.022951776504516602, 0.020856800079345702, 0.020869152069091797, 0.020920320510864256, 0.02097113609313965, 0.020863359451293945, 0.020993247985839843, 0.02083510398864746, 0.020774911880493165, 0.02083430480957031, 0.020918272018432618, 0.02097171211242676, 0.020843711853027344, 0.021009023666381837, 0.02106572723388672, 0.020965087890625, 0.020864320755004884, 0.020851200103759765, 0.020797920227050782, 0.02092755126953125, 0.020761568069458006, 0.020750303268432618, 0.02099404716491699, 0.02092748832702637, 0.02082099151611328, 0.02085273551940918, 0.02074959945678711, 0.020941535949707032, 0.02143020820617676, 0.02170172882080078, 0.021799903869628906, 0.021288415908813477, 0.0210478401184082, 0.020815872192382814, 0.020882720947265624, 0.02095996856689453, 0.022290016174316408, 0.02213929557800293, 0.020899839401245117, 0.021001247406005858, 0.021110944747924805, 0.020933055877685548, 0.02075663948059082, 0.0210229434967041, 0.02076643180847168, 0.020859167098999022, 0.02070528030395508, 0.020834079742431642, 0.020816064834594725, 0.02309328079223633, 0.02149171257019043, 0.02098761558532715, 0.020857120513916017, 0.020809728622436522, 0.020872575759887695, 0.021139551162719726, 0.020830751419067383, 0.02071334457397461, 0.020781280517578125, 0.02120841598510742, 0.021853824615478516, 0.021191743850708007, 0.02083724784851074, 0.0207258243560791, 0.020802495956420898, 0.020707263946533203, 0.020719615936279297, 0.020711423873901368, 0.02072777557373047, 0.02055990409851074, 0.020584447860717774, 0.02087116813659668, 0.0208855037689209, 0.020811456680297852, 0.020750816345214845, 0.020701023101806642, 0.02087068748474121, 0.020759008407592774, 0.02071900749206543, 0.020683359146118165, 0.020527103424072265, 0.020692991256713866, 0.02099404716491699, 0.02065203285217285, 0.02048409652709961, 0.020625408172607423, 0.020707328796386718, 0.020985855102539062, 0.02086502456665039, 0.020748287200927733, 0.02065203285217285, 0.020768224716186525, 0.02059516716003418, 0.020594240188598633, 0.020602752685546875, 0.02069772720336914, 0.020999488830566407, 0.020807903289794923, 0.020718048095703125, 0.020800928115844726, 0.020946815490722658, 0.020775295257568358, 0.020807359695434572, 0.020774560928344725, 0.020810752868652343, 0.02090166473388672, 0.020907936096191407, 0.02096928024291992, 0.02111516761779785, 0.020985279083251953, 0.020929311752319334, 0.020960607528686524, 0.020997983932495117, 0.020834592819213866, 0.020965919494628907, 0.021387264251708983, 0.021127168655395507, 0.020930559158325195, 0.020993919372558595, 0.020916351318359373, 0.020948192596435548, 0.021006591796875, 0.021100511550903322, 0.02100918388366699, 0.02083203125, 0.020920160293579102, 0.020740095138549804, 0.020879520416259765, 0.02101795196533203, 0.02084966468811035, 0.020927776336669923, 0.020625791549682616, 0.020765024185180662, 0.020702816009521483, 0.02068931198120117, 0.02061311912536621, 0.020706367492675782, 0.02087376022338867, 0.02196291160583496, 0.020739456176757813, 0.020933504104614257, 0.02087731170654297, 0.020891136169433593, 0.020984319686889647, 0.02090729522705078, 0.02093129539489746, 0.02091007995605469, 0.020760000228881834, 0.02101715278625488, 0.02100160026550293, 0.020896255493164064, 0.021176448822021486, 0.022616064071655274, 0.021098367691040038, 0.02111296081542969, 0.02081996726989746, 0.020778112411499024, 0.02086591911315918, 0.020872928619384765, 0.02097107124328613, 0.021269216537475585, 0.021118783950805665, 0.02096771240234375, 0.020837343215942383, 0.02079372787475586, 0.020953664779663084, 0.020884992599487305, 0.02077337646484375, 0.020840448379516603, 0.020676607131958007, 0.020772863388061523, 0.020918272018432618, 0.02089779281616211, 0.020741952896118163, 0.02084230422973633, 0.020756864547729494, 0.020930559158325195, 0.020758527755737305, 0.020875263214111327, 0.020793344497680662, 0.021741567611694337, 0.02109008026123047, 0.021114784240722655, 0.020726079940795897, 0.020641408920288085, 0.020783487319946288, 0.020825279235839843, 0.02080441665649414, 0.02066441535949707, 0.020864927291870117, 0.020674560546875, 0.02074006462097168, 0.021083168029785156, 0.021128416061401367, 0.021044511795043946, 0.021080095291137694, 0.02108051109313965, 0.02103843116760254, 0.02103107261657715, 0.021107200622558595, 0.021018047332763672, 0.02100896072387695, 0.020965375900268556, 0.02101055908203125, 0.02097760009765625, 0.02082809638977051, 0.021090303421020508, 0.021086208343505858, 0.020940256118774415, 0.02109040069580078, 0.020935104370117186, 0.020976640701293944, 0.02093881607055664, 0.021033920288085938, 0.021340160369873046, 0.021028863906860353, 0.021325824737548828, 0.02095913505554199, 0.02127052879333496, 0.021026784896850587, 0.02124608039855957, 0.02113052749633789, 0.021054176330566405, 0.021178367614746094, 0.021104448318481444, 0.021176511764526368, 0.021045248031616212, 0.021198848724365234, 0.02121126365661621, 0.021402816772460937, 0.021037759780883788, 0.020961280822753905, 0.020996095657348633, 0.02102272033691406, 0.020856767654418944, 0.0209552001953125, 0.020919807434082033, 0.02091430473327637, 0.0209716796875, 0.021069183349609374, 0.020886367797851562, 0.021070016860961913, 0.02101638412475586, 0.02103500747680664, 0.02092380714416504, 0.02097158432006836, 0.021055967330932616, 0.021421728134155275, 0.02113577651977539, 0.02099404716491699, 0.020911136627197267, 0.020982976913452148, 0.020791072845458985, 0.02084659194946289, 0.02086092758178711, 0.02105180740356445, 0.021178207397460937, 0.021104639053344726, 0.021045248031616212, 0.021044288635253906, 0.021183391571044922, 0.02101046371459961, 0.020894975662231446, 0.021076255798339844, 0.021058015823364258, 0.021159936904907226, 0.021067743301391603, 0.021063711166381834, 0.020964351654052735, 0.021097471237182617, 0.0209420166015625, 0.02094163131713867, 0.020891616821289063, 0.020908063888549804, 0.021327871322631836, 0.021014528274536134, 0.020930080413818358, 0.020924448013305664, 0.02098419189453125, 0.02102672004699707, 0.02081398391723633, 0.02086092758178711, 0.02086092758178711, 0.021143552780151367, 0.02097724723815918, 0.020956607818603517, 0.020849632263183593, 0.021104639053344726, 0.021331647872924804, 0.021174591064453126, 0.02106572723388672, 0.021325855255126952, 0.021073888778686524, 0.020981760025024415, 0.021083711624145508, 0.02099407958984375, 0.022002080917358398, 0.02333695983886719, 0.021235488891601564, 0.020903615951538085, 0.02076214408874512, 0.02085785675048828, 0.020785152435302736, 0.020746240615844725, 0.02073353576660156, 0.020699167251586915, 0.020578624725341797, 0.02057632064819336, 0.020621440887451173, 0.02048192024230957, 0.020676607131958007, 0.020719615936279297, 0.020665664672851563, 0.021561151504516603, 0.020537567138671876, 0.02042947196960449, 0.020335647583007814, 0.020564960479736327, 0.020920320510864256, 0.020961408615112306, 0.021110784530639647, 0.020938623428344728, 0.020965343475341798, 0.02077654457092285, 0.020811840057373045, 0.020791807174682618, 0.020944896697998046, 0.02079871940612793, 0.02077568054199219, 0.02081996726989746, 0.02079539108276367, 0.020911231994628906, 0.02084668731689453, 0.02115660858154297, 0.020905088424682618, 0.020927391052246093, 0.020803455352783204, 0.020915584564208986, 0.020875616073608397, 0.020957599639892577, 0.021373088836669923, 0.020966527938842773, 0.020628032684326173, 0.020542816162109376, 0.0208023681640625, 0.020883455276489257, 0.020936704635620116, 0.020867071151733398, 0.020856351852416993, 0.02081622314453125, 0.020759967803955077, 0.020871904373168944, 0.0208154239654541, 0.02071340751647949, 0.02076416015625, 0.020789344787597655, 0.020917312622070312, 0.0207642879486084, 0.020652095794677736, 0.020812992095947266, 0.020882400512695312, 0.020918272018432618, 0.02081996726989746, 0.02088960075378418, 0.020955135345458984, 0.021192384719848634, 0.020871488571166993, 0.02099945640563965, 0.02117491149902344, 0.021014623641967774, 0.020971519470214844, 0.020979103088378907, 0.020830432891845704, 0.021531007766723634, 0.022556671142578123, 0.021022048950195313, 0.02111350440979004, 0.021007680892944337, 0.021153888702392577, 0.02103766441345215, 0.02097350311279297, 0.02103500747680664, 0.020987295150756837, 0.021027423858642577, 0.020905824661254884, 0.020930976867675782, 0.020975360870361327, 0.02087321662902832, 0.020866336822509764, 0.020957696914672853, 0.020951263427734373, 0.020783103942871094, 0.02076198387145996, 0.020793983459472656, 0.020911903381347657, 0.020887775421142576, 0.02091007995605469, 0.020950368881225586, 0.021004928588867187, 0.02102070426940918, 0.020973567962646485, 0.021006336212158205, 0.020891679763793945, 0.02090185546875, 0.020933759689331054, 0.020797439575195312, 0.02088025665283203, 0.020883455276489257, 0.020960575103759767, 0.021018720626831053, 0.02107209587097168, 0.021031295776367188, 0.020972639083862304, 0.0210251522064209, 0.021017120361328124, 0.021041183471679686, 0.02089967918395996, 0.02095871925354004, 0.021019264221191405, 0.02086195182800293, 0.020871456146240235, 0.0209968318939209, 0.02098099136352539, 0.020992767333984374, 0.02112291145324707, 0.02095939254760742, 0.020912128448486327, 0.020908031463623047, 0.02105881690979004, 0.02105625534057617, 0.021255327224731446, 0.02100227165222168, 0.020984640121459962, 0.022052608489990234, 0.021329311370849608, 0.02184012794494629, 0.02163539123535156, 0.021, 0.0210817928314209, 0.021015327453613283, 0.02113929557800293, 0.0209849910736084, 0.02084524726867676, 0.02090220832824707, 0.021114879608154297, 0.021102592468261717, 0.021159999847412108, 0.021017663955688475, 0.021363584518432618, 0.021111967086791993, 0.021070688247680665, 0.020999711990356447, 0.021272960662841796, 0.021136608123779297, 0.020941247940063478, 0.021034591674804686, 0.02111369514465332, 0.020926464080810548, 0.020961280822753905, 0.02107961654663086, 0.02107027244567871, 0.021307392120361326, 0.021057504653930664, 0.02102684783935547, 0.020940031051635742, 0.020861696243286133, 0.020733951568603515, 0.02063929557800293, 0.021856704711914064, 0.02120275115966797, 0.02068511962890625, 0.020909952163696288, 0.021112831115722656, 0.0209017276763916, 0.020934816360473632, 0.021102399826049806, 0.02099628829956055, 0.02104729652404785, 0.020880544662475586, 0.021015392303466798, 0.02093417549133301, 0.02095155143737793, 0.020891616821289063, 0.020951040267944337, 0.020953088760375976, 0.020817760467529298, 0.020910240173339843, 0.02089916801452637, 0.02093942451477051, 0.020874719619750976, 0.020734495162963867, 0.02067203140258789, 0.020564191818237303, 0.02057241630554199, 0.020539264678955078, 0.02063488006591797, 0.020979648590087892, 0.02110089683532715, 0.02053353691101074, 0.020582304000854493, 0.020351200103759765, 0.02025494384765625, 0.020174816131591798, 0.020162559509277343, 0.020121599197387697, 0.020090303421020507, 0.02018502426147461, 0.02026134490966797, 0.02069286346435547, 0.020222335815429687, 0.020354175567626955, 0.020198272705078124, 0.020281375885009764, 0.020148191452026367, 0.02012265586853027, 0.020236608505249023, 0.022420127868652343, 0.02115283203125, 0.020245439529418947, 0.020325952529907227, 0.02034432029724121, 0.02017990493774414, 0.02037478446960449, 0.020312480926513672, 0.02023664093017578, 0.020143455505371093, 0.020148895263671876, 0.02026108741760254, 0.02023811149597168, 0.020176895141601564, 0.020271104812622072, 0.020173856735229492, 0.020150400161743163, 0.020204416275024416, 0.020336095809936523, 0.020204191207885743, 0.020241376876831054, 0.020298847198486326, 0.02010044860839844, 0.020226112365722658, 0.020296064376831055, 0.020183263778686525, 0.0201889591217041, 0.02030182456970215, 0.02030723190307617, 0.02017353630065918, 0.020578304290771485, 0.020823328018188477, 0.02078998374938965, 0.020774335861206056, 0.020916799545288085, 0.021016576766967773, 0.02084864044189453, 0.02101862335205078, 0.020934560775756835, 0.020959327697753907, 0.02087321662902832, 0.020996095657348633, 0.02096143913269043, 0.020848703384399415, 0.020905759811401366, 0.020903488159179688, 0.0208218879699707, 0.020886079788208008, 0.020932607650756836, 0.020748287200927733, 0.02115100860595703, 0.02062758445739746, 0.020537952423095703, 0.022007551193237305]",tokens/s,47.84352650370366,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1948.069888,2886.664192,0.0,2491.416576,2425.650176,s,1,9.972828125,9.972828125,0.0,9.972828125,9.972828125,9.972828125,9.972828125,[9.972828125],,kWh,8.597890382082672e-05,9.476869520918352e-06,3.169669202399139e-05,0.00012715246536573646,,MB,1733.013504,3115.25376,0.0,2705.32608,2606.127616,s,10,0.4999646072387696,0.04999646072387696,0.0003091115439414927,0.04991495895385742,0.05013563652038574,0.050508970832824705,0.05080763828277588,"[0.05088230514526367, 0.049979934692382814, 0.04994582366943359, 0.04974195098876953, 0.049794143676757815, 0.05005267333984375, 0.04999516677856446, 0.049836673736572266, 0.049851840972900394, 0.04988409423828125]",tokens/s,5120.362447531037,kWh,1.5538485170211853e-06,1.713596663676243e-07,1.0287892391062423e-06,2.7539974224950518e-06,tokens/kWh,92955787.79738672,MB,1737.27744,3115.25376,0.0,2705.32608,2606.130176,s,10,13.563270996093749,1.3563270996093748,0.010747341118720936,1.3583674926757812,1.3651344482421874,1.3715413452148437,1.3766668627929688,"[1.3593658447265624, 1.3383299560546875, 1.34439794921875, 1.3485174560546875, 1.3503255615234375, 1.357369140625, 1.363710693359375, 1.360469970703125, 1.362836181640625, 1.3779482421875]",tokens/s,46.44897238884642,kWh,4.0362754617560954e-05,4.451614435356935e-06,1.988122748889506e-05,6.469559654181294e-05,tokens/kWh,973791.1599483116,,s,630,13.561056861877454,0.021525487082345147,0.0003614071250362197,0.021439663887023924,0.021907794761657717,0.022065814304351807,0.022571212730407717,"[0.021614528656005858, 0.021540992736816405, 0.022388736724853517, 0.022577152252197266, 0.02147430419921875, 0.021477760314941405, 0.021293312072753905, 0.02134422492980957, 0.02127065658569336, 0.02156732749938965, 0.02171539115905762, 0.021896383285522462, 0.021689151763916014, 0.02219161605834961, 0.0218176326751709, 0.021835199356079103, 0.021738271713256836, 0.021774335861206053, 0.021761056900024413, 0.021926271438598634, 0.021784639358520506, 0.021797407150268556, 0.021772287368774415, 0.021751808166503905, 0.021770240783691407, 0.021727231979370116, 0.021792671203613282, 0.021799007415771485, 0.02179836845397949, 0.021647296905517577, 0.02166796875, 0.021680608749389648, 0.021583423614501954, 0.021592512130737304, 0.021531936645507812, 0.021443199157714844, 0.021420127868652345, 0.021278751373291015, 0.02127449607849121, 0.02119219207763672, 0.02136284828186035, 0.021162080764770507, 0.02121558380126953, 0.021136735916137694, 0.02431862449645996, 0.021315584182739256, 0.02123084831237793, 0.02120569610595703, 0.021321792602539063, 0.021304927825927734, 0.021481151580810546, 0.021306079864501955, 0.021157535552978515, 0.021276735305786134, 0.021289247512817383, 0.021202943801879884, 0.021208127975463866, 0.02117932891845703, 0.021263551712036134, 0.021310272216796874, 0.021231103897094726, 0.021238271713256835, 0.02122956848144531, 0.02152038383483887, 0.02135603141784668, 0.02136729621887207, 0.021376352310180664, 0.02155708885192871, 0.022068031311035158, 0.021384511947631836, 0.021309247970581056, 0.021463359832763672, 0.021213760375976564, 0.021170175552368165, 0.021169919967651368, 0.021838048934936523, 0.021268512725830076, 0.021214847564697267, 0.021125503540039062, 0.02114963150024414, 0.021045215606689455, 0.02126652717590332, 0.02111267280578613, 0.021182144165039062, 0.021053920745849608, 0.021082111358642578, 0.021129215240478515, 0.021368831634521485, 0.021981184005737304, 0.021161600112915038, 0.02107139205932617, 0.021042015075683592, 0.021064960479736328, 0.021193471908569336, 0.02119606399536133, 0.021287647247314453, 0.02105887985229492, 0.02107375907897949, 0.02118332862854004, 0.021096128463745117, 0.021132736206054686, 0.021213920593261718, 0.021242015838623046, 0.021198848724365234, 0.02115692710876465, 0.021161184310913086, 0.021113824844360352, 0.021118719100952147, 0.02110073661804199, 0.021154624938964844, 0.02140719985961914, 0.02112156867980957, 0.021090303421020508, 0.02126028823852539, 0.021114879608154297, 0.021157888412475585, 0.02109644889831543, 0.021135072708129882, 0.021217344284057617, 0.02123798370361328, 0.021082111358642578, 0.021194751739501954, 0.021403648376464843, 0.02125619125366211, 0.021295103073120117, 0.0212674560546875, 0.021819456100463867, 0.021786687850952148, 0.021790719985961913, 0.02168627166748047, 0.021440511703491212, 0.02124799919128418, 0.02129715156555176, 0.021172224044799806, 0.02126438331604004, 0.02130534362792969, 0.021196800231933592, 0.021295103073120117, 0.021346303939819337, 0.02123366355895996, 0.021182464599609374, 0.02125212860107422, 0.021178335189819337, 0.021219327926635743, 0.02124300765991211, 0.02125449562072754, 0.021188512802124023, 0.021203584671020508, 0.02143404769897461, 0.021291328430175782, 0.021336063385009766, 0.02143436813354492, 0.021773887634277345, 0.021363136291503906, 0.02144486427307129, 0.02147030448913574, 0.021340351104736328, 0.02131715202331543, 0.02143123245239258, 0.022231039047241212, 0.021415872573852537, 0.02123097610473633, 0.02121798324584961, 0.021224832534790038, 0.021157503128051758, 0.021117952346801756, 0.02111884880065918, 0.0212542724609375, 0.02114684867858887, 0.021174463272094726, 0.021117536544799805, 0.02127408027648926, 0.02120944023132324, 0.021215423583984375, 0.021243904113769533, 0.02167788887023926, 0.02152876853942871, 0.021302848815917968, 0.021234111785888674, 0.021202016830444335, 0.02123664093017578, 0.02131155204772949, 0.021135200500488283, 0.021245216369628905, 0.021371519088745117, 0.02127686309814453, 0.021388959884643555, 0.021364543914794924, 0.021317216873168947, 0.021408544540405274, 0.02122547149658203, 0.0212807674407959, 0.021393247604370117, 0.021352575302124022, 0.021356576919555663, 0.021322879791259765, 0.021377119064331054, 0.021177120208740234, 0.02116511917114258, 0.02119366455078125, 0.021114879608154297, 0.021174272537231444, 0.021131263732910157, 0.021075456619262696, 0.021117439270019533, 0.021053440093994142, 0.021319679260253906, 0.021180416107177736, 0.021132352828979493, 0.021164800643920897, 0.021192895889282228, 0.021456895828247072, 0.021325824737548828, 0.021395040512084962, 0.021253696441650392, 0.021228384017944336, 0.021303295135498047, 0.02123526382446289, 0.021324127197265626, 0.021194847106933593, 0.02129715156555176, 0.021303295135498047, 0.02132524871826172, 0.021369407653808594, 0.021256032943725585, 0.021297183990478516, 0.021311616897583006, 0.02127401542663574, 0.021385791778564454, 0.021338144302368165, 0.02145414352416992, 0.02184671974182129, 0.021460512161254882, 0.021307519912719727, 0.021428575515747072, 0.02135443115234375, 0.021319103240966798, 0.021396095275878907, 0.021379072189331053, 0.021618431091308593, 0.021836095809936524, 0.021812320709228516, 0.021871456146240233, 0.021820480346679688, 0.021846975326538086, 0.02168012809753418, 0.021800960540771484, 0.021813119888305664, 0.021796287536621092, 0.021772703170776366, 0.021694751739501954, 0.022171648025512695, 0.022556671142578123, 0.02221014404296875, 0.02210652732849121, 0.021972768783569335, 0.02185772705078125, 0.021830432891845702, 0.021750911712646485, 0.021623680114746094, 0.021753856658935547, 0.02148703956604004, 0.0213753604888916, 0.021215423583984375, 0.021157888412475585, 0.021331071853637695, 0.021202911376953126, 0.021169055938720704, 0.02122534370422363, 0.021091840744018556, 0.021190656661987304, 0.021189247131347656, 0.021069631576538086, 0.02112441635131836, 0.021148544311523437, 0.021065088272094728, 0.021146240234375, 0.021263711929321288, 0.021209344863891602, 0.021224992752075195, 0.021220224380493164, 0.022190080642700196, 0.022796287536621093, 0.02145280075073242, 0.02123936080932617, 0.021246400833129883, 0.021233024597167968, 0.02114419174194336, 0.02112499237060547, 0.02121945571899414, 0.021168127059936523, 0.021120576858520507, 0.021100128173828125, 0.02114143943786621, 0.021126047134399414, 0.021281951904296874, 0.021146463394165038, 0.02127052879333496, 0.02125347137451172, 0.02116035270690918, 0.02113727951049805, 0.021346111297607422, 0.02123958396911621, 0.021390111923217773, 0.02147532844543457, 0.021444608688354492, 0.021551103591918946, 0.021749568939208985, 0.021727392196655274, 0.02165558433532715, 0.02162073516845703, 0.021575679779052736, 0.0216494083404541, 0.021685407638549804, 0.021668256759643553, 0.02177222442626953, 0.021766143798828123, 0.02188083267211914, 0.021894336700439453, 0.021768896102905274, 0.021727359771728516, 0.021567487716674806, 0.021436416625976562, 0.021698816299438477, 0.021447999954223633, 0.021309120178222656, 0.021416160583496095, 0.021252639770507814, 0.02132080078125, 0.021240255355834962, 0.02120342445373535, 0.021347711563110352, 0.021299840927124024, 0.022634496688842775, 0.02186614418029785, 0.02173529624938965, 0.021683807373046874, 0.021563488006591795, 0.021449440002441405, 0.022058271408081056, 0.021977664947509766, 0.021433696746826172, 0.021557600021362304, 0.021426719665527345, 0.021342016220092772, 0.021618879318237305, 0.022124544143676757, 0.021352447509765626, 0.021294368743896484, 0.021433055877685545, 0.021340160369873046, 0.021581823348999024, 0.02143846321105957, 0.021356544494628905, 0.021389312744140625, 0.02126950454711914, 0.021857280731201172, 0.021792768478393554, 0.02189107131958008, 0.02156915283203125, 0.021430656433105467, 0.021548128128051756, 0.021414815902709963, 0.021403648376464843, 0.021360095977783204, 0.021219263076782225, 0.02120150375366211, 0.02127872085571289, 0.02124595260620117, 0.021215391159057618, 0.0213703670501709, 0.02143881607055664, 0.021614591598510743, 0.021563295364379884, 0.021590112686157226, 0.021568544387817384, 0.021582048416137697, 0.021701055526733397, 0.02228223991394043, 0.021885215759277345, 0.021937183380126953, 0.021858207702636717, 0.021904159545898437, 0.02169241523742676, 0.021776384353637695, 0.021816831588745117, 0.021541376113891602, 0.021525951385498048, 0.02146771240234375, 0.021495744705200194, 0.021441696166992187, 0.021754783630371095, 0.021652767181396484, 0.02156390380859375, 0.021403295516967773, 0.021350080490112305, 0.02134105682373047, 0.0212359676361084, 0.02200873565673828, 0.02217622375488281, 0.02245465660095215, 0.02165350341796875, 0.02149504089355469, 0.021742015838623046, 0.021493152618408205, 0.021566368103027343, 0.02164121627807617, 0.02167158317565918, 0.02160470390319824, 0.021462303161621094, 0.021352767944335938, 0.021293472290039063, 0.021399551391601563, 0.021391359329223633, 0.021448511123657227, 0.021310783386230468, 0.021241952896118164, 0.021412448883056642, 0.021446847915649415, 0.0225218563079834, 0.022253568649291993, 0.02166988754272461, 0.021649023056030274, 0.0215350399017334, 0.021579679489135743, 0.021483295440673827, 0.02140937614440918, 0.021327840805053712, 0.021265216827392578, 0.02232646369934082, 0.02142223930358887, 0.021602527618408203, 0.02128121566772461, 0.02140323257446289, 0.021135295867919922, 0.021246431350708007, 0.02457779121398926, 0.021712831497192383, 0.021319999694824218, 0.021298559188842773, 0.021289600372314452, 0.02165727996826172, 0.021391807556152345, 0.021428096771240236, 0.021446399688720703, 0.021307647705078123, 0.021364736557006835, 0.021186559677124024, 0.021198432922363283, 0.021387807846069334, 0.021534591674804687, 0.021391359329223633, 0.021272512435913087, 0.021285087585449218, 0.021284704208374024, 0.021374975204467773, 0.021301151275634766, 0.02129520034790039, 0.02131328010559082, 0.02129852867126465, 0.021381792068481446, 0.021346208572387695, 0.021415647506713868, 0.021497919082641603, 0.021879360198974608, 0.021769216537475586, 0.021852800369262695, 0.021827583312988282, 0.021688703536987305, 0.021755903244018555, 0.02170822334289551, 0.021596416473388672, 0.02169683265686035, 0.021665632247924806, 0.021700544357299803, 0.021724767684936523, 0.021641536712646483, 0.021533088684082033, 0.021551008224487304, 0.021612543106079102, 0.02156447982788086, 0.021730239868164063, 0.021696672439575196, 0.021716064453125, 0.021832096099853517, 0.02163337516784668, 0.02152448081970215, 0.02195039939880371, 0.02130335998535156, 0.02135180854797363, 0.021492351531982423, 0.021356544494628905, 0.021380607604980468, 0.021422592163085938, 0.02136412811279297, 0.02248966407775879, 0.023604896545410155, 0.02178700828552246, 0.021743616104125976, 0.02186444854736328, 0.021777856826782228, 0.021709375381469727, 0.021693599700927733, 0.021680992126464845, 0.022190176010131835, 0.021877792358398436, 0.021856000900268555, 0.021824928283691408, 0.02176272010803223, 0.021639167785644533, 0.021506048202514647, 0.021557247161865235, 0.02149990463256836, 0.021420032501220702, 0.02151219177246094, 0.02164454460144043, 0.021850879669189454, 0.0218723201751709, 0.02152467155456543, 0.02127401542663574, 0.02135932731628418, 0.02128486442565918, 0.02122256088256836, 0.02121404838562012, 0.021241472244262694, 0.021686943054199218, 0.02139423942565918, 0.02118320083618164, 0.02130143928527832, 0.021794975280761717, 0.02172297668457031, 0.021738719940185548, 0.02168707275390625, 0.021855424880981446, 0.021734207153320313, 0.021792160034179688, 0.021720672607421877, 0.021808128356933593, 0.021938175201416017, 0.021677503585815428, 0.021975616455078124, 0.02191980743408203, 0.022157247543334962, 0.02176188850402832, 0.02196905517578125, 0.021548927307128905, 0.02140096092224121, 0.021684991836547853, 0.02149580764770508, 0.02152038383483887, 0.021573408126831055, 0.022048608779907226, 0.021480831146240234, 0.021572288513183595, 0.021485696792602538, 0.021294687271118166, 0.02148384094238281, 0.021352256774902344, 0.021579616546630858, 0.021392000198364257, 0.021372127532958984, 0.02145359992980957, 0.02191084861755371, 0.0215765438079834, 0.02173731231689453, 0.02186240005493164, 0.021817344665527344, 0.021977855682373048, 0.02186419105529785, 0.02224358367919922, 0.02185420799255371, 0.021716991424560548, 0.021549055099487305, 0.021716064453125, 0.021767072677612305, 0.02170412826538086, 0.021631551742553712, 0.021732479095458983, 0.021588863372802733, 0.021769760131835937, 0.021447040557861327, 0.021463136672973632, 0.021544960021972655, 0.02185215950012207, 0.02200739288330078, 0.021901311874389647, 0.02184547233581543, 0.021840831756591798, 0.021743616104125976, 0.02182931137084961, 0.021810911178588868, 0.02179542350769043, 0.02190745544433594, 0.021810943603515626, 0.021985536575317384, 0.02192300796508789, 0.021944320678710938, 0.021974880218505858, 0.021902111053466795, 0.021991615295410157, 0.02202828788757324, 0.02204876708984375, 0.02209587287902832, 0.02208153533935547, 0.02211840057373047, 0.021937471389770508, 0.021940128326416015, 0.022806848526000977, 0.02202467155456543, 0.022091775894165038, 0.022063104629516602, 0.022018047332763673, 0.02199734306335449, 0.021797088623046874, 0.02188595199584961, 0.022258399963378906, 0.022010143280029298, 0.021906848907470702, 0.021981792449951174, 0.02187990379333496, 0.021883039474487304, 0.021747711181640626, 0.021752159118652345, 0.02187868881225586, 0.021740032196044923, 0.021776384353637695, 0.0216494083404541, 0.02165760040283203, 0.021542911529541017, 0.021503679275512694]",tokens/s,46.456556182655845,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,843.771904,558.825472,0.0,163.577856,154.631168,s,1,8.00877294921875,8.00877294921875,0.0,8.00877294921875,8.00877294921875,8.00877294921875,8.00877294921875,[8.00877294921875],,kWh,2.1754401229198566e-05,2.3922935190957997e-06,7.536950474007487e-06,3.168364522230185e-05,,MB,1196.183552,611.254272,0.0,201.326592,187.147776,s,30,0.19616284799575806,0.006538761599858603,0.00013730042177287423,0.006516160011291504,0.0066924351692199706,0.006713878369331359,0.0069213583135604865,"[0.007003903865814209, 0.00655238389968872, 0.006690783977508545, 0.006612927913665772, 0.006543263912200927, 0.00663702392578125, 0.006719264030456543, 0.006657216072082519, 0.006386528015136719, 0.006424960136413574, 0.0064652800559997555, 0.006520063877105713, 0.00667683219909668, 0.006707295894622802, 0.00646127986907959, 0.006395328044891358, 0.006508287906646729, 0.0066433920860290525, 0.006512256145477295, 0.006609920024871826, 0.006563680171966553, 0.006437983989715576, 0.006429791927337647, 0.006380735874176025, 0.006371295928955078, 0.006402527809143067, 0.0065049281120300294, 0.006523776054382324, 0.006439680099487304, 0.006380256175994873]",tokens/s,39151.14446220762,kWh,1.9014748516559677e-07,2.0969349838361798e-08,8.76385065928472e-08,2.9875534159680575e-07,tokens/kWh,856888444.6775599,MB,1209.9584,613.351424,0.0,203.423744,187.150336,s,30,10.121157592773438,0.33737191975911446,0.006944105613889463,0.33639990234375,0.34557210083007817,0.35074585571289063,0.35332661407470706,"[0.35142596435546875, 0.34991461181640626, 0.3447034606933594, 0.34299172973632813, 0.345089599609375, 0.34135153198242185, 0.3415032653808594, 0.33556829833984375, 0.330926025390625, 0.3306352844238281, 0.3398701477050781, 0.35410293579101565, 0.3389402160644531, 0.3327669677734375, 0.3322997741699219, 0.3302099914550781, 0.3411739501953125, 0.34000830078125, 0.3374844665527344, 0.3426356201171875, 0.3336680908203125, 0.3295469360351562, 0.331063232421875, 0.32873614501953125, 0.3288599853515625, 0.33218084716796875, 0.3349767761230469, 0.33723150634765625, 0.33184170532226565, 0.3294502258300781]",tokens/s,186.73753300210157,kWh,9.395116657307233e-06,1.0361209795953927e-06,3.490285502697509e-06,1.3921523139600134e-05,tokens/kWh,4525366.899028086,,s,1890,10.1066182656288,0.005347417071761278,0.00020955570937754628,0.005310992002487183,0.005513996982574463,0.005591457629203796,0.006038199048042297,"[0.005385119915008545, 0.005482079982757568, 0.006101247787475586, 0.0056341438293457035, 0.005629824161529541, 0.00672976016998291, 0.005650432109832764, 0.0061914238929748535, 0.005571616172790527, 0.005527455806732178, 0.005444352149963379, 0.005488063812255859, 0.005523200035095215, 0.005579360008239746, 0.0054943361282348635, 0.005511231899261474, 0.005515872001647949, 0.0054988799095153805, 0.005742591857910156, 0.0056212158203125, 0.0055874881744384765, 0.0055474238395690914, 0.005477024078369141, 0.005554111957550048, 0.0054776320457458495, 0.005543839931488037, 0.005601823806762695, 0.005607295989990235, 0.005623392105102539, 0.005591904163360596, 0.005577919960021972, 0.005604320049285889, 0.005590911865234375, 0.005745855808258057, 0.005571648120880127, 0.005567647933959961, 0.005589151859283447, 0.005579296112060547, 0.005615488052368164, 0.005543680191040039, 0.005529344081878662, 0.005546495914459228, 0.005638144016265869, 0.005679103851318359, 0.005566592216491699, 0.0055454721450805666, 0.005543647766113281, 0.005491072177886963, 0.005427487850189209, 0.005480000019073486, 0.005421504020690918, 0.005431295871734619, 0.005484799861907959, 0.005347008228302002, 0.005407872200012207, 0.005411776065826416, 0.005403872013092041, 0.0054362878799438475, 0.005437312126159668, 0.00544598388671875, 0.0054924159049987795, 0.0054160962104797365, 0.005360479831695557, 0.0053153600692749025, 0.005367712020874023, 0.0053853759765625, 0.00542416000366211, 0.005396383762359619, 0.005386240005493164, 0.00541923189163208, 0.005410592079162598, 0.005407040119171143, 0.005350240230560303, 0.005348256111145019, 0.005326752185821533, 0.005409023761749268, 0.005452672004699707, 0.005359615802764893, 0.005382719993591308, 0.005347519874572754, 0.0054431681632995605, 0.005519648075103759, 0.005527391910552979, 0.005406847953796387, 0.005470240116119384, 0.005400864124298096, 0.005375743865966797, 0.0053844480514526364, 0.0054906878471374515, 0.0053678078651428224, 0.005392384052276611, 0.005388095855712891, 0.005702943801879883, 0.005540575981140137, 0.005495999813079834, 0.005473279953002929, 0.005480447769165039, 0.0064899840354919434, 0.005605504035949707, 0.005601280212402344, 0.005525087833404541, 0.005640672206878662, 0.005521344184875488, 0.005522624015808105, 0.005513984203338623, 0.0054795842170715335, 0.00549571180343628, 0.006146048069000244, 0.0055658559799194336, 0.0065414719581604, 0.005906879901885986, 0.005848671913146973, 0.00554640007019043, 0.005784863948822021, 0.005606112003326416, 0.005646368026733398, 0.0055149121284484865, 0.005633503913879395, 0.005524352073669434, 0.005512159824371338, 0.005489727973937988, 0.005597119808197022, 0.005505023956298828, 0.006170783996582031, 0.005551360130310059, 0.005558879852294922, 0.005474720001220703, 0.005470304012298584, 0.005451072216033936, 0.005517600059509277, 0.005478816032409668, 0.005485983848571777, 0.005441952228546143, 0.005445280075073242, 0.005480735778808594, 0.005499135971069336, 0.005438560009002685, 0.005521791934967041, 0.005521952152252197, 0.005595136165618897, 0.0054988799095153805, 0.005502975940704345, 0.005529151916503906, 0.005487040042877197, 0.0054778242111206054, 0.00556828784942627, 0.005493535995483399, 0.005537600040435791, 0.005465792179107666, 0.005473087787628174, 0.005422944068908691, 0.005394559860229492, 0.005363423824310303, 0.0054579200744628905, 0.005443583965301513, 0.005449183940887451, 0.0054635839462280275, 0.005427648067474365, 0.0054494719505310055, 0.005501760005950928, 0.005416959762573242, 0.005444608211517334, 0.005440127849578857, 0.005429215908050537, 0.005460608005523682, 0.005424384117126465, 0.005442080020904541, 0.005469759941101074, 0.005509568214416504, 0.00546611213684082, 0.005488639831542969, 0.005396480083465576, 0.005404767990112305, 0.005439008235931396, 0.005396927833557129, 0.00540451192855835, 0.00544982385635376, 0.005392159938812256, 0.005439167976379395, 0.005423647880554199, 0.005486591815948487, 0.0055008320808410645, 0.005425248146057129, 0.005458975791931152, 0.005448448181152344, 0.005437664031982422, 0.0054720320701599125, 0.005490911960601807, 0.00547983980178833, 0.005371871948242187, 0.005417280197143555, 0.005408768177032471, 0.005438752174377441, 0.005416672229766845, 0.0054568958282470706, 0.005394591808319092, 0.005460063934326172, 0.005371647834777832, 0.005393472194671631, 0.005468671798706055, 0.0053632321357727055, 0.005403615951538086, 0.005375455856323242, 0.005413216114044189, 0.005373472213745117, 0.005374559879302979, 0.005378047943115235, 0.005383264064788818, 0.005393311977386474, 0.005502079963684082, 0.005413760185241699, 0.005424672126770019, 0.005460608005523682, 0.0055294399261474605, 0.0053944320678710935, 0.005398528099060058, 0.005414912223815918, 0.0053505277633667, 0.005347199916839599, 0.005383488178253174, 0.005421823978424072, 0.005342144012451172, 0.0053640642166137694, 0.005406688213348389, 0.0053654398918151856, 0.005386240005493164, 0.0053944320678710935, 0.005392096042633057, 0.005456319808959961, 0.005578783988952637, 0.005510335922241211, 0.005585536003112793, 0.005524608135223388, 0.005514111995697022, 0.005542912006378174, 0.005510015964508057, 0.005507199764251709, 0.005601280212402344, 0.005481887817382813, 0.0055426878929138185, 0.005481311798095703, 0.005440447807312012, 0.005486623764038086, 0.005530975818634033, 0.005425343990325928, 0.005460447788238525, 0.005443583965301513, 0.005418496131896973, 0.0054297599792480465, 0.0054694080352783205, 0.00546281623840332, 0.005350751876831055, 0.005356544017791748, 0.0053376641273498535, 0.005407167911529541, 0.005407872200012207, 0.005370751857757568, 0.005438560009002685, 0.005452191829681397, 0.005377535820007324, 0.005402944087982178, 0.005407423973083496, 0.005402175903320312, 0.005478943824768066, 0.005449120044708252, 0.005464288234710693, 0.005475647926330566, 0.005608672142028808, 0.005574143886566162, 0.005468416213989258, 0.00543123197555542, 0.005490752220153808, 0.005480447769165039, 0.005736735820770264, 0.005467936038970948, 0.005459904193878174, 0.0054991040229797365, 0.0055229439735412595, 0.005580448150634766, 0.005503615856170654, 0.005465248107910156, 0.005505087852478027, 0.0055071358680725095, 0.005550816059112549, 0.005564479827880859, 0.005586880207061768, 0.005504320144653321, 0.005472703933715821, 0.005478623867034912, 0.005494815826416016, 0.005451776027679443, 0.005399680137634278, 0.005429535865783691, 0.00540118408203125, 0.005423168182373047, 0.005481887817382813, 0.005480319976806641, 0.005481279850006103, 0.005490528106689453, 0.005432352066040039, 0.005464928150177002, 0.00547430419921875, 0.005417344093322754, 0.005477312088012695, 0.0054915199279785155, 0.005392384052276611, 0.005447679996490478, 0.00541596794128418, 0.005485536098480225, 0.0054336638450622554, 0.0054403839111328125, 0.005472447872161865, 0.005464799880981445, 0.005475296020507813, 0.005547071933746338, 0.0055305280685424806, 0.005451680183410645, 0.0055251197814941404, 0.0054198079109191895, 0.005342591762542725, 0.005366079807281494, 0.005405824184417724, 0.005332064151763916, 0.0053303041458129885, 0.005310880184173584, 0.00530841588973999, 0.005310336112976074, 0.005399680137634278, 0.005273663997650146, 0.005286367893218994, 0.005309919834136963, 0.0052861437797546386, 0.00532089614868164, 0.005371551990509033, 0.005295008182525635, 0.005314559936523438, 0.0053012480735778805, 0.005345600128173828, 0.005364255905151367, 0.0053003840446472165, 0.005357439994812012, 0.005298399925231933, 0.005302271842956543, 0.005287839889526367, 0.005326272010803223, 0.005319104194641113, 0.005298304080963135, 0.005308767795562744, 0.005342559814453125, 0.00532041597366333, 0.0053827519416809085, 0.005382336139678955, 0.005397408008575439, 0.005489376068115235, 0.005458111763000488, 0.005451776027679443, 0.005449728012084961, 0.005439487934112549, 0.005593440055847168, 0.005424287796020508, 0.005450047969818115, 0.005902048110961914, 0.005491168022155762, 0.005484543800354004, 0.005573919773101807, 0.005421855926513672, 0.0057404799461364744, 0.005465983867645264, 0.005545695781707763, 0.005481056213378906, 0.005449535846710205, 0.0054860801696777345, 0.005499648094177246, 0.00546995210647583, 0.005496831893920898, 0.005488160133361816, 0.005460095882415771, 0.005479936122894287, 0.005456352233886718, 0.005437632083892822, 0.005457695960998535, 0.005406752109527588, 0.0053678078651428224, 0.005408736228942871, 0.0053578557968139645, 0.005351168155670166, 0.005478400230407715, 0.005406720161437988, 0.005355519771575928, 0.005388288021087646, 0.005457791805267334, 0.005400352001190186, 0.005365695953369141, 0.005707903861999512, 0.005410367965698242, 0.005402815818786621, 0.005351967811584472, 0.005445631980895996, 0.005402815818786621, 0.005368927955627442, 0.005345344066619873, 0.005397151947021484, 0.005345280170440674, 0.005400576114654541, 0.005399936199188232, 0.00537667179107666, 0.005392352104187012, 0.005436863899230957, 0.005400479793548584, 0.00538486385345459, 0.005400191783905029, 0.005392191886901855, 0.005410943984985352, 0.0053805441856384275, 0.005370975971221924, 0.005393248081207275, 0.005345344066619873, 0.00540883207321167, 0.005353312015533448, 0.00532806396484375, 0.005325727939605713, 0.005306367874145508, 0.0053567042350769044, 0.005408927917480468, 0.005432000160217285, 0.005418303966522217, 0.005528255939483643, 0.005572319984436035, 0.005502495765686035, 0.0054906558990478515, 0.005441343784332276, 0.005464032173156739, 0.005448703765869141, 0.005412896156311035, 0.005449215888977051, 0.005425631999969482, 0.005400800228118897, 0.005486368179321289, 0.0054271998405456545, 0.005448031902313233, 0.00540227222442627, 0.005396607875823974, 0.005433152198791504, 0.005421023845672607, 0.005451871871948242, 0.005431295871734619, 0.005489920139312744, 0.005437983989715576, 0.005459839820861816, 0.005384543895721436, 0.005368896007537842, 0.005370687961578369, 0.0053290238380432125, 0.0054559998512268065, 0.005342400074005127, 0.005329279899597168, 0.005379424095153808, 0.00534000015258789, 0.005369344234466553, 0.0053582720756530765, 0.005349088191986084, 0.005348800182342529, 0.005296576023101807, 0.005337279796600342, 0.005322368144989014, 0.005427743911743164, 0.005406720161437988, 0.005327968120574951, 0.00531657600402832, 0.00534009599685669, 0.005349376201629639, 0.005320703983306885, 0.0053350400924682614, 0.005333055973052978, 0.005283775806427002, 0.005322751998901368, 0.005296351909637451, 0.005328671932220459, 0.005322751998901368, 0.005328896045684814, 0.005344575881958008, 0.0053582720756530765, 0.005292031764984131, 0.005289152145385743, 0.005255424022674561, 0.0052540478706359865, 0.005237855911254883, 0.005326528072357178, 0.005237631797790528, 0.0052379522323608395, 0.005208447933197021, 0.005235455989837647, 0.005205440044403076, 0.005191135883331299, 0.005225535869598389, 0.005219423770904541, 0.005212096214294434, 0.005229279994964599, 0.005285759925842285, 0.005189727783203125, 0.005191679954528809, 0.005201920032501221, 0.005238912105560303, 0.005179423809051514, 0.0050869441032409665, 0.005190336227416993, 0.005175295829772949, 0.005246079921722412, 0.005188543796539307, 0.005201920032501221, 0.005181439876556396, 0.005242496013641357, 0.005175136089324951, 0.005197535991668701, 0.005159743785858154, 0.005189407825469971, 0.005187104225158692, 0.0052368960380554195, 0.005198368072509766, 0.005215328216552734, 0.005167327880859375, 0.00518336009979248, 0.005192512035369873, 0.005198847770690918, 0.005159967899322509, 0.005213600158691406, 0.005179967880249024, 0.005187007904052734, 0.005247104167938233, 0.00519212818145752, 0.005195136070251465, 0.005212160110473632, 0.005246623992919922, 0.005219295978546142, 0.005201920032501221, 0.005265344142913819, 0.005263584136962891, 0.0052304320335388186, 0.005269504070281982, 0.005318655967712403, 0.005371903896331787, 0.005322751998901368, 0.005295135974884033, 0.0052912960052490235, 0.005373983860015869, 0.0053814401626586916, 0.005269824028015136, 0.00525110387802124, 0.005289567947387695, 0.005276063919067383, 0.005242879867553711, 0.005251071929931641, 0.005225920200347901, 0.005657375812530518, 0.005308191776275634, 0.0052791681289672855, 0.005291935920715332, 0.00524560022354126, 0.005223807811737061, 0.005311103820800781, 0.0052420802116394044, 0.005220799922943115, 0.005219808101654053, 0.0052674880027771, 0.005321599960327148, 0.005298304080963135, 0.005306208133697509, 0.005187583923339844, 0.005277696132659912, 0.005203551769256591, 0.005247392177581787, 0.005232736110687256, 0.005188672065734863, 0.005249824047088623, 0.005236320018768311, 0.005212160110473632, 0.005236447811126709, 0.005195839881896973, 0.005210815906524658, 0.005258880138397217, 0.005237215995788574, 0.00521779203414917, 0.005234272003173828, 0.005239615917205811, 0.005251264095306397, 0.005216063976287842, 0.0052163200378417965, 0.005206240177154541, 0.005223936080932618, 0.0053554558753967285, 0.00527184009552002, 0.005275648117065429, 0.005320352077484131, 0.005227968215942382, 0.005237664222717285, 0.005224448204040527, 0.005234176158905029, 0.005202079772949219, 0.0051654081344604495, 0.005230016231536865, 0.00524073600769043, 0.0051998400688171385, 0.005226880073547363, 0.005232223987579346, 0.005288576126098633, 0.005252255916595459, 0.005219552040100097, 0.00519481611251831, 0.005190303802490234, 0.0052137279510498045, 0.005269599914550781, 0.005208447933197021, 0.005238527774810791, 0.005237215995788574, 0.005231584072113037, 0.0054767999649047855, 0.005292416095733643, 0.005261312007904053, 0.0052425599098205565, 0.005225855827331543, 0.005227456092834473, 0.005270847797393799, 0.005227039813995362, 0.005228096008300781, 0.005200479984283447, 0.005257376194000244, 0.005224192142486572, 0.0053105602264404295, 0.00528380823135376, 0.005275680065155029, 0.005239488124847412, 0.005244448184967041, 0.0053415679931640625, 0.005380127906799316, 0.005375904083251953, 0.005415103912353516, 0.005440512180328369, 0.005368703842163086, 0.005445631980895996, 0.005692768096923828, 0.005517312049865723, 0.005444159984588623, 0.005382239818572998, 0.005439680099487304, 0.005379903793334961, 0.005441535949707031, 0.00535756778717041, 0.0053043198585510255, 0.005249343872070313, 0.005324480056762696, 0.005326848030090332, 0.005260831832885742, 0.005285855770111084, 0.0053005762100219725, 0.005301792144775391, 0.0053233919143676756, 0.005316256046295166, 0.005325151920318604, 0.005361120223999024, 0.005343776226043701, 0.00531660795211792, 0.005390336036682129, 0.0053303041458129885, 0.005312416076660156, 0.005349952220916748, 0.005314720153808594, 0.0052882561683654785, 0.005323455810546875, 0.005397791862487793, 0.005401951789855957, 0.005472640037536621, 0.0054906878471374515, 0.0054026880264282226, 0.005470143795013428, 0.005480447769165039, 0.005502912044525146, 0.005486656188964844, 0.005431295871734619, 0.005498591899871826, 0.005445919990539551, 0.005455872058868408, 0.00550707197189331, 0.005435391902923584, 0.005406720161437988, 0.00547430419921875, 0.005406720161437988, 0.005402624130249023, 0.005361663818359375, 0.005341055870056152, 0.005386367797851562, 0.005427135944366455, 0.005393727779388428, 0.005350272178649903, 0.005356927871704101, 0.005390719890594482, 0.005415487766265869, 0.0054002561569213865, 0.005360960006713867, 0.005375967979431152, 0.0053769278526306155, 0.005432479858398438, 0.005380000114440918, 0.005372255802154541, 0.005395199775695801, 0.005422880172729492, 0.005473440170288086, 0.005455904006958008, 0.005620704174041748, 0.005562079906463623, 0.005489920139312744, 0.005598239898681641, 0.005474016189575195, 0.0055025601387023925, 0.00544323205947876, 0.005473023891448975, 0.005435455799102784, 0.0053860158920288085, 0.005367968082427978, 0.005439487934112549, 0.005414912223815918, 0.00548367977142334, 0.005475168228149414, 0.005445248126983643, 0.005450111865997314, 0.005427103996276855, 0.005404767990112305, 0.005389632225036621, 0.005415616035461426, 0.005609471797943116, 0.005434368133544922, 0.0056514558792114256, 0.005383776187896728, 0.005429024219512939, 0.005763711929321289, 0.0054778242111206054, 0.005464191913604736, 0.005461984157562256, 0.005491168022155762, 0.005408192157745361, 0.00541270399093628, 0.005464320182800293, 0.005370336055755615, 0.005406720161437988, 0.005335103988647461, 0.005354656219482422, 0.005753632068634033, 0.005752831935882568, 0.006037312030792236, 0.007058879852294922, 0.009331456184387207, 0.008516736030578613, 0.005598176002502442, 0.005582752227783203, 0.005525504112243652, 0.005551616191864014, 0.005626495838165283, 0.005443327903747559, 0.005564576148986817, 0.0053983678817749025, 0.005375455856323242, 0.0057489280700683595, 0.005420608043670654, 0.005320576190948487, 0.005303040027618409, 0.005312672138214111, 0.00530841588973999, 0.005240960121154785, 0.005344927787780762, 0.005287231922149659, 0.005266335964202881, 0.005260287761688232, 0.005274335861206055, 0.005275936126708985, 0.005293087959289551, 0.005303264141082764, 0.005271455764770508, 0.005290080070495606, 0.005285183906555176, 0.005333951950073242, 0.005318048000335693, 0.0053005762100219725, 0.0053043198585510255, 0.005414912223815918, 0.005436768054962158, 0.005417920112609863, 0.005480095863342285, 0.005465568065643311, 0.005448224067687989, 0.005516416072845459, 0.005434304237365723, 0.005416319847106933, 0.005422912120819092, 0.0054815678596496584, 0.005578464031219483, 0.005566271781921386, 0.005486783981323243, 0.0055101442337036136, 0.005442240238189697, 0.0055248641967773435, 0.005387199878692627, 0.005279583930969238, 0.005275807857513428, 0.005268703937530517, 0.005286687850952148, 0.005302271842956543, 0.005285888195037842, 0.00529363203048706, 0.005280191898345947, 0.005373888015747071, 0.005297311782836914, 0.005303199768066406, 0.0053463678359985355, 0.005319007873535156, 0.005305056095123291, 0.005348991870880127, 0.005327104091644287, 0.005444799900054932, 0.005409599781036377, 0.005434688091278076, 0.005315072059631347, 0.005383935928344727, 0.005375999927520752, 0.005379839897155762, 0.005396736145019531, 0.0054102401733398435, 0.005357600212097168, 0.005339680194854736, 0.0053554878234863285, 0.005281599998474121, 0.005289247989654541, 0.005313055992126465, 0.005234623908996582, 0.005269343852996826, 0.005289663791656494, 0.005270624160766601, 0.005385280132293701, 0.005322559833526612, 0.005308608055114746, 0.005356224060058594, 0.0052962880134582516, 0.00525929594039917, 0.005311520099639893, 0.0053043198585510255, 0.0052822079658508304, 0.005320703983306885, 0.00532528018951416, 0.00530841588973999, 0.005351424217224121, 0.00544371223449707, 0.005355391979217529, 0.005345280170440674, 0.005320703983306885, 0.005267327785491944, 0.005281919956207275, 0.005271423816680909, 0.005242112159729004, 0.005261695861816406, 0.005200160026550293, 0.005310400009155274, 0.005183104038238525, 0.0052202239036560055, 0.005187967777252197, 0.005177760124206543, 0.005228544235229492, 0.005216288089752197, 0.005204224109649659, 0.005211872100830078, 0.005210112094879151, 0.0051998720169067385, 0.005251071929931641, 0.005175295829772949, 0.005189824104309082, 0.005171264171600342, 0.0051855998039245605, 0.005168223857879638, 0.005196352005004883, 0.005218592166900635, 0.005216000080108643, 0.005180448055267334, 0.005185535907745361, 0.0052557439804077144, 0.005198239803314209, 0.005111584186553955, 0.005177760124206543, 0.005200352191925049, 0.005244383811950684, 0.005208608150482178, 0.005232639789581299, 0.005227712154388428, 0.005281983852386474, 0.005276288032531738, 0.005185440063476563, 0.005172607898712159, 0.005184127807617188, 0.0052399678230285645, 0.005243840217590332, 0.005162112236022949, 0.005188479900360108, 0.005187583923339844, 0.005361663818359375, 0.005203968048095703, 0.005197824001312256, 0.005179391860961914, 0.005164608001708985, 0.005189087867736817, 0.005172192096710205, 0.005151872158050537, 0.00520851182937622, 0.005185823917388916, 0.005183231830596924, 0.005290112018585205, 0.005191967964172363, 0.005201632022857666, 0.005161119937896729, 0.005203904151916504, 0.005228640079498291, 0.005177440166473389, 0.005197887897491455, 0.005173183917999267, 0.005799935817718506, 0.005982207775115967, 0.006150335788726807, 0.006502367973327637, 0.005484384059906006, 0.005242879867553711, 0.005216576099395752, 0.005207744121551514, 0.005251071929931641, 0.005230591773986816, 0.005195775985717774, 0.00532419204711914, 0.005204544067382812, 0.005199903964996338, 0.005191296100616455, 0.005175680160522461, 0.0051662721633911135, 0.005181344032287598, 0.005210400104522705, 0.005204288005828858, 0.0051708159446716305, 0.005189663887023925, 0.00524560022354126, 0.005178880214691162, 0.005175583839416504, 0.00520851182937622, 0.005102880001068115, 0.005405792236328125, 0.005179168224334717, 0.005234848022460938, 0.005216095924377441, 0.00530841588973999, 0.005314720153808594, 0.005256192207336426, 0.005194591999053955, 0.005201920032501221, 0.005208064079284668, 0.005172863960266113, 0.005157248020172119, 0.0052013759613037105, 0.005180255889892578, 0.005170048236846924, 0.005271647930145264, 0.005184224128723144, 0.005164480209350586, 0.005177919864654541, 0.005305984020233154, 0.005155456066131592, 0.005177408218383789, 0.005189151763916016, 0.005183648109436035, 0.005332992076873779, 0.005547679901123047, 0.005200128078460694, 0.005174528121948243, 0.0052007360458374025, 0.005183487892150879, 0.005171199798583984, 0.005236800193786621, 0.005186592102050781, 0.005211040019989014, 0.005199967861175537, 0.005204063892364502, 0.005228352069854736, 0.005172416210174561, 0.005194623947143554, 0.0051916160583496095, 0.0051643519401550295, 0.005229248046875, 0.005248672008514405, 0.00522876787185669, 0.005238912105560303, 0.005233920097351074, 0.005288703918457031, 0.005236639976501465, 0.005275743961334229, 0.00525216007232666, 0.005259647846221924, 0.0052740478515625, 0.005265503883361816, 0.005292031764984131, 0.005298111915588379, 0.005255263805389404, 0.005237055778503418, 0.005325535774230957, 0.00527023983001709, 0.005263040065765381, 0.005399104118347168, 0.00526470422744751, 0.005214943885803223, 0.005255712032318115, 0.00525267219543457, 0.005304351806640625, 0.005279104232788086, 0.005278463840484619, 0.005297696113586426, 0.005301760196685791, 0.005337984085083008, 0.005298175811767578, 0.005242335796356201, 0.005259935855865479, 0.005253087997436523, 0.005346528053283691, 0.005372704029083252, 0.005326848030090332, 0.0053877439498901366, 0.005470367908477783, 0.005425631999969482, 0.00540012788772583, 0.005384543895721436, 0.005385695934295654, 0.005372447967529297, 0.005398528099060058, 0.005433343887329102, 0.005334752082824707, 0.005318943977355957, 0.005276703834533691, 0.005253952026367187, 0.005355679988861084, 0.005294047832489014, 0.005270976066589356, 0.005404672145843506, 0.0054514241218566895, 0.005579103946685791, 0.005494912147521972, 0.005501279830932617, 0.005471648216247559, 0.005396927833557129, 0.00547215986251831, 0.00549721622467041, 0.005449728012084961, 0.005486591815948487, 0.0055459198951721194, 0.005613632202148437, 0.00555017614364624, 0.005619616031646729, 0.005561952114105225, 0.005601920127868652, 0.005574175834655762, 0.005646592140197754, 0.005615615844726562, 0.005530784130096436, 0.005511936187744141, 0.005531904220581055, 0.005412992000579834, 0.005432864189147949, 0.005348544120788574, 0.005346303939819336, 0.005339231967926026, 0.0053779520988464355, 0.005401631832122803, 0.005489727973937988, 0.005679232120513916, 0.005488895893096924, 0.005836415767669678, 0.005541888236999512, 0.005681151866912842, 0.005550079822540284, 0.0055763840675354, 0.005570879936218262, 0.005522463798522949, 0.00553059196472168, 0.005590240001678467, 0.005464863777160644, 0.005476352214813233, 0.005457376003265381, 0.005461599826812744, 0.005333951950073242, 0.005334752082824707, 0.005365280151367188, 0.0053294401168823245, 0.005314527988433838, 0.005378111839294434, 0.00532089614868164, 0.005395967960357666, 0.005788127899169922, 0.005662144184112549, 0.005411424160003662, 0.005453311920166016, 0.005820223808288574, 0.005472576141357422, 0.005449408054351806, 0.005446335792541504, 0.005392384052276611, 0.005342336177825928, 0.005299071788787842, 0.005344480037689209, 0.005309216022491455, 0.005297952175140381, 0.005324575901031494, 0.005277599811553955, 0.005244639873504639, 0.005325215816497803, 0.00525324821472168, 0.005212448120117188, 0.005265408039093018, 0.005206016063690186, 0.0052276158332824705, 0.005229472160339355, 0.005252352237701416, 0.005249792098999023, 0.0052512001991271975, 0.005242239952087402, 0.005273920059204101, 0.005204127788543701, 0.005238560199737549, 0.005204351902008056, 0.005219232082366943, 0.005233632087707519, 0.005257376194000244, 0.005254687786102295, 0.005319007873535156, 0.005352640151977539, 0.005378528118133545, 0.005343552112579346, 0.005316544055938721, 0.0052954239845275876, 0.005274144172668457, 0.005319968223571777, 0.005356480121612548, 0.005315936088562011, 0.0052919998168945315, 0.005328800201416016, 0.005267231941223144, 0.00528495979309082, 0.005221471786499023, 0.0052358078956604, 0.005218016147613525, 0.005232736110687256, 0.005218272209167481, 0.005388224124908447, 0.005281792163848877, 0.005322656154632568, 0.005331039905548096, 0.005299200057983398, 0.005264383792877197, 0.005273600101470947, 0.005597184181213379, 0.005337088108062744, 0.005330560207366944, 0.005330624103546143, 0.005333695888519287, 0.005338912010192871, 0.005394815921783447, 0.005356863975524902, 0.005313055992126465, 0.005338560104370118, 0.005316671848297119, 0.005267744064331055, 0.005486976146697998, 0.005377888202667236, 0.005251071929931641, 0.005242976188659668, 0.00526643180847168, 0.005208992004394532, 0.0052733120918273925, 0.005224575996398926, 0.005506752014160156, 0.005480447769165039, 0.005286464214324951, 0.00527350378036499, 0.0052674560546875, 0.005281375885009766, 0.005347199916839599, 0.005392159938812256, 0.005399295806884766, 0.005440832138061524, 0.005452479839324951, 0.005523551940917968, 0.005597087860107422, 0.00569488000869751, 0.005451680183410645, 0.005475135803222656, 0.005433311939239502, 0.005459167957305908, 0.005495488166809082, 0.005416607856750489, 0.00539302396774292, 0.00540067195892334, 0.005404064178466797, 0.005378367900848388, 0.005433631896972656, 0.005492735862731934, 0.005302271842956543, 0.005337088108062744, 0.0055328960418701174, 0.005310624122619629, 0.005372384071350098, 0.0053446397781372074, 0.005325535774230957, 0.0053209600448608394, 0.0053348479270935055, 0.005264736175537109, 0.0053985600471496585, 0.005286848068237305, 0.0052936959266662595, 0.005296192169189453, 0.0052633600234985355, 0.005255231857299805, 0.005315743923187256, 0.00534607982635498, 0.005312511920928955, 0.0053249278068542485, 0.005447423934936523, 0.005414463996887207, 0.005356095790863037, 0.005536863803863526, 0.005359839916229248, 0.005411520004272461, 0.007044544219970703, 0.006045375823974609, 0.006243552207946777, 0.005506720066070557, 0.005565760135650635, 0.005483200073242187, 0.005687295913696289, 0.005615327835083008, 0.005410783767700196, 0.005356895923614502, 0.005379263877868652, 0.005340320110321045, 0.005326848030090332, 0.005400991916656494, 0.0053786239624023435, 0.005340479850769043, 0.005340544223785401, 0.005298592090606689, 0.005321311950683594, 0.005316703796386719, 0.005385568141937256, 0.005330719947814941, 0.005363935947418213, 0.005343776226043701, 0.005421055793762207, 0.005386240005493164, 0.005398431777954102, 0.005390272140502929, 0.005398687839508057, 0.005383456230163574, 0.00537395191192627, 0.005353631973266601, 0.005222591876983642, 0.005300127983093262, 0.005294239997863769, 0.005326496124267578, 0.005294496059417724, 0.00532480001449585, 0.0052856640815734865, 0.005420639991760254, 0.005282432079315186, 0.005279744148254394, 0.005258399963378907, 0.005382719993591308, 0.005261023998260498, 0.005284416198730469, 0.0052871999740600585, 0.005261504173278809, 0.005290527820587158, 0.005971968173980713, 0.005605375766754151, 0.00531609582901001, 0.005312160015106201, 0.005351871967315674, 0.005310751914978027, 0.005350815773010254, 0.005343071937561035, 0.005304255962371826, 0.005302303791046142, 0.005241759777069092, 0.005227935791015625, 0.005231200218200683, 0.0052674560546875, 0.005249023914337158, 0.005339136123657226, 0.0052667841911315915, 0.005233312129974365, 0.005236959934234619, 0.005393856048583985, 0.005333439826965332, 0.005273375988006592, 0.005238272190093994, 0.005238687992095948, 0.005198560237884522, 0.005306367874145508, 0.005260608196258545, 0.005194719791412354, 0.005246975898742676, 0.005543168067932129, 0.005202112197875977, 0.00521779203414917, 0.005196576118469238, 0.005232639789581299, 0.005222400188446045, 0.0052165122032165525, 0.005226240158081054, 0.005193535804748535, 0.005253151893615722, 0.005271743774414062, 0.0052193598747253414, 0.005213024139404297, 0.0051818561553955075, 0.005182591915130616, 0.0051851201057434085, 0.005266335964202881, 0.00510211181640625, 0.005234367847442627, 0.005218560218811035, 0.005222464084625244, 0.005232639789581299, 0.005163008213043213, 0.005207295894622803, 0.005207071781158448, 0.005199903964996338, 0.005275328159332276, 0.005241087913513184, 0.005211904048919678, 0.005255167961120606, 0.005220352172851562, 0.00518943977355957, 0.005215519905090332, 0.0051957440376281736, 0.005208255767822266, 0.005218688011169433, 0.005216447830200195, 0.005265215873718262, 0.00520252799987793, 0.005221568107604981, 0.005191904067993164, 0.005185919761657715, 0.0052197761535644534, 0.005140543937683106, 0.005210368156433105, 0.005170623779296875, 0.005240960121154785, 0.005239168167114258, 0.005197696208953858, 0.005196320056915283, 0.005221759796142578, 0.0051617598533630375, 0.0053431358337402346, 0.00518943977355957, 0.005226304054260254, 0.00522057580947876, 0.0052460160255432126, 0.005237696170806885, 0.005246528148651123, 0.00520851182937622, 0.005189375877380371, 0.005236192226409912, 0.005230751991271973, 0.005210752010345459, 0.005202015876770019, 0.005210015773773193, 0.0053678078651428224, 0.0052750401496887206, 0.005323359966278076, 0.00520524787902832, 0.005224256038665771, 0.005187744140625, 0.005528416156768799, 0.00524012804031372, 0.005212160110473632, 0.005208352088928223, 0.005176832199096679, 0.0051801280975341795, 0.005195903778076172, 0.005269375801086426, 0.005171135902404785, 0.005211967945098877, 0.005205728054046631, 0.005184000015258789, 0.005214240074157715, 0.005258431911468506, 0.005251904010772705, 0.005274623870849609, 0.005333504199981689, 0.0053318080902099605, 0.005237760066986084, 0.005211071968078613, 0.005295519828796387, 0.005238751888275147, 0.005222239971160889, 0.005251455783843994, 0.005211455821990967, 0.005174079895019531, 0.005193727970123291, 0.005221439838409424, 0.005209023952484131, 0.005176959991455078, 0.005216224193572998, 0.0051940159797668455, 0.005147840023040771, 0.005219264030456543, 0.005212096214294434, 0.0051918082237243654, 0.005433279991149903, 0.00543552017211914, 0.005270815849304199, 0.005206111907958984, 0.005184000015258789, 0.005173247814178467, 0.005151840209960937, 0.0053441600799560545, 0.005754432201385498, 0.005509568214416504, 0.005195199966430664, 0.005232831954956055, 0.005200352191925049, 0.005138400077819824, 0.0051916160583496095, 0.005144864082336426, 0.005201663970947266, 0.005145567893981934, 0.0051404800415039064, 0.005188896179199219, 0.0051463360786437986, 0.005142528057098389, 0.00517958402633667, 0.005148384094238281, 0.005155104160308838, 0.005152575969696045, 0.00524294376373291, 0.006780159950256348, 0.005196127891540527, 0.005148191928863525, 0.005163839817047119, 0.005189504146575928, 0.005199327945709229, 0.005145120143890381, 0.005130368232727051, 0.005043488025665283, 0.005290016174316407, 0.005169888019561767, 0.0051773438453674315, 0.005138432025909424, 0.005167520046234131, 0.005174880027770996, 0.0051948800086975095, 0.005177599906921387, 0.005151711940765381, 0.0051580162048339845, 0.005216800212860108, 0.005157983779907226, 0.005172128200531006, 0.005127808094024658, 0.0054254398345947264, 0.005259359836578369, 0.005240447998046875, 0.005615615844726562, 0.005218688011169433, 0.005197824001312256, 0.005211872100830078, 0.005353759765625, 0.005177440166473389, 0.005203360080718994, 0.005200255870819092, 0.00519385576248169, 0.005234687805175781, 0.00525494384765625, 0.005201759815216064, 0.005192160129547119, 0.005618815898895264, 0.0052091522216796875, 0.005164608001708985, 0.00520579195022583, 0.005298431873321533, 0.005203167915344239, 0.0052130880355834965, 0.005243135929107666, 0.00521120023727417, 0.005179168224334717, 0.005161439895629883, 0.005165503978729248, 0.005158815860748291, 0.005208159923553467, 0.0051847038269042965, 0.005151552200317383, 0.00522815990447998, 0.005171584129333496, 0.0052204480171203615, 0.005190688133239746, 0.00514246416091919, 0.0051550078392028804, 0.005165823936462402, 0.005249023914337158, 0.005250048160552978, 0.005157792091369629, 0.005171296119689942, 0.005172512054443359, 0.005199999809265137, 0.00517526388168335, 0.005175648212432861, 0.005191008090972901, 0.005106175899505615, 0.005195424079895019, 0.0051938238143920895, 0.00518006420135498, 0.005141952037811279, 0.005183872222900391, 0.005205567836761475, 0.00516761589050293, 0.005149856090545655, 0.005179520130157471, 0.005235424041748047, 0.005197824001312256, 0.0051979517936706545, 0.005170464038848877, 0.00517958402633667, 0.005202335834503174, 0.005219552040100097, 0.005173471927642823, 0.00516764783859253, 0.0052135357856750485, 0.005170080184936523, 0.00521295976638794, 0.00521014404296875, 0.005196512222290039, 0.0051814718246459964, 0.0052081279754638676, 0.005202112197875977, 0.00524835205078125, 0.005216063976287842, 0.005294911861419678, 0.005214303970336914, 0.0051794562339782715, 0.005226143836975098, 0.0052308797836303714, 0.005240543842315674, 0.005239295959472656, 0.005232639789581299, 0.005240511894226074, 0.005210112094879151, 0.005187583923339844, 0.005187583923339844, 0.005153823852539063, 0.005229631900787354, 0.005210015773773193, 0.00523414421081543, 0.00521017599105835, 0.005220831871032715, 0.005273695945739746, 0.005363615989685058, 0.005218560218811035, 0.0051996798515319825, 0.005197760105133057, 0.005255167961120606, 0.005197824001312256, 0.0055214080810546875, 0.0052408318519592285, 0.005230175971984863, 0.005196191787719726, 0.005283840179443359, 0.005203199863433838, 0.005190624237060547, 0.005195424079895019, 0.0051981439590454105, 0.005091360092163086, 0.005216576099395752, 0.005195775985717774, 0.005285312175750732, 0.005245503902435303, 0.005159967899322509, 0.005219071865081787, 0.00522876787185669, 0.005188896179199219, 0.005245664119720459, 0.00522979211807251, 0.005210847854614258, 0.005276991844177246, 0.0052492480278015135, 0.005240447998046875, 0.00538108777999878, 0.005277632236480713, 0.005268703937530517, 0.005260064125061035, 0.0052063679695129395, 0.005218175888061524, 0.005187136173248291, 0.005334303855895996, 0.005234943866729736, 0.0052005119323730465, 0.0052501120567321775, 0.005204991817474365, 0.005201536178588867, 0.00523635196685791, 0.00522873592376709, 0.005233215808868408, 0.005191679954528809, 0.005234464168548584, 0.005343008041381836, 0.005243328094482422, 0.005244927883148193, 0.005283199787139893, 0.005221216201782226, 0.005254079818725586, 0.005230463981628418, 0.005199935913085938, 0.005425119876861572, 0.005292992115020752, 0.0052715520858764645, 0.005326848030090332, 0.005277696132659912, 0.005341055870056152, 0.005300543785095215, 0.005302303791046142, 0.005317503929138183, 0.005348256111145019, 0.0053002238273620605, 0.005274687767028809, 0.005297088146209717, 0.0053053760528564455, 0.0053207998275756835, 0.005373087882995606, 0.005365664005279541, 0.005278624057769775, 0.0053105602264404295, 0.005423647880554199, 0.005273888111114502, 0.005319808006286621, 0.005444767951965332, 0.005309279918670654, 0.005326848030090332, 0.005414112091064453, 0.005270304203033448, 0.005295711994171143, 0.005301663875579834, 0.005284543991088867, 0.005261760234832763, 0.005265279769897461, 0.005320703983306885, 0.005340735912322998, 0.0053294081687927245, 0.005729407787322998, 0.005329343795776367, 0.005333504199981689, 0.0052386560440063476, 0.005269504070281982, 0.0052911038398742675, 0.005346240043640137, 0.005351391792297364, 0.005326752185821533, 0.0052466878890991215, 0.0052739839553833006, 0.005261312007904053, 0.005281087875366211, 0.005339263916015625, 0.005326848030090332, 0.0053417601585388184, 0.005304543972015381, 0.005320159912109375, 0.005333312034606933, 0.005270624160766601, 0.005374879837036133, 0.005339488029479981, 0.005266304016113281, 0.005294879913330078, 0.005305856227874756, 0.005300735950469971, 0.0053309440612792965, 0.005299680233001709, 0.005291903972625732, 0.005307360172271729, 0.005308256149291992, 0.005352352142333984, 0.005292223930358887, 0.005286816120147705, 0.005287903785705567, 0.005280928134918213, 0.005304895877838135, 0.005308671951293946, 0.005257120132446289, 0.005283840179443359, 0.005256608009338379, 0.005291872024536133, 0.005247680187225342, 0.0052583680152893065, 0.005225215911865234, 0.005281023979187012, 0.005272160053253174, 0.0052778878211975095, 0.005303840160369873, 0.005327104091644287, 0.0053143038749694825, 0.005296383857727051, 0.005285888195037842, 0.005312511920928955, 0.0052432317733764645, 0.005255904197692871, 0.0052848000526428225, 0.005246975898742676, 0.005255167961120606, 0.005273759841918945, 0.0052449598312377926, 0.005322559833526612, 0.005467807769775391, 0.005499231815338135, 0.005306367874145508, 0.005530816078186035, 0.005313344001770019, 0.0053506879806518555, 0.005361728191375733, 0.006898367881774902, 0.006851967811584473, 0.00540118408203125, 0.005359615802764893, 0.005310463905334473, 0.005351424217224121, 0.005328896045684814, 0.005296127796173096, 0.005311488151550293, 0.005299200057983398, 0.0053034558296203614, 0.005331424236297608, 0.005622399806976318, 0.005400576114654541, 0.005324831962585449, 0.005316319942474365, 0.00524505615234375, 0.005263232231140137, 0.005199391841888428, 0.0052219839096069335, 0.005224703788757324, 0.005206655979156494, 0.0052367358207702636, 0.005255167961120606, 0.005193024158477783, 0.005298143863677979, 0.005238880157470703, 0.0051943678855896, 0.005190720081329345, 0.005190080165863037, 0.005247488021850586, 0.005484799861907959, 0.005261280059814453, 0.005257279872894287, 0.005224160194396973, 0.0052080960273742675, 0.005247039794921875, 0.00523414421081543, 0.005248703956604004, 0.005264287948608399, 0.005252480030059814, 0.005241312026977539, 0.005298175811767578, 0.005275296211242676, 0.00514083194732666, 0.005173376083374023, 0.005189023971557617, 0.0052213120460510255, 0.0052015681266784665, 0.005188960075378418, 0.005208735942840576, 0.005203680038452148, 0.00526364803314209, 0.005210112094879151, 0.005203968048095703, 0.005184607982635498, 0.005200640201568603, 0.005175487995147705, 0.005189087867736817, 0.005181280136108399, 0.005222527980804443, 0.005191679954528809, 0.005186079978942871, 0.005242976188659668, 0.0053628478050231935, 0.005181375980377197, 0.005267936229705811, 0.0052288641929626465, 0.005191711902618408, 0.005214208126068115, 0.005220352172851562, 0.005222367763519287, 0.005206048011779785, 0.00531660795211792, 0.005226047992706298, 0.005226880073547363, 0.00570579195022583, 0.005610496044158936, 0.005876832008361817, 0.005631904125213623, 0.005756927967071533, 0.0053229122161865235, 0.005256063938140869, 0.005311200141906738, 0.0052492799758911135, 0.005229983806610107, 0.005252960205078125, 0.0052839360237121585, 0.0052432317733764645, 0.005194399833679199, 0.005217023849487305, 0.005196063995361328, 0.005169792175292969, 0.0052367358207702636, 0.0052977919578552244, 0.005202303886413575, 0.00518723201751709, 0.005310207843780518, 0.005212768077850342, 0.005187583923339844, 0.0053002238273620605, 0.005209343910217285, 0.005167871952056885, 0.005172800064086914, 0.005230591773986816, 0.005150847911834717, 0.0051511359214782716, 0.005136832237243653, 0.005199776172637939, 0.005164735794067383, 0.005210527896881104, 0.005156864166259765, 0.0051673917770385745, 0.00517091178894043, 0.005242879867553711, 0.005219552040100097, 0.005354400157928467, 0.005214208126068115, 0.005267327785491944, 0.005525599956512451, 0.005214240074157715, 0.005222271919250488, 0.005189631938934326, 0.005213791847229004, 0.005327263832092285, 0.005230336189270019, 0.005252927780151367, 0.005222879886627198, 0.005193120002746582, 0.005203936100006103, 0.005196159839630127, 0.005222527980804443, 0.005187679767608642, 0.005173247814178467, 0.005259263992309571, 0.005186783790588379, 0.005180448055267334, 0.005189087867736817, 0.005257567882537842, 0.005162943840026855, 0.0051651840209960935, 0.005185408115386963, 0.005195775985717774, 0.005154911994934082, 0.005197728157043457, 0.005221536159515381, 0.005177760124206543, 0.00519596815109253, 0.005189888000488281, 0.005179327964782715, 0.005205344200134277, 0.005179967880249024, 0.006047904014587402, 0.005228544235229492, 0.005185535907745361, 0.005238783836364746, 0.005244927883148193, 0.0052120318412780765, 0.005171167850494385, 0.0051914558410644535, 0.005202400207519531, 0.005183328151702881, 0.005173312187194824, 0.0052074241638183595, 0.005161600112915039, 0.005163167953491211, 0.005228384017944336, 0.005174399852752686, 0.005237631797790528, 0.005159135818481445]",tokens/s,187.00617262132317,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1846.718464,3181.248512,0.0,2778.7264,2485.458944,s,1,9.571412109375,9.571412109375,0.0,9.571412109375,9.571412109375,9.571412109375,9.571412109375,[9.571412109375],,kWh,5.981635140836564e-05,6.5907576965785035e-06,2.3407518726009657e-05,8.98146278309538e-05,,MB,1884.91776,3208.511488,0.0,2791.309312,2192.262656,s,10,0.6890395431518554,0.06890395431518555,0.00012344336781756215,0.06890537643432618,0.06906681060791015,0.0690948600769043,0.0691172996520996,"[0.068976318359375, 0.06906057739257812, 0.06874940490722656, 0.0689697265625, 0.06875251007080078, 0.06880332946777344, 0.06893097686767578, 0.06887977600097656, 0.06879401397705077, 0.06912290954589843]",tokens/s,3715.316523475357,kWh,2.051766153263279e-06,2.2627303397152105e-07,1.3635636394126781e-06,3.6416028266474786e-06,tokens/kWh,70298715.20494121,MB,1884.91776,3210.60864,0.0,2793.406464,2192.265216,s,10,14.676991943359376,1.4676991943359377,0.006223674092212024,1.4691343994140624,1.4739992797851562,1.4744118713378906,1.4747419445800782,"[1.472859375, 1.4739075927734375, 1.4610623779296874, 1.4685751953125, 1.4573760986328126, 1.473489990234375, 1.469693603515625, 1.458694091796875, 1.4665091552734375, 1.474824462890625]",tokens/s,42.92432689417972,kWh,4.182680401924377e-05,4.61314164888694e-06,2.2528622140984304e-05,6.896856780911503e-05,tokens/kWh,913459.5947296703,,s,630,14.675075885772705,0.02329377124725826,0.00032567108914041427,0.023210736274719236,0.023599504280090333,0.02376534080505371,0.02466829425811768,"[0.02330937576293945, 0.023095584869384764, 0.0231977596282959, 0.023256895065307617, 0.023182144165039064, 0.023125823974609376, 0.02316208076477051, 0.023399391174316407, 0.023377920150756838, 0.023433216094970705, 0.024306720733642578, 0.025262432098388674, 0.023618175506591798, 0.02352511978149414, 0.023753984451293945, 0.023957984924316406, 0.023577024459838867, 0.023268800735473633, 0.023253664016723633, 0.02321116828918457, 0.02316783905029297, 0.02331648063659668, 0.02326016044616699, 0.023196672439575194, 0.023146495819091797, 0.02326665687561035, 0.023173471450805665, 0.023291391372680666, 0.02322092819213867, 0.02321321678161621, 0.023204832077026366, 0.023234560012817384, 0.023199615478515626, 0.023125728607177733, 0.023209503173828125, 0.023167871475219728, 0.0232959041595459, 0.0232260799407959, 0.023217855453491212, 0.02319808006286621, 0.023210304260253906, 0.02317228889465332, 0.0231693115234375, 0.02315932846069336, 0.023218175888061524, 0.023136255264282226, 0.023162879943847657, 0.023556095123291015, 0.023295135498046876, 0.02335215950012207, 0.023214176177978517, 0.02340617561340332, 0.02351136016845703, 0.023767040252685546, 0.02367011260986328, 0.023643871307373047, 0.02362828826904297, 0.023560224533081056, 0.023529279708862306, 0.023502592086791993, 0.023429920196533203, 0.023404607772827147, 0.023347200393676756, 0.02397123146057129, 0.023480928421020508, 0.02351273536682129, 0.023423328399658203, 0.023341056823730468, 0.023258623123168946, 0.023214591979980468, 0.023205663681030272, 0.023224544525146485, 0.02346598434448242, 0.023242752075195314, 0.023359487533569336, 0.023774688720703124, 0.023398944854736328, 0.0233057918548584, 0.023276992797851562, 0.02327654457092285, 0.023201248168945313, 0.023198240280151366, 0.02323276710510254, 0.023256191253662108, 0.023282207489013673, 0.023240543365478514, 0.02335286331176758, 0.023617279052734374, 0.023398624420166016, 0.02336867141723633, 0.023385887145996095, 0.02349875259399414, 0.023484256744384764, 0.02355011177062988, 0.023578367233276366, 0.025766143798828123, 0.02388719940185547, 0.023486783981323242, 0.023547840118408204, 0.023579231262207033, 0.02348806381225586, 0.02359321594238281, 0.023776384353637697, 0.024582111358642578, 0.023589183807373047, 0.023374719619750975, 0.02342627143859863, 0.0234071044921875, 0.023377920150756838, 0.023182880401611327, 0.023185279846191405, 0.02320035171508789, 0.023138303756713868, 0.02304819107055664, 0.023039968490600585, 0.02301139259338379, 0.02302284812927246, 0.02300592041015625, 0.023040000915527343, 0.023058176040649414, 0.023155168533325197, 0.02310691261291504, 0.02304582405090332, 0.02315507125854492, 0.023036287307739257, 0.023027711868286133, 0.023340831756591796, 0.023181407928466798, 0.023226463317871093, 0.023224319458007812, 0.023233600616455078, 0.02322761535644531, 0.023260736465454103, 0.02327280044555664, 0.023166912078857422, 0.023239551544189455, 0.02311087989807129, 0.023140640258789064, 0.0232142391204834, 0.023351648330688476, 0.023489952087402344, 0.02336796760559082, 0.023246143341064455, 0.023880704879760743, 0.02345369529724121, 0.023350784301757813, 0.02342963218688965, 0.02345180892944336, 0.023391775131225586, 0.023271455764770507, 0.023144384384155274, 0.02309974479675293, 0.023004959106445313, 0.023072032928466796, 0.023345760345458984, 0.023341407775878908, 0.02315807914733887, 0.023050783157348632, 0.02305830383300781, 0.023094911575317383, 0.02301139259338379, 0.023100000381469726, 0.023017471313476562, 0.02307491111755371, 0.022979711532592772, 0.023022655487060548, 0.02304732894897461, 0.02310211181640625, 0.02305558395385742, 0.023071039199829103, 0.02297225570678711, 0.02309584045410156, 0.02315673637390137, 0.02329395294189453, 0.02317465591430664, 0.023345663070678712, 0.0232871036529541, 0.023394943237304688, 0.023069759368896485, 0.02303094482421875, 0.023032928466796877, 0.023099519729614257, 0.023030399322509765, 0.0230830078125, 0.023171072006225587, 0.023195648193359376, 0.023029535293579102, 0.0230545597076416, 0.02298067283630371, 0.023614112854003905, 0.023435264587402343, 0.023402240753173827, 0.023500255584716797, 0.023257888793945313, 0.023199743270874023, 0.023269344329833984, 0.02505731201171875, 0.023721343994140626, 0.02317375946044922, 0.023128063201904296, 0.0230645751953125, 0.023076864242553712, 0.023073055267333983, 0.02330723190307617, 0.02332339286804199, 0.023252447128295897, 0.02316547203063965, 0.02309734344482422, 0.0230863037109375, 0.02310153579711914, 0.023179967880249022, 0.02304819107055664, 0.023037088394165038, 0.0230798397064209, 0.023183231353759767, 0.02314041519165039, 0.02345779228210449, 0.023775232315063476, 0.023604511260986328, 0.023716575622558595, 0.026101760864257813, 0.023402496337890624, 0.02323865509033203, 0.02364825630187988, 0.02330396842956543, 0.023314655303955076, 0.023207935333251953, 0.023148511886596678, 0.02341481590270996, 0.023177215576171875, 0.02315644836425781, 0.023247135162353515, 0.023146495819091797, 0.023102624893188477, 0.02315555191040039, 0.02310348892211914, 0.023093248367309572, 0.023358688354492188, 0.02321414375305176, 0.023122848510742186, 0.02309209632873535, 0.02304240036010742, 0.023120479583740236, 0.023082912445068358, 0.023023136138916017, 0.02311840057373047, 0.023191616058349608, 0.0231496639251709, 0.023073183059692384, 0.02305273628234863, 0.02303958320617676, 0.023213727951049805, 0.023348703384399414, 0.023187936782836913, 0.023117183685302734, 0.0231409912109375, 0.023029760360717775, 0.023573535919189453, 0.023665632247924805, 0.023353023529052733, 0.023589183807373047, 0.023457183837890624, 0.023166976928710937, 0.023063039779663085, 0.022998847961425782, 0.02302761650085449, 0.023069087982177734, 0.023037919998168944, 0.023130111694335938, 0.023091039657592773, 0.023097503662109376, 0.023162784576416014, 0.023048288345336915, 0.022978559494018554, 0.023023616790771483, 0.02302332878112793, 0.02299932861328125, 0.022988800048828126, 0.022991039276123046, 0.02375235176086426, 0.02308252716064453, 0.023253631591796876, 0.023027040481567382, 0.022974720001220705, 0.023037664413452147, 0.023078943252563478, 0.022995616912841795, 0.02308095932006836, 0.02304204750061035, 0.023104576110839842, 0.02300761604309082, 0.023001535415649414, 0.023009344100952147, 0.02303308868408203, 0.02311187171936035, 0.02300992012023926, 0.022931455612182617, 0.02304150390625, 0.02296681594848633, 0.02307276725769043, 0.022976192474365234, 0.02334160041809082, 0.02316652870178223, 0.02311599922180176, 0.023037952423095705, 0.02308915138244629, 0.02326323127746582, 0.02309334373474121, 0.023013280868530273, 0.023268735885620118, 0.023156639099121093, 0.02316707229614258, 0.023124223709106446, 0.023191936492919923, 0.023205535888671875, 0.023493568420410157, 0.023333887100219726, 0.02330841636657715, 0.024617855072021484, 0.02330009651184082, 0.023173120498657225, 0.023301504135131837, 0.023126304626464842, 0.023126623153686524, 0.023185152053833008, 0.023078399658203123, 0.022984384536743164, 0.0230633602142334, 0.02314854431152344, 0.023109151840209962, 0.02304777526855469, 0.02299782371520996, 0.02299295997619629, 0.023023136138916017, 0.02341935920715332, 0.023094655990600586, 0.023071359634399415, 0.022986528396606445, 0.023091424942016603, 0.023005184173583985, 0.02307276725769043, 0.023351295471191406, 0.023113727569580078, 0.023123775482177734, 0.023950687408447267, 0.025282463073730468, 0.023990943908691408, 0.02354118347167969, 0.023247968673706054, 0.023228160858154295, 0.023212032318115236, 0.02342911911010742, 0.023176223754882812, 0.02334934425354004, 0.023848031997680662, 0.024001888275146484, 0.02369580841064453, 0.023744512557983398, 0.02507161521911621, 0.023549951553344727, 0.023468032836914062, 0.023475711822509765, 0.02345155143737793, 0.0235894718170166, 0.02411520004272461, 0.02333286476135254, 0.0232608642578125, 0.02334457588195801, 0.023198591232299805, 0.023234560012817384, 0.023210079193115234, 0.02326927947998047, 0.023219200134277345, 0.023150720596313477, 0.02311257553100586, 0.0231628475189209, 0.023342815399169922, 0.02328607940673828, 0.02369740867614746, 0.02327667236328125, 0.02358358383178711, 0.023244543075561522, 0.023170879364013672, 0.023159168243408204, 0.023226751327514648, 0.024517824172973633, 0.023498847961425783, 0.023235008239746092, 0.02312390327453613, 0.023041824340820312, 0.02317136001586914, 0.02310553550720215, 0.023134208679199218, 0.023128063201904296, 0.023240575790405272, 0.023147808074951173, 0.02324345588684082, 0.023979616165161134, 0.02326118469238281, 0.02312454414367676, 0.023235904693603517, 0.02347897529602051, 0.02373135948181152, 0.023201887130737304, 0.023106304168701172, 0.023744064331054686, 0.02334671974182129, 0.023300191879272462, 0.023155519485473633, 0.023271360397338868, 0.02318547248840332, 0.023198720932006835, 0.02309350395202637, 0.023151359558105468, 0.023133567810058594, 0.023277280807495117, 0.023312959671020508, 0.023150943756103517, 0.023073888778686522, 0.0230532169342041, 0.02308095932006836, 0.022992895126342772, 0.023046016693115234, 0.023285888671875, 0.023044095993041993, 0.023056255340576173, 0.02300326347351074, 0.023223840713500976, 0.02319843292236328, 0.023662336349487306, 0.02360281562805176, 0.023744895935058595, 0.024193023681640623, 0.02369126319885254, 0.023692991256713865, 0.023763263702392578, 0.023725215911865234, 0.023528287887573242, 0.02327142333984375, 0.02311529541015625, 0.02306710433959961, 0.023439359664916993, 0.023236000061035156, 0.02323628807067871, 0.023116704940795898, 0.02311078453063965, 0.023087167739868165, 0.023134111404418945, 0.023278079986572265, 0.02316124725341797, 0.023041536331176758, 0.02306643295288086, 0.023052991867065428, 0.023066751480102538, 0.023114944458007814, 0.023093952178955077, 0.02305023956298828, 0.023087072372436523, 0.023197727203369142, 0.023002656936645507, 0.023046016693115234, 0.023077375411987306, 0.023169120788574218, 0.02323651123046875, 0.02308105659484863, 0.023027711868286133, 0.023154687881469727, 0.023479999542236327, 0.023470111846923828, 0.023492000579833985, 0.02337171173095703, 0.023440319061279295, 0.02367692756652832, 0.023179264068603517, 0.023259136199951173, 0.023244800567626952, 0.02323206329345703, 0.023295743942260742, 0.023163583755493163, 0.023072704315185547, 0.023127616882324217, 0.023138303756713868, 0.02309542465209961, 0.02306255912780762, 0.023182880401611327, 0.023050336837768554, 0.02312816047668457, 0.023050880432128905, 0.02307481575012207, 0.022994943618774414, 0.023173120498657225, 0.022999040603637694, 0.02305638313293457, 0.02300102424621582, 0.023013439178466797, 0.022961408615112304, 0.022995168685913087, 0.02308355140686035, 0.02305638313293457, 0.023041248321533203, 0.023252864837646485, 0.02304252815246582, 0.023075263977050783, 0.02310531234741211, 0.023442399978637694, 0.023240703582763672, 0.023347200393676756, 0.023301536560058594, 0.023143007278442384, 0.023126016616821288, 0.0231362247467041, 0.023217567443847655, 0.023507583618164064, 0.023148096084594727, 0.02302726364135742, 0.023016576766967774, 0.022998783111572267, 0.02311577606201172, 0.02307072067260742, 0.023015424728393553, 0.023059776306152344, 0.02301971244812012, 0.02300716781616211, 0.022943552017211915, 0.022987520217895508, 0.02339795112609863, 0.023361984252929686, 0.02325459289550781, 0.02300998306274414, 0.023008031845092772, 0.023253984451293945, 0.023322368621826173, 0.02468889617919922, 0.02352035140991211, 0.023382144927978514, 0.02336422348022461, 0.023875743865966796, 0.02349056053161621, 0.02327552032470703, 0.02323004722595215, 0.02339062309265137, 0.023232511520385742, 0.02312396812438965, 0.023117824554443358, 0.023128063201904296, 0.023174560546875, 0.02320185661315918, 0.023237024307250977, 0.023169151306152345, 0.023146495819091797, 0.023191455841064454, 0.02319696044921875, 0.023432191848754884, 0.023188991546630858, 0.023195711135864258, 0.023187711715698243, 0.02315411186218262, 0.023292224884033205, 0.023517440795898438, 0.023346464157104493, 0.023937376022338867, 0.023402271270751954, 0.023478879928588867, 0.023382015228271484, 0.023480512619018554, 0.023350400924682616, 0.023345983505249024, 0.02367417526245117, 0.023568416595458986, 0.023405216217041017, 0.02338387107849121, 0.023187648773193358, 0.023619583129882812, 0.023694559097290038, 0.02327836799621582, 0.023221439361572265, 0.02319206428527832, 0.023232704162597657, 0.023351423263549803, 0.023177215576171875, 0.023631263732910156, 0.02346044731140137, 0.023416288375854494, 0.023659008026123047, 0.023539743423461913, 0.023422208786010743, 0.023487232208251954, 0.02353152084350586, 0.02350432014465332, 0.023417407989501954, 0.023470048904418946, 0.023599136352539064, 0.023830528259277343, 0.02349875259399414, 0.0234803524017334, 0.023945184707641603, 0.023588735580444335, 0.02342451286315918, 0.02343177604675293, 0.023490591049194334, 0.023559616088867186, 0.023437215805053712, 0.023294624328613282, 0.02329190444946289, 0.02343731117248535, 0.02317296028137207, 0.023273632049560546, 0.02379350471496582, 0.02359721565246582, 0.023439359664916993, 0.02336467170715332, 0.023335872650146486, 0.023408639907836915, 0.024135679244995118, 0.023232511520385742, 0.023340192794799805, 0.023207935333251953, 0.023231327056884767, 0.023162879943847657, 0.023138303756713868, 0.02338787269592285, 0.023127840042114257, 0.02323664093017578, 0.02324937629699707, 0.023201536178588868, 0.023171327590942384, 0.023195648193359376, 0.02311529541015625, 0.02313257598876953, 0.02314188766479492]",tokens/s,42.92993132735871,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4758.474752,7635.59936,0.0,7233.077248,6543.2832,s,1,12.588533203125,12.588533203125,0.0,12.588533203125,12.588533203125,12.588533203125,12.588533203125,[12.588533203125],,kWh,0.0001523027402458865,1.6792606305230585e-05,6.739616502798862e-05,0.0002364915115791057,,MB,1780.8384,7654.473728,0.0,7237.271552,5960.351744,s,10,2.420859176635742,0.2420859176635742,0.0005138109555105713,0.2422885971069336,0.24253287811279298,0.24261312637329102,0.24267732498168945,"[0.2415701446533203, 0.24221466064453126, 0.24196745300292968, 0.24236253356933593, 0.24119386291503905, 0.24251094055175781, 0.24269337463378907, 0.24251504516601563, 0.2424983673095703, 0.24133279418945314]",tokens/s,1057.4758022718288,kWh,7.104466456942443e-06,7.834885164529351e-07,4.729355635334287e-06,1.2617310608729663e-05,tokens/kWh,20289585.31169699,MB,1783.23456,7654.473728,0.0,7237.271552,5960.354304,s,10,22.723371826171874,2.2723371826171874,0.11559462167702798,2.239414794921875,2.28726015625,2.4524809082031247,2.584657509765625,"[2.2179306640625, 2.22373388671875, 2.22501318359375, 2.245681884765625, 2.238302978515625, 2.240526611328125, 2.25054443359375, 2.241124267578125, 2.61770166015625, 2.222812255859375]",tokens/s,27.724758667830763,kWh,6.479772236889453e-05,7.146176328605413e-06,4.2874126891863306e-05,0.00011481802558936327,tokens/kWh,548694.3332862564,,s,630,22.72100329971315,0.03606508460271927,0.00231975967447566,0.035426975250244144,0.03642537155151367,0.044493206596374506,0.04493531101226807,"[0.035918014526367184, 0.03527862548828125, 0.03500284957885742, 0.03499795150756836, 0.03502083206176758, 0.03517443084716797, 0.03523027038574219, 0.035208831787109374, 0.035348705291748043, 0.03525987243652344, 0.03499401473999023, 0.03484758377075195, 0.03508428955078125, 0.035166206359863283, 0.03518899154663086, 0.03507174301147461, 0.035196735382080076, 0.03528518295288086, 0.0352911376953125, 0.03515830230712891, 0.034934497833251955, 0.03504883193969727, 0.035181182861328125, 0.03526860809326172, 0.035143680572509765, 0.03516524887084961, 0.03543318557739258, 0.03535279846191406, 0.03512700653076172, 0.03501833724975586, 0.03515052795410156, 0.035020801544189455, 0.035178497314453126, 0.035118881225585936, 0.03515619277954102, 0.03536383819580078, 0.03503731155395508, 0.03506470489501953, 0.03500870513916016, 0.0351530876159668, 0.034965953826904296, 0.034965694427490236, 0.035192703247070314, 0.03528511810302734, 0.035160064697265625, 0.03513663864135742, 0.03573030471801758, 0.03667744064331055, 0.03543264007568359, 0.035454975128173825, 0.035264511108398434, 0.03500236892700195, 0.03502652740478516, 0.0351473274230957, 0.035093345642089845, 0.035114208221435544, 0.035351329803466794, 0.03514777755737305, 0.03503308868408203, 0.03496905517578125, 0.03533071899414063, 0.035334014892578126, 0.03523497772216797, 0.03605913543701172, 0.03512351989746094, 0.03547273635864258, 0.035353248596191406, 0.035757537841796874, 0.03527971267700195, 0.03512799835205078, 0.03502204895019531, 0.034977054595947264, 0.03494758224487305, 0.03508224105834961, 0.0349901123046875, 0.034922462463378906, 0.03489107131958008, 0.034947776794433595, 0.03490755081176758, 0.034861534118652344, 0.034754016876220706, 0.036424190521240234, 0.03511721420288086, 0.035026943206787106, 0.035141632080078124, 0.03602431869506836, 0.03509187316894531, 0.03523987197875977, 0.035141441345214845, 0.03516819381713867, 0.035238304138183595, 0.03554083251953125, 0.03535734558105469, 0.03528428649902344, 0.03522835159301758, 0.0353177604675293, 0.03526409530639649, 0.03580969619750977, 0.03523379135131836, 0.03509183883666992, 0.03535244750976563, 0.03517516708374024, 0.03520307159423828, 0.035332096099853515, 0.03541196823120117, 0.03534460830688477, 0.03522438430786133, 0.03524297714233399, 0.03534358215332031, 0.03536566543579102, 0.03510236740112305, 0.03723440170288086, 0.03609868621826172, 0.035176193237304684, 0.036090145111083986, 0.03564847946166992, 0.035165184020996096, 0.035020641326904293, 0.035034912109375, 0.035039295196533204, 0.03500400161743164, 0.035095264434814456, 0.035058784484863284, 0.03498870468139648, 0.03565315246582031, 0.034886367797851564, 0.036192222595214846, 0.03529318237304688, 0.03507628631591797, 0.035079902648925784, 0.03511920166015625, 0.03498393630981445, 0.034872737884521485, 0.03493334579467773, 0.035964256286621095, 0.03690972900390625, 0.035295040130615234, 0.034968929290771486, 0.03632828903198242, 0.035221088409423826, 0.035074207305908205, 0.03498780822753906, 0.03503152084350586, 0.034925567626953126, 0.03507686233520508, 0.035137153625488284, 0.0352608642578125, 0.03742268753051758, 0.03522364807128906, 0.034965824127197266, 0.03497593688964844, 0.03516211318969727, 0.03491430282592774, 0.034970817565917967, 0.03506195068359375, 0.03510745620727539, 0.03520307159423828, 0.03491430282592774, 0.03499139022827148, 0.03484678268432617, 0.03470134353637695, 0.035016990661621096, 0.035114688873291014, 0.03491900634765625, 0.035200191497802735, 0.03553779220581055, 0.035569377899169925, 0.0356541748046875, 0.03532694244384765, 0.0353328971862793, 0.037272830963134766, 0.035389568328857424, 0.03546086502075195, 0.035083137512207034, 0.0355423355102539, 0.035689151763916016, 0.035560638427734374, 0.03524003219604492, 0.0353021125793457, 0.034953216552734374, 0.035166206359863283, 0.0350863037109375, 0.035019935607910155, 0.035285438537597656, 0.03524857711791992, 0.0351124153137207, 0.03534198379516602, 0.035744129180908205, 0.035410335540771484, 0.03624512100219727, 0.035463550567626956, 0.03569606399536133, 0.03544736099243164, 0.03559328079223633, 0.03567087936401367, 0.03561036682128906, 0.03571065521240235, 0.03565836715698242, 0.035743999481201175, 0.03585612869262695, 0.03579702377319336, 0.03575807952880859, 0.03545907211303711, 0.03544633483886719, 0.035334590911865235, 0.035563518524169925, 0.035178497314453126, 0.035345470428466794, 0.03537750244140625, 0.035327617645263674, 0.035412960052490235, 0.03530752182006836, 0.03568435287475586, 0.03552870559692383, 0.03602182388305664, 0.03567164611816406, 0.03543497467041016, 0.03562329483032227, 0.03535196685791016, 0.03549248123168945, 0.035200096130371096, 0.0353504638671875, 0.03544931030273438, 0.03568892669677735, 0.03547340774536133, 0.03561875152587891, 0.03524979019165039, 0.035393985748291015, 0.035643390655517575, 0.03588460922241211, 0.035318206787109375, 0.0355362548828125, 0.035379840850830076, 0.035606529235839846, 0.035350528717041016, 0.03548476791381836, 0.035434814453125, 0.035998302459716795, 0.035846145629882815, 0.03672883224487305, 0.03592396926879883, 0.036367969512939455, 0.03648963165283203, 0.036446208953857424, 0.036503551483154296, 0.036732929229736325, 0.035538944244384765, 0.03555254364013672, 0.03519375991821289, 0.03530937576293945, 0.035339839935302736, 0.03557625579833985, 0.037101726531982425, 0.03536080169677734, 0.03555942535400391, 0.03545087814331055, 0.03543475341796875, 0.03532540893554687, 0.035205406188964845, 0.0353361930847168, 0.035332096099853515, 0.03537919998168945, 0.035299327850341795, 0.035332096099853515, 0.035618015289306644, 0.03542505645751953, 0.03535036849975586, 0.035651744842529295, 0.035307422637939456, 0.035213153839111326, 0.03512313461303711, 0.03524844741821289, 0.03538739013671875, 0.03543417739868164, 0.035302879333496094, 0.0354409294128418, 0.035670814514160154, 0.03553823852539063, 0.035674655914306644, 0.0357639045715332, 0.035853633880615236, 0.03565049743652344, 0.035520511627197264, 0.03553494262695313, 0.035522335052490236, 0.03536284637451172, 0.035593662261962894, 0.03563587188720703, 0.035397441864013675, 0.03534819030761719, 0.03544112014770508, 0.035487743377685545, 0.035746849060058594, 0.03564643096923828, 0.03544073486328125, 0.03538934326171875, 0.035342334747314456, 0.03562700653076172, 0.035522560119628906, 0.03542630386352539, 0.03546316909790039, 0.03649740982055664, 0.03643600082397461, 0.03576345443725586, 0.035360832214355466, 0.03558671951293945, 0.035280895233154294, 0.03549766540527344, 0.03572563171386719, 0.03574774551391602, 0.03542230224609375, 0.035229022979736326, 0.0354428482055664, 0.0352825927734375, 0.035562335968017576, 0.03649097442626953, 0.03581622314453125, 0.035388736724853515, 0.03541609573364258, 0.035599006652832034, 0.035381248474121094, 0.03527993774414063, 0.03580409622192383, 0.035612224578857425, 0.03546771240234375, 0.03548713684082031, 0.03583587265014648, 0.03545753479003906, 0.035315841674804685, 0.035517822265625, 0.03603740692138672, 0.03619935989379883, 0.035896224975585936, 0.03566940689086914, 0.03544944000244141, 0.03559795379638672, 0.03544102478027344, 0.0353546257019043, 0.03537641525268555, 0.03569465637207031, 0.0355519027709961, 0.0353177604675293, 0.035185791015625, 0.035857280731201174, 0.035700321197509766, 0.035262176513671875, 0.03522969436645508, 0.03619247817993164, 0.03537062454223633, 0.03517059326171875, 0.03532166290283203, 0.03574649429321289, 0.03569884872436523, 0.035528606414794925, 0.035732959747314455, 0.03556163024902344, 0.03541167831420899, 0.03536553573608398, 0.03540787124633789, 0.035487743377685545, 0.03530547332763672, 0.0353259506225586, 0.0353546257019043, 0.035434497833251956, 0.035390655517578126, 0.03519366455078125, 0.03546480178833008, 0.03553936004638672, 0.03545292663574219, 0.03563724899291992, 0.03590371322631836, 0.035593536376953124, 0.03577318572998047, 0.03565334320068359, 0.03556534576416016, 0.03579926300048828, 0.03575091171264649, 0.035460094451904296, 0.03666435241699219, 0.036004993438720705, 0.035810718536376955, 0.03575852966308594, 0.03555737686157227, 0.03569868850708008, 0.03559628677368164, 0.03544633483886719, 0.03548611068725586, 0.035415233612060545, 0.03620703887939453, 0.03720796966552734, 0.0358548469543457, 0.03578995132446289, 0.03555622482299805, 0.03583760070800781, 0.03578230285644531, 0.03531017684936524, 0.035358814239501955, 0.03546726226806641, 0.03557580947875977, 0.03564716720581055, 0.03552083206176758, 0.03795929718017578, 0.03591350555419922, 0.03573539352416992, 0.03523385620117187, 0.03592419052124023, 0.03546979141235351, 0.035727134704589845, 0.03536899185180664, 0.035229888916015625, 0.03538739013671875, 0.03578879928588867, 0.03516211318969727, 0.03542406463623047, 0.03549612808227539, 0.035844097137451174, 0.035291454315185544, 0.03543008041381836, 0.03535785675048828, 0.035939167022705075, 0.03545292663574219, 0.03569782257080078, 0.03548617553710937, 0.035381633758544924, 0.0357130241394043, 0.03863731384277344, 0.0355494384765625, 0.035249248504638675, 0.036102207183837894, 0.035682590484619144, 0.035639873504638674, 0.03564739227294922, 0.03539363098144531, 0.03560009765625, 0.035428382873535155, 0.03541836929321289, 0.03543983840942383, 0.03572739028930664, 0.035595008850097656, 0.03545702362060547, 0.035776512145996094, 0.0368067512512207, 0.035727359771728515, 0.03572246551513672, 0.035699489593505856, 0.035757152557373044, 0.03545775985717774, 0.03523158264160156, 0.035461536407470705, 0.03557984161376953, 0.03557340621948242, 0.03543804931640625, 0.03544153594970703, 0.0353177604675293, 0.03553267288208008, 0.03536089706420899, 0.035436256408691406, 0.03536310577392578, 0.0355491828918457, 0.035354080200195315, 0.0353073616027832, 0.03550278472900391, 0.03560857772827149, 0.03562470245361328, 0.03548963165283203, 0.03529974365234375, 0.0355503044128418, 0.035697086334228516, 0.03528508758544922, 0.03617798233032227, 0.03563894271850586, 0.03545564651489258, 0.03530044937133789, 0.03528796768188477, 0.03577398300170898, 0.03549763107299805, 0.03560940933227539, 0.03606249618530273, 0.03558380889892578, 0.03545180892944336, 0.03554003143310547, 0.03533715057373047, 0.03553279876708984, 0.03559219360351563, 0.03534438323974609, 0.03551635360717773, 0.035444320678710936, 0.035956321716308595, 0.03550707244873047, 0.035459007263183594, 0.03550419235229492, 0.03571843338012695, 0.03659439849853516, 0.03622067260742187, 0.035555583953857425, 0.03573075103759766, 0.0354290542602539, 0.035745792388916016, 0.03529523086547852, 0.03542764663696289, 0.03524473571777344, 0.03541196823120117, 0.03528908920288086, 0.035487167358398436, 0.03619839859008789, 0.03547526550292969, 0.03564358520507813, 0.03526860809326172, 0.03509862518310547, 0.03512873458862305, 0.035006881713867184, 0.03524832153320313, 0.03527609634399414, 0.035626945495605467, 0.035486465454101564, 0.03552460861206055, 0.03542832183837891, 0.03613699340820312, 0.03582511901855469, 0.03528976058959961, 0.03554496002197265, 0.035954048156738284, 0.03669657516479492, 0.044693153381347654, 0.04464627075195313, 0.04454051208496094, 0.04532624053955078, 0.04445785522460938, 0.044528926849365234, 0.04446867370605469, 0.04438272094726563, 0.04444707107543945, 0.04459539031982422, 0.044841438293457034, 0.04494540786743164, 0.044521472930908204, 0.0444040641784668, 0.0443419189453125, 0.04480579376220703, 0.044720481872558594, 0.04465049743652344, 0.04456995010375977, 0.04453647994995117, 0.04441497421264649, 0.04491059112548828, 0.04472217559814453, 0.0445296630859375, 0.044623680114746093, 0.045356639862060545, 0.04539654541015625, 0.04511747360229492, 0.04534886550903321, 0.044746078491210935, 0.04455801773071289, 0.044624862670898435, 0.044620960235595704, 0.04584534454345703, 0.04451327896118164, 0.04436611175537109, 0.04423855972290039, 0.04473161697387695, 0.044722976684570315, 0.04457398223876953, 0.04482080078125, 0.03772003173828125, 0.03532195281982422, 0.035337921142578124, 0.03619430541992188, 0.035399105072021486, 0.03563993453979492, 0.035657665252685544, 0.036049983978271485, 0.03536537551879883, 0.03504399871826172, 0.03520000076293945, 0.03505846405029297, 0.03493603134155274, 0.0347979850769043, 0.034901439666748045, 0.03500537490844727, 0.03523174285888672, 0.03515395355224609, 0.035571136474609376, 0.03527673721313476, 0.0353587532043457, 0.03554537582397461, 0.03506409454345703, 0.035124801635742185, 0.03501305770874023, 0.035323902130126955, 0.03553996658325195, 0.03511603164672852, 0.035100318908691405, 0.035004768371582035, 0.03517440032958984, 0.03497507095336914, 0.035220127105712894, 0.035098464965820315, 0.035296798706054684, 0.03526512145996094, 0.03525414276123047, 0.03511856079101563, 0.03523769760131836, 0.03544972610473633, 0.03547040176391601, 0.03533100891113281, 0.035272705078125, 0.03527475357055664, 0.03523174285888672, 0.03530342483520508, 0.03531980895996094, 0.035315711975097655, 0.03534787368774414, 0.03513814544677735, 0.035256607055664066, 0.03508390426635742, 0.0350302734375, 0.035279136657714844, 0.03561939239501953, 0.03541571044921875, 0.03530083084106445, 0.0351855354309082, 0.035672065734863284, 0.0353546257019043, 0.03516191864013672, 0.035145919799804685, 0.035678207397460936, 0.035188800811767576, 0.035159584045410155, 0.03528348922729492]",tokens/s,27.72764880536565,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 224629 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7334.526976,9673.048064,0.0,9277.800448,8679.633408,s,1,14.6659150390625,14.6659150390625,0.0,14.6659150390625,14.6659150390625,14.6659150390625,14.6659150390625,[14.6659150390625],,kWh,0.00022638449367916183,2.496456093734959e-05,0.00010217091507000453,0.0003535199696865159,,MB,1633.624064,9687.728128,0.0,9277.800448,8206.444544,s,10,3.639275939941406,0.3639275939941406,0.0009292388946834563,0.3641373748779297,0.36494803466796877,0.36504625244140626,0.3651248266601563,"[0.36514447021484375, 0.3631889343261719, 0.36265203857421874, 0.363911865234375, 0.36459429931640625, 0.362269287109375, 0.36436288452148435, 0.36359759521484375, 0.36492620849609375, 0.36462835693359374]",tokens/s,703.4366292216954,kWh,1.0648591585416512e-05,1.1743468569112036e-06,7.045541350713927e-06,1.886847979304164e-05,tokens/kWh,13567600.718655048,MB,1637.482496,9687.728128,0.0,9277.800448,8483.104256,s,10,24.1464248046875,2.41464248046875,0.0032581539207504878,2.4154171142578127,2.4186192626953122,2.418891174316406,2.419108703613281,"[2.409552490234375, 2.41047900390625, 2.411179931640625, 2.415583984375, 2.4128466796875, 2.415250244140625, 2.418558837890625, 2.416442138671875, 2.4191630859375, 2.417368408203125]",tokens/s,26.09081903825776,kWh,7.050408184916646e-05,7.776659843467544e-06,4.701366856328689e-05,0.00012529441025592087,tokens/kWh,502815.7271447223,,s,630,24.142780403137223,0.038321873655773345,0.0003929093673359112,0.038308799743652346,0.03883981666564942,0.038901848220825194,0.03900824077606201,"[0.03857583999633789, 0.037775646209716796, 0.037539840698242184, 0.037614879608154295, 0.03764025497436523, 0.03761129760742187, 0.037563007354736326, 0.0376814079284668, 0.03783785629272461, 0.037826976776123046, 0.03775955200195313, 0.037741825103759764, 0.03778412628173828, 0.037865665435791014, 0.03782656097412109, 0.03779919815063477, 0.03781500625610352, 0.037804031372070314, 0.03781017684936523, 0.03796582412719727, 0.03805712127685547, 0.038060897827148436, 0.03808844757080078, 0.03813750457763672, 0.03818761444091797, 0.03812895965576172, 0.038124031066894534, 0.0381049919128418, 0.038050079345703126, 0.03806585693359375, 0.038029632568359374, 0.03810841751098633, 0.03817753601074219, 0.03830771255493164, 0.03822195053100586, 0.03827014541625977, 0.03833119964599609, 0.03832831954956055, 0.03827228927612305, 0.03833516693115235, 0.038515968322753905, 0.03848886489868164, 0.038408191680908206, 0.03844095993041992, 0.03840732955932617, 0.03849097442626953, 0.03859628677368164, 0.03869289779663086, 0.03867219161987305, 0.03879894256591797, 0.03871324920654297, 0.038925121307373044, 0.03890192031860352, 0.03879731369018555, 0.0386761589050293, 0.03863724899291992, 0.03876927947998047, 0.038811168670654296, 0.03883465576171875, 0.03886000061035156, 0.0388267822265625, 0.03886016082763672, 0.03880409622192383, 0.038560031890869144, 0.03792035293579102, 0.03766518402099609, 0.037703231811523436, 0.03769708633422852, 0.037668800354003905, 0.037635009765625, 0.0376995849609375, 0.037720062255859374, 0.03783200073242188, 0.03784569549560547, 0.037787647247314454, 0.037754878997802735, 0.037852607727050784, 0.037782081604003905, 0.037795360565185544, 0.03777788925170898, 0.03786751937866211, 0.03798777770996094, 0.03805606460571289, 0.037980609893798825, 0.03801657485961914, 0.03796217727661133, 0.037973281860351565, 0.038141918182373044, 0.03822665786743164, 0.03819113540649414, 0.03807392120361328, 0.038121921539306644, 0.03818905639648437, 0.0381376953125, 0.03819084930419922, 0.0383553581237793, 0.038458366394042966, 0.03842876815795898, 0.03840911865234375, 0.03836921691894531, 0.038541057586669925, 0.038426944732666016, 0.03845939254760742, 0.038413761138916015, 0.0383776969909668, 0.03852886581420899, 0.03859260940551758, 0.03858620834350586, 0.03861670303344727, 0.03856889724731445, 0.0385445442199707, 0.038742881774902344, 0.038599681854248044, 0.03875449752807617, 0.03874803161621094, 0.038712257385253905, 0.03868163299560547, 0.038642559051513675, 0.03861872100830078, 0.038615550994873044, 0.03878092956542969, 0.038745121002197264, 0.03875939178466797, 0.0387209587097168, 0.03877536010742187, 0.03879024124145508, 0.0385816650390625, 0.03784960174560547, 0.03766396713256836, 0.03768809509277344, 0.03763824081420898, 0.03768524932861328, 0.037779457092285154, 0.037894142150878905, 0.03788390350341797, 0.03782860946655273, 0.03786483383178711, 0.03784767913818359, 0.03784425735473633, 0.037884449005126955, 0.03784096145629883, 0.037844768524169924, 0.03785311889648438, 0.03789148712158203, 0.037956607818603515, 0.03802316665649414, 0.03811238479614258, 0.03809686279296875, 0.038056865692138675, 0.03800883102416992, 0.03812761688232422, 0.038400001525878906, 0.03826278305053711, 0.03814140701293945, 0.03809689712524414, 0.0380994873046875, 0.03811123275756836, 0.03815423965454102, 0.03824012756347656, 0.03822195053100586, 0.03821977615356445, 0.038299648284912106, 0.038305728912353516, 0.03838547134399414, 0.0384697265625, 0.03834812927246094, 0.03830252838134766, 0.0383631362915039, 0.03833651351928711, 0.03841228866577148, 0.038507553100585935, 0.03856067276000977, 0.03850451278686524, 0.038556926727294924, 0.03853388977050781, 0.03863273620605469, 0.03856662368774414, 0.038555648803710936, 0.038645759582519534, 0.038844417572021485, 0.038811168670654296, 0.03885513687133789, 0.03876454544067383, 0.03890176010131836, 0.03891795349121094, 0.0389183349609375, 0.03883612823486328, 0.03889775848388672, 0.03902000045776367, 0.03836476898193359, 0.03778601455688477, 0.03763814544677734, 0.03763600158691406, 0.03767305755615234, 0.03779542541503906, 0.037720481872558595, 0.03773235321044922, 0.03770307159423828, 0.0378201904296875, 0.037788478851318356, 0.03791462326049805, 0.03794124984741211, 0.038031295776367186, 0.03792083358764648, 0.03790643310546875, 0.03790848159790039, 0.037904384613037106, 0.037887649536132814, 0.038109535217285155, 0.038117374420166016, 0.038158302307128904, 0.0379692497253418, 0.03799488067626953, 0.03808492660522461, 0.03811894226074219, 0.03819308853149414, 0.038140449523925785, 0.038365184783935545, 0.03837747192382813, 0.038340576171875, 0.03835292816162109, 0.038373374938964845, 0.03845119857788086, 0.03844255828857422, 0.03856838226318359, 0.038506240844726564, 0.03853952026367188, 0.03864371109008789, 0.03854524612426758, 0.03859676742553711, 0.038723201751708985, 0.038613121032714845, 0.03854975891113281, 0.03865375900268555, 0.038691009521484375, 0.03868672180175781, 0.03864985656738281, 0.038741214752197266, 0.03875100708007812, 0.03874816131591797, 0.0387677116394043, 0.038679454803466795, 0.03884163284301758, 0.038849056243896486, 0.038797504425048826, 0.03875136184692383, 0.038851039886474606, 0.03899334335327148, 0.03891299057006836, 0.03889926528930664, 0.0392053108215332, 0.038793216705322264, 0.038599807739257815, 0.03794742584228516, 0.037808990478515624, 0.037717727661132815, 0.03768348693847656, 0.037782913208007814, 0.037835391998291015, 0.03790643310546875, 0.037889022827148434, 0.03793407821655274, 0.03789004898071289, 0.037953536987304685, 0.037904384613037106, 0.03793920135498047, 0.03791027069091797, 0.037932510375976565, 0.03788880157470703, 0.03806345748901367, 0.03818972778320313, 0.03833446502685547, 0.038188255310058594, 0.03811724853515625, 0.03809782409667969, 0.03800883102416992, 0.038141822814941405, 0.038258209228515624, 0.038255199432373044, 0.03830579376220703, 0.03829759979248047, 0.03824585723876953, 0.03820393753051758, 0.038153343200683594, 0.03818585586547851, 0.03819497680664063, 0.038291679382324216, 0.03830537414550781, 0.03832867050170898, 0.03832633590698242, 0.03831398391723633, 0.038414337158203124, 0.03837094497680664, 0.038420032501220704, 0.03846227264404297, 0.03860883331298828, 0.03855980682373047, 0.038569534301757816, 0.03856995010375976, 0.038523361206054686, 0.03858227157592774, 0.03865190505981445, 0.0385986557006836, 0.03870851135253906, 0.03870105743408203, 0.03878780746459961, 0.038739456176757815, 0.03871139144897461, 0.03865977478027344, 0.03868745422363281, 0.038688606262207034, 0.03881795120239258, 0.03883433532714844, 0.038784862518310544, 0.03876249694824219, 0.0386866569519043, 0.03793423843383789, 0.03774556732177734, 0.03773980712890625, 0.03768937683105469, 0.037708480834960936, 0.03769343948364258, 0.03774185562133789, 0.03777199935913086, 0.037766143798828124, 0.03773478317260742, 0.03776752090454102, 0.03787190246582031, 0.03793222427368164, 0.03795795059204102, 0.03791414260864258, 0.03792585754394531, 0.03788390350341797, 0.0379576301574707, 0.0379576301574707, 0.03812063980102539, 0.03811564636230469, 0.038091262817382815, 0.03802649688720703, 0.038123329162597655, 0.03820640182495117, 0.03821763229370117, 0.038227935791015626, 0.03820556640625, 0.0382597770690918, 0.03828627014160156, 0.038295040130615236, 0.03822611236572265, 0.0382938232421875, 0.03823206329345703, 0.03833651351928711, 0.038424575805664066, 0.038504447937011715, 0.038483585357666016, 0.038496639251708986, 0.03849622344970703, 0.03851779174804688, 0.03848908615112305, 0.03850239944458008, 0.038537025451660156, 0.03875235366821289, 0.03876464080810547, 0.03860684967041016, 0.038688385009765625, 0.03978812789916992, 0.0387380485534668, 0.038707489013671874, 0.03882841491699219, 0.03878297424316406, 0.03895199966430664, 0.03881228637695312, 0.03893065643310547, 0.03897967910766602, 0.03890380859375, 0.03888742446899414, 0.03885039901733398, 0.03895657730102539, 0.03888355255126953, 0.0388485107421875, 0.038019073486328124, 0.03775849533081055, 0.03778607940673828, 0.03777452850341797, 0.03779024124145508, 0.03786323165893555, 0.037937503814697265, 0.03796329498291016, 0.03796819305419922, 0.03789648056030273, 0.03792819213867187, 0.0379788818359375, 0.037986305236816405, 0.03788800048828125, 0.03787273788452149, 0.03795036697387695, 0.037988353729248046, 0.03794124984741211, 0.03805152130126953, 0.03819142532348633, 0.03813776016235351, 0.0381952018737793, 0.03828745651245117, 0.03833366394042969, 0.03841417694091797, 0.03828627014160156, 0.03823820877075195, 0.03815119934082031, 0.038257633209228516, 0.03829542541503906, 0.038319839477539065, 0.038340641021728517, 0.03844124984741211, 0.0384697265625, 0.03842009735107422, 0.03842700958251953, 0.038558719635009765, 0.03851161575317383, 0.038629344940185543, 0.038543392181396484, 0.03850239944458008, 0.038704769134521484, 0.038683006286621094, 0.038768638610839845, 0.038795265197753906, 0.03874816131591797, 0.038690174102783206, 0.03865663909912109, 0.03880550384521484, 0.038838241577148436, 0.03886633682250976, 0.03892428970336914, 0.03886959838867188, 0.03875433731079102, 0.038836223602294925, 0.038795265197753906, 0.038905567169189456, 0.03894822311401367, 0.038818401336669923, 0.03891436767578125, 0.03883961486816406, 0.03888172912597656, 0.03857555389404297, 0.03783523178100586, 0.03770582580566406, 0.03772825622558594, 0.03785279846191406, 0.037900543212890624, 0.0377918701171875, 0.03775897598266602, 0.03776921463012695, 0.03794124984741211, 0.037857086181640624, 0.03787094497680664, 0.03787452697753906, 0.03787776184082031, 0.03786073684692383, 0.037910465240478516, 0.037880512237548826, 0.037959136962890626, 0.03795817565917969, 0.03826454544067383, 0.03820982360839844, 0.03823616027832031, 0.03818905639648437, 0.03822182464599609, 0.0382355842590332, 0.0382490234375, 0.03820748901367187, 0.03820748901367187, 0.03829350280761719, 0.03837542343139649, 0.038332416534423826, 0.038294784545898436, 0.03825945663452148, 0.03829759979248047, 0.03841558456420899, 0.03854528045654297, 0.03848604965209961, 0.03850739288330078, 0.038540481567382816, 0.038555904388427736, 0.03858812713623047, 0.03860771179199219, 0.038637569427490234, 0.03857126235961914, 0.03859532928466797, 0.03871670532226563, 0.038693599700927735, 0.038637569427490234, 0.038741310119628905, 0.0387303352355957, 0.038670337677001954, 0.03871084976196289, 0.038724128723144534, 0.03889670562744141, 0.038917057037353514, 0.03887923049926758, 0.038782878875732424, 0.038803424835205075, 0.03886297607421875, 0.03889120101928711, 0.03887472152709961, 0.038849246978759765, 0.03888496017456055, 0.03861094284057617, 0.037934432983398436, 0.03767907333374024, 0.037690048217773435, 0.03764217758178711, 0.037719486236572265, 0.03771660614013672, 0.03784499359130859, 0.03787980651855469, 0.037925983428955076, 0.03794627380371094, 0.03792473602294922, 0.037934303283691406, 0.037990814208984376, 0.03799091339111328, 0.03794281768798828, 0.03799702453613281, 0.037966976165771486, 0.03808345413208008, 0.038223648071289064, 0.03821369552612305, 0.03816259384155273, 0.0381952018737793, 0.03815190505981445, 0.038317569732666014, 0.038294303894042966, 0.03829116821289062, 0.03835728073120117, 0.03827711868286133, 0.03829283142089844, 0.03829779052734375, 0.03833699035644531, 0.03842015838623047, 0.03851443099975586, 0.03854188919067383, 0.038785022735595705, 0.03859251022338867, 0.03862694549560547, 0.03858060836791992, 0.03860275268554687, 0.038550559997558596, 0.03857670211791992, 0.038544830322265626, 0.03858844757080078, 0.038646720886230466, 0.03863929748535156, 0.038723648071289064, 0.03874550247192383, 0.038797504425048826, 0.0388590087890625, 0.038801311492919925, 0.038832065582275394, 0.038763072967529295, 0.03876454544067383, 0.03877478408813476, 0.038903457641601566, 0.03889187240600586, 0.038965248107910154, 0.039003265380859374, 0.03894771194458008, 0.03888032150268555, 0.039010272979736325, 0.03905020904541016, 0.038674720764160155, 0.03798697662353516, 0.03767705535888672, 0.03768323135375977, 0.03772134399414063, 0.0377303695678711, 0.03779033660888672, 0.03785862350463867, 0.03780022430419922, 0.03791097640991211, 0.037881118774414066, 0.037868255615234374, 0.038002689361572264, 0.037975391387939456, 0.037949119567871094, 0.037999584197998044, 0.03797401428222656, 0.037961505889892576, 0.03807664108276367, 0.038133758544921875, 0.03818086242675781, 0.03814368057250977, 0.038072032928466795, 0.03815033721923828, 0.038236576080322264, 0.03830988693237305, 0.03832012939453125, 0.03822796630859375, 0.03823820877075195, 0.03829340744018555, 0.03823011016845703, 0.038266880035400394, 0.03830748748779297, 0.03845094299316406, 0.0384436149597168, 0.038438911437988284, 0.03844300842285156, 0.038419807434082034, 0.038369953155517576, 0.03854742431640625, 0.03843411254882813, 0.03844579315185547, 0.038532958984375, 0.03861315155029297, 0.03862694549560547, 0.038736255645751956, 0.03876995086669922, 0.03872227096557617, 0.03864166259765625, 0.03870300674438477, 0.038750049591064456, 0.03889120101928711, 0.038923934936523436, 0.038886302947998046, 0.03882169723510742, 0.038895809173583984, 0.03897679901123047, 0.038994625091552736, 0.03893660736083984, 0.03890108871459961, 0.03902531051635742, 0.039041023254394534, 0.03895827102661133]",tokens/s,26.09475750018152,,, 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-exllama-v2-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3874, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1770, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` " 4bit-awq-exllama-v2-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,awq,4,exllama,2,64,1,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1030.61504,965.67296,0.0,570.425344,536.326656,s,1,8.4580712890625,8.4580712890625,0.0,8.4580712890625,8.4580712890625,8.4580712890625,8.4580712890625,[8.4580712890625],,kWh,3.686940990834652e-05,4.05941187368778e-06,1.2865010291990275e-05,5.3793832074024575e-05,,MB,1262.75584,1034.878976,0.0,624.951296,594.377728,s,10,0.20789846420288088,0.020789846420288086,0.000156543809506488,0.020772943496704104,0.020961072158813477,0.02104437599182129,0.02111101905822754,"[0.0211276798248291, 0.020942560195922853, 0.02068841552734375, 0.02059040069580078, 0.020784351348876955, 0.020589759826660156, 0.02076153564453125, 0.02089727973937988, 0.02071865653991699, 0.02079782485961914]",tokens/s,12313.703277296869,kWh,6.076229467082312e-07,6.701004061386872e-08,3.9350112330144745e-07,1.0681341106235472e-06,tokens/kWh,239670278.71673742,MB,1275.990016,1049.55904,0.0,639.63136,607.71072,s,10,11.622058959960938,1.162205895996094,0.011934721498417466,1.1644623413085937,1.176913916015625,1.1784804565429687,1.1797336889648438,"[1.1765657958984375, 1.1506571044921876, 1.1425592041015624, 1.1505360107421876, 1.15258935546875, 1.1618824462890625, 1.168671630859375, 1.1715081787109376, 1.1800469970703125, 1.167042236328125]",tokens/s,54.207262428319105,kWh,3.2591786253707696e-05,3.594396489437822e-06,1.304845407469995e-05,4.9234636817845455e-05,tokens/kWh,1279586.9751833975,,s,630,11.615323753356947,0.018437021830725292,0.0004221994916844008,0.018455056190490725,0.018704652023315428,0.018967825794219968,0.01997489570617676,"[0.01814137649536133, 0.018436960220336914, 0.018668479919433593, 0.021637664794921876, 0.018501983642578126, 0.01927382469177246, 0.01839286422729492, 0.018638431549072267, 0.018555423736572266, 0.018327680587768555, 0.018392959594726564, 0.019791040420532226, 0.01818623924255371, 0.018191295623779295, 0.018612064361572266, 0.01861039924621582, 0.018245695114135742, 0.01832310485839844, 0.020714847564697266, 0.019788543701171876, 0.018700000762939453, 0.018786592483520506, 0.018552831649780274, 0.018560831069946288, 0.018687328338623046, 0.018686464309692383, 0.01859574317932129, 0.018573759078979492, 0.018503231048583986, 0.01861881637573242, 0.018733055114746093, 0.01859107208251953, 0.018687679290771485, 0.018704479217529296, 0.018702335357666015, 0.01866172790527344, 0.018683647155761717, 0.01862326431274414, 0.0186200008392334, 0.01868227195739746, 0.018569311141967772, 0.018708383560180664, 0.018476800918579103, 0.018690496444702148, 0.018503135681152343, 0.018526336669921876, 0.018546239852905273, 0.018535072326660157, 0.01859174346923828, 0.01841971206665039, 0.018475072860717773, 0.018436159133911133, 0.018484512329101564, 0.018520288467407227, 0.01853273582458496, 0.018518016815185546, 0.018453567504882813, 0.018423807144165038, 0.01841862487792969, 0.018448383331298827, 0.01851087951660156, 0.01847769546508789, 0.018246240615844726, 0.018312543869018556, 0.01865180778503418, 0.018472959518432617, 0.018333696365356447, 0.018339296340942383, 0.018265663146972658, 0.01824787139892578, 0.018158143997192382, 0.01814860725402832, 0.018253952026367187, 0.018168352127075196, 0.01827257537841797, 0.01827187156677246, 0.018235647201538085, 0.01822537612915039, 0.018236928939819336, 0.018172319412231446, 0.01814886474609375, 0.018061088562011718, 0.017932607650756837, 0.017897344589233397, 0.017904159545898437, 0.0177674560546875, 0.017744863510131836, 0.017612800598144532, 0.01760630416870117, 0.01762156867980957, 0.017600288391113283, 0.01761075210571289, 0.01765817642211914, 0.017761184692382814, 0.01796339225769043, 0.01787107276916504, 0.01774015998840332, 0.01849235153198242, 0.018161855697631835, 0.018285247802734376, 0.018273759841918945, 0.018332191467285156, 0.01842995262145996, 0.018417503356933592, 0.018472576141357423, 0.01843452835083008, 0.018575424194335936, 0.018757055282592774, 0.018626367568969727, 0.01847372817993164, 0.018288639068603514, 0.018353759765625, 0.018434463500976564, 0.018537919998168947, 0.018538175582885744, 0.018408319473266602, 0.01855232048034668, 0.018626527786254884, 0.018547231674194337, 0.018550783157348632, 0.01864089584350586, 0.018515968322753908, 0.019095552444458007, 0.018570655822753905, 0.01895689582824707, 0.018382848739624022, 0.018041536331176757, 0.018332927703857423, 0.018342144012451173, 0.018625120162963867, 0.018433664321899416, 0.01838688087463379, 0.018428447723388672, 0.018414527893066406, 0.018557823181152344, 0.018448383331298827, 0.01845248031616211, 0.018509824752807616, 0.01835411262512207, 0.018421823501586915, 0.018565120697021483, 0.01840358352661133, 0.01851545524597168, 0.01838719940185547, 0.01831679916381836, 0.018274816513061523, 0.018544607162475586, 0.018613920211791993, 0.01849996757507324, 0.01862006378173828, 0.018917760848999023, 0.018393056869506836, 0.018556575775146484, 0.018534143447875975, 0.01841971206665039, 0.018281055450439454, 0.018175167083740236, 0.018112319946289063, 0.018340864181518556, 0.018423807144165038, 0.01856716728210449, 0.018294368743896484, 0.017826047897338868, 0.01760643196105957, 0.017516927719116213, 0.017448352813720702, 0.017452959060668946, 0.017502143859863283, 0.01749478340148926, 0.017512128829956054, 0.017539392471313475, 0.017547168731689454, 0.017939807891845704, 0.017789695739746095, 0.01778892707824707, 0.017614112854003907, 0.017615583419799803, 0.017639167785644533, 0.01763555145263672, 0.017545055389404297, 0.017825632095336913, 0.01796131134033203, 0.01790496063232422, 0.01801491165161133, 0.017912895202636718, 0.017820159912109376, 0.01772812843322754, 0.01799260711669922, 0.018246559143066405, 0.01741804885864258, 0.017850656509399414, 0.017981407165527343, 0.017821887969970703, 0.017759647369384766, 0.01788105583190918, 0.017848031997680664, 0.017801984786987305, 0.017777727127075194, 0.01802876853942871, 0.018083967208862305, 0.017859167098999023, 0.017731584548950196, 0.017741695404052734, 0.017727615356445313, 0.017735679626464843, 0.018192384719848635, 0.018007295608520508, 0.018130847930908203, 0.01792086410522461, 0.017840192794799804, 0.017778303146362303, 0.017754432678222656, 0.017734848022460937, 0.01777542304992676, 0.01799977684020996, 0.018331392288208008, 0.018027040481567384, 0.017932096481323243, 0.018182144165039063, 0.018317440032958984, 0.018465856552124023, 0.01850364875793457, 0.018402143478393553, 0.0183767032623291, 0.018401119232177736, 0.018423967361450196, 0.018915327072143554, 0.018581504821777343, 0.018607711791992186, 0.018714048385620116, 0.01973062324523926, 0.018649887084960938, 0.019044479370117186, 0.01852150344848633, 0.01854863929748535, 0.018510400772094728, 0.018463775634765624, 0.018576351165771485, 0.018486335754394533, 0.01843315124511719, 0.01847420883178711, 0.01857391929626465, 0.018582656860351564, 0.018587648391723634, 0.018570112228393554, 0.018606048583984375, 0.01865897560119629, 0.01846886444091797, 0.018542976379394532, 0.01845248031616211, 0.01845452880859375, 0.01855036735534668, 0.01822537612915039, 0.01846272087097168, 0.018472320556640626, 0.018452735900878908, 0.018336128234863282, 0.018264064788818358, 0.018151424407958985, 0.018257823944091797, 0.01803468894958496, 0.018148576736450196, 0.018166656494140624, 0.018067455291748045, 0.01781760025024414, 0.01780940818786621, 0.017831487655639647, 0.017868896484375, 0.0179531192779541, 0.017663999557495116, 0.01760256004333496, 0.017662176132202147, 0.01799068832397461, 0.018075584411621094, 0.01852694320678711, 0.018358367919921875, 0.01845849609375, 0.018352256774902344, 0.018533504486083985, 0.018500736236572266, 0.018941568374633788, 0.018478336334228514, 0.018442527770996094, 0.018327903747558594, 0.01837491226196289, 0.01843715286254883, 0.018349023818969728, 0.018380800247192384, 0.01845644760131836, 0.018333152770996095, 0.018385568618774415, 0.018382816314697265, 0.01847817611694336, 0.018551168441772462, 0.018683904647827147, 0.018634368896484375, 0.018545440673828125, 0.018419136047363283, 0.01851055908203125, 0.01839836883544922, 0.01840246391296387, 0.018357984542846678, 0.01827862358093262, 0.018330368041992187, 0.018365440368652345, 0.0182476806640625, 0.01835219192504883, 0.018302656173706053, 0.01815888023376465, 0.018254816055297853, 0.018376768112182616, 0.018245248794555663, 0.01813667106628418, 0.018274080276489257, 0.018305856704711913, 0.017779392242431642, 0.018277727127075195, 0.018225248336791993, 0.018135616302490234, 0.01786675262451172, 0.01803878402709961, 0.018112415313720702, 0.018244800567626954, 0.018092960357666017, 0.018167808532714845, 0.01800956726074219, 0.017949216842651366, 0.01789132881164551, 0.017740928649902343, 0.017985471725463865, 0.01824444770812988, 0.017868896484375, 0.018159616470336915, 0.018520063400268554, 0.018454143524169922, 0.019347360610961914, 0.01880873680114746, 0.018600032806396483, 0.018535072326660157, 0.019037471771240235, 0.01850831985473633, 0.018556640625, 0.018651296615600586, 0.018550912857055665, 0.01860812759399414, 0.018548128128051757, 0.018538591384887695, 0.019111776351928712, 0.018538848876953125, 0.018444000244140626, 0.018395360946655274, 0.0185532169342041, 0.01846272087097168, 0.01843404769897461, 0.018452127456665038, 0.018458976745605468, 0.018423807144165038, 0.01844428825378418, 0.018460800170898437, 0.018521760940551756, 0.018577056884765623, 0.018436288833618163, 0.018385343551635742, 0.019048383712768555, 0.0182969913482666, 0.018485088348388672, 0.018357280731201173, 0.01826095962524414, 0.01837238311767578, 0.018289119720458984, 0.018351871490478514, 0.01828598403930664, 0.0183253116607666, 0.0182685432434082, 0.018264415740966797, 0.019470399856567382, 0.020112512588500976, 0.018842496871948243, 0.01804697608947754, 0.018122400283813477, 0.018231903076171875, 0.018262847900390625, 0.018332639694213868, 0.018440160751342773, 0.018314783096313476, 0.019291711807250977, 0.019452608108520508, 0.018624319076538085, 0.01853081512451172, 0.0199781436920166, 0.0183306884765625, 0.018428863525390624, 0.018472959518432617, 0.018579456329345705, 0.0184520320892334, 0.01842835235595703, 0.018327360153198243, 0.018374847412109374, 0.018308128356933594, 0.018311424255371092, 0.01833238410949707, 0.018296831130981444, 0.018403295516967774, 0.018409503936767577, 0.018562431335449218, 0.01846131134033203, 0.018374656677246092, 0.018391040802001952, 0.01927168083190918, 0.018388992309570314, 0.01843404769897461, 0.018431711196899413, 0.018401567459106444, 0.018472959518432617, 0.01835212707519531, 0.01848121643066406, 0.019966943740844727, 0.01867286491394043, 0.01858665657043457, 0.018655967712402344, 0.018644960403442382, 0.018509855270385744, 0.018554943084716797, 0.018646272659301757, 0.01882307243347168, 0.018313472747802734, 0.018315807342529297, 0.018503679275512695, 0.01854863929748535, 0.018514015197753905, 0.018907360076904297, 0.018380479812622072, 0.018579551696777344, 0.018488576889038086, 0.018434816360473633, 0.018744895935058594, 0.018504127502441407, 0.01844428825378418, 0.018372608184814454, 0.01832262420654297, 0.018455360412597658, 0.01829478454589844, 0.018794048309326173, 0.018706880569458007, 0.01884547233581543, 0.018403104782104492, 0.01839084815979004, 0.018829343795776367, 0.01860259246826172, 0.01841766357421875, 0.01846681594848633, 0.018465824127197265, 0.01841219139099121, 0.019022207260131836, 0.019781312942504882, 0.018579168319702147, 0.01846940803527832, 0.018601984024047852, 0.01845248031616211, 0.01841152000427246, 0.01857151985168457, 0.018562816619873048, 0.018536447525024414, 0.018547807693481445, 0.018551712036132813, 0.01859686470031738, 0.018723424911499024, 0.01868671989440918, 0.018708127975463867, 0.018808832168579103, 0.01862838363647461, 0.01850595283508301, 0.01848320007324219, 0.018554880142211915, 0.018550783157348632, 0.01845577621459961, 0.0182772159576416, 0.018271360397338867, 0.01820719909667969, 0.018255552291870116, 0.018322431564331054, 0.01846646308898926, 0.018495487213134765, 0.018912511825561522, 0.018602880477905273, 0.01863257598876953, 0.01860406494140625, 0.018608095169067383, 0.01857535934448242, 0.01855062484741211, 0.01892572784423828, 0.01857961654663086, 0.018634592056274414, 0.01855238342285156, 0.01864067268371582, 0.018600608825683592, 0.01861625671386719, 0.018556320190429687, 0.018697887420654296, 0.018534400939941405, 0.018496768951416016, 0.01860736083984375, 0.018625024795532227, 0.01851753616333008, 0.018356767654418946, 0.018487295150756835, 0.018544607162475586, 0.018276704788208007, 0.01832124710083008, 0.018431840896606447, 0.018421024322509767, 0.01837948799133301, 0.018308767318725584, 0.019818944931030272, 0.018652639389038085, 0.019240800857543944, 0.018862688064575195, 0.018568672180175782, 0.01851651191711426, 0.018499584197998048, 0.018505727767944336, 0.018976768493652343, 0.019820383071899414, 0.01951145553588867, 0.018425567626953125, 0.018493728637695314, 0.018411359786987304, 0.02023644828796387, 0.01848464012145996, 0.018332160949707032, 0.018507648468017578, 0.018454751968383788, 0.01839891242980957, 0.018481056213378907, 0.01882383918762207, 0.018414655685424806, 0.019628799438476563, 0.019998655319213868, 0.02098966407775879, 0.01866156768798828, 0.01859190368652344, 0.01848512077331543, 0.018663551330566405, 0.018822399139404297, 0.018500288009643553, 0.018470848083496094, 0.018470975875854494, 0.01843814468383789, 0.018739423751831054, 0.018706207275390626, 0.018481151580810547, 0.018382623672485353, 0.018378976821899415, 0.018280607223510742, 0.01989347267150879, 0.018350719451904297, 0.018524160385131837, 0.01861417579650879, 0.018813024520874022, 0.018706623077392577, 0.01850553512573242, 0.01856719970703125, 0.018668800354003905, 0.018504032135009764, 0.018473344802856444, 0.018520063400268554, 0.01856265640258789, 0.018309120178222657, 0.018538496017456055, 0.01865497589111328, 0.01854841613769531, 0.01862713623046875, 0.018569215774536133, 0.018452064514160156, 0.01877382469177246, 0.018606719970703126, 0.01859152030944824, 0.01856940841674805, 0.01842918395996094, 0.0185097599029541, 0.018459680557250977, 0.018411296844482422, 0.01835775947570801, 0.018416128158569335, 0.018506784439086914, 0.018501760482788086, 0.018303840637207032, 0.018425504684448243, 0.018261663436889647, 0.01843235206604004, 0.018546815872192382, 0.018388511657714844, 0.018496192932128907, 0.01856233596801758, 0.018448896408081054, 0.018493631362915038, 0.01850067138671875, 0.018537343978881834, 0.018540351867675782, 0.018409311294555666, 0.018444639205932617, 0.01835215950012207, 0.018268159866333008, 0.018411136627197264, 0.018358720779418945, 0.01857974433898926, 0.01840867233276367, 0.01846668815612793, 0.018501792907714844, 0.018554912567138673, 0.018583936691284178, 0.018594015121459962, 0.01849123191833496, 0.018507232666015627, 0.01851651191711426, 0.018524160385131837, 0.01846873664855957, 0.018489471435546877, 0.01852604866027832, 0.01856528091430664, 0.01862406349182129, 0.018548608779907227, 0.01854707145690918, 0.01853785514831543, 0.018666303634643555, 0.01861030387878418, 0.018593568801879883, 0.018575071334838867, 0.019274080276489258, 0.018583263397216797]",tokens/s,54.23869479470379,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2172.416,2198.798336,0.0,1820.327936,1730.89792,s,1,8.8145322265625,8.8145322265625,0.0,8.8145322265625,8.8145322265625,8.8145322265625,8.8145322265625,[8.8145322265625],,kWh,5.4517574154160305e-05,6.00652971744652e-06,1.8165570088005123e-05,7.868967395961195e-05,,MB,2246.254592,2343.501824,0.0,1935.671296,1893.834752,s,10,1.6642487487792967,0.16642487487792965,0.0006596255032235849,0.16620549011230468,0.1671137954711914,0.16756105575561525,0.1679188639831543,"[0.16800831604003907, 0.16573887634277343, 0.16613337707519532, 0.16627760314941406, 0.16562451171875, 0.167014404296875, 0.16610520935058593, 0.1661298522949219, 0.166712890625, 0.16650370788574217]",tokens/s,1538.2315906067076,kWh,4.944369268124862e-06,5.452589416985818e-07,3.2784424375666374e-06,8.768070647390081e-06,tokens/kWh,29196845.040955663,MB,2252.017664,2490.302464,0.0,2082.471936,1953.4336,s,10,15.85852490234375,1.585852490234375,0.014475317429753993,1.5874735717773438,1.6008872436523438,1.601325762939453,1.6016765783691407,"[1.6017642822265625, 1.5851982421875, 1.600789794921875, 1.5940562744140625, 1.58705517578125, 1.5878919677734376, 1.5962750244140624, 1.57608447265625, 1.5796583251953125, 1.5497513427734375]",tokens/s,39.72626734702744,kWh,4.576581528312513e-05,5.047719653414422e-06,2.790886029003318e-05,7.872239522657272e-05,tokens/kWh,800280.5277796523,,s,630,15.855024709701548,0.025166705888415138,0.0006164887478600973,0.025137167930603027,0.025476288795471192,0.025661361122131347,0.02766415050506593,"[0.025282880783081055, 0.02501523208618164, 0.0251013126373291, 0.025157632827758788, 0.025169919967651368, 0.02531942367553711, 0.027202655792236328, 0.032416671752929685, 0.025204736709594725, 0.02534809684753418, 0.0252620792388916, 0.025031967163085936, 0.025336544036865236, 0.025215200424194336, 0.02540028762817383, 0.025381984710693358, 0.025124576568603514, 0.025089408874511717, 0.025366943359375, 0.025346271514892577, 0.0259420166015625, 0.02517168045043945, 0.025024799346923827, 0.025298944473266603, 0.025257984161376954, 0.024887296676635744, 0.02493382453918457, 0.02548419189453125, 0.025253536224365235, 0.02515782356262207, 0.025106239318847656, 0.025001983642578125, 0.025075647354125978, 0.025120832443237304, 0.02519363212585449, 0.0250849609375, 0.025177888870239258, 0.025137184143066406, 0.025325439453125, 0.025603904724121093, 0.025297216415405274, 0.025186304092407227, 0.025095359802246094, 0.025078432083129883, 0.025792671203613282, 0.025382911682128906, 0.025382400512695313, 0.025339967727661134, 0.025913791656494142, 0.025481088638305664, 0.02530112075805664, 0.025445695877075195, 0.02531193542480469, 0.025251840591430662, 0.025341951370239257, 0.025184255599975586, 0.025653247833251954, 0.025274368286132814, 0.025276416778564452, 0.02656051254272461, 0.024985151290893556, 0.02497987174987793, 0.024931903839111327, 0.02748236846923828, 0.025526304244995118, 0.027293376922607422, 0.02734876823425293, 0.02484480094909668, 0.02480227279663086, 0.024827968597412108, 0.02485321617126465, 0.025049087524414062, 0.025184255599975586, 0.025241695404052734, 0.02501523208618164, 0.024949728012084962, 0.025001535415649413, 0.02511097526550293, 0.024938079833984376, 0.025055648803710938, 0.025067520141601563, 0.02513315200805664, 0.025143199920654297, 0.025058847427368164, 0.025028928756713868, 0.024919904708862305, 0.025123136520385742, 0.025329376220703127, 0.025128639221191407, 0.02511929512023926, 0.025126943588256834, 0.024895488739013674, 0.024866783142089843, 0.024999744415283204, 0.024813791275024415, 0.025025760650634766, 0.025106847763061522, 0.025014463424682616, 0.02506582450866699, 0.02498944091796875, 0.02514246368408203, 0.025431968688964843, 0.025017343521118163, 0.02511257553100586, 0.024997888565063478, 0.025068704605102538, 0.024923328399658204, 0.02498684883117676, 0.024953279495239258, 0.025013792037963868, 0.024924127578735352, 0.024961055755615233, 0.02509257507324219, 0.025319072723388673, 0.025124767303466796, 0.024914400100708008, 0.02531849670410156, 0.025819263458251952, 0.02512563133239746, 0.025177696228027343, 0.025131328582763672, 0.025075551986694335, 0.024833759307861327, 0.024703519821166992, 0.024635391235351564, 0.024619007110595705, 0.02555721664428711, 0.025305984497070312, 0.02543609619140625, 0.02524460792541504, 0.02567350387573242, 0.025628480911254883, 0.02556153678894043, 0.025190399169921874, 0.025476863861083984, 0.02528281593322754, 0.0252105598449707, 0.024985919952392577, 0.025100160598754882, 0.025005823135375978, 0.02521945571899414, 0.025047040939331053, 0.025122528076171876, 0.025084192276000977, 0.025054784774780275, 0.025182655334472656, 0.025161727905273438, 0.02513865661621094, 0.025037343978881837, 0.025008352279663085, 0.024942047119140626, 0.024854496002197267, 0.025038656234741212, 0.02493615913391113, 0.024777023315429688, 0.024924671173095703, 0.024919679641723633, 0.025128896713256837, 0.025135360717773437, 0.025548992156982423, 0.025320575714111327, 0.02523971176147461, 0.025069664001464844, 0.028514944076538085, 0.028008447647094727, 0.02507776069641113, 0.025203807830810547, 0.02510531234741211, 0.02514739227294922, 0.02550169563293457, 0.02527824020385742, 0.02532745552062988, 0.025348480224609376, 0.02525388717651367, 0.025856000900268555, 0.02539926338195801, 0.025372703552246093, 0.025314367294311524, 0.02527519989013672, 0.025110687255859375, 0.025724000930786133, 0.029483903884887697, 0.02532352066040039, 0.025354143142700195, 0.025460832595825194, 0.025622528076171876, 0.025268224716186522, 0.025282560348510744, 0.02524313545227051, 0.025537919998168946, 0.025334144592285158, 0.02544233512878418, 0.025211103439331056, 0.025165855407714845, 0.025168991088867186, 0.025169855117797853, 0.0251975040435791, 0.025309183120727538, 0.025211999893188477, 0.02512361526489258, 0.025100128173828125, 0.025096063613891603, 0.025557024002075195, 0.025354623794555664, 0.025235456466674806, 0.025298944473266603, 0.025548736572265626, 0.025765600204467772, 0.025552383422851564, 0.025232223510742186, 0.025214975357055663, 0.025113887786865234, 0.025213663101196288, 0.025366527557373047, 0.02513715171813965, 0.02512406349182129, 0.0251362247467041, 0.02526585578918457, 0.02534169578552246, 0.025207040786743164, 0.025161407470703126, 0.025196863174438477, 0.025233407974243165, 0.0251343994140625, 0.02503152084350586, 0.025329504013061523, 0.025241600036621094, 0.02548940849304199, 0.02509119987487793, 0.02515657615661621, 0.025414976119995117, 0.025334367752075194, 0.025207071304321288, 0.025192287445068358, 0.025266048431396484, 0.025165504455566406, 0.025181631088256835, 0.02550668716430664, 0.025377920150756836, 0.025262975692749025, 0.02521660804748535, 0.025330400466918944, 0.02554969596862793, 0.025289535522460938, 0.025264127731323242, 0.025350143432617187, 0.025298944473266603, 0.02562825584411621, 0.025667999267578127, 0.025448448181152345, 0.025649152755737304, 0.025300991058349608, 0.02551558494567871, 0.025245920181274414, 0.02535001564025879, 0.02581760025024414, 0.025167680740356444, 0.025196512222290038, 0.02520684814453125, 0.025185407638549803, 0.02509913635253906, 0.025163455963134764, 0.02515817642211914, 0.025059104919433594, 0.025243583679199218, 0.025122400283813476, 0.02506985664367676, 0.024960800170898436, 0.024977567672729493, 0.02502681541442871, 0.025018367767333984, 0.02488934326171875, 0.02505107116699219, 0.025094207763671876, 0.025100288391113282, 0.024860416412353516, 0.024793407440185548, 0.024753408432006838, 0.024978111267089844, 0.02507699203491211, 0.025071552276611328, 0.025709375381469727, 0.025208831787109375, 0.02520028877258301, 0.02514963150024414, 0.025104543685913087, 0.025223392486572266, 0.025155168533325195, 0.025045120239257812, 0.025104448318481444, 0.025165279388427733, 0.025325727462768555, 0.025029247283935546, 0.025125984191894532, 0.025100095748901367, 0.025237823486328127, 0.025053728103637696, 0.025198591232299804, 0.02524310493469238, 0.02510620880126953, 0.02539132881164551, 0.025106975555419922, 0.025173120498657227, 0.0251646728515625, 0.025198591232299804, 0.025345056533813477, 0.025312000274658204, 0.025243711471557618, 0.02727952003479004, 0.025312768936157228, 0.0250863037109375, 0.02523673629760742, 0.025066144943237306, 0.02505244827270508, 0.02500912094116211, 0.02553638458251953, 0.02530646324157715, 0.02535503959655762, 0.025149440765380858, 0.025044992446899415, 0.024975360870361327, 0.025491455078125, 0.025476224899291994, 0.025039487838745118, 0.02510873603820801, 0.02509993553161621, 0.02512345504760742, 0.025593568801879883, 0.025769952774047852, 0.02521049690246582, 0.025129472732543946, 0.025180192947387697, 0.025138208389282227, 0.02539401626586914, 0.025048416137695314, 0.025055904388427735, 0.025036800384521486, 0.025110111236572266, 0.025051551818847655, 0.024930303573608398, 0.024968351364135742, 0.02516399955749512, 0.025137792587280272, 0.025157632827758788, 0.025221120834350585, 0.025103967666625978, 0.025153951644897463, 0.025190048217773438, 0.02514364814758301, 0.025080928802490233, 0.0251125431060791, 0.025035072326660156, 0.025092735290527343, 0.02513248062133789, 0.02528927993774414, 0.02553670310974121, 0.025296703338623047, 0.025337600708007814, 0.025184576034545898, 0.02522515106201172, 0.02520412826538086, 0.025183935165405274, 0.025153696060180666, 0.02507804870605469, 0.02503932762145996, 0.02518016052246094, 0.025105567932128908, 0.025214111328125, 0.025253568649291992, 0.02513715171813965, 0.02529689598083496, 0.02507334327697754, 0.025210432052612305, 0.025266944885253908, 0.025288415908813477, 0.025343904495239256, 0.025250175476074218, 0.025316736221313477, 0.025461984634399415, 0.025258975982666014, 0.025334848403930663, 0.025379583358764647, 0.025235456466674806, 0.025117759704589845, 0.025217887878417968, 0.02500364875793457, 0.024977184295654296, 0.025023168563842773, 0.025398431777954103, 0.02517897605895996, 0.026425344467163086, 0.025206432342529297, 0.025395103454589844, 0.02516537666320801, 0.025205120086669922, 0.02521958351135254, 0.02511257553100586, 0.02501955223083496, 0.02520355224609375, 0.02503670310974121, 0.025143392562866212, 0.025609695434570312, 0.02508025550842285, 0.02515772819519043, 0.02508185577392578, 0.025071807861328125, 0.02514134407043457, 0.025134111404418947, 0.02522700881958008, 0.025098751068115235, 0.025182111740112305, 0.025141504287719725, 0.025700639724731446, 0.02501158332824707, 0.02496780776977539, 0.025022464752197264, 0.02591107177734375, 0.02494220733642578, 0.024701536178588866, 0.024805376052856445, 0.0249487361907959, 0.02492838478088379, 0.02510630416870117, 0.025231424331665038, 0.025169984817504883, 0.025087839126586915, 0.025133087158203126, 0.026482688903808595, 0.029913087844848633, 0.027738399505615234, 0.025261791229248046, 0.025392480850219726, 0.025154367446899414, 0.02521683120727539, 0.02526166343688965, 0.025190656661987304, 0.025135295867919922, 0.025075712203979493, 0.025020416259765626, 0.025515647888183595, 0.025232927322387695, 0.025396671295166016, 0.02518572807312012, 0.025437183380126953, 0.02529280090332031, 0.02500559997558594, 0.024928735733032226, 0.02496512031555176, 0.025135103225708007, 0.02533705520629883, 0.025125024795532226, 0.02519718360900879, 0.024964799880981447, 0.02491628837585449, 0.025004032135009766, 0.025036800384521486, 0.025145471572875975, 0.025138336181640623, 0.02509903907775879, 0.024936384201049804, 0.024714975357055663, 0.02480531120300293, 0.02482809638977051, 0.024735103607177733, 0.024806175231933594, 0.024725055694580077, 0.02458892822265625, 0.024631103515625, 0.024475648880004884, 0.024647743225097656, 0.024709056854248047, 0.024530048370361327, 0.024501247406005858, 0.02477462387084961, 0.02466396713256836, 0.024517663955688475, 0.024463615417480468, 0.024629535675048827, 0.02460905647277832, 0.02550559997558594, 0.02499350357055664, 0.025049407958984374, 0.025020736694335938, 0.02508297538757324, 0.025118751525878905, 0.025070144653320314, 0.025262432098388674, 0.02501180839538574, 0.025166208267211915, 0.02508799934387207, 0.02517356872558594, 0.025217472076416017, 0.025208768844604493, 0.025550912857055665, 0.025299104690551757, 0.02534281539916992, 0.02526518440246582, 0.025288671493530274, 0.025257984161376954, 0.02511894416809082, 0.025259807586669923, 0.025233407974243165, 0.025233407974243165, 0.025296255111694334, 0.025465791702270507, 0.025202463150024414, 0.025182271957397463, 0.025093696594238282, 0.02511680030822754, 0.02505958366394043, 0.025077695846557616, 0.02523993682861328, 0.02505638313293457, 0.0250067195892334, 0.02516547203063965, 0.02489993667602539, 0.02510767936706543, 0.025145536422729493, 0.025010112762451173, 0.02533033561706543, 0.026093568801879883, 0.029319168090820313, 0.02516691207885742, 0.02520364761352539, 0.02515711975097656, 0.024951295852661134, 0.024870912551879884, 0.0246965446472168, 0.02466003227233887, 0.02454924774169922, 0.02453334426879883, 0.02432204818725586, 0.024156160354614258, 0.024413440704345705, 0.024505088806152344, 0.02434611129760742, 0.024433151245117187, 0.02470911979675293, 0.026217599868774415, 0.024747135162353516, 0.02479692840576172, 0.02492416000366211, 0.0249487361907959, 0.024747392654418946, 0.024699552536010742, 0.024712543487548828, 0.02505513572692871, 0.02504572868347168, 0.024827775955200197, 0.025365951538085938, 0.027249343872070314, 0.024896703720092773, 0.02485331153869629, 0.024473600387573242, 0.024606719970703125, 0.024772064208984375, 0.025100000381469728, 0.02524652862548828, 0.02507980728149414, 0.025049087524414062, 0.025069055557250978, 0.025132640838623047, 0.02508896064758301, 0.02513711929321289, 0.025061376571655275, 0.02506883239746094, 0.025086368560791016, 0.025300416946411133, 0.025221696853637697, 0.024961376190185548, 0.02487612724304199, 0.0247445125579834, 0.024440160751342775, 0.024236799240112305, 0.024745887756347656, 0.02453708839416504, 0.024279327392578126, 0.024166112899780275, 0.023985151290893555, 0.024092927932739257, 0.02411392021179199, 0.02402521514892578, 0.024016767501831054, 0.02405171203613281, 0.02402659225463867, 0.02419152069091797, 0.024276992797851563, 0.02408963203430176, 0.024182975769042967, 0.024179487228393554, 0.024137727737426756, 0.024191999435424806, 0.024050304412841797, 0.02405414390563965, 0.023988224029541014, 0.024287008285522462, 0.024226015090942382, 0.024341791152954102, 0.024380128860473634, 0.02431795120239258, 0.024334144592285157, 0.024277151107788084, 0.02430953598022461, 0.024330495834350586, 0.02432137680053711, 0.02436777687072754, 0.02445516777038574, 0.02458163261413574, 0.024381952285766603, 0.024369152069091796, 0.02447670364379883, 0.0246363525390625, 0.024960159301757812, 0.025069759368896483, 0.025055936813354492, 0.025146944046020508, 0.025020864486694334, 0.025178016662597655, 0.02517411231994629, 0.025185823440551758, 0.025405536651611327, 0.02541401672363281, 0.025100160598754882, 0.02526630401611328, 0.025140575408935547, 0.025305952072143555, 0.025333568572998046, 0.025616384506225585, 0.025214975357055663, 0.02522217559814453]",tokens/s,39.73503741148439,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 205, in run_text_generation_memory_tracking _ = backend.generate(self.inputs, self.config.generate_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 454, in generate return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 362, in forward query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 260, in apply_rotary_pos_emb k_embed = (k * cos) + (rotate_half(k) * sin) RuntimeError: CUDA error: an illegal memory access was encountered CUDA kernel errors might be asynchronously reported at some other API call, so the stacktrace below might be incorrect. For debugging consider passing CUDA_LAUNCH_BLOCKING=1 Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions. " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4356.624384,4562.28864,0.0,4183.81824,4182.069248,s,1,9.8477255859375,9.8477255859375,0.0,9.8477255859375,9.8477255859375,9.8477255859375,9.8477255859375,[9.8477255859375],,kWh,8.873898320000061e-05,9.781352760492478e-06,2.9000856533999923e-05,0.00012752119249449302,,MB,1599.602688,4715.380736,0.0,4307.550208,4281.174016,s,10,3.7143634033203123,0.3714363403320312,0.0021803293657190388,0.37187149047851564,0.373250830078125,0.37330784912109377,0.37335346435546873,"[0.3652881774902344, 0.37182183837890626, 0.371921142578125, 0.3733648681640625, 0.3726637268066406, 0.3722630615234375, 0.3711059875488281, 0.3711553039550781, 0.3732381591796875, 0.3715411376953125]",tokens/s,689.2163533895435,kWh,1.0779766479166616e-05,1.1880993018705978e-06,7.134093008857145e-06,1.910195878989436e-05,tokens/kWh,13401766.950488525,MB,1605.07904,4730.0608,0.0,4322.230272,4281.176576,s,10,18.6219794921875,1.86219794921875,0.009161592099191089,1.8625853881835939,1.871479797363281,1.873463653564453,1.8750507385253905,"[1.8694849853515625, 1.85217333984375, 1.8689837646484375, 1.8710389404296874, 1.870054443359375, 1.8488328857421874, 1.8560394287109374, 1.85618701171875, 1.8537371826171876, 1.875447509765625]",tokens/s,33.83098989365253,kWh,5.4357870552083466e-05,5.9959499420913065e-06,3.603005263354278e-05,9.638387312771752e-05,tokens/kWh,653636.3185625378,,s,630,18.61888622093199,0.029553787652273025,0.0006580003981161237,0.029398816108703613,0.029977926063537597,0.030449244976043698,0.03256973907470704,"[0.030482912063598634, 0.029822080612182618, 0.02954444885253906, 0.02929676818847656, 0.02929305648803711, 0.02929414367675781, 0.029298879623413085, 0.029274368286132814, 0.029339296340942383, 0.029404895782470703, 0.029364864349365236, 0.029224960327148438, 0.029251359939575197, 0.02938697624206543, 0.029298688888549803, 0.02939289665222168, 0.029327360153198243, 0.029404256820678713, 0.029612960815429686, 0.029558656692504882, 0.029438079833984374, 0.029304832458496095, 0.029226272583007813, 0.029287071228027345, 0.02920800018310547, 0.029280576705932617, 0.029329727172851563, 0.029425664901733397, 0.034744129180908204, 0.03146563148498535, 0.029431840896606446, 0.029241247177124022, 0.02941756820678711, 0.029624319076538085, 0.02933363151550293, 0.029220735549926758, 0.029247264862060546, 0.029114591598510743, 0.029079551696777343, 0.02934169578552246, 0.02898476791381836, 0.02940985679626465, 0.02918604850769043, 0.032671745300292966, 0.02939084815979004, 0.029706239700317383, 0.02942972755432129, 0.029808671951293945, 0.029558784484863283, 0.02977382469177246, 0.030174943923950197, 0.029894176483154296, 0.029827840805053712, 0.0333616943359375, 0.029632736206054687, 0.029714719772338867, 0.029402816772460937, 0.029367904663085937, 0.029554752349853514, 0.029526496887207033, 0.029449535369873048, 0.029434463500976563, 0.029308479309082033, 0.031189664840698243, 0.030099071502685548, 0.029792608261108397, 0.02973676872253418, 0.02947711944580078, 0.029155328750610353, 0.029117919921875, 0.029744800567626954, 0.029596223831176757, 0.02944236755371094, 0.029327360153198243, 0.029237247467041014, 0.02923654365539551, 0.02950009536743164, 0.02940928077697754, 0.029128704071044922, 0.02912665557861328, 0.029473983764648437, 0.029635040283203126, 0.029486528396606447, 0.029324031829833983, 0.029132959365844726, 0.02897648048400879, 0.028914335250854493, 0.029145023345947266, 0.029339712142944337, 0.029236448287963866, 0.029108287811279297, 0.02910416030883789, 0.029239967346191408, 0.029679647445678713, 0.029318143844604492, 0.02916454315185547, 0.029414495468139647, 0.029267072677612305, 0.029241119384765625, 0.029317312240600586, 0.029511295318603515, 0.029216320037841796, 0.02935251235961914, 0.029397056579589843, 0.029404287338256837, 0.03014950370788574, 0.02940342330932617, 0.0295830078125, 0.02920044708251953, 0.029449663162231444, 0.029331872940063477, 0.02970060729980469, 0.02935772705078125, 0.029190143585205077, 0.02920857620239258, 0.02912665557861328, 0.02928553581237793, 0.029288320541381835, 0.029336736679077147, 0.029085504531860353, 0.029447872161865233, 0.02923347282409668, 0.029433504104614257, 0.029501792907714843, 0.02940108871459961, 0.029453344345092773, 0.030838464736938475, 0.02966953659057617, 0.029659263610839842, 0.029404895782470703, 0.02925391960144043, 0.02943180847167969, 0.02933558464050293, 0.02935625648498535, 0.02912214469909668, 0.029366399765014647, 0.029460479736328125, 0.029773664474487305, 0.029768991470336913, 0.029858848571777345, 0.029502368927001952, 0.02967977523803711, 0.029542367935180665, 0.029950815200805662, 0.030129568099975586, 0.02993212890625, 0.02962816047668457, 0.029544704437255858, 0.02955228805541992, 0.02958950424194336, 0.029671039581298828, 0.030005983352661133, 0.02990287971496582, 0.02981270408630371, 0.029863391876220703, 0.029786624908447266, 0.030042144775390626, 0.029818143844604492, 0.029898624420166015, 0.029967199325561522, 0.03089971160888672, 0.029875968933105467, 0.030124799728393555, 0.029784063339233398, 0.030670944213867186, 0.029798080444335937, 0.029724895477294924, 0.029487104415893556, 0.02937651252746582, 0.029310592651367186, 0.02929088020324707, 0.029476896286010742, 0.029275968551635743, 0.02969820785522461, 0.029896703720092774, 0.029838783264160156, 0.029401344299316408, 0.029534528732299805, 0.02917580795288086, 0.029442047119140623, 0.02920243263244629, 0.029294591903686523, 0.0292044792175293, 0.0290631046295166, 0.030123136520385743, 0.029388736724853516, 0.029397472381591797, 0.029454879760742188, 0.029360128402709962, 0.030927135467529298, 0.0300516471862793, 0.029977279663085936, 0.030060543060302734, 0.029882368087768556, 0.029742464065551758, 0.029702783584594727, 0.03033087921142578, 0.029612031936645508, 0.029920799255371094, 0.029626848220825196, 0.029646848678588866, 0.029710336685180663, 0.02960588836669922, 0.029380607604980468, 0.029322975158691405, 0.02905116844177246, 0.02940675163269043, 0.02930940818786621, 0.029313024520874024, 0.029280256271362305, 0.02936947250366211, 0.029607967376708986, 0.029684576034545898, 0.029456384658813478, 0.029456192016601563, 0.03030035209655762, 0.03010259246826172, 0.03063596725463867, 0.029228031158447267, 0.029222911834716796, 0.029305919647216797, 0.029508544921875, 0.029247488021850586, 0.029517568588256837, 0.02932262420654297, 0.029190080642700195, 0.029260255813598632, 0.029295072555541993, 0.02928223991394043, 0.029411392211914064, 0.02944758415222168, 0.029235231399536134, 0.0295850887298584, 0.029579967498779298, 0.029421695709228514, 0.02922604751586914, 0.029809663772583008, 0.02953753662109375, 0.029340415954589843, 0.03168047904968262, 0.030681119918823243, 0.029556991577148438, 0.029794048309326172, 0.029875776290893555, 0.029831743240356444, 0.029855743408203125, 0.029958015441894532, 0.03005580711364746, 0.030210687637329103, 0.03012784004211426, 0.029782079696655275, 0.029851615905761717, 0.030861312866210938, 0.029816768646240235, 0.02987740707397461, 0.03382160186767578, 0.033925121307373046, 0.029743104934692382, 0.029755392074584962, 0.029746240615844726, 0.029781951904296874, 0.029831167221069335, 0.02998374366760254, 0.02967065620422363, 0.029706720352172852, 0.029290752410888674, 0.029259807586669923, 0.02922710418701172, 0.029548095703125, 0.029360479354858398, 0.029542272567749023, 0.029122655868530273, 0.029177888870239258, 0.029162687301635744, 0.029078336715698243, 0.029263744354248045, 0.029292671203613282, 0.029226112365722656, 0.02975993537902832, 0.029712831497192383, 0.029665279388427734, 0.029812736511230467, 0.02964873504638672, 0.029595808029174806, 0.029644351959228515, 0.029726272583007814, 0.02995859146118164, 0.02978665542602539, 0.030074783325195312, 0.029724672317504884, 0.029831167221069335, 0.029642751693725586, 0.029386751174926756, 0.029454336166381836, 0.02957049560546875, 0.029456384658813478, 0.029440479278564455, 0.02940889549255371, 0.029466400146484373, 0.02923766326904297, 0.029240800857543946, 0.029278207778930664, 0.029205312728881837, 0.029437952041625977, 0.029310976028442383, 0.02959564781188965, 0.029752607345581054, 0.029512416839599608, 0.02914739227294922, 0.02927292823791504, 0.029311679840087892, 0.029155551910400392, 0.02927552032470703, 0.0299400634765625, 0.029259136199951172, 0.0313474235534668, 0.029718624114990235, 0.030408096313476563, 0.03231999969482422, 0.029546560287475584, 0.029366207122802735, 0.029491199493408202, 0.029290496826171877, 0.029314304351806642, 0.02959791946411133, 0.029464544296264647, 0.029254016876220704, 0.029106367111206056, 0.029157152175903322, 0.029077728271484374, 0.029230943679809572, 0.029140640258789062, 0.02937468719482422, 0.0292825927734375, 0.029272064208984375, 0.02907151985168457, 0.029235040664672852, 0.029120512008666992, 0.02914303970336914, 0.029101728439331054, 0.029091968536376953, 0.029325536727905274, 0.02916147232055664, 0.029181024551391602, 0.029111200332641602, 0.029061119079589845, 0.029083648681640626, 0.029079551696777343, 0.029233152389526368, 0.029296640396118165, 0.029120512008666992, 0.029149183273315428, 0.02992742347717285, 0.02924937629699707, 0.029204639434814453, 0.029124607086181642, 0.029081600189208984, 0.029159648895263672, 0.029228832244873045, 0.02938470458984375, 0.029145280838012697, 0.029417280197143555, 0.029058528900146485, 0.029055519104003905, 0.029040767669677735, 0.029191808700561525, 0.029296287536621092, 0.029418079376220704, 0.02942300796508789, 0.029239904403686522, 0.02923654365539551, 0.029213375091552734, 0.02913484764099121, 0.029239295959472656, 0.02910428810119629, 0.029220703125, 0.02917580795288086, 0.029256799697875976, 0.030508991241455077, 0.029890560150146486, 0.03144710350036621, 0.029822912216186524, 0.03004323196411133, 0.029637535095214843, 0.029461824417114257, 0.02947961616516113, 0.02975948715209961, 0.029648319244384765, 0.029802400588989256, 0.029738880157470702, 0.0294420166015625, 0.029440832138061524, 0.029321216583251954, 0.02920038414001465, 0.02934307289123535, 0.029227680206298828, 0.02910207939147949, 0.02920857620239258, 0.029257055282592773, 0.029462879180908202, 0.029767679214477538, 0.029349536895751954, 0.02918467140197754, 0.029206079483032228, 0.029251935958862305, 0.029085792541503907, 0.029376447677612303, 0.02918111991882324, 0.02897599983215332, 0.02921228790283203, 0.02909555244445801, 0.029125375747680662, 0.029335744857788087, 0.029679487228393555, 0.029890495300292967, 0.029439807891845703, 0.029317312240600586, 0.02906675148010254, 0.029107776641845703, 0.02909417533874512, 0.029211296081542968, 0.02945155143737793, 0.029543136596679686, 0.029425664901733397, 0.029519872665405275, 0.029386751174926756, 0.029467647552490234, 0.029258752822875978, 0.02911846351623535, 0.02921062469482422, 0.029050527572631837, 0.02937059211730957, 0.029331167221069335, 0.029296863555908204, 0.029095455169677733, 0.029260448455810547, 0.029219039916992186, 0.029441343307495118, 0.029493728637695314, 0.030680192947387695, 0.02986422348022461, 0.03139433670043945, 0.03000115203857422, 0.029487104415893556, 0.02940108871459961, 0.029429759979248047, 0.029180927276611326, 0.029434879302978514, 0.029631744384765624, 0.029811296463012695, 0.029596832275390624, 0.0291727352142334, 0.02931616020202637, 0.02926643180847168, 0.029243072509765624, 0.028989408493041994, 0.029124448776245117, 0.029125408172607423, 0.0292325439453125, 0.02928102493286133, 0.02931711959838867, 0.029190143585205077, 0.02923423957824707, 0.029182912826538086, 0.029258975982666014, 0.029413375854492187, 0.029338367462158205, 0.0295199031829834, 0.029302047729492187, 0.029332191467285156, 0.029617792129516603, 0.029155168533325195, 0.030354080200195314, 0.030721920013427734, 0.02936422348022461, 0.02943935966491699, 0.029634719848632814, 0.029512159347534178, 0.029734912872314452, 0.029550592422485353, 0.030027776718139648, 0.029367807388305665, 0.02947532844543457, 0.029379903793334963, 0.029911167144775392, 0.029476896286010742, 0.029106719970703125, 0.029527616500854493, 0.029325056076049804, 0.029465280532836913, 0.029214719772338867, 0.029472768783569334, 0.029382623672485352, 0.029462560653686524, 0.029265504837036133, 0.02923356819152832, 0.029261823654174804, 0.02917523193359375, 0.02937913513183594, 0.029312511444091797, 0.02945884895324707, 0.029208671569824218, 0.02937446403503418, 0.029345279693603517, 0.030140735626220702, 0.029562944412231444, 0.02918400001525879, 0.02921062469482422, 0.02937651252746582, 0.02917580795288086, 0.02924310493469238, 0.029196575164794923, 0.02924367904663086, 0.02931273651123047, 0.02926972770690918, 0.03281094360351562, 0.029489343643188476, 0.02947088050842285, 0.029130752563476563, 0.029169599533081056, 0.029124671936035157, 0.029360128402709962, 0.029138944625854493, 0.02921062469482422, 0.029073087692260743, 0.02959312057495117, 0.0290548152923584, 0.029123199462890624, 0.029110784530639647, 0.029187551498413088, 0.029196640014648438, 0.029157215118408204, 0.02928451156616211, 0.02924060821533203, 0.029567712783813475, 0.02953625679016113, 0.029474815368652343, 0.029303968429565428, 0.02926380729675293, 0.029600383758544922, 0.029255519866943358, 0.029292991638183594, 0.02934169578552246, 0.029306047439575194, 0.02927084732055664, 0.030701759338378907, 0.02956211280822754, 0.02931273651123047, 0.02908678436279297, 0.02934752082824707, 0.029497215270996094, 0.029335775375366212, 0.02939846420288086, 0.029215072631835936, 0.029343135833740236, 0.029209407806396484, 0.029393024444580078, 0.029175680160522462, 0.029081600189208984, 0.02959676742553711, 0.02916035270690918, 0.029263872146606446, 0.029212671279907225, 0.029357952117919923, 0.02952739143371582, 0.030978719711303712, 0.029614112854003907, 0.030810111999511718, 0.029646879196166993, 0.029475936889648436, 0.029499488830566405, 0.029602783203125, 0.029327167510986327, 0.029335552215576172, 0.02934163284301758, 0.029548608779907226, 0.029647008895874023, 0.029529951095581056, 0.02958131217956543, 0.029472768783569334, 0.03018057632446289, 0.029399168014526366, 0.029589471817016603, 0.02950147247314453, 0.029395263671875, 0.029284255981445313, 0.02960223960876465, 0.029488895416259767, 0.029419776916503906, 0.02928179168701172, 0.02962892723083496, 0.030008960723876953, 0.03782486343383789, 0.02988035202026367, 0.029473056793212892, 0.02937740707397461, 0.029492063522338866, 0.030115840911865234, 0.02937446403503418, 0.029233152389526368, 0.029384607315063475, 0.029388160705566407, 0.029301471710205078, 0.02936832046508789, 0.02936729621887207, 0.029432512283325194, 0.030513343811035157, 0.030652799606323243, 0.030134016036987305, 0.030004480361938476, 0.030009376525878907, 0.029935808181762696, 0.029714975357055664, 0.03034636878967285, 0.030593536376953126, 0.02963007926940918, 0.029301504135131835, 0.029403200149536134, 0.029374176025390625, 0.029468896865844727, 0.029474815368652343, 0.02931711959838867, 0.02931292724609375, 0.029266016006469726, 0.029366111755371092, 0.029606048583984374, 0.02931711959838867, 0.029130752563476563, 0.030665023803710938, 0.029875904083251952]",tokens/s,33.8366104462109,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,2042.904576,2090.729472,0.0,1704.984576,1584.582656,s,1,9.21046484375,9.21046484375,0.0,9.21046484375,9.21046484375,9.21046484375,9.21046484375,[9.21046484375],,kWh,4.7607886637471586e-05,5.237542919873092e-06,1.659917994600857e-05,6.944460950335325e-05,,MB,1606.8608,2222.850048,0.0,1807.745024,1740.566528,s,10,1.5413435974121095,0.15413435974121092,0.0006466721792513922,0.15396320343017578,0.15451668243408204,0.15523184432983397,0.15580397384643555,"[0.15594700622558594, 0.15371490478515626, 0.1541998748779297, 0.1537677459716797, 0.153865478515625, 0.1536513214111328, 0.15435775756835937, 0.1541134338378906, 0.15406092834472657, 0.15366514587402344]",tokens/s,1660.8885937556026,kWh,4.633877077994815e-06,5.107095466066841e-07,3.0793731232189696e-06,8.223959747820468e-06,tokens/kWh,31128557.027269702,MB,1612.972032,2369.650688,0.0,1954.545664,1805.569024,s,10,14.226609375,1.4226609375,0.008867347347852135,1.4230558471679688,1.4315916625976564,1.436672235107422,1.4407366931152343,"[1.4165667724609374, 1.4417528076171875, 1.4230211181640624, 1.4152039794921876, 1.4263116455078124, 1.4267042236328125, 1.41170263671875, 1.423090576171875, 1.41179296875, 1.430462646484375]",tokens/s,44.28321488232329,kWh,5.256855610033655e-05,5.7983965339957e-06,2.8673652279178913e-05,8.704060491351119e-05,tokens/kWh,723800.1167684968,,s,630,14.218644268035879,0.02256927661592998,0.0003756135765653414,0.022491007804870608,0.02295808639526367,0.02312665615081787,0.023848766117095948,"[0.022953983306884765, 0.022816543579101563, 0.02247817611694336, 0.022401920318603517, 0.022392223358154297, 0.02239958381652832, 0.02229043197631836, 0.022339584350585938, 0.022388736724853517, 0.022371967315673827, 0.022587583541870116, 0.0225133113861084, 0.02253878402709961, 0.022274240493774414, 0.02218783950805664, 0.022287616729736327, 0.02251852798461914, 0.022687744140625, 0.022455711364746094, 0.022325855255126953, 0.02224332809448242, 0.022271615982055664, 0.022290815353393556, 0.022265344619750976, 0.022259840011596678, 0.022280288696289063, 0.022434080123901367, 0.022320608139038085, 0.022303455352783202, 0.022439968109130858, 0.022548255920410157, 0.02255462455749512, 0.02264678382873535, 0.02273811149597168, 0.022725439071655272, 0.022771711349487304, 0.02264838409423828, 0.02247881507873535, 0.02258745574951172, 0.022382463455200195, 0.022499872207641602, 0.022511615753173828, 0.02248089599609375, 0.022566272735595704, 0.02252249526977539, 0.02262835121154785, 0.0225482234954834, 0.02242563247680664, 0.022601024627685547, 0.022530176162719726, 0.02229327964782715, 0.02239446449279785, 0.02235638427734375, 0.02231609535217285, 0.022248384475708007, 0.022417247772216795, 0.02268739128112793, 0.022493696212768553, 0.022478431701660157, 0.02255094337463379, 0.022533727645874024, 0.022572799682617186, 0.022716543197631837, 0.02308572769165039, 0.02347007942199707, 0.022980607986450196, 0.022874111175537108, 0.022915071487426757, 0.022740991592407226, 0.02288844871520996, 0.022834943771362304, 0.0227574405670166, 0.02267359924316406, 0.022585344314575196, 0.022708223342895507, 0.022816768646240236, 0.02294988822937012, 0.022921215057373046, 0.022800384521484376, 0.023191551208496093, 0.02291916847229004, 0.02293494415283203, 0.02291164779663086, 0.022810527801513672, 0.022765600204467773, 0.022806432723999022, 0.022990943908691407, 0.0228635196685791, 0.023555711746215822, 0.0227806396484375, 0.02277939224243164, 0.022727167129516602, 0.022667200088500976, 0.02335545539855957, 0.02304819107055664, 0.02288364791870117, 0.02282975959777832, 0.02269171142578125, 0.022775936126708984, 0.022743040084838868, 0.022861824035644532, 0.0234703369140625, 0.023863040924072265, 0.02288640022277832, 0.02291916847229004, 0.023037120819091796, 0.02305311965942383, 0.02291097640991211, 0.022819936752319334, 0.022891679763793946, 0.023039743423461913, 0.022904640197753907, 0.0227674560546875, 0.022615455627441407, 0.022591999053955078, 0.022538496017456056, 0.022673568725585937, 0.02282089614868164, 0.02276543998718262, 0.02266326332092285, 0.022509599685668947, 0.022517696380615234, 0.02263462448120117, 0.022893888473510742, 0.022806848526000977, 0.022938047409057617, 0.023766016006469725, 0.023180288314819338, 0.02302876853942871, 0.022951967239379884, 0.023181280136108397, 0.02318339157104492, 0.022783008575439453, 0.022658624649047852, 0.02261225509643555, 0.02239289665222168, 0.02244607925415039, 0.022328384399414064, 0.022936511993408203, 0.022445375442504883, 0.022703840255737306, 0.02226880073547363, 0.022265151977539064, 0.022416160583496093, 0.02231500816345215, 0.022392831802368163, 0.02224947166442871, 0.022149120330810547, 0.022262943267822265, 0.02213190460205078, 0.02222982406616211, 0.022213184356689453, 0.02242412757873535, 0.022247135162353517, 0.022195327758789064, 0.022180511474609376, 0.022548511505126954, 0.022832544326782226, 0.02281737518310547, 0.02260963249206543, 0.022950239181518554, 0.022674911499023436, 0.02276419258117676, 0.022845375061035157, 0.022657087326049805, 0.022562368392944336, 0.022798784255981447, 0.02266316795349121, 0.02245622444152832, 0.02238371276855469, 0.02249395179748535, 0.02241971206665039, 0.02235171127319336, 0.02237660789489746, 0.022548479080200197, 0.022597631454467772, 0.022456031799316406, 0.022349695205688476, 0.022444448471069335, 0.022310272216796875, 0.02241548728942871, 0.022213119506835938, 0.022165088653564452, 0.02253388786315918, 0.023833248138427736, 0.023287263870239258, 0.022642656326293944, 0.022352319717407226, 0.02231318473815918, 0.02296905517578125, 0.02294169616699219, 0.022998464584350585, 0.0226309757232666, 0.022618240356445312, 0.02262777519226074, 0.022526079177856446, 0.022591808319091796, 0.022495296478271483, 0.022492607116699218, 0.022548320770263672, 0.022588064193725586, 0.022648832321166993, 0.022581375122070313, 0.022486047744750978, 0.022229856491088867, 0.023625919342041016, 0.0222608642578125, 0.02228499221801758, 0.022239231109619142, 0.022166816711425782, 0.022196800231933593, 0.022249631881713867, 0.022373600006103514, 0.02259542465209961, 0.0224749755859375, 0.022354015350341795, 0.02230726432800293, 0.022622400283813477, 0.0224399356842041, 0.022319263458251953, 0.023012895584106446, 0.022453887939453125, 0.0222174072265625, 0.02242918395996094, 0.02244588851928711, 0.022460479736328125, 0.02240073585510254, 0.022224031448364257, 0.02211609649658203, 0.02214067268371582, 0.022154592514038087, 0.022151487350463867, 0.02207823944091797, 0.022144832611083985, 0.022165023803710937, 0.022248031616210938, 0.02216703987121582, 0.022159744262695312, 0.022175743103027345, 0.025411584854125976, 0.0223191032409668, 0.02228223991394043, 0.022464736938476563, 0.022314783096313476, 0.022350912094116212, 0.022272960662841797, 0.022177791595458983, 0.022190080642700196, 0.02225766372680664, 0.02215433692932129, 0.022137760162353515, 0.022245536804199217, 0.02347417640686035, 0.02307891273498535, 0.023030879974365235, 0.023122432708740235, 0.02281430435180664, 0.022731584548950197, 0.022299903869628906, 0.023470848083496094, 0.022566911697387695, 0.022382591247558595, 0.022552576065063477, 0.02228953552246094, 0.022665279388427734, 0.022274816513061523, 0.022254783630371092, 0.02225008010864258, 0.024123680114746093, 0.022353120803833008, 0.02235635185241699, 0.02234000015258789, 0.022315359115600585, 0.02233718490600586, 0.022207712173461912, 0.0222523193359375, 0.022286336898803712, 0.02236809539794922, 0.022288799285888672, 0.02217523193359375, 0.022707616806030274, 0.02231532859802246, 0.022364095687866212, 0.022360511779785156, 0.022292640686035155, 0.02224892807006836, 0.022322975158691406, 0.02290768051147461, 0.022659040451049803, 0.02267136001586914, 0.022627840042114256, 0.022477376937866212, 0.02259903907775879, 0.023404287338256835, 0.022519712448120118, 0.022418495178222655, 0.022773664474487306, 0.022527936935424805, 0.02240233612060547, 0.022677568435668944, 0.0224116153717041, 0.022441280364990233, 0.023069696426391603, 0.02247200012207031, 0.022604543685913085, 0.022591424942016602, 0.022675455093383787, 0.022665216445922853, 0.022890495300292968, 0.02287615966796875, 0.022965856552124023, 0.022941600799560546, 0.022929759979248048, 0.02308230400085449, 0.022846303939819335, 0.022957632064819336, 0.022994911193847656, 0.02283776092529297, 0.022794240951538085, 0.0227061767578125, 0.022591487884521484, 0.02245427131652832, 0.02467635154724121, 0.023855104446411132, 0.023130111694335938, 0.022810272216796875, 0.022747488021850587, 0.02267465591430664, 0.022838048934936524, 0.022543903350830077, 0.022428064346313475, 0.022433855056762694, 0.022579200744628908, 0.022314559936523436, 0.022932992935180665, 0.022490047454833986, 0.02252390480041504, 0.02248255920410156, 0.022560447692871095, 0.022589311599731446, 0.02243577575683594, 0.02303273582458496, 0.02618992042541504, 0.02274086380004883, 0.022566591262817383, 0.022638496398925782, 0.022612192153930663, 0.022409696578979493, 0.022382303237915038, 0.02249728012084961, 0.022409215927124023, 0.02223535919189453, 0.0223187198638916, 0.022284448623657225, 0.02227302360534668, 0.022286783218383788, 0.022346303939819335, 0.02227132797241211, 0.02230748748779297, 0.022398303985595704, 0.02227676773071289, 0.022777856826782225, 0.02224742317199707, 0.02248099136352539, 0.022464160919189454, 0.0223767032623291, 0.022401311874389648, 0.022250240325927734, 0.02318012809753418, 0.022321279525756837, 0.02225881576538086, 0.0223056640625, 0.02234067153930664, 0.022207424163818358, 0.022331392288208008, 0.022224063873291015, 0.02226464080810547, 0.022566911697387695, 0.023230464935302734, 0.023098560333251954, 0.02255241584777832, 0.022735071182250977, 0.022358144760131836, 0.022389215469360353, 0.022279552459716797, 0.022226783752441408, 0.02237945556640625, 0.022302207946777345, 0.022446495056152344, 0.02230076789855957, 0.02226380729675293, 0.022218303680419924, 0.02231324768066406, 0.022157024383544922, 0.022235679626464843, 0.022337440490722657, 0.02228428840637207, 0.022283615112304686, 0.022174367904663084, 0.022519807815551757, 0.022517759323120116, 0.022837247848510742, 0.022433792114257813, 0.0224399356842041, 0.022492992401123048, 0.02225324821472168, 0.022463232040405273, 0.02230179214477539, 0.022342239379882813, 0.022332672119140626, 0.02220729637145996, 0.022194175720214843, 0.022246976852416993, 0.022232959747314453, 0.02226563262939453, 0.022342432022094728, 0.022261760711669923, 0.02254643249511719, 0.022448287963867188, 0.022435199737548827, 0.022325727462768556, 0.022709312438964843, 0.022254528045654295, 0.022230623245239257, 0.0223176326751709, 0.022203519821166993, 0.02229248046875, 0.022137279510498046, 0.022327360153198243, 0.02229033660888672, 0.02229216003417969, 0.02230646324157715, 0.022350496292114257, 0.02236038398742676, 0.022555967330932618, 0.022553279876708986, 0.022574848175048828, 0.02265727996826172, 0.02249318313598633, 0.0224532470703125, 0.022491968154907227, 0.023230464935302734, 0.023056095123291015, 0.02278953552246094, 0.023276416778564454, 0.02261382484436035, 0.022585535049438478, 0.02248067283630371, 0.022612192153930663, 0.02262124824523926, 0.022459327697753908, 0.022601728439331056, 0.022884159088134765, 0.022773311614990233, 0.022475391387939452, 0.02306662368774414, 0.022757375717163086, 0.02289468765258789, 0.022675359725952148, 0.023024959564208983, 0.02265340805053711, 0.022831327438354494, 0.022771007537841798, 0.02271113586425781, 0.022552608489990234, 0.022553407669067382, 0.022426143646240234, 0.02244041633605957, 0.022343008041381836, 0.022358688354492187, 0.02246246337890625, 0.022691551208496093, 0.022624544143676758, 0.022718463897705078, 0.02262620735168457, 0.02249884796142578, 0.022569536209106445, 0.022419008255004882, 0.022442304611206054, 0.02242367935180664, 0.022271007537841798, 0.022521888732910156, 0.02237740707397461, 0.022628000259399414, 0.022880672454833984, 0.02262214469909668, 0.022300479888916015, 0.02235103988647461, 0.022620479583740236, 0.022282943725585938, 0.022282079696655275, 0.022307104110717773, 0.022298496246337892, 0.022347776412963868, 0.02222003173828125, 0.02221683120727539, 0.022248064041137695, 0.022249343872070313, 0.02217171287536621, 0.02214918327331543, 0.022992799758911133, 0.02275462341308594, 0.022666015625, 0.02248294448852539, 0.023462432861328125, 0.022749183654785156, 0.022533599853515624, 0.022559263229370116, 0.022443935394287108, 0.022265792846679688, 0.02229852867126465, 0.02227840042114258, 0.022782079696655272, 0.022516639709472656, 0.022309856414794924, 0.02226380729675293, 0.02226361656188965, 0.022266080856323242, 0.022267808914184572, 0.02250553512573242, 0.022239231109619142, 0.02232080078125, 0.022337888717651366, 0.022239231109619142, 0.022783039093017578, 0.022315967559814454, 0.02326527976989746, 0.022380544662475587, 0.02231705665588379, 0.02228223991394043, 0.02224127960205078, 0.022394079208374024, 0.02223142433166504, 0.0222906551361084, 0.022307008743286134, 0.022705759048461914, 0.022231456756591796, 0.022147071838378905, 0.022210559844970702, 0.022365407943725588, 0.022399744033813476, 0.02284454345703125, 0.022354848861694337, 0.02230886459350586, 0.022347776412963868, 0.022380544662475587, 0.02212633514404297, 0.02215750312805176, 0.02223548889160156, 0.022143871307373046, 0.022264863967895506, 0.02218988800048828, 0.02213596725463867, 0.022192447662353516, 0.022213151931762695, 0.022203615188598633, 0.022365983963012696, 0.022451200485229493, 0.022609920501708985, 0.022751359939575194, 0.022484575271606445, 0.02239446449279785, 0.022208511352539064, 0.022170431137084962, 0.022397087097167968, 0.02264803123474121, 0.02252851104736328, 0.02270207977294922, 0.022861824035644532, 0.02253183937072754, 0.022489343643188477, 0.02249932861328125, 0.02230067253112793, 0.02229248046875, 0.02221670341491699, 0.022511232376098634, 0.022222848892211915, 0.02243212890625, 0.022321151733398437, 0.022931455612182617, 0.022681600570678712, 0.022996543884277344, 0.022653152465820312, 0.0226492805480957, 0.022500864028930666, 0.022405120849609376, 0.022362239837646486, 0.022452384948730468, 0.022250911712646485, 0.022244064331054688, 0.022201696395874024, 0.022288095474243163, 0.022274335861206054, 0.022329824447631836, 0.02247596740722656, 0.02231180763244629, 0.022360416412353517, 0.022703775405883787, 0.022732799530029296, 0.022724096298217773, 0.02260403251647949, 0.022661376953125, 0.022833152770996092, 0.02295359992980957, 0.022876544952392577, 0.022943744659423827, 0.022775808334350587, 0.022965471267700197, 0.02407030487060547, 0.022987327575683593, 0.023035327911376954, 0.022829248428344728, 0.022813119888305665, 0.023076864242553712, 0.022789440155029296, 0.023590784072875976, 0.02267206382751465, 0.022631647109985352, 0.022792383193969725, 0.022604288101196288, 0.022657247543334962, 0.022823936462402345, 0.022871040344238282, 0.023142400741577147, 0.022962175369262695, 0.02306662368774414, 0.02283014488220215, 0.022852256774902345, 0.022726943969726562, 0.023217376708984376]",tokens/s,44.30802178631521,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1858.260992,2820.538368,0.0,2434.793472,2407.822848,s,1,8.8084072265625,8.8084072265625,0.0,8.8084072265625,8.8084072265625,8.8084072265625,8.8084072265625,[8.8084072265625],,kWh,5.120098608329802e-05,5.635867549746195e-06,1.631417971800775e-05,7.315103335105196e-05,,MB,1956.2496,3871.21152,0.0,3451.912192,2999.957504,s,10,5.177409851074218,0.5177409851074218,0.06991661313111333,0.5512180480957032,0.5552003295898437,0.5568318542480468,0.5581370739746093,"[0.3788780822753906, 0.37709445190429686, 0.55138671875, 0.5548377685546875, 0.5527754516601563, 0.5500377807617187, 0.5510493774414063, 0.5520761108398438, 0.5508107299804688, 0.55846337890625]",tokens/s,494.4557362923174,kWh,1.666209497523478e-05,1.837543809381452e-06,7.878740870889072e-06,2.6378379655505307e-05,tokens/kWh,9704917.562916774,MB,1964.838912,3873.308672,0.0,3454.009344,2999.960064,s,10,30.109125244140625,3.0109125244140627,0.2710568465089674,3.13614306640625,3.1505343017578125,3.179036682128906,3.2018385864257812,"[2.46539501953125, 2.475647705078125, 3.13516796875, 3.14079443359375, 3.124468017578125, 3.1371181640625, 3.13467333984375, 3.144200439453125, 3.14412109375, 3.2075390625]",tokens/s,20.92388918281845,kWh,9.401137071601894e-05,1.036987105003129e-05,4.551135739670815e-05,0.00014989259916275838,tokens/kWh,420300.9378174335,,s,630,30.107398406982412,0.04778952128092448,0.0044781567185291915,0.049549312591552735,0.0506115535736084,0.05099746398925781,0.05316491729736331,"[0.03956121444702149, 0.039016063690185544, 0.039247360229492184, 0.03892723083496094, 0.038819839477539066, 0.03885055923461914, 0.03909971237182617, 0.03906220626831055, 0.03923251342773437, 0.03915673446655273, 0.03961407852172852, 0.03885094451904297, 0.03920076751708984, 0.03864985656738281, 0.038578174591064454, 0.0396736946105957, 0.04556972885131836, 0.0386646728515625, 0.038580223083496096, 0.038670337677001954, 0.03901440048217773, 0.03884236907958984, 0.03895462417602539, 0.03928623962402344, 0.03917001724243164, 0.039146335601806644, 0.03880259323120117, 0.03865209579467774, 0.039176959991455075, 0.03918643188476562, 0.03892019271850586, 0.039031967163085934, 0.03977824020385742, 0.03880550384521484, 0.039123870849609374, 0.039061344146728516, 0.0387790412902832, 0.03860275268554687, 0.038484992980957033, 0.038363391876220704, 0.038602527618408204, 0.03905804824829102, 0.0383614387512207, 0.03840716934204102, 0.03838422393798828, 0.038582687377929685, 0.03897139358520508, 0.03914547348022461, 0.039212799072265624, 0.039534847259521486, 0.039411712646484375, 0.03905737686157226, 0.039012382507324216, 0.03912499237060547, 0.03898080062866211, 0.039107391357421875, 0.04205276870727539, 0.038963264465332034, 0.03924854278564453, 0.03906480026245117, 0.03861363220214844, 0.03889180755615235, 0.039215103149414066, 0.03895920181274414, 0.03854940795898437, 0.03842867279052734, 0.03839161682128906, 0.038426815032958986, 0.038335742950439455, 0.03836595153808594, 0.038354942321777344, 0.03831558227539063, 0.038602657318115234, 0.038340641021728517, 0.03840256118774414, 0.038938465118408205, 0.03830169677734375, 0.03814825439453125, 0.03832767868041992, 0.038347393035888674, 0.0381684799194336, 0.03833865737915039, 0.03815423965454102, 0.03864518356323242, 0.03820342254638672, 0.03819164657592773, 0.03840204620361328, 0.038397953033447264, 0.03829471969604492, 0.03817529678344726, 0.03832592010498047, 0.03832380676269531, 0.038295616149902345, 0.038314014434814456, 0.03810806274414062, 0.03835289764404297, 0.038176769256591796, 0.03833651351928711, 0.038266880035400394, 0.038125568389892575, 0.038215679168701173, 0.038378719329833985, 0.03835120010375977, 0.038351295471191406, 0.038438911437988284, 0.03822387313842773, 0.038647743225097654, 0.038402111053466796, 0.038330368041992184, 0.039066654205322265, 0.03940041732788086, 0.038789310455322266, 0.03838886260986328, 0.03852057647705078, 0.03854227066040039, 0.038647903442382815, 0.03844291305541992, 0.03866934585571289, 0.03841737747192383, 0.0385263671875, 0.03846316909790039, 0.048419681549072266, 0.04941161727905274, 0.04943427276611328, 0.04934924697875977, 0.05051596832275391, 0.04993638229370117, 0.049926143646240234, 0.04947455978393555, 0.04948688125610352, 0.04933785629272461, 0.04952633666992187, 0.0500233268737793, 0.049756160736083986, 0.049517566680908204, 0.05016195297241211, 0.049833793640136716, 0.049535903930664066, 0.049408000946044923, 0.049401439666748044, 0.04976588821411133, 0.04966083145141602, 0.04972886276245117, 0.049576446533203124, 0.04952892684936523, 0.04945721435546875, 0.04954111862182617, 0.04972748947143555, 0.0495300178527832, 0.04964028930664063, 0.04938956832885742, 0.04962918472290039, 0.04962691116333008, 0.04960483169555664, 0.049677631378173825, 0.05016582489013672, 0.049422977447509765, 0.04936832046508789, 0.04927884674072266, 0.050412158966064456, 0.04949343872070312, 0.05036115264892578, 0.04965580749511719, 0.04962284851074219, 0.04973766326904297, 0.050939422607421875, 0.04951523208618164, 0.04976025772094726, 0.049364990234375, 0.04974387359619141, 0.04928278350830078, 0.04963151931762695, 0.049469280242919925, 0.052058143615722655, 0.05063692855834961, 0.04996054458618164, 0.04959065628051758, 0.04968191909790039, 0.04943107223510742, 0.049656993865966795, 0.04934064102172851, 0.04957241439819336, 0.05213504028320313, 0.05018207931518555, 0.04974233627319336, 0.049643585205078125, 0.04949446487426758, 0.04965286254882813, 0.04958297729492187, 0.05012899017333984, 0.05056143951416016, 0.04958003234863281, 0.049637374877929685, 0.04946476745605469, 0.04952323150634766, 0.0494510383605957, 0.049498111724853515, 0.04948521423339844, 0.04965846252441406, 0.04967401504516602, 0.05017827224731446, 0.050504863739013674, 0.05059670257568359, 0.05066457748413086, 0.05028134536743164, 0.05038694381713867, 0.0505425910949707, 0.049928192138671876, 0.04961011123657227, 0.04945132827758789, 0.04979129409790039, 0.05337449645996094, 0.05036483383178711, 0.050337215423583985, 0.05001855850219727, 0.049513919830322266, 0.049439678192138674, 0.04925395202636719, 0.04935110473632812, 0.04927267074584961, 0.04960192108154297, 0.049392414093017575, 0.049555454254150394, 0.04957183837890625, 0.049339744567871095, 0.04931161499023438, 0.04974262237548828, 0.049549312591552735, 0.04961280059814453, 0.049544895172119144, 0.0497869758605957, 0.04954748916625976, 0.04955337524414063, 0.0500469741821289, 0.05001155090332031, 0.04985257720947266, 0.04966412734985352, 0.049504161834716794, 0.049816001892089845, 0.049726463317871096, 0.04973433685302735, 0.049961280822753903, 0.04982886505126953, 0.049715423583984376, 0.05038931274414062, 0.050155681610107423, 0.05003091049194336, 0.04979507064819336, 0.0495852165222168, 0.04969772720336914, 0.04980121612548828, 0.0496715202331543, 0.050606975555419924, 0.049702911376953124, 0.04965990447998047, 0.0506798095703125, 0.04998553466796875, 0.049465343475341796, 0.04932313537597656, 0.0493515510559082, 0.049323902130126954, 0.0492374382019043, 0.049271198272705076, 0.04962128067016602, 0.049595745086669925, 0.04937590408325195, 0.04957388687133789, 0.049547039031982425, 0.04923392105102539, 0.04928508758544922, 0.04931190490722656, 0.0493158073425293, 0.04914188766479492, 0.049342464447021485, 0.04919705581665039, 0.04938310241699219, 0.049459518432617186, 0.04921939086914062, 0.049172542572021485, 0.04999795150756836, 0.05690982437133789, 0.04976761627197265, 0.0497031364440918, 0.049439327239990234, 0.04949606323242187, 0.0495022087097168, 0.0493568000793457, 0.04934860610961914, 0.049216575622558593, 0.04926265716552734, 0.04921760177612305, 0.049301856994628905, 0.05035804748535156, 0.04948652648925781, 0.049274784088134765, 0.04941788864135742, 0.04942598342895508, 0.049316318511962894, 0.04915036773681641, 0.04953702545166016, 0.04929536056518555, 0.049237152099609376, 0.04929417419433594, 0.04914585494995117, 0.049307647705078124, 0.04972118377685547, 0.04946448135375976, 0.049296382904052735, 0.04952883148193359, 0.04967356872558594, 0.04953897476196289, 0.04954326248168945, 0.049466014862060544, 0.04953190231323242, 0.0493837776184082, 0.05058153533935547, 0.04966630554199219, 0.04945100784301758, 0.04983967971801758, 0.05265180969238281, 0.04984390258789063, 0.05072995376586914, 0.04964761734008789, 0.04955539321899414, 0.04938457489013672, 0.049587135314941404, 0.04945305633544922, 0.04944044876098633, 0.049594688415527347, 0.04954316711425781, 0.04932636642456055, 0.049370849609375, 0.04964556884765625, 0.049948257446289064, 0.050004383087158204, 0.049966079711914066, 0.049931262969970705, 0.04969472122192383, 0.049547264099121094, 0.05231820678710938, 0.050105438232421876, 0.050829856872558594, 0.0499672966003418, 0.049391136169433594, 0.04933270263671875, 0.049395072937011716, 0.04953580856323242, 0.04955750274658203, 0.04951039886474609, 0.05218099212646484, 0.05057247924804688, 0.04959929656982422, 0.0494202880859375, 0.049425662994384764, 0.04955376052856445, 0.04958428955078125, 0.04976569747924805, 0.04967520141601563, 0.04959641647338867, 0.049737728118896485, 0.04954684829711914, 0.04926614379882813, 0.04961520004272461, 0.04929596710205078, 0.04929536056518555, 0.04947148895263672, 0.0497677116394043, 0.04975075149536133, 0.049833984375, 0.04957295989990235, 0.04935939025878906, 0.04940428924560547, 0.0494837760925293, 0.04938956832885742, 0.049446910858154294, 0.04942233657836914, 0.04982515335083008, 0.04973632049560547, 0.05008076858520508, 0.05454643249511719, 0.049524127960205076, 0.049605216979980465, 0.049415359497070314, 0.0493139533996582, 0.04940662384033203, 0.049438720703125, 0.04935270309448242, 0.049713153839111325, 0.04971628952026367, 0.04950646209716797, 0.049463489532470706, 0.05000252914428711, 0.04986675262451172, 0.0501739501953125, 0.05026803207397461, 0.05035212707519531, 0.050321537017822264, 0.05045379257202148, 0.05034384155273437, 0.04993308639526367, 0.04950396728515625, 0.049436351776123044, 0.049264705657958985, 0.04934041595458984, 0.04982003021240235, 0.049926143646240234, 0.050220512390136716, 0.049580768585205076, 0.05073673629760742, 0.04962694549560547, 0.04930620956420898, 0.04937267303466797, 0.049455615997314455, 0.049458465576171874, 0.049380062103271484, 0.049430526733398435, 0.04957388687133789, 0.04971724700927734, 0.04943667221069336, 0.04934860610961914, 0.0493809928894043, 0.04934899139404297, 0.04927897644042969, 0.04975820922851563, 0.04941398239135742, 0.04942367935180664, 0.049422462463378905, 0.04959920120239258, 0.0494284782409668, 0.049651329040527346, 0.04938380813598633, 0.04987033462524414, 0.04944128036499024, 0.05005852890014648, 0.04976303863525391, 0.04945100784301758, 0.0498974723815918, 0.04995436859130859, 0.05011705780029297, 0.04952883148193359, 0.04959641647338867, 0.051142368316650394, 0.05035820770263672, 0.05003504180908203, 0.049788097381591796, 0.04983395385742188, 0.049744735717773436, 0.05173759841918945, 0.050678848266601566, 0.049933727264404294, 0.04993225479125977, 0.04989804840087891, 0.04988927841186523, 0.04984832000732422, 0.050255870819091795, 0.04988278579711914, 0.049964641571044924, 0.04988198471069336, 0.049530303955078125, 0.04982006454467774, 0.05068979263305664, 0.04949001693725586, 0.049499774932861326, 0.049893985748291014, 0.05012271881103516, 0.04994595336914062, 0.049860992431640626, 0.049882816314697265, 0.04979977416992187, 0.04955340957641602, 0.049399200439453124, 0.04938560104370117, 0.049280574798583984, 0.04990019226074219, 0.04955775833129883, 0.04947558212280274, 0.0493568000793457, 0.04941727828979492, 0.04948064041137695, 0.04960870361328125, 0.05049116897583008, 0.04980553436279297, 0.049460960388183595, 0.04951683044433594, 0.049877025604248046, 0.04985852813720703, 0.05083315277099609, 0.05255807876586914, 0.05013708877563477, 0.05030912017822266, 0.050170143127441405, 0.04969443130493164, 0.04946944046020508, 0.04951228713989258, 0.0497808952331543, 0.049575679779052736, 0.04944044876098633, 0.04940963363647461, 0.04966009521484375, 0.04963817596435547, 0.04995916748046875, 0.050208511352539065, 0.04941619110107422, 0.049495296478271486, 0.0502476806640625, 0.04956159973144531, 0.04949724960327148, 0.04955129623413086, 0.051597728729248046, 0.05236095809936524, 0.05169615936279297, 0.05022079849243164, 0.04951804733276367, 0.0492327995300293, 0.04931148910522461, 0.04967049789428711, 0.049549312591552735, 0.051795391082763674, 0.04998611068725586, 0.049702465057373045, 0.04958009719848633, 0.04946777725219727, 0.049393665313720705, 0.05091328048706055, 0.050974720001220705, 0.04990566253662109, 0.04945420837402344, 0.049423072814941404, 0.04964163208007812, 0.050536449432373044, 0.05087353515625, 0.050199359893798826, 0.0495753288269043, 0.04959417724609375, 0.04937603378295898, 0.049549312591552735, 0.04993228912353516, 0.04953036880493164, 0.049490432739257816, 0.0494090576171875, 0.04963836669921875, 0.049452415466308595, 0.0496376953125, 0.049772640228271485, 0.049559135437011716, 0.04953561782836914, 0.04972339248657227, 0.04966185760498047, 0.0495750732421875, 0.049625408172607424, 0.04948646545410156, 0.04970684814453125, 0.050122303009033205, 0.050084449768066405, 0.04955871963500977, 0.04951327896118164, 0.04978483200073242, 0.04945286560058594, 0.05414112091064453, 0.04972339248657227, 0.04941619110107422, 0.049235969543457034, 0.049360897064208986, 0.04940329742431641, 0.04942006301879883, 0.04939449691772461, 0.049631233215332034, 0.050776065826416014, 0.050528255462646485, 0.049618400573730466, 0.049603103637695316, 0.04954092788696289, 0.04992428970336914, 0.049993728637695314, 0.0497597770690918, 0.0498713264465332, 0.051027553558349606, 0.052547393798828126, 0.050608734130859374, 0.05025996780395508, 0.050546688079833986, 0.04969247817993164, 0.04992633438110351, 0.05025296020507813, 0.049554271697998045, 0.049774528503417965, 0.04967599868774414, 0.05077436828613281, 0.05075353622436524, 0.05064089584350586, 0.05085299301147461, 0.05055068969726562, 0.050807167053222656, 0.05043465423583984, 0.05090304183959961, 0.05068185424804687, 0.05038396835327148, 0.05033052825927734, 0.050640705108642575, 0.05058563232421875, 0.05056313705444336, 0.05077411270141602, 0.05076377487182617, 0.05072694396972656, 0.050603519439697264, 0.05088713455200195, 0.05087846374511719, 0.05100457763671875, 0.05046470260620117, 0.050502273559570314, 0.050417953491210935, 0.050513313293457034, 0.05086489486694336, 0.051066463470458984, 0.051259647369384764, 0.05133926391601563, 0.0521844482421875, 0.05239052963256836, 0.05148262405395508, 0.051288288116455076, 0.0513328971862793, 0.05100953674316406, 0.050755870819091796, 0.05098876953125, 0.05433510589599609, 0.05913817596435547, 0.051069087982177734, 0.0555904655456543, 0.05079040145874023, 0.050547134399414065]",tokens/s,20.925089291471036,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3722.653696,4371.447808,0.0,3992.977408,3875.045888,s,1,9.69694921875,9.69694921875,0.0,9.69694921875,9.69694921875,9.69694921875,9.69694921875,[9.69694921875],,kWh,7.936803180833749e-05,8.743291083237318e-06,2.7014743834000154e-05,0.00011512606672557496,,MB,1954.742272,4595.843072,0.0,4188.012544,4099.587072,s,10,3.046258911132813,0.30462589111328126,0.0025102218270380726,0.30477621459960935,0.3076242462158203,0.3078436019897461,0.3080190866088867,"[0.2989281311035156, 0.3049571838378906, 0.30459524536132815, 0.3032857666015625, 0.30345806884765625, 0.30550613403320315, 0.30324420166015625, 0.3080629577636719, 0.30757550048828125, 0.30664572143554686]",tokens/s,840.3750550041108,kWh,8.844502419949459e-06,9.753858381427302e-07,5.889070367818137e-06,1.5708958625910327e-05,tokens/kWh,16296433.525374118,MB,1963.761664,4700.700672,0.0,4292.870144,4197.763584,s,10,22.24019262695312,2.224019262695312,0.013936060472504645,2.222382568359375,2.2395323730468752,2.245690869140625,2.250617666015625,"[2.212475830078125, 2.20854931640625, 2.238163818359375, 2.220225341796875, 2.21496533203125, 2.205560302734375, 2.23595654296875, 2.227906982421875, 2.224539794921875, 2.251849365234375]",tokens/s,28.32709278050481,kWh,6.514719932088363e-05,7.185694272140852e-06,4.2981329839581884e-05,0.00011531422343260636,tokens/kWh,546333.2980499096,,s,630,22.23730520248411,0.035297309845212906,0.0004976233514561854,0.03523739242553711,0.035796012878417965,0.03608641014099121,0.03681490692138672,"[0.03545292663574219, 0.03514486312866211, 0.034976608276367185, 0.034837696075439455, 0.035070304870605466, 0.03481238555908203, 0.034871295928955076, 0.03514531326293945, 0.034859424591064454, 0.03491193771362305, 0.03500678253173828, 0.03504489517211914, 0.03498566436767578, 0.03528169631958008, 0.03475167846679687, 0.034859039306640624, 0.03480656051635742, 0.035128833770751954, 0.03493529510498047, 0.035416065216064455, 0.03500543975830078, 0.03509145736694336, 0.034936832427978515, 0.035309089660644534, 0.03483081436157227, 0.034912254333496096, 0.03482352066040039, 0.03494355010986328, 0.034760032653808594, 0.03491507339477539, 0.035334144592285156, 0.035530750274658206, 0.03545209503173828, 0.03553363037109375, 0.03545702362060547, 0.03541196823120117, 0.035522079467773436, 0.03609852981567383, 0.035501953125, 0.03554828643798828, 0.035371391296386716, 0.03516476821899414, 0.03552259063720703, 0.03531980895996094, 0.035110912322998046, 0.03514291381835938, 0.034960128784179687, 0.03520716857910156, 0.034904064178466795, 0.03493622589111328, 0.034981857299804686, 0.03550886535644531, 0.03535871887207031, 0.03513753509521484, 0.035077407836914064, 0.03481631851196289, 0.034984096527099606, 0.03472000122070312, 0.03484652709960938, 0.03513158416748047, 0.03496345520019531, 0.03492454528808594, 0.03490374374389649, 0.03561062240600586, 0.03522864151000977, 0.03503513717651367, 0.034920448303222655, 0.03504294586181641, 0.03482457733154297, 0.03504742431640625, 0.034920318603515625, 0.03493219375610351, 0.034915199279785154, 0.03477187347412109, 0.03473702239990235, 0.03483225631713867, 0.034847904205322265, 0.03475500869750976, 0.034867710113525394, 0.034582561492919925, 0.03467059326171875, 0.034719585418701175, 0.034871295928955076, 0.034683361053466796, 0.03461907196044922, 0.0348812141418457, 0.03545119857788086, 0.03478489685058594, 0.03511251068115234, 0.034673118591308595, 0.03482790374755859, 0.036085952758789064, 0.03491279983520508, 0.03479062271118164, 0.03503376007080078, 0.03535788726806641, 0.03496646499633789, 0.03487238311767578, 0.03511123275756836, 0.03488832092285156, 0.03499008178710938, 0.03488915252685547, 0.03497824096679687, 0.03533631896972656, 0.03506175994873047, 0.0349793930053711, 0.036022689819335936, 0.03498713684082031, 0.03490057754516602, 0.03483679962158203, 0.03477840042114258, 0.0349703369140625, 0.035373054504394534, 0.03530342483520508, 0.035381248474121094, 0.035250175476074216, 0.035520511627197264, 0.03567001724243164, 0.0353259506225586, 0.035184513092041014, 0.035152000427246095, 0.035127296447753906, 0.03529449462890625, 0.0352918701171875, 0.035206401824951175, 0.03528985595703125, 0.0358570556640625, 0.035509727478027345, 0.03552719879150391, 0.035620864868164064, 0.03548364639282227, 0.0354317741394043, 0.036676254272460934, 0.03561062240600586, 0.035846145629882815, 0.035281982421875, 0.03558291244506836, 0.03546102523803711, 0.03530556869506836, 0.03501670455932617, 0.03539475250244141, 0.0350052490234375, 0.03500435256958008, 0.03495328140258789, 0.035104351043701174, 0.035682720184326173, 0.03566339111328125, 0.0353546257019043, 0.035232223510742185, 0.03508224105834961, 0.03531545639038086, 0.03539174270629883, 0.03533004760742187, 0.03554079818725586, 0.03543008041381836, 0.035526336669921874, 0.035563743591308594, 0.035570270538330076, 0.03548915100097656, 0.03533824157714844, 0.03517504119873047, 0.03540991973876953, 0.03550207901000976, 0.03539763259887695, 0.035299327850341795, 0.03522710418701172, 0.035138080596923825, 0.035149120330810545, 0.03523244857788086, 0.03494022369384766, 0.03508070373535156, 0.03590163040161133, 0.03742051315307617, 0.03576886367797852, 0.035676158905029294, 0.03567001724243164, 0.03574784088134766, 0.035454975128173825, 0.035672065734863284, 0.03543040084838867, 0.03564287948608398, 0.03529369735717774, 0.0352542724609375, 0.03501875305175781, 0.039215103149414066, 0.03560195159912109, 0.0357743034362793, 0.035394336700439455, 0.035202560424804685, 0.03569907379150391, 0.03517747116088867, 0.03496857452392578, 0.034944286346435545, 0.03503792190551758, 0.03485203170776367, 0.03534112167358398, 0.03491635131835937, 0.034869247436523435, 0.034884830474853516, 0.03479632186889649, 0.034844001770019534, 0.034835105895996095, 0.03467059326171875, 0.03468492889404297, 0.035672065734863284, 0.03465830230712891, 0.03475225448608398, 0.03486745452880859, 0.034764801025390625, 0.03499187088012695, 0.03499647903442383, 0.03486684799194336, 0.03486547088623047, 0.03494297790527344, 0.03489590454101563, 0.03508019256591797, 0.0352542724609375, 0.03544374465942383, 0.03936355209350586, 0.035767425537109376, 0.035494785308837894, 0.035438591003417966, 0.035659584045410156, 0.03550636672973633, 0.035700382232666014, 0.03523004913330078, 0.03546112060546875, 0.03521478271484375, 0.03535520172119141, 0.03515760040283203, 0.034982078552246096, 0.03499030303955078, 0.03492428970336914, 0.035643646240234375, 0.034973056793212894, 0.035289119720458985, 0.03535436630249023, 0.03515068817138672, 0.036234848022460936, 0.03504579162597656, 0.034885887145996095, 0.034933502197265626, 0.034854080200195314, 0.03534652709960937, 0.035122913360595705, 0.03525174331665039, 0.03535488128662109, 0.03533846282958984, 0.03539734268188476, 0.03541376113891601, 0.03607196807861328, 0.03545702362060547, 0.03643801498413086, 0.03532185745239258, 0.035186302185058596, 0.03571340942382813, 0.035389438629150394, 0.03516416168212891, 0.03514883041381836, 0.03497264099121094, 0.03492659378051758, 0.035073856353759765, 0.03471379089355469, 0.03480335998535156, 0.034574687957763674, 0.034968929290771486, 0.03492665481567383, 0.034951774597167966, 0.03494063949584961, 0.03482038497924805, 0.03483443069458008, 0.03500851058959961, 0.034887680053710936, 0.03470064163208008, 0.03468764877319336, 0.03466649627685547, 0.03485625457763672, 0.034982593536376956, 0.03466854476928711, 0.034797569274902344, 0.034778656005859374, 0.03574012756347656, 0.035448833465576174, 0.03494297790527344, 0.03493030548095703, 0.03471807861328125, 0.03506332778930664, 0.035402496337890624, 0.03485795211791992, 0.0347770881652832, 0.03516460800170899, 0.03495532989501953, 0.03507020950317383, 0.0353704948425293, 0.035460609436035156, 0.035500961303710936, 0.03539344024658203, 0.03555715179443359, 0.03549020767211914, 0.035573760986328126, 0.035646751403808595, 0.035214046478271484, 0.035092479705810545, 0.03542425537109375, 0.03506995010375977, 0.035178497314453126, 0.03507382583618164, 0.03500566482543945, 0.036086784362792966, 0.035844097137451174, 0.034950496673583985, 0.03520783996582031, 0.03550003051757813, 0.035514366149902346, 0.03550185775756836, 0.03628998565673828, 0.035383872985839844, 0.03612876892089844, 0.035788734436035155, 0.035295295715332034, 0.035135486602783206, 0.03503827285766602, 0.03497465515136719, 0.03482624053955078, 0.03502684783935547, 0.03521750259399414, 0.03516211318969727, 0.03492659378051758, 0.03482828903198242, 0.035775646209716794, 0.03483513641357422, 0.034875232696533205, 0.034533695220947264, 0.03461529541015625, 0.03471769714355469, 0.0357130241394043, 0.034721569061279295, 0.034945247650146484, 0.03507199859619141, 0.03469311904907227, 0.03591167831420899, 0.0345945930480957, 0.034652385711669925, 0.03483356857299805, 0.03470761489868164, 0.0348842887878418, 0.0347380142211914, 0.034842784881591794, 0.035094528198242186, 0.03489756774902344, 0.034795295715332034, 0.034896446228027345, 0.03473721694946289, 0.0347309455871582, 0.03478937530517578, 0.03666124725341797, 0.03503308868408203, 0.035106143951416015, 0.03573417663574219, 0.03476025772094726, 0.03482022476196289, 0.034793792724609376, 0.03476643371582031, 0.034789505004882815, 0.03473027038574219, 0.034701057434082035, 0.034763008117675784, 0.0348221435546875, 0.03477088165283203, 0.03476486587524414, 0.03475235366821289, 0.03479334259033203, 0.0347977294921875, 0.034707584381103516, 0.034955265045166016, 0.03473583984375, 0.03490639877319336, 0.035004417419433595, 0.03623481750488281, 0.03502739334106445, 0.03476224136352539, 0.03468566513061523, 0.03477481460571289, 0.03539286422729492, 0.03538995361328125, 0.03538095855712891, 0.03508268737792969, 0.035148799896240236, 0.03522457504272461, 0.03536185455322265, 0.03539616012573242, 0.0354758415222168, 0.03549321746826172, 0.03536553573608398, 0.035356670379638674, 0.035342334747314456, 0.035563518524169925, 0.035804767608642575, 0.035387809753417966, 0.035261566162109376, 0.035494785308837894, 0.03555433654785156, 0.0349988784790039, 0.03512153625488281, 0.03498758316040039, 0.03526291275024414, 0.03605871963500976, 0.035510688781738284, 0.035547134399414065, 0.03566796875, 0.035755168914794924, 0.03567292785644531, 0.03559219360351563, 0.03565465545654297, 0.03573030471801758, 0.03590351867675781, 0.03568239974975586, 0.03589897537231445, 0.035516830444335935, 0.035573760986328126, 0.03569427108764649, 0.03682892990112305, 0.03611091232299805, 0.03592739105224609, 0.035334815979003904, 0.03569868850708008, 0.03515142440795899, 0.03517715072631836, 0.03544409561157227, 0.0353012809753418, 0.03521379089355469, 0.03549900817871094, 0.035531230926513675, 0.03678057479858399, 0.036896446228027346, 0.03538358306884765, 0.035084320068359376, 0.035272705078125, 0.03503308868408203, 0.035225601196289064, 0.03500233459472656, 0.035856449127197265, 0.035598175048828125, 0.03556556701660156, 0.03605100631713867, 0.035741470336914063, 0.03559088134765625, 0.03526006317138672, 0.03515631866455078, 0.035447967529296874, 0.03536124801635742, 0.03524236679077149, 0.035185791015625, 0.035016830444335935, 0.03594931030273438, 0.035082527160644535, 0.03524323272705078, 0.03525795364379883, 0.03524291229248047, 0.03540377426147461, 0.03542611312866211, 0.03494924926757813, 0.03505942535400391, 0.03496502304077148, 0.03477791976928711, 0.034742111206054686, 0.03490409469604492, 0.03472198486328125, 0.037865089416503905, 0.03513286590576172, 0.035187583923339844, 0.03531161499023437, 0.03535257720947266, 0.03535436630249023, 0.03520127868652344, 0.03504272079467773, 0.035207775115966795, 0.03549385452270508, 0.03549520111083984, 0.035242336273193356, 0.03513753509521484, 0.03499836730957031, 0.03482246398925781, 0.03577043151855469, 0.035076000213623046, 0.03484374237060547, 0.03529619216918945, 0.03574784088134766, 0.035899391174316404, 0.03495529556274414, 0.03511497497558594, 0.03507523345947266, 0.03528508758544922, 0.03541648101806641, 0.035409793853759766, 0.03540425491333008, 0.035440254211425784, 0.03558233642578125, 0.03550207901000976, 0.035467456817626954, 0.035571521759033206, 0.03559158325195313, 0.036123233795166014, 0.035388607025146485, 0.03594044876098633, 0.03558860778808594, 0.035274879455566406, 0.03612044906616211, 0.035102718353271486, 0.03538534545898438, 0.035200542449951175, 0.03529366302490235, 0.035778560638427735, 0.03569631958007812, 0.03572137451171875, 0.03577052688598633, 0.035506175994873046, 0.03578243255615234, 0.03556169509887695, 0.03570687866210937, 0.035606529235839846, 0.03558399963378906, 0.03570278549194336, 0.03555942535400391, 0.03537715148925781, 0.03547500610351562, 0.03531411361694336, 0.035332096099853515, 0.0353259506225586, 0.03544268798828125, 0.03610403060913086, 0.03527423858642578, 0.03518940734863281, 0.03542425537109375, 0.03568377685546875, 0.036299327850341796, 0.03547955322265625, 0.03545087814331055, 0.03527475357055664, 0.03525836944580078, 0.03505881500244141, 0.035089279174804686, 0.0348671989440918, 0.034961185455322265, 0.03492047882080078, 0.034876991271972656, 0.03492108917236328, 0.03491430282592774, 0.03487744140625, 0.0351539192199707, 0.03501567840576172, 0.034923519134521484, 0.0349917106628418, 0.034892192840576174, 0.03485696029663086, 0.0349224967956543, 0.03478732681274414, 0.034994174957275394, 0.034834144592285156, 0.03588534545898438, 0.0348590087890625, 0.03521033477783203, 0.03499827194213867, 0.035172382354736326, 0.034827136993408205, 0.034928638458251955, 0.0349306869506836, 0.036214656829833984, 0.035624897003173825, 0.03568582534790039, 0.035453697204589844, 0.03509436798095703, 0.03489807891845703, 0.03489785766601562, 0.03476076889038086, 0.03500851058959961, 0.03525836944580078, 0.03530547332763672, 0.03535171127319336, 0.036683998107910155, 0.03518323135375977, 0.03533004760742187, 0.036251903533935544, 0.035923168182373046, 0.035674655914306644, 0.03565785598754883, 0.03567174530029297, 0.03627027130126953, 0.036773792266845705, 0.035690593719482425, 0.0357130241394043, 0.03582156753540039, 0.03590956878662109, 0.03588227081298828, 0.03616438293457031, 0.036003551483154296, 0.03576860809326172, 0.03604889678955078, 0.035813377380371096, 0.03566748809814453, 0.03534236907958985, 0.03526496124267578, 0.03547676849365235, 0.036526817321777344, 0.035624576568603517, 0.03564287948608398, 0.03608870315551758, 0.03662416076660156, 0.03751715087890625, 0.03652851104736328, 0.03592396926879883, 0.03569036865234375, 0.035795040130615234, 0.03571036911010742, 0.03565145492553711, 0.03589321517944336, 0.035844894409179685, 0.03555670547485352, 0.03538191986083984, 0.035332000732421875, 0.03602851104736328, 0.03552025604248047, 0.03565798568725586, 0.035850238800048825, 0.03546931076049804, 0.03572326278686523, 0.035659774780273434, 0.03567776107788086, 0.03549638366699219, 0.035542015075683595]",tokens/s,28.33077093935027,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7159.865344,10248.650752,0.0,9862.905856,9797.835264,s,1,13.036337890625,13.036337890625,0.0,13.036337890625,13.036337890625,13.036337890625,13.036337890625,[13.036337890625],,kWh,0.0001601455375041572,1.7655413268169356e-05,5.424448783999947e-05,0.00023204543861232602,,MB,2928.021504,10626.138112,0.0,10211.033088,10097.478144,s,10,7.080755126953125,0.7080755126953125,0.0021852999069200663,0.708653076171875,0.7098954345703126,0.7101543823242188,0.7103615405273438,"[0.702265625, 0.7067350463867188, 0.708219970703125, 0.707822509765625, 0.7079814453125, 0.7092393188476562, 0.709837890625, 0.709086181640625, 0.70915380859375, 0.710413330078125]",tokens/s,361.5433600090584,kWh,2.0654328066945408e-05,2.2778309918260785e-06,1.3706399853999814e-05,3.66385589127713e-05,tokens/kWh,6987174.375757577,MB,2932.051968,10630.332416,0.0,10213.13024,10097.480704,s,10,37.07304760742188,3.7073047607421876,0.0052103151914074265,3.7071820068359376,3.7146004150390626,3.716242102050781,3.717555451660156,"[3.708646484375, 3.704712890625, 3.714235595703125, 3.7004345703125, 3.708013671875, 3.7178837890625, 3.707966064453125, 3.7007197265625, 3.704036865234375, 3.70639794921875]",tokens/s,16.99347748993467,kWh,9.782617883013852e-05,1.0789192504527572e-05,6.500438533679996e-05,0.000173619756671466,tokens/kWh,362861.9300464317,,s,630,37.06995254516605,0.05884119451613652,0.0006394524138849767,0.058709760665893555,0.05922449226379395,0.05971522789001465,0.061702710342407235,"[0.059774974822998046, 0.059101184844970706, 0.05900502395629883, 0.05902121734619141, 0.05874697494506836, 0.05884236907958985, 0.05944553756713867, 0.05892118453979492, 0.06093344116210937, 0.059485023498535156, 0.058933246612548826, 0.058947582244873044, 0.05889011383056641, 0.0589169921875, 0.05895577621459961, 0.06150870513916016, 0.058915969848632815, 0.05879580688476563, 0.05876128005981445, 0.05875296020507813, 0.0585011215209961, 0.058431488037109375, 0.05849292755126953, 0.058517505645751956, 0.05834272003173828, 0.05849715042114258, 0.058714176177978514, 0.0586060791015625, 0.05867897415161133, 0.058491199493408204, 0.058744831085205076, 0.05909635162353516, 0.05887411117553711, 0.0585302734375, 0.05857228851318359, 0.05884924697875977, 0.05863375854492187, 0.05853696060180664, 0.05842739105224609, 0.05859088134765625, 0.05866940689086914, 0.058515167236328124, 0.058625984191894534, 0.058537982940673826, 0.05854854583740234, 0.05851091384887695, 0.05871196746826172, 0.058729022979736326, 0.05890652847290039, 0.05876540756225586, 0.05853507232666016, 0.058590015411376956, 0.05934425735473633, 0.058884353637695314, 0.05870198440551758, 0.05868364715576172, 0.0587243537902832, 0.05905203247070313, 0.058705921173095706, 0.05870182418823242, 0.05924678421020508, 0.05903523254394531, 0.058839488983154296, 0.059620769500732425, 0.05904646301269531, 0.05915849685668945, 0.05875868988037109, 0.058595550537109374, 0.0586366081237793, 0.05845305633544922, 0.0586577262878418, 0.058904159545898435, 0.058753246307373046, 0.05857033538818359, 0.05854620742797852, 0.05875081634521485, 0.0586899185180664, 0.05848099136352539, 0.058742782592773435, 0.058807552337646486, 0.058641151428222654, 0.0587789421081543, 0.05909779357910156, 0.05884723281860352, 0.058826751708984375, 0.05871615982055664, 0.05897625732421875, 0.05902272033691406, 0.059199840545654296, 0.05853555297851563, 0.05881711959838867, 0.05855599975585937, 0.05853411102294922, 0.05882511901855469, 0.05874016189575195, 0.05851587295532226, 0.0587243537902832, 0.05863238525390625, 0.05859513473510742, 0.05895577621459961, 0.05879379272460938, 0.05848432159423828, 0.058754688262939454, 0.058440383911132814, 0.05871030426025391, 0.062083263397216794, 0.058690559387207034, 0.058657470703125, 0.058622081756591796, 0.059211681365966794, 0.05918102264404297, 0.05884735870361328, 0.05847449493408203, 0.05866495895385742, 0.05836800003051758, 0.05918064117431641, 0.05892284774780274, 0.0589051513671875, 0.058052608489990234, 0.058515457153320315, 0.05864857482910156, 0.058840511322021484, 0.058913345336914065, 0.05856662368774414, 0.05868137741088867, 0.058480640411376954, 0.059736640930175784, 0.059768447875976564, 0.05863087844848633, 0.059154430389404294, 0.05850726318359375, 0.05845971298217773, 0.05850156784057617, 0.05863993453979492, 0.05856099319458008, 0.05866873550415039, 0.06027907180786133, 0.05872127914428711, 0.05865903854370117, 0.058477344512939455, 0.058355712890625, 0.05841100692749023, 0.058447872161865234, 0.059222015380859375, 0.059138046264648435, 0.059156478881835936, 0.058652000427246095, 0.05866121673583984, 0.0587421760559082, 0.058665889739990235, 0.05874595260620117, 0.059398334503173826, 0.06028307342529297, 0.060660255432128905, 0.06094956970214844, 0.059345855712890624, 0.058531841278076174, 0.05861075210571289, 0.05848294448852539, 0.058778305053710934, 0.058818336486816405, 0.06199456024169922, 0.058990943908691404, 0.05917318344116211, 0.05880009460449219, 0.05869366455078125, 0.05894863891601562, 0.058768352508544924, 0.05898758316040039, 0.05879043197631836, 0.05865718460083008, 0.05974016189575195, 0.05928879928588867, 0.058874015808105466, 0.05864857482910156, 0.05858777618408203, 0.058584991455078124, 0.059047134399414065, 0.058627201080322267, 0.05872150421142578, 0.05844022369384766, 0.05849087905883789, 0.058492992401123045, 0.058560447692871095, 0.058431137084960935, 0.05857519912719727, 0.058789344787597654, 0.05878758239746094, 0.058620193481445315, 0.05974259185791016, 0.05899433517456055, 0.05899472045898437, 0.058945632934570315, 0.05968905639648438, 0.059572673797607424, 0.05868323135375977, 0.0586399040222168, 0.05862899017333984, 0.05859932708740234, 0.05886547088623047, 0.058792224884033205, 0.05880944061279297, 0.05879641723632813, 0.05847001647949219, 0.058753952026367184, 0.058436702728271485, 0.05845801544189453, 0.058211326599121094, 0.058570751190185545, 0.05872150421142578, 0.05845446395874023, 0.05855417633056641, 0.059103328704833986, 0.058447616577148434, 0.05832160186767578, 0.05831884765625, 0.05846835327148438, 0.05835779190063477, 0.05865264129638672, 0.05872409439086914, 0.058423553466796875, 0.05944319915771484, 0.0583741455078125, 0.05845196914672852, 0.05848883056640625, 0.05893632125854492, 0.05862707138061524, 0.05900032043457031, 0.05874124908447265, 0.05868268966674805, 0.05868803024291992, 0.05840297698974609, 0.05849856185913086, 0.05886323165893555, 0.058655200958251955, 0.0587421760559082, 0.05876406478881836, 0.05883484649658203, 0.05843379211425781, 0.0585055046081543, 0.05844899368286133, 0.05847110366821289, 0.058894241333007816, 0.05856265640258789, 0.058464256286621094, 0.05907455825805664, 0.05860940933227539, 0.05877376174926758, 0.05853772735595703, 0.05837030410766601, 0.059066112518310544, 0.06052428817749023, 0.05958633422851563, 0.058863712310791017, 0.058318687438964845, 0.05855913543701172, 0.058581214904785156, 0.058823806762695316, 0.05861260986328125, 0.058589183807373046, 0.05842339324951172, 0.058881950378417966, 0.06003526306152344, 0.05871187210083008, 0.05900697708129883, 0.058759166717529294, 0.0586915512084961, 0.058882080078125, 0.05871516799926758, 0.05862403106689453, 0.05870892715454101, 0.05877958297729492, 0.058848960876464844, 0.058788288116455076, 0.059018177032470705, 0.05902438354492188, 0.058942527770996095, 0.058596286773681644, 0.05911142349243164, 0.058605567932128906, 0.0584089584350586, 0.058582080841064456, 0.058915775299072264, 0.059015167236328124, 0.05901625442504883, 0.05892192077636719, 0.05865267181396484, 0.058535934448242184, 0.058447872161865234, 0.05900672149658203, 0.05848831939697265, 0.05854489517211914, 0.05872780990600586, 0.058775585174560545, 0.05839014434814453, 0.05838499069213867, 0.058361793518066404, 0.05832953643798828, 0.05825641632080078, 0.058702560424804685, 0.062163200378417965, 0.05917900848388672, 0.05898380661010742, 0.05886220932006836, 0.05851136016845703, 0.05894144058227539, 0.05865859222412109, 0.06253385543823242, 0.05875116729736328, 0.0588941421508789, 0.05857484817504883, 0.05849497604370117, 0.05831248092651367, 0.05843123245239258, 0.05884771347045899, 0.059908096313476565, 0.05893033599853516, 0.05851004791259766, 0.05842956924438476, 0.05889775848388672, 0.05876201629638672, 0.05860073471069336, 0.058990753173828125, 0.058996448516845705, 0.05855920028686523, 0.05858303833007812, 0.058836513519287106, 0.058738689422607425, 0.05863801574707031, 0.05881862258911133, 0.05859340667724609, 0.05870857620239258, 0.058789344787597654, 0.05848118209838867, 0.05862809753417969, 0.05917695999145508, 0.05856367874145508, 0.05872275161743164, 0.05888582229614258, 0.05950678253173828, 0.05929439926147461, 0.05867046356201172, 0.059136638641357424, 0.05890252685546875, 0.058589183807373046, 0.0592026252746582, 0.058837566375732425, 0.058724735260009764, 0.058619232177734376, 0.05884934234619141, 0.05887241744995117, 0.058660255432128904, 0.0585693130493164, 0.05841715240478516, 0.06811965179443359, 0.0596591682434082, 0.059643905639648435, 0.058887680053710936, 0.058699745178222656, 0.05875763320922851, 0.05899817657470703, 0.05888678359985351, 0.058789249420166015, 0.05866953659057617, 0.05864668655395508, 0.05869977569580078, 0.059494400024414064, 0.05881587219238281, 0.058931838989257815, 0.05913705444335938, 0.05878195190429687, 0.05872057723999023, 0.058823070526123046, 0.05978112030029297, 0.059133438110351565, 0.0586833610534668, 0.05878838348388672, 0.05843337631225586, 0.0596701774597168, 0.058657470703125, 0.058584545135498045, 0.05858595275878906, 0.058726398468017575, 0.058660160064697264, 0.06025484848022461, 0.059482177734375, 0.05885081481933594, 0.05886767959594726, 0.05888668823242187, 0.05897129440307617, 0.061781951904296875, 0.05884169769287109, 0.05885318374633789, 0.05848320007324219, 0.05843088150024414, 0.058544734954833984, 0.058421150207519534, 0.05835171127319336, 0.05830620956420898, 0.05842160034179687, 0.058380287170410154, 0.05938790512084961, 0.05866416168212891, 0.058780448913574215, 0.05869120025634766, 0.05882819366455078, 0.05898649597167969, 0.05871305465698242, 0.05986435317993164, 0.05868003082275391, 0.058705726623535154, 0.05900511932373047, 0.0586217269897461, 0.05893756866455078, 0.058671104431152345, 0.05879308700561523, 0.05872870254516602, 0.05878761672973633, 0.058522174835205075, 0.059264991760253904, 0.05883526229858398, 0.05866099166870117, 0.05877337646484375, 0.059254783630371094, 0.058775550842285154, 0.058660511016845704, 0.058685249328613284, 0.05875334548950195, 0.05850543975830078, 0.058828800201416016, 0.05872844696044922, 0.058584545135498045, 0.05849520111083984, 0.05850143814086914, 0.05847590255737305, 0.05901910400390625, 0.0589628791809082, 0.058920799255371095, 0.058617313385009764, 0.058898975372314456, 0.05907046508789063, 0.06258278274536133, 0.05930108642578125, 0.0587743034362793, 0.05889168167114258, 0.059095008850097656, 0.05864716720581055, 0.0584007682800293, 0.05850080108642578, 0.05834783935546875, 0.058460224151611326, 0.05890028762817383, 0.05876339340209961, 0.058358848571777346, 0.05825203323364258, 0.05861600112915039, 0.05875225448608398, 0.05914291381835937, 0.05909299087524414, 0.05861151885986328, 0.05895395278930664, 0.0586640625, 0.058966880798339845, 0.058824478149414064, 0.05844137573242188, 0.05847900772094727, 0.05870739364624023, 0.058597503662109376, 0.05879571151733398, 0.0587305908203125, 0.0584835205078125, 0.05854163360595703, 0.05829843139648438, 0.05839651107788086, 0.059092960357666015, 0.058698177337646484, 0.05843695831298828, 0.058547008514404295, 0.058763137817382814, 0.05900912094116211, 0.05878579330444336, 0.058548225402832034, 0.058508766174316405, 0.058655200958251955, 0.05844588851928711, 0.05887180709838867, 0.05867472076416016, 0.05830425643920899, 0.05847683334350586, 0.05856512069702149, 0.05858707046508789, 0.059184417724609376, 0.05864214324951172, 0.058606239318847654, 0.058671455383300784, 0.05898649597167969, 0.058773086547851565, 0.05884150314331055, 0.058611358642578125, 0.0584185905456543, 0.05867200088500977, 0.05846172714233398, 0.058507137298583985, 0.05869366455078125, 0.05956307220458985, 0.05890963363647461, 0.05859532928466797, 0.05901251220703125, 0.05887241744995117, 0.058533889770507816, 0.0583842887878418, 0.0584617919921875, 0.0584967041015625, 0.0591748161315918, 0.05858710479736328, 0.05866796875, 0.058508449554443356, 0.05837705612182617, 0.05875711822509765, 0.0587694091796875, 0.0585011215209961, 0.05836800003051758, 0.05854844665527344, 0.05833049774169922, 0.05825763320922851, 0.0583251838684082, 0.05850547027587891, 0.058406654357910155, 0.058343425750732425, 0.05840825653076172, 0.060211902618408204, 0.05907455825805664, 0.0595148811340332, 0.05856166458129883, 0.05837913513183594, 0.058638336181640625, 0.059829566955566404, 0.05954995346069336, 0.05859721755981445, 0.05911795043945312, 0.058611934661865234, 0.05867033767700195, 0.05862067031860352, 0.05867929458618164, 0.0587097282409668, 0.058528030395507816, 0.05816902542114258, 0.05959507369995117, 0.05845171356201172, 0.05870198440551758, 0.058350719451904294, 0.05848121643066406, 0.05862236785888672, 0.059154239654541016, 0.060373184204101565, 0.05870121765136719, 0.05929596710205078, 0.058675582885742185, 0.05859894561767578, 0.059275745391845706, 0.05849660873413086, 0.058667423248291016, 0.05879369735717774, 0.059015457153320315, 0.0597988166809082, 0.05858582305908203, 0.05894960021972656, 0.05973689651489258, 0.058867263793945315, 0.059333057403564454, 0.058638336181640625, 0.05861548614501953, 0.05858745574951172, 0.058517505645751956, 0.058515457153320315, 0.05876736068725586, 0.0592097282409668, 0.05881996917724609, 0.0591855354309082, 0.06104089736938476, 0.059906047821044923, 0.0594659538269043, 0.05892591857910156, 0.05877446365356445, 0.058799232482910156, 0.05863955307006836, 0.059067329406738284, 0.06032806396484375, 0.05911750411987305, 0.05903833770751953, 0.05913401412963867, 0.05865267181396484, 0.058700927734375, 0.05921862411499024, 0.059029697418212894, 0.05852569580078125, 0.05845401763916016, 0.058898368835449216, 0.05891692733764648, 0.058603553771972655, 0.05843270492553711, 0.058808223724365234, 0.058565502166748044, 0.05904159927368164, 0.0585709457397461, 0.05836713409423828, 0.058420063018798825, 0.05870979309082031, 0.05851286315917969, 0.058571521759033206, 0.05898553466796875, 0.0588922233581543, 0.05886387252807617, 0.058540702819824215, 0.058728542327880856, 0.05882060623168945, 0.058451774597167966, 0.05851286315917969, 0.05838716888427734, 0.05816729736328125, 0.058666240692138674, 0.058545024871826175, 0.05835663986206055, 0.05839152145385742, 0.05844172668457031, 0.05826355361938477, 0.058506526947021485, 0.058786304473876956, 0.05868566513061523, 0.05903481674194336]",tokens/s,16.99489631750697,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 4.12 MiB is free. Process 110512 has 14.73 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 242.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,869.863424,556.72832,0.0,178.25792,176.546816,s,1,7.713306640625,7.713306640625,0.0,7.713306640625,7.713306640625,7.713306640625,7.713306640625,[7.713306640625],,kWh,2.018954481249201e-05,2.2169501482602328e-06,6.545560791992866e-06,2.895205575274511e-05,,MB,1167.09376,661.58592,0.0,253.755392,221.106688,s,16,0.22666860866546631,0.014166788041591645,7.366024324280652e-05,0.014159088134765626,0.014224016189575195,0.01426743197441101,0.01435408148765564,"[0.014181344032287598, 0.014150176048278808, 0.014187104225158691, 0.01410108757019043, 0.014125760078430177, 0.014212160110473633, 0.014231328010559083, 0.014052319526672363, 0.014131360054016114, 0.014070048332214355, 0.014132672309875489, 0.014129983901977538, 0.014375743865966796, 0.014202816009521484, 0.014216704368591309, 0.014168000221252442]",tokens/s,18070.43341429412,kWh,4.245650116763654e-07,4.6821627602398355e-08,2.809361097616254e-07,7.523227490403892e-07,tokens/kWh,340279488.19377834,MB,1201.364992,676.265984,0.0,268.435456,221.109248,s,16,10.183209472656252,0.6364505920410156,0.006485617121469593,0.6356436767578124,0.6450568542480469,0.646190658569336,0.6476012664794921,"[0.6382002563476562, 0.6479539184570312, 0.6319490966796875, 0.6264183959960937, 0.6369337768554687, 0.6389736328125, 0.6244791259765625, 0.6310694580078124, 0.6333386840820312, 0.6341722412109375, 0.6456029052734376, 0.639483642578125, 0.6445108032226563, 0.6432012329101563, 0.6343535766601562, 0.6325687255859375]",tokens/s,98.98647402929906,kWh,1.8477347904209184e-05,2.0375971334138364e-06,7.2614310352382455e-06,2.777637607286127e-05,tokens/kWh,2268114.4521784373,,s,1008,10.174912675857541,0.010094159400652326,0.00023526298569966127,0.01006612777709961,0.010324982166290284,0.010412306880950928,0.01069016888618469,"[0.009861120223999023, 0.010414079666137695, 0.009979904174804688, 0.00999443244934082, 0.009987903594970702, 0.010186752319335938, 0.010055071830749511, 0.010114912033081055, 0.010099136352539062, 0.010152095794677735, 0.010123200416564941, 0.010078847885131836, 0.010112607955932617, 0.01022976016998291, 0.01031987190246582, 0.010203071594238281, 0.010119071960449219, 0.010082464218139648, 0.010083711624145508, 0.01004361629486084, 0.01006015968322754, 0.01005951976776123, 0.010055968284606934, 0.010016575813293457, 0.010135744094848633, 0.010137439727783203, 0.010311840057373046, 0.01031372833251953, 0.010389535903930665, 0.010439680099487305, 0.01033340835571289, 0.010251615524291992, 0.010205087661743164, 0.01019545555114746, 0.010227007865905762, 0.010268447875976562, 0.010138848304748534, 0.010229439735412597, 0.010141247749328614, 0.010085984230041504, 0.01012822437286377, 0.010097920417785644, 0.010217696189880372, 0.010148320198059082, 0.010129216194152833, 0.010099295616149903, 0.010123968124389648, 0.010046431541442871, 0.010053631782531738, 0.010096351623535156, 0.01026863956451416, 0.009992511749267578, 0.010031328201293946, 0.010026592254638672, 0.009978048324584961, 0.009887807846069336, 0.009973695755004883, 0.010045760154724121, 0.010106240272521973, 0.010027327537536622, 0.00999014377593994, 0.009934847831726074, 0.010046527862548829, 0.009910079956054687, 0.01032032012939453, 0.0103088321685791, 0.010349311828613282, 0.010326527595520019, 0.010358431816101073, 0.010336383819580078, 0.010327232360839845, 0.01022432041168213, 0.010190848350524903, 0.010346495628356933, 0.010219648361206054, 0.010196864128112793, 0.010554431915283203, 0.010563872337341308, 0.010399616241455079, 0.010332415580749512, 0.010281791687011719, 0.010198752403259277, 0.01021939182281494, 0.01028825569152832, 0.010473952293395997, 0.010359328269958496, 0.010303263664245605, 0.010283231735229492, 0.010337823867797852, 0.0102423677444458, 0.01025654411315918, 0.010231807708740234, 0.010190848350524903, 0.01010483169555664, 0.010465567588806152, 0.010211039543151855, 0.010157952308654785, 0.010056896209716797, 0.010228832244873047, 0.010055392265319825, 0.010091808319091797, 0.010249343872070313, 0.010220800399780273, 0.010250271797180176, 0.010377152442932129, 0.010487584114074707, 0.010420960426330566, 0.010375167846679688, 0.010466783523559571, 0.010422271728515625, 0.010435232162475586, 0.010432383537292481, 0.010298751831054687, 0.010276512145996093, 0.010095583915710448, 0.010268671989440918, 0.010164223670959472, 0.010092543601989747, 0.010117407798767089, 0.010167072296142578, 0.01025324821472168, 0.010211647987365723, 0.010186431884765625, 0.010287103652954101, 0.010282527923583985, 0.010229472160339356, 0.009875455856323241, 0.010094592094421387, 0.011318943977355956, 0.010265055656433106, 0.010109888076782227, 0.010261407852172852, 0.010049216270446778, 0.01019878387451172, 0.010219552040100098, 0.010252287864685058, 0.010283583641052247, 0.010244128227233886, 0.010175647735595704, 0.010064703941345215, 0.009984000205993653, 0.00993391990661621, 0.009972640037536621, 0.00988588809967041, 0.010098496437072753, 0.009881216049194336, 0.009777183532714845, 0.009850751876831055, 0.009832927703857422, 0.009841823577880859, 0.009816512107849121, 0.009830304145812988, 0.00983296012878418, 0.009891839981079101, 0.009851103782653808, 0.009885312080383301, 0.009861215591430664, 0.009910335540771484, 0.009914591789245605, 0.009835488319396972, 0.009877856254577638, 0.009834400177001953, 0.009929344177246094, 0.009834752082824708, 0.009954943656921386, 0.009838239669799805, 0.009841055870056152, 0.009830016136169434, 0.009788127899169921, 0.009835583686828613, 0.009785311698913574, 0.00984124755859375, 0.009768992424011231, 0.009850624084472657, 0.009799648284912109, 0.009819552421569825, 0.010087295532226563, 0.009842144012451173, 0.009804320335388184, 0.009795167922973632, 0.009779616355895996, 0.009825375556945801, 0.010572064399719238, 0.013265536308288575, 0.010495776176452637, 0.009938400268554687, 0.009855520248413087, 0.009866496086120606, 0.009823007583618164, 0.00959718418121338, 0.009935199737548828, 0.009860639572143554, 0.009914496421813964, 0.009828351974487304, 0.009793536186218262, 0.00991641616821289, 0.009865216255187988, 0.009873408317565918, 0.009887743949890136, 0.009887743949890136, 0.009973759651184083, 0.010074111938476562, 0.009979968070983886, 0.009965503692626953, 0.009919615745544433, 0.009878399848937989, 0.009861023902893066, 0.00987564754486084, 0.009848735809326171, 0.009885696411132813, 0.009814016342163086, 0.009862208366394043, 0.009872320175170899, 0.009747648239135742, 0.009795968055725097, 0.009869631767272949, 0.009906304359436036, 0.009934847831726074, 0.010026080131530762, 0.009939743995666503, 0.009968768119812012, 0.010019840240478516, 0.0101396484375, 0.010088095664978028, 0.010088735580444336, 0.010030143737792969, 0.009984416007995605, 0.009937151908874512, 0.009976351737976074, 0.00992204761505127, 0.009888256072998047, 0.00987936019897461, 0.009834495544433594, 0.009840895652770996, 0.009884767532348633, 0.00983516788482666, 0.009951231956481933, 0.00991427230834961, 0.010010335922241211, 0.009986207962036133, 0.010010784149169921, 0.010004672050476074, 0.010059552192687988, 0.009930944442749024, 0.009956992149353028, 0.010073920249938965, 0.01005350399017334, 0.010084287643432617, 0.010075072288513183, 0.010046751976013183, 0.01001638412475586, 0.010025792121887207, 0.009690655708312988, 0.010018783569335938, 0.009960255622863769, 0.010053343772888183, 0.009957440376281738, 0.010059743881225587, 0.01009996795654297, 0.01018329620361328, 0.01012339210510254, 0.010123167991638184, 0.010166367530822755, 0.010168319702148437, 0.010145792007446289, 0.010141759872436524, 0.010081536293029785, 0.010014944076538086, 0.00998265552520752, 0.009842656135559081, 0.009916288375854492, 0.010153599739074707, 0.010182975769042969, 0.010069600105285645, 0.010280896186828613, 0.010145440101623535, 0.010197823524475097, 0.01009455966949463, 0.0101048641204834, 0.01005891227722168, 0.010107744216918945, 0.010026335716247559, 0.009987903594970702, 0.00990511989593506, 0.009917632102966308, 0.00987615966796875, 0.00999833583831787, 0.009973504066467286, 0.009924863815307617, 0.009909664154052734, 0.009908576011657715, 0.009847040176391602, 0.009885120391845702, 0.01007260799407959, 0.009881631851196289, 0.009901663780212403, 0.009922975540161133, 0.009930751800537109, 0.009945088386535645, 0.010000384330749512, 0.010071519851684571, 0.0099901123046875, 0.01005615997314453, 0.0100414400100708, 0.011681792259216308, 0.010293439865112304, 0.010350111961364747, 0.010420767784118653, 0.010315520286560059, 0.010378463745117188, 0.010541152000427247, 0.01032431983947754, 0.010314111709594726, 0.010307552337646485, 0.01035654354095459, 0.009958208084106445, 0.01022383975982666, 0.010217184066772462, 0.010540575981140136, 0.010324095726013184, 0.010307807922363282, 0.010330240249633789, 0.01032102394104004, 0.010195551872253418, 0.01031942367553711, 0.01023692798614502, 0.01034937572479248, 0.010135519981384277, 0.01014470386505127, 0.010149888038635254, 0.010059231758117676, 0.010079839706420898, 0.010078944206237794, 0.00998243236541748, 0.010070048332214355, 0.009999808311462402, 0.010107135772705078, 0.00998198413848877, 0.010114751815795898, 0.01101036834716797, 0.010404959678649902, 0.01118835163116455, 0.01003996753692627, 0.010096768379211425, 0.010348544120788575, 0.010207136154174805, 0.010116607666015624, 0.01010371208190918, 0.010158047676086426, 0.01009763240814209, 0.010316703796386719, 0.010350432395935059, 0.010317888259887695, 0.010193023681640625, 0.01027462387084961, 0.009965567588806153, 0.010042688369750977, 0.010007519721984864, 0.009975520133972168, 0.009852288246154785, 0.009939583778381347, 0.009948639869689942, 0.009957056045532226, 0.009851743698120117, 0.010210463523864746, 0.009913311958312989, 0.009926527976989745, 0.009943360328674317, 0.009894720077514648, 0.00983948802947998, 0.009881119728088378, 0.009872896194458008, 0.00997001552581787, 0.009964159965515137, 0.010047264099121093, 0.010073568344116211, 0.009992608070373535, 0.00995475196838379, 0.009736191749572755, 0.010069408416748048, 0.010011136054992676, 0.010172415733337402, 0.010296480178833008, 0.010412351608276367, 0.010320799827575684, 0.010171680450439454, 0.01008499240875244, 0.010004063606262208, 0.010023136138916016, 0.010212960243225098, 0.009865632057189941, 0.009903776168823241, 0.00991049575805664, 0.009969792366027831, 0.009881600379943848, 0.009796607971191406, 0.009792799949645997, 0.009809632301330567, 0.009801792144775391, 0.009767999649047851, 0.009752639770507813, 0.009798399925231934, 0.010280832290649414, 0.0098920316696167, 0.009856351852416992, 0.009790111541748046, 0.009803775787353516, 0.009875455856323241, 0.009797632217407226, 0.009987968444824219, 0.009846912384033204, 0.00981606388092041, 0.009791487693786622, 0.009852928161621094, 0.00987980842590332, 0.009807616233825684, 0.01001471996307373, 0.009775103569030762, 0.009802911758422852, 0.009771424293518067, 0.009803199768066406, 0.009833215713500976, 0.009825887680053711, 0.009839263916015625, 0.009959615707397462, 0.009814911842346192, 0.009830495834350587, 0.00981283187866211, 0.009780287742614746, 0.009751168251037598, 0.009760576248168945, 0.009798144340515137, 0.009844736099243164, 0.00976089572906494, 0.009920384407043457, 0.009911359786987305, 0.009825216293334962, 0.009846879959106445, 0.00994489574432373, 0.009947232246398926, 0.009981951713562011, 0.009586591720581055, 0.009936991691589356, 0.009971808433532715, 0.009973759651184083, 0.009965567588806153, 0.009904288291931152, 0.00998588752746582, 0.009958975791931153, 0.009912032127380371, 0.009917152404785157, 0.009954784393310547, 0.009982144355773926, 0.010090911865234375, 0.00989583969116211, 0.009985119819641113, 0.01002182388305664, 0.009960800170898438, 0.009951231956481933, 0.00992527961730957, 0.009977984428405761, 0.009943103790283203, 0.010172063827514649, 0.010049759864807129, 0.009993887901306153, 0.01000271987915039, 0.010123231887817382, 0.010366815567016602, 0.009945152282714843, 0.01003651237487793, 0.009945631980895996, 0.010010687828063965, 0.010076416015625, 0.010082559585571289, 0.009977791786193848, 0.010041152000427246, 0.009930751800537109, 0.010017024040222169, 0.009959168434143066, 0.009968864440917968, 0.010025152206420898, 0.010009247779846192, 0.009965184211730958, 0.01005190372467041, 0.00999785614013672, 0.00996953582763672, 0.009964287757873535, 0.009999296188354493, 0.009957951545715332, 0.01020963191986084, 0.010037247657775878, 0.010076160430908204, 0.010074272155761718, 0.010032863616943359, 0.010086624145507813, 0.010049728393554688, 0.01006982421875, 0.010056768417358398, 0.010089311599731445, 0.010051839828491211, 0.010041088104248046, 0.010048704147338867, 0.010153087615966797, 0.010026880264282226, 0.009779680252075195, 0.010229984283447265, 0.01021343994140625, 0.010170304298400878, 0.010073344230651855, 0.010033408164978027, 0.009996800422668458, 0.010057727813720703, 0.010012672424316407, 0.010040448188781739, 0.010168800354003906, 0.009985535621643067, 0.010013888359069825, 0.010174176216125489, 0.010280960083007813, 0.010116288185119629, 0.01008518409729004, 0.00996947193145752, 0.010144288063049317, 0.009989983558654785, 0.009981887817382813, 0.00996947193145752, 0.009957056045532226, 0.009927103996276856, 0.00993785572052002, 0.009903103828430175, 0.009922335624694825, 0.009916576385498047, 0.009952832221984863, 0.010025312423706054, 0.009955648422241212, 0.010092063903808594, 0.009941568374633789, 0.009959168434143066, 0.010026528358459472, 0.010041824340820312, 0.00998307228088379, 0.009987071990966797, 0.010039551734924317, 0.010110624313354492, 0.010174015998840331, 0.010096799850463868, 0.010041631698608399, 0.009973759651184083, 0.010051103591918945, 0.010122847557067872, 0.010112992286682129, 0.01010700798034668, 0.01018992042541504, 0.010104512214660645, 0.010210368156433105, 0.010130304336547852, 0.01004963207244873, 0.010090016365051269, 0.010090944290161132, 0.010068256378173828, 0.010272480010986329, 0.010137248039245605, 0.00990447998046875, 0.00992972755432129, 0.009947135925292968, 0.009962207794189453, 0.00992636775970459, 0.009659616470336915, 0.00990287971496582, 0.009897983551025391, 0.009928671836853028, 0.009817279815673829, 0.009830368041992188, 0.009819007873535157, 0.009740608215332031, 0.009842368125915527, 0.009857024192810059, 0.009827584266662597, 0.009788448333740235, 0.009805536270141602, 0.009774592399597168, 0.009938528060913086, 0.010105759620666503, 0.010258560180664062, 0.009937984466552734, 0.00994591999053955, 0.01010812759399414, 0.009948287963867188, 0.009870495796203613, 0.010078495979309082, 0.00993667221069336, 0.010001184463500977, 0.010025952339172363, 0.009907999992370605, 0.00989247989654541, 0.009916223526000977, 0.010693087577819824, 0.010110495567321777, 0.010797087669372558, 0.011152095794677734, 0.010643168449401856, 0.01012451171875, 0.010148639678955079, 0.010169471740722656, 0.010208127975463867, 0.010249919891357422, 0.01011900806427002, 0.010138015747070312, 0.010233920097351074, 0.01012707233428955, 0.010223936080932618, 0.010166080474853515, 0.010301600456237793, 0.01016755199432373, 0.010113375663757324, 0.010069984436035156, 0.010186304092407226, 0.010043807983398437, 0.010044032096862793, 0.010116640090942383, 0.010000800132751465, 0.009969599723815918, 0.00998192024230957, 0.009940447807312012, 0.009947680473327636, 0.01005568027496338, 0.009977855682373048, 0.010038304328918456, 0.01003974437713623, 0.010002976417541503, 0.009648384094238282, 0.010227295875549316, 0.010117216110229492, 0.010055808067321778, 0.01007817554473877, 0.01004748821258545, 0.009969568252563477, 0.010036607742309571, 0.010410240173339844, 0.012155263900756837, 0.011288352012634277, 0.010651391983032226, 0.010070143699645996, 0.010055871963500976, 0.010213536262512207, 0.010379263877868652, 0.010612735748291016, 0.01030784034729004, 0.01034768009185791, 0.010402463912963867, 0.010422207832336427, 0.01023795223236084, 0.010166367530822755, 0.010167360305786133, 0.010134495735168458, 0.010264320373535156, 0.010514752388000488, 0.010232640266418456, 0.010228672027587891, 0.010432191848754883, 0.010406496047973633, 0.01044480037689209, 0.010198816299438477, 0.010166080474853515, 0.010348735809326172, 0.010114720344543457, 0.01031817626953125, 0.01028332805633545, 0.010159808158874512, 0.010197152137756347, 0.01020911979675293, 0.010173600196838379, 0.010132575988769531, 0.010145183563232421, 0.010202816009521484, 0.010084320068359375, 0.010043647766113281, 0.01008841609954834, 0.009973407745361328, 0.009968447685241699, 0.01006287956237793, 0.009987199783325195, 0.010469311714172364, 0.010004287719726562, 0.009936991691589356, 0.010004480361938477, 0.010119168281555176, 0.010288288116455078, 0.010297727584838868, 0.010171072006225586, 0.010130687713623047, 0.010068511962890625, 0.01003110408782959, 0.009707615852355958, 0.010123167991638184, 0.010076160430908204, 0.010195232391357422, 0.010052831649780273, 0.009981535911560058, 0.010019743919372558, 0.010123264312744141, 0.010067968368530274, 0.010024448394775391, 0.01006438446044922, 0.009959424018859863, 0.009953056335449219, 0.010029631614685059, 0.010182016372680664, 0.010113311767578124, 0.009998271942138672, 0.01013152027130127, 0.010057567596435547, 0.010106304168701171, 0.010031071662902832, 0.009974464416503907, 0.009913663864135742, 0.00996224021911621, 0.009977855682373048, 0.010282688140869141, 0.010264896392822265, 0.010178560256958008, 0.010192288398742675, 0.010123680114746094, 0.010102975845336913, 0.010309632301330567, 0.010233856201171876, 0.010194944381713868, 0.01022156810760498, 0.01028003215789795, 0.010249055862426758, 0.010230912208557128, 0.010164640426635741, 0.010094431877136231, 0.010158816337585449, 0.010149855613708497, 0.01017420768737793, 0.010315711975097657, 0.010154623985290528, 0.010159808158874512, 0.010102784156799317, 0.010186880111694337, 0.010162015914916993, 0.010233983993530274, 0.010207263946533204, 0.010339776039123534, 0.010243840217590332, 0.010232159614562988, 0.010223679542541504, 0.010170304298400878, 0.010109248161315918, 0.010104415893554687, 0.010227359771728516, 0.0101046724319458, 0.010439616203308105, 0.01027609634399414, 0.01044159984588623, 0.009976544380187989, 0.010237792015075683, 0.010256511688232422, 0.010419936180114745, 0.010379584312438964, 0.010550559997558594, 0.010304384231567383, 0.010344287872314454, 0.01039568042755127, 0.010283200263977051, 0.01026643180847168, 0.010202752113342286, 0.010147744178771973, 0.010226112365722656, 0.010288352012634278, 0.010314528465270995, 0.010201087951660156, 0.0101910400390625, 0.010270591735839843, 0.010205120086669921, 0.010248224258422851, 0.010226655960083007, 0.010971872329711915, 0.010174752235412598, 0.010110591888427735, 0.010094976425170899, 0.01012268829345703, 0.009974335670471192, 0.010055135726928711, 0.009966015815734863, 0.010018143653869629, 0.010044159889221192, 0.010084351539611817, 0.010036800384521484, 0.010027520179748535, 0.009913311958312989, 0.009974111557006836, 0.01014844799041748, 0.010090784072875976, 0.010149567604064942, 0.010231871604919434, 0.010392736434936524, 0.010314751625061035, 0.01015497589111328, 0.010331007957458496, 0.010320896148681641, 0.010259455680847167, 0.01026153564453125, 0.010345439910888671, 0.010292767524719238, 0.010311327934265136, 0.010279071807861329, 0.01035536003112793, 0.010358624458312989, 0.010423871994018554, 0.01024675178527832, 0.010225664138793946, 0.010098688125610352, 0.010235903739929199, 0.010170368194580079, 0.010110976219177246, 0.010164511680603028, 0.010188223838806153, 0.009944319725036621, 0.010488287925720215, 0.010380831718444825, 0.01035542392730713, 0.010346752166748047, 0.010420000076293946, 0.010294367790222168, 0.010204416275024414, 0.010092191696166992, 0.010035136222839355, 0.01000864028930664, 0.009928095817565917, 0.009942975997924805, 0.009953760147094726, 0.010049856185913086, 0.009955039978027344, 0.009959584236145019, 0.010001888275146484, 0.009970208168029786, 0.009941023826599121, 0.010055904388427734, 0.010137184143066406, 0.010170528411865234, 0.010106271743774414, 0.010056320190429688, 0.010069727897644043, 0.010080672264099122, 0.009998175621032715, 0.009975808143615723, 0.010051360130310058, 0.010092767715454101, 0.010151488304138184, 0.010239935874938965, 0.010401439666748048, 0.010414560317993165, 0.010181280136108398, 0.010233119964599609, 0.010262944221496583, 0.010207263946533204, 0.010553471565246582, 0.010388895988464356, 0.010472991943359374, 0.010353599548339844, 0.010283007621765136, 0.010309344291687011, 0.010209440231323242, 0.010189184188842773, 0.010272512435913086, 0.010291199684143066, 0.010553407669067382, 0.010279935836791992, 0.010269632339477539, 0.010452128410339356, 0.010348575592041016, 0.010249119758605957, 0.010272671699523925, 0.010489055633544922, 0.010276991844177246, 0.010268896102905274, 0.010243552207946777, 0.010173407554626465, 0.010169695854187012, 0.010139936447143555, 0.009957088470458984, 0.010216383934020997, 0.01022156810760498, 0.010188608169555665, 0.010209471702575683, 0.01020041561126709, 0.01024182415008545, 0.01015283203125, 0.010108927726745605, 0.010202848434448242, 0.010403679847717286, 0.010285504341125488, 0.010283200263977051, 0.010321696281433106, 0.010254015922546387, 0.010267007827758788, 0.010247296333312988, 0.010238816261291504, 0.01023795223236084, 0.010160127639770507, 0.010412223815917969, 0.010519424438476562, 0.009982912063598633, 0.009995391845703125, 0.009971776008605958, 0.0099268798828125, 0.009943296432495117, 0.00992086410522461, 0.009904224395751953, 0.00997977638244629, 0.009976096153259277, 0.009964384078979493, 0.01011631965637207, 0.010073599815368652, 0.010031295776367187, 0.010046527862548829, 0.01002387237548828, 0.010090496063232422, 0.010090496063232422, 0.00998140811920166, 0.01003600025177002, 0.010057696342468262, 0.009977631568908692, 0.01002086353302002, 0.009967776298522949, 0.009953120231628417, 0.00987679958343506, 0.009894880294799804, 0.009997983932495118, 0.009982015609741211, 0.009920512199401856, 0.009989472389221191, 0.009924960136413575, 0.009879776000976562, 0.009941087722778321, 0.009861120223999023, 0.009914048194885254, 0.009863167762756348, 0.009879487991333008, 0.009930399894714356, 0.009847455978393555, 0.009875200271606445, 0.009910592079162598, 0.009927935600280761, 0.010320896148681641, 0.010408639907836914, 0.01033625602722168, 0.0101397123336792, 0.01012342357635498, 0.01040886402130127, 0.009882495880126954, 0.009893695831298828, 0.010173824310302735, 0.009904543876647949, 0.009979392051696777, 0.009921440124511719, 0.009943039894104003, 0.009926655769348144, 0.009946240425109863, 0.009927552223205566, 0.009932703971862793, 0.00983356761932373, 0.010013728141784669, 0.00999830436706543, 0.009975968360900879, 0.009983648300170898, 0.009927071571350098, 0.009922335624694825, 0.010004192352294923, 0.009997759819030762, 0.010265695571899413, 0.010173791885375977, 0.01008681583404541, 0.010175616264343261, 0.010047552108764648, 0.01007481575012207, 0.010160032272338868, 0.010088352203369141, 0.010101056098937989, 0.010162176132202149, 0.010196991920471191, 0.010112544059753419, 0.010031776428222656, 0.010000191688537597, 0.009996288299560547, 0.009928704261779785, 0.009963520050048828, 0.009957056045532226, 0.010084735870361327, 0.009988032341003417, 0.009965151786804198, 0.010004511833190918, 0.009938336372375489, 0.009948127746582032, 0.010036319732666015, 0.009919391632080079, 0.009904128074645996, 0.009862239837646485, 0.010056608200073243, 0.01008841609954834, 0.009953439712524414, 0.009928288459777833, 0.010012351989746093, 0.009976415634155274, 0.010035167694091797, 0.010067551612854005]",tokens/s,99.06718928327761,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1139.027968,1075.707904,0.0,710.934528,686.03904,s,1,8.6485009765625,8.6485009765625,0.0,8.6485009765625,8.6485009765625,8.6485009765625,8.6485009765625,[8.6485009765625],,kWh,2.4731579891666418e-05,2.720960264943441e-06,8.533617937983573e-06,3.598615809459343e-05,,MB,1501.863936,1436.418048,0.0,1021.313024,985.00096,s,10,0.34727174377441405,0.034727174377441404,0.00020048635533280468,0.03472707176208496,0.034873468017578124,0.03501990203857422,0.035137049255371096,"[0.034793983459472655, 0.03432419204711914, 0.03464787292480469, 0.03471615982055664, 0.03516633605957031, 0.03484092712402344, 0.03473798370361328, 0.03478505706787109, 0.03461593627929688, 0.034643295288085935]",tokens/s,7371.748625949144,kWh,1.0597837427537235e-06,1.1687371799714404e-07,7.063552349709435e-07,1.8830126957218112e-06,tokens/kWh,135952349.43536484,MB,1534.697472,1486.749696,0.0,1071.644672,985.00352,s,10,12.299974487304686,1.2299974487304686,0.006145748553420057,1.2299616088867187,1.2381208618164063,1.2388652404785157,1.239460743408203,"[1.21961376953125, 1.2272628173828124, 1.2351158447265624, 1.23228369140625, 1.239609619140625, 1.2379554443359375, 1.2316201171875, 1.2283031005859375, 1.224498779296875, 1.2237113037109375]",tokens/s,51.219618435001564,kWh,3.557996262016117e-05,3.924023964454741e-06,1.5841796892428184e-05,5.53457834770441e-05,tokens/kWh,1138298.0968393132,,s,630,12.296335512161262,0.019517992876446438,0.0002630778216794924,0.019482864379882815,0.019722240447998047,0.019802812480926513,0.020368323764801032,"[0.01948297691345215, 0.019397823333740235, 0.019278656005859374, 0.01969152069091797, 0.019234304428100587, 0.01920595169067383, 0.01921686363220215, 0.01926371192932129, 0.019158592224121095, 0.01917977523803711, 0.019249343872070314, 0.019258495330810546, 0.019157247543334963, 0.01908915138244629, 0.01919615936279297, 0.019229440689086913, 0.019299392700195313, 0.01940153694152832, 0.019148319244384766, 0.01922470474243164, 0.019220832824707032, 0.019201471328735353, 0.019294784545898436, 0.019203071594238282, 0.019266464233398437, 0.01912022399902344, 0.019152576446533204, 0.01927609634399414, 0.019187551498413086, 0.01936639976501465, 0.019291807174682617, 0.019705856323242187, 0.019572736740112305, 0.01941094398498535, 0.01942678451538086, 0.019431039810180663, 0.019735456466674805, 0.019580928802490235, 0.01964166450500488, 0.019479232788085936, 0.019424415588378905, 0.01934124755859375, 0.019493791580200197, 0.0193702392578125, 0.019424800872802735, 0.01941100883483887, 0.01935478401184082, 0.01939699172973633, 0.01954060745239258, 0.019400703430175782, 0.01935468864440918, 0.019544416427612305, 0.01935420799255371, 0.019349504470825195, 0.019268800735473633, 0.01958585548400879, 0.019380191802978515, 0.01939859199523926, 0.01934499168395996, 0.019337728500366212, 0.019310592651367187, 0.0194969596862793, 0.01947238349914551, 0.01983283233642578, 0.019424640655517578, 0.01955075263977051, 0.01950271987915039, 0.01938684844970703, 0.019455135345458983, 0.019394432067871094, 0.019391456604003907, 0.019535871505737306, 0.01943552017211914, 0.019366943359375, 0.019251232147216798, 0.019450239181518555, 0.019494848251342775, 0.019401344299316406, 0.019290111541748048, 0.019423135757446287, 0.0194899845123291, 0.01938115119934082, 0.019371103286743165, 0.019752927780151367, 0.019544832229614256, 0.019650751113891602, 0.019469568252563477, 0.01946291160583496, 0.019722240447998047, 0.019369728088378908, 0.01948847961425781, 0.019407295227050782, 0.019417184829711914, 0.019337215423583985, 0.019331071853637697, 0.01927577590942383, 0.019449855804443358, 0.019517215728759765, 0.019455392837524413, 0.019404928207397462, 0.019360000610351563, 0.019519935607910155, 0.01957459259033203, 0.019415231704711915, 0.019408895492553712, 0.019556352615356445, 0.019722240447998047, 0.019656896591186523, 0.01975276756286621, 0.019459999084472657, 0.01942947196960449, 0.019334815979003907, 0.019446111679077147, 0.019396608352661132, 0.019597312927246095, 0.019548160552978516, 0.019519264221191407, 0.019544288635253905, 0.019723583221435546, 0.019390783309936523, 0.019474815368652344, 0.019404800415039062, 0.01946339225769043, 0.01938102340698242, 0.019416671752929687, 0.01954243278503418, 0.01972652816772461, 0.019496768951416017, 0.01946006393432617, 0.019563199996948243, 0.019428960800170897, 0.01951785659790039, 0.0194969596862793, 0.01946009635925293, 0.019499008178710937, 0.019493152618408203, 0.019492576599121094, 0.019568639755249022, 0.020105215072631837, 0.01947238349914551, 0.019554304122924804, 0.019541343688964843, 0.019784000396728514, 0.019666528701782225, 0.019380992889404296, 0.01953990364074707, 0.019404863357543944, 0.0194703369140625, 0.01956991958618164, 0.01945471954345703, 0.01985536003112793, 0.019824384689331054, 0.020134143829345703, 0.019684608459472657, 0.019478303909301758, 0.019493120193481445, 0.019604095458984373, 0.019759199142456055, 0.01998409652709961, 0.019562784194946288, 0.019517440795898438, 0.01961369514465332, 0.019562496185302734, 0.019698688507080078, 0.019790239334106445, 0.019553184509277344, 0.019504480361938477, 0.019609952926635744, 0.019377824783325195, 0.01970604705810547, 0.019563680648803712, 0.019675519943237303, 0.019588895797729492, 0.01978041648864746, 0.019595264434814453, 0.01955753517150879, 0.019450752258300782, 0.01956800079345703, 0.01956710433959961, 0.019519615173339843, 0.01946396827697754, 0.019364063262939452, 0.0194703369140625, 0.019621631622314454, 0.019683839797973633, 0.019580768585205077, 0.019531679153442384, 0.01997209548950195, 0.01964563179016113, 0.01951708793640137, 0.019691232681274415, 0.019657344818115235, 0.01957209587097168, 0.019702112197875977, 0.019460384368896484, 0.019380224227905272, 0.019394176483154297, 0.01958745574951172, 0.019464319229125976, 0.01970572853088379, 0.019525503158569334, 0.01938003158569336, 0.019637632369995116, 0.019616640090942383, 0.019556415557861327, 0.019492799758911133, 0.01953593635559082, 0.019514463424682618, 0.019495840072631835, 0.019484031677246095, 0.01968160057067871, 0.019518848419189452, 0.019489728927612304, 0.019396608352661132, 0.019688671112060546, 0.01947238349914551, 0.01958787155151367, 0.019369983673095705, 0.019331071853637697, 0.019360992431640626, 0.01938640022277832, 0.019544511795043944, 0.019341695785522462, 0.019466175079345702, 0.019494911193847657, 0.019512928009033204, 0.019669408798217772, 0.01946041679382324, 0.01957436752319336, 0.019467552185058593, 0.02001798439025879, 0.019650144577026366, 0.01945062446594238, 0.019436960220336915, 0.019574560165405274, 0.019558879852294923, 0.019613119125366212, 0.019362367630004883, 0.01967919921875, 0.01965648078918457, 0.01957913589477539, 0.019972448348999024, 0.01997558403015137, 0.019785888671875, 0.01964041519165039, 0.019458047866821288, 0.019506399154663085, 0.0195850887298584, 0.01933785629272461, 0.019565664291381835, 0.01967001533508301, 0.019652448654174804, 0.019784992218017577, 0.01977827262878418, 0.019559616088867186, 0.019675968170166015, 0.0196177921295166, 0.01957683181762695, 0.019372032165527343, 0.01946739196777344, 0.019587936401367186, 0.01939254379272461, 0.019340768814086914, 0.01921059226989746, 0.019275840759277345, 0.019447328567504883, 0.019460704803466795, 0.019283967971801756, 0.019224159240722655, 0.019452320098876954, 0.02325299263000488, 0.01962508773803711, 0.01948057556152344, 0.019488704681396483, 0.019620031356811524, 0.01939263916015625, 0.019791744232177735, 0.019394655227661133, 0.01985807991027832, 0.021024032592773436, 0.01955913543701172, 0.019877887725830077, 0.019732479095458985, 0.020934751510620117, 0.019762975692749023, 0.019546239852905274, 0.01944329643249512, 0.019617759704589843, 0.019704256057739258, 0.019601119995117187, 0.019420543670654298, 0.019403263092041014, 0.019567007064819335, 0.019666015625, 0.01941391944885254, 0.019414112091064452, 0.019454431533813477, 0.01951705551147461, 0.019650976181030275, 0.019675552368164064, 0.019666336059570313, 0.019984991073608398, 0.019568639755249022, 0.01956038475036621, 0.019679136276245117, 0.019650720596313478, 0.019510976791381834, 0.019600959777832033, 0.01947110366821289, 0.01965827178955078, 0.019588895797729492, 0.019681919097900392, 0.019733728408813475, 0.019644895553588868, 0.019859840393066406, 0.01976518440246582, 0.019708576202392577, 0.01952934455871582, 0.01939484786987305, 0.019460191726684572, 0.019357696533203125, 0.019482624053955077, 0.019519487380981446, 0.01944883155822754, 0.01945497512817383, 0.01963212776184082, 0.019935232162475586, 0.019426368713378907, 0.02085481643676758, 0.019786655426025392, 0.020024896621704102, 0.01956012725830078, 0.019546335220336913, 0.019601375579833984, 0.019550783157348633, 0.019445119857788087, 0.019455808639526367, 0.019688255310058595, 0.019668991088867188, 0.019398656845092774, 0.019598335266113282, 0.019479551315307618, 0.019587072372436523, 0.0196212158203125, 0.019507871627807618, 0.01952467155456543, 0.0195532169342041, 0.0194150390625, 0.019719200134277345, 0.01968035125732422, 0.019724159240722655, 0.02021504020690918, 0.02128060722351074, 0.01983942413330078, 0.01955273628234863, 0.01975872039794922, 0.019433792114257813, 0.019600799560546875, 0.019581600189208983, 0.01949286460876465, 0.019564544677734375, 0.019568639755249022, 0.019552255630493166, 0.020242431640625, 0.019697664260864257, 0.019556352615356445, 0.019529727935791014, 0.01954803276062012, 0.019654783248901367, 0.019656160354614257, 0.01966102409362793, 0.01952367973327637, 0.01940912055969238, 0.019441600799560546, 0.019510751724243165, 0.0194671688079834, 0.019596128463745116, 0.019548927307128906, 0.0198371524810791, 0.019533439636230467, 0.02008073616027832, 0.01957676887512207, 0.01972220802307129, 0.01951091194152832, 0.019532800674438477, 0.0194150390625, 0.019963903427124022, 0.019529727935791014, 0.01949056053161621, 0.01940915107727051, 0.01939558410644531, 0.019528287887573242, 0.019544479370117187, 0.01959321594238281, 0.019615743637084963, 0.019417087554931642, 0.019606592178344727, 0.019569599151611328, 0.0196177921295166, 0.019781183242797852, 0.019593055725097657, 0.0193603515625, 0.01961369514465332, 0.019367935180664063, 0.019563711166381836, 0.01957356834411621, 0.01948057556152344, 0.01941913604736328, 0.019494207382202148, 0.01949910354614258, 0.019474016189575196, 0.019534847259521485, 0.01943961524963379, 0.019418624877929686, 0.019599872589111327, 0.019473695755004884, 0.019448543548583986, 0.019412992477416992, 0.019482624053955077, 0.019564544677734375, 0.01941913604736328, 0.019406528472900392, 0.019349407196044922, 0.019380640029907227, 0.01935500717163086, 0.01959385681152344, 0.019505151748657225, 0.019615392684936523, 0.019515743255615236, 0.01951686477661133, 0.019554880142211913, 0.0194368953704834, 0.019374624252319336, 0.019462272644042967, 0.01938969612121582, 0.019519519805908204, 0.01952227210998535, 0.020419744491577147, 0.019579584121704102, 0.019592992782592772, 0.019652992248535155, 0.019706783294677736, 0.019726335525512697, 0.019736576080322265, 0.019670463562011718, 0.01943516731262207, 0.019476991653442383, 0.01955580711364746, 0.019454912185668947, 0.019589120864868165, 0.019437215805053712, 0.0194051513671875, 0.019586399078369142, 0.019369728088378908, 0.019551136016845702, 0.019388416290283202, 0.019406848907470704, 0.01944166374206543, 0.019358976364135742, 0.01942959976196289, 0.01924764823913574, 0.019298112869262696, 0.019412639617919922, 0.01939900779724121, 0.01979769515991211, 0.01962828826904297, 0.01942963218688965, 0.019333120346069335, 0.019404607772827147, 0.019402240753173827, 0.019190719604492187, 0.019241888046264647, 0.01940768051147461, 0.01932598304748535, 0.019317663192749024, 0.019243104934692383, 0.019306079864501953, 0.01936774444580078, 0.01963222312927246, 0.019278335571289062, 0.0194268798828125, 0.020761024475097655, 0.0192491512298584, 0.019340448379516602, 0.019296159744262697, 0.019546911239624022, 0.019441823959350586, 0.019766687393188476, 0.01977334403991699, 0.01981839942932129, 0.019798271179199217, 0.019640127182006837, 0.019503839492797853, 0.019337215423583985, 0.019656095504760742, 0.019552255630493166, 0.01938060760498047, 0.019561887741088867, 0.01977836799621582, 0.019373760223388672, 0.01947270393371582, 0.019341312408447265, 0.019296255111694336, 0.01941689682006836, 0.019669952392578124, 0.019564544677734375, 0.01957446479797363, 0.019540288925170898, 0.019367935180664063, 0.019384479522705077, 0.019375072479248048, 0.01954844856262207, 0.019806528091430665, 0.019298591613769532, 0.01925734329223633, 0.01942643165588379, 0.019377023696899413, 0.019478527069091797, 0.019258655548095704, 0.01941289520263672, 0.019147584915161133, 0.019279712677001952, 0.01940870475769043, 0.019667295455932616, 0.019505151748657225, 0.019478303909301758, 0.01962825584411621, 0.01924710464477539, 0.019367935180664063, 0.01927743911743164, 0.019482751846313477, 0.019360000610351563, 0.019191648483276365, 0.019298463821411132, 0.019316064834594728, 0.019380640029907227, 0.019417152404785157, 0.019394752502441406, 0.0194150390625, 0.019205888748168944, 0.019542335510253906, 0.019650432586669923, 0.019202272415161134, 0.019354656219482423, 0.019446592330932617, 0.019514751434326173, 0.01933171272277832, 0.01927577590942383, 0.019523584365844726, 0.019528959274291994, 0.01950796890258789, 0.019381248474121093, 0.019426464080810547, 0.0196496639251709, 0.019505088806152343, 0.019475231170654295, 0.01972364807128906, 0.01943404769897461, 0.019238143920898437, 0.01960838317871094, 0.019400703430175782, 0.019359039306640624, 0.019460927963256835, 0.01940671920776367, 0.019599359512329103, 0.019357696533203125, 0.01945142364501953, 0.019742687225341796, 0.019685375213623048, 0.019388416290283202, 0.01918297576904297, 0.019455936431884764, 0.0193371524810791, 0.0193625602722168, 0.019596479415893556, 0.019364671707153322, 0.019394208908081054, 0.019403104782104493, 0.01941913604736328, 0.01925529670715332, 0.019663871765136717, 0.019370304107666016, 0.019415456771850585, 0.01925542449951172, 0.01926918411254883, 0.019298912048339844, 0.019514848709106445, 0.019326847076416017, 0.01933737564086914, 0.019478496551513673, 0.01932956886291504, 0.019357696533203125, 0.019453983306884765, 0.01937942314147949, 0.019431488037109375, 0.019296064376831054, 0.019386943817138673, 0.01966854476928711, 0.019519392013549804, 0.019630943298339844, 0.01920204734802246, 0.01934694480895996, 0.01934332847595215, 0.019403295516967775, 0.019992256164550783, 0.019375551223754884, 0.01939708709716797, 0.01930633544921875, 0.019335744857788086, 0.019636224746704102, 0.019367935180664063, 0.019239967346191406, 0.01925766372680664, 0.01950172805786133, 0.019428447723388673, 0.019295488357543945, 0.019393632888793946, 0.019322559356689452, 0.0192992000579834, 0.019369855880737304, 0.019500160217285158, 0.01958710479736328, 0.01965769577026367, 0.019301984786987306, 0.01940220832824707, 0.019485631942749024, 0.01937129592895508, 0.019420896530151367, 0.019324960708618163, 0.019506143569946288]",tokens/s,51.23477635893399,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1501.093888,1324.285952,0.0,945.815552,943.480832,s,1,8.4035087890625,8.4035087890625,0.0,8.4035087890625,8.4035087890625,8.4035087890625,8.4035087890625,[8.4035087890625],,kWh,4.1327311754164e-05,4.550933413764803e-06,1.3277510622005523e-05,5.9155755789934325e-05,,MB,1589.542912,1487.863808,0.0,1080.03328,1046.51776,s,10,0.8268167724609374,0.08268167724609374,0.0004368021488074932,0.08262483215332031,0.08331148834228516,0.08346441802978516,0.08358676177978516,"[0.08361734771728516, 0.08257561492919922, 0.0824674530029297, 0.08216486358642579, 0.08275836944580078, 0.08218051147460938, 0.08327750396728516, 0.08237484741210938, 0.08272621154785156, 0.08267404937744141]",tokens/s,3096.211984646146,kWh,2.5114694163461434e-06,2.768441387566843e-07,1.6708014316068285e-06,4.459114986709656e-06,tokens/kWh,57410495.30299291,MB,1593.438208,1573.84704,0.0,1166.016512,1082.822656,s,10,11.529435424804689,1.1529435424804686,0.002247484354686342,1.1528699340820312,1.155279248046875,1.1558662475585937,1.1563358471679688,"[1.152810791015625, 1.15289111328125, 1.1527913818359374, 1.1551488037109374, 1.1535712890625, 1.1528487548828126, 1.1502054443359375, 1.1481671142578125, 1.1545474853515625, 1.1564532470703126]",tokens/s,54.64274500767001,kWh,3.383504718865373e-05,3.731713839729946e-06,1.7744325211593323e-05,5.531108623997701e-05,tokens/kWh,1139012.16343254,,s,630,11.527169504165656,0.018297094451056584,0.0003323471075669016,0.018204559326171876,0.01853084735870361,0.018927657604217527,0.01959991451263428,"[0.018458911895751953, 0.018184671401977538, 0.01805721664428711, 0.018102272033691406, 0.01833776092529297, 0.01850774383544922, 0.01816172790527344, 0.018067455291748045, 0.01827014350891113, 0.01813100814819336, 0.018089984893798827, 0.01811359977722168, 0.018141984939575195, 0.018163007736206056, 0.01816643142700195, 0.018217151641845702, 0.01816582489013672, 0.01832748794555664, 0.018210111618041994, 0.018090368270874024, 0.018047103881835936, 0.01814134407043457, 0.018185375213623046, 0.018227615356445313, 0.018135520935058595, 0.018077695846557617, 0.018153663635253905, 0.0181463680267334, 0.01811327934265137, 0.018124479293823242, 0.018143552780151367, 0.018131231307983397, 0.018112224578857423, 0.019215391159057616, 0.018207712173461912, 0.018198528289794923, 0.01816783905029297, 0.01830499267578125, 0.018237760543823242, 0.018340736389160155, 0.01835647964477539, 0.018370784759521485, 0.018574975967407228, 0.018284351348876952, 0.018283424377441407, 0.018289920806884765, 0.018270879745483398, 0.018410751342773438, 0.018221023559570313, 0.01821526336669922, 0.02178102493286133, 0.01844428825378418, 0.01826188850402832, 0.018288768768310548, 0.018212480545043944, 0.018347391128540037, 0.018267135620117187, 0.01818601608276367, 0.01817545509338379, 0.018150144577026368, 0.01830694389343262, 0.018187936782836915, 0.018325023651123047, 0.018329919815063475, 0.018243263244628907, 0.018178400039672853, 0.018134016036987305, 0.018115360260009764, 0.018322784423828124, 0.01823593521118164, 0.018228832244873046, 0.01825846481323242, 0.018108415603637695, 0.01819196891784668, 0.018594207763671874, 0.018593791961669923, 0.018307071685791015, 0.01830473518371582, 0.01831760025024414, 0.01831248092651367, 0.018244352340698242, 0.018257919311523436, 0.01826812744140625, 0.019224576950073242, 0.018817024230957033, 0.018292383193969728, 0.01817795181274414, 0.01817612838745117, 0.01817964744567871, 0.018199296951293947, 0.01824563217163086, 0.018126848220825196, 0.02006380844116211, 0.018229183197021485, 0.018243392944335936, 0.018264768600463867, 0.01822105598449707, 0.01829033660888672, 0.018224672317504884, 0.018184959411621095, 0.01829449653625488, 0.018214975357055664, 0.018216928482055663, 0.018229440689086916, 0.01820419120788574, 0.018227807998657225, 0.01825984001159668, 0.018193727493286134, 0.018231840133666993, 0.018173280715942382, 0.018167871475219727, 0.018250207901000976, 0.01849734306335449, 0.018170175552368165, 0.018157855987548828, 0.018214176177978516, 0.01827840042114258, 0.01819926452636719, 0.01826201629638672, 0.018142784118652344, 0.018322879791259766, 0.018147520065307617, 0.01825027275085449, 0.01816316795349121, 0.018301759719848633, 0.018184255599975586, 0.018536447525024414, 0.020339744567871094, 0.018977760314941406, 0.018364639282226564, 0.01815456008911133, 0.018121152877807616, 0.018124191284179688, 0.019116928100585937, 0.018251775741577148, 0.018329599380493163, 0.01815283203125, 0.01810291290283203, 0.01816329574584961, 0.0181231689453125, 0.01819366455078125, 0.018489599227905273, 0.018395647048950196, 0.018157888412475585, 0.018266048431396485, 0.018083583831787108, 0.018453727722167967, 0.01830108833312988, 0.018354816436767576, 0.018195711135864257, 0.01816204833984375, 0.01823196792602539, 0.018122463226318358, 0.018199552536010744, 0.01808896064758301, 0.018122047424316407, 0.01813164710998535, 0.018241439819335938, 0.01849558448791504, 0.01815894317626953, 0.018057504653930665, 0.018123136520385743, 0.018269664764404298, 0.018210336685180663, 0.01810527992248535, 0.018109760284423827, 0.01804569625854492, 0.018170080184936523, 0.018112287521362305, 0.018077695846557617, 0.018218751907348632, 0.018163839340209962, 0.018945600509643553, 0.01824211120605469, 0.018207935333251952, 0.01816156768798828, 0.018216096878051757, 0.01815936088562012, 0.01819443130493164, 0.01834320068359375, 0.018526815414428712, 0.018234752655029298, 0.018184831619262695, 0.01828611183166504, 0.018092639923095705, 0.018224544525146484, 0.018080352783203125, 0.018097280502319336, 0.01900364875793457, 0.019517183303833008, 0.01814121627807617, 0.01832364845275879, 0.018444608688354493, 0.018110944747924806, 0.018131967544555663, 0.01816268730163574, 0.018298879623413086, 0.018296735763549805, 0.019089120864868164, 0.018239519119262696, 0.01820707130432129, 0.018239488601684572, 0.01840127944946289, 0.018167327880859375, 0.018141664505004883, 0.01804035186767578, 0.018601791381835937, 0.018159456253051758, 0.018228031158447264, 0.018255872726440428, 0.018140832901000978, 0.01810047912597656, 0.01815510368347168, 0.018727424621582032, 0.019129440307617186, 0.01823187255859375, 0.01822550392150879, 0.01834614372253418, 0.018201696395874024, 0.01804569625854492, 0.018069503784179687, 0.018096128463745118, 0.018128896713256838, 0.018136735916137695, 0.01819683265686035, 0.018108415603637695, 0.018143232345581056, 0.018130367279052734, 0.018119232177734375, 0.018134016036987305, 0.018107391357421874, 0.018136415481567383, 0.018121376037597656, 0.018134271621704102, 0.018194944381713866, 0.01815577507019043, 0.018099903106689453, 0.0180513916015625, 0.018144895553588867, 0.019227008819580078, 0.018339935302734374, 0.018433055877685546, 0.018467008590698244, 0.01865388870239258, 0.019785375595092775, 0.018686304092407225, 0.018758655548095703, 0.018766847610473633, 0.018288639068603514, 0.01821286392211914, 0.01822550392150879, 0.018158464431762694, 0.018840576171875, 0.01864838409423828, 0.01853068733215332, 0.018528575897216796, 0.01886115264892578, 0.018275232315063478, 0.018259967803955078, 0.01819443130493164, 0.01827833557128906, 0.018384960174560545, 0.01844380760192871, 0.018581535339355467, 0.018335391998291015, 0.018287391662597657, 0.018315168380737306, 0.01824127960205078, 0.018141408920288087, 0.01818796730041504, 0.01819411277770996, 0.018368799209594725, 0.018131423950195312, 0.01817190361022949, 0.018080896377563476, 0.018100576400756838, 0.01813763236999512, 0.018239488601684572, 0.018214912414550782, 0.018884607315063476, 0.019231935501098633, 0.01859231948852539, 0.01823468780517578, 0.018214879989624025, 0.01808403205871582, 0.018115360260009764, 0.018100223541259765, 0.01804697608947754, 0.018155519485473632, 0.019152576446533204, 0.018188831329345703, 0.018183967590332032, 0.01823289680480957, 0.018118751525878905, 0.018186368942260743, 0.01818047904968262, 0.01818828773498535, 0.018452320098876953, 0.01818137550354004, 0.018196672439575196, 0.01815545654296875, 0.018137727737426758, 0.01817964744567871, 0.01826246452331543, 0.018233343124389647, 0.018192384719848635, 0.018446176528930665, 0.01822934341430664, 0.018151487350463867, 0.018255807876586913, 0.018133056640625, 0.018435903549194336, 0.018211008071899414, 0.01813484764099121, 0.018280832290649415, 0.018132640838623048, 0.01813360023498535, 0.018048799514770508, 0.018155744552612305, 0.018067455291748045, 0.018102144241333006, 0.018178176879882813, 0.01812623977661133, 0.018176607131958008, 0.018135072708129883, 0.018182111740112306, 0.018146623611450197, 0.01815622329711914, 0.018094079971313477, 0.018259904861450196, 0.018199712753295898, 0.01820150375366211, 0.018167808532714845, 0.018089984893798827, 0.01809436798095703, 0.018036447525024413, 0.018067455291748045, 0.018010271072387694, 0.018120672225952147, 0.019124095916748046, 0.01838447952270508, 0.018578880310058592, 0.018086431503295898, 0.01808633613586426, 0.01815465545654297, 0.018120735168457032, 0.0181092472076416, 0.018100223541259765, 0.019085119247436524, 0.018468416213989258, 0.01957142448425293, 0.018171808242797852, 0.01821273612976074, 0.018339712142944335, 0.018204927444458008, 0.018092031478881835, 0.018184192657470705, 0.01828156852722168, 0.018115583419799804, 0.018116512298583985, 0.018198751449584962, 0.01919977569580078, 0.01832364845275879, 0.01815737533569336, 0.018137088775634767, 0.01808950424194336, 0.01817558479309082, 0.01807244873046875, 0.018186176300048828, 0.018296895980834962, 0.01815670394897461, 0.018111328125, 0.01943142318725586, 0.020368671417236327, 0.01828118324279785, 0.018219104766845705, 0.01828223991394043, 0.018253984451293944, 0.018263551712036134, 0.01809049606323242, 0.018333696365356447, 0.01807910346984863, 0.01809062385559082, 0.01818623924255371, 0.019267839431762697, 0.018130687713623046, 0.018045087814331055, 0.018081632614135743, 0.018044927597045898, 0.01912556838989258, 0.01822358322143555, 0.018090208053588866, 0.01803500747680664, 0.01812233543395996, 0.018038240432739258, 0.018216991424560548, 0.01812441635131836, 0.018094303131103516, 0.0180948486328125, 0.01819443130493164, 0.018335744857788085, 0.018184192657470705, 0.018101343154907225, 0.018055999755859375, 0.018136159896850586, 0.018117631912231445, 0.01807360076904297, 0.018276639938354492, 0.0182554874420166, 0.019285472869873047, 0.01890572738647461, 0.01878201675415039, 0.018249120712280274, 0.018245439529418945, 0.01823632049560547, 0.01809324836730957, 0.01811974334716797, 0.01807459259033203, 0.01840640068054199, 0.018245695114135742, 0.018132511138916015, 0.018178207397460937, 0.018110559463500975, 0.018280448913574218, 0.018081792831420897, 0.018083423614501954, 0.018102399826049803, 0.018208608627319336, 0.01841606330871582, 0.018617631912231446, 0.018381471633911132, 0.018239551544189454, 0.018135135650634765, 0.01818943977355957, 0.018121503829956056, 0.018358591079711915, 0.018089567184448242, 0.01803856086730957, 0.018123071670532228, 0.018040416717529296, 0.018899360656738282, 0.01822163200378418, 0.018304832458496095, 0.018153919219970702, 0.018112159729003905, 0.018280799865722657, 0.018151552200317382, 0.018143104553222655, 0.01839308738708496, 0.01856060791015625, 0.018280864715576172, 0.018225151062011717, 0.018229248046875, 0.01821900749206543, 0.0181628475189209, 0.018346111297607423, 0.0181624641418457, 0.01832748794555664, 0.018137216567993164, 0.018130815505981446, 0.018118656158447266, 0.01817804718017578, 0.018167808532714845, 0.01818556785583496, 0.018120672225952147, 0.01816441535949707, 0.018224672317504884, 0.018209247589111327, 0.01828160095214844, 0.018090879440307617, 0.018147327423095702, 0.01822719955444336, 0.01818780708312988, 0.0181560001373291, 0.018135232925415037, 0.018153280258178712, 0.018140607833862305, 0.01806015968322754, 0.018144927978515624, 0.018175296783447266, 0.018117055892944336, 0.018047264099121094, 0.018100223541259765, 0.01809110450744629, 0.018254751205444335, 0.01873459243774414, 0.018355743408203125, 0.018221183776855467, 0.018170175552368165, 0.01812944030761719, 0.018137088775634767, 0.018170015335083008, 0.018192256927490234, 0.01827734375, 0.018215072631835937, 0.01917411231994629, 0.01824985694885254, 0.018208671569824218, 0.018268255233764647, 0.018097824096679687, 0.01819100761413574, 0.018132671356201172, 0.018138816833496094, 0.018460800170898437, 0.018995071411132814, 0.018319168090820313, 0.01818864059448242, 0.018505727767944336, 0.018464767456054687, 0.01840447998046875, 0.018367359161376953, 0.018149215698242186, 0.018110496520996094, 0.018118783950805663, 0.01814691162109375, 0.018225568771362305, 0.018134559631347656, 0.01815395164489746, 0.01825382423400879, 0.018159616470336915, 0.018128896713256838, 0.01814240074157715, 0.01810256004333496, 0.018141727447509765, 0.01818979263305664, 0.018147296905517578, 0.018289087295532226, 0.01817344093322754, 0.018137727737426758, 0.01816985511779785, 0.0184965763092041, 0.018289600372314453, 0.01828985595703125, 0.01826464080810547, 0.018267488479614256, 0.018408191680908202, 0.01818003273010254, 0.018272192001342773, 0.018140512466430662, 0.01811520004272461, 0.018172224044799803, 0.01818320083618164, 0.01808483123779297, 0.01821696090698242, 0.018051071166992186, 0.019091455459594727, 0.018181535720825197, 0.018107135772705077, 0.018171743392944337, 0.018146400451660157, 0.01813596725463867, 0.01840742492675781, 0.01837494468688965, 0.018650848388671874, 0.018485023498535157, 0.018392416000366212, 0.018357152938842772, 0.018194400787353515, 0.018093984603881837, 0.018399391174316406, 0.018079776763916016, 0.018304607391357423, 0.020652351379394532, 0.01961155128479004, 0.018309215545654296, 0.018264064788818358, 0.01815331268310547, 0.018258848190307618, 0.01819443130493164, 0.01843177604675293, 0.018120927810668944, 0.018182144165039063, 0.018152448654174806, 0.01815449523925781, 0.01806780815124512, 0.01811814308166504, 0.01858531188964844, 0.01825584030151367, 0.018252191543579103, 0.018177536010742186, 0.018208480834960936, 0.018352991104125978, 0.01829478454589844, 0.01830297660827637, 0.018431999206542968, 0.01918156814575195, 0.01852544021606445, 0.018491264343261718, 0.018478111267089845, 0.01845631980895996, 0.018456640243530272, 0.018409503936767577, 0.018526208877563476, 0.01858905601501465, 0.01851158332824707, 0.019485631942749024, 0.01853228759765625, 0.01837264060974121, 0.018249408721923828, 0.018217632293701172, 0.018124448776245118, 0.01826959991455078, 0.01834377670288086, 0.018706687927246092, 0.01859382438659668, 0.018280576705932618, 0.01829859161376953, 0.018192127227783204, 0.01827724838256836, 0.018171743392944337, 0.018148704528808592, 0.018279232025146485, 0.018366111755371093, 0.018315616607666015, 0.018210464477539063, 0.018288991928100587, 0.018193599700927734, 0.018225120544433593, 0.018207584381103516, 0.01837648010253906, 0.018501792907714844, 0.018472991943359374, 0.018415647506713866, 0.018571264266967775, 0.018308576583862306, 0.018246431350708008, 0.01821980857849121, 0.01815648078918457, 0.018156608581542968, 0.018278656005859376]",tokens/s,54.653486250231055,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 96.12 MiB is free. Process 290696 has 14.64 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 323.24 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3894, in from_pretrained hf_quantizer.preprocess_model( File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/base.py"", line 182, in preprocess_model return self._process_model_before_weight_loading(model, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/quantizers/quantizer_awq.py"", line 85, in _process_model_before_weight_loading model, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 179, in replace_with_awq_linear _, has_been_replaced = replace_with_awq_linear( [Previous line repeated 1 more time] File ""/usr/local/lib/python3.10/dist-packages/transformers/integrations/awq.py"", line 166, in replace_with_awq_linear model._modules[name] = target_cls( File ""/usr/local/lib/python3.10/dist-packages/awq/modules/linear/gemv.py"", line 42, in __init__ assert out_features % (32 // self.w_bit) == 0 AssertionError " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1491.501056,1546.584064,0.0,1168.113664,1154.613248,s,1,8.3148095703125,8.3148095703125,0.0,8.3148095703125,8.3148095703125,8.3148095703125,8.3148095703125,[8.3148095703125],,kWh,3.861054563332876e-05,4.251723312599966e-06,1.2329732086008294e-05,5.5192001031937024e-05,,MB,1591.00928,1796.145152,0.0,1388.314624,1334.065152,s,10,0.8245529479980468,0.08245529479980468,0.00039060264817304067,0.08240457534790038,0.0828208740234375,0.08305651702880859,0.08324503143310547,"[0.08329216003417969, 0.08210860443115234, 0.08276850891113281, 0.08228358459472657, 0.08231641387939453, 0.08249273681640625, 0.08180339050292969, 0.08251830291748047, 0.08223340606689453, 0.08273583984375]",tokens/s,3104.7126885125926,kWh,2.4979876080481665e-06,2.7548447445572607e-07,1.655366946324919e-06,4.428839028828811e-06,tokens/kWh,57802958.81914186,MB,1599.61088,1796.145152,0.0,1388.314624,1372.847616,s,10,14.184140502929688,1.4184140502929687,0.012717605394795299,1.4124118041992189,1.4413192993164063,1.441889324951172,1.4423453454589845,"[1.441192626953125, 1.41733544921875, 1.424362548828125, 1.4087288818359376, 1.4124964599609375, 1.4068421630859376, 1.40653955078125, 1.4424593505859375, 1.4123271484375, 1.4118563232421875]",tokens/s,44.41580368368994,kWh,4.158395035028572e-05,4.5863410420389415e-06,2.066892869087587e-05,6.683922008320053e-05,tokens/kWh,942560.369818476,,s,630,14.181650554656967,0.022510556435963464,0.000441573484618649,0.022355904579162596,0.023032106399536133,0.023229604244232177,0.024400959815979003,"[0.022883968353271486, 0.022775615692138672, 0.022372928619384766, 0.02328134346008301, 0.022628671646118165, 0.02272390365600586, 0.02265567970275879, 0.022644384384155274, 0.022833503723144532, 0.022494848251342774, 0.02241574478149414, 0.022583295822143554, 0.02249932861328125, 0.022908992767333984, 0.023009248733520508, 0.022877920150756837, 0.023116031646728517, 0.02322003173828125, 0.02311187171936035, 0.023651840209960938, 0.023056896209716796, 0.02309529685974121, 0.02294304084777832, 0.02310147285461426, 0.02301728057861328, 0.024599391937255858, 0.024070144653320313, 0.023216127395629883, 0.023259136199951173, 0.02327302360534668, 0.023402944564819336, 0.023218175888061524, 0.022996992111206056, 0.02312396812438965, 0.023259136199951173, 0.02307276725769043, 0.025007776260375977, 0.023136608123779295, 0.022815967559814455, 0.0225020809173584, 0.022383808135986328, 0.02230067253112793, 0.022448736190795897, 0.022437728881835938, 0.022378463745117188, 0.022277887344360352, 0.022255552291870116, 0.022253631591796875, 0.02218060874938965, 0.022359167098999023, 0.022243200302124025, 0.022194303512573243, 0.022563711166381836, 0.022208511352539064, 0.022335039138793946, 0.022378496170043945, 0.02266124725341797, 0.024013120651245116, 0.02289023971557617, 0.02280473518371582, 0.022786048889160155, 0.022952959060668944, 0.022753311157226563, 0.02301644706726074, 0.02279612731933594, 0.022786048889160155, 0.02294988822937012, 0.02255264091491699, 0.022235071182250977, 0.022177791595458983, 0.02229043197631836, 0.022593536376953126, 0.022623872756958006, 0.02232940864562988, 0.02225388717651367, 0.022257568359375, 0.022298719406127928, 0.022323200225830078, 0.022971935272216797, 0.0224116153717041, 0.022853759765625, 0.022228384017944337, 0.02239139175415039, 0.02236595153808594, 0.022364416122436524, 0.022529535293579102, 0.022729215621948243, 0.022755327224731444, 0.022562271118164064, 0.022388832092285156, 0.022342079162597655, 0.022296480178833008, 0.02247065544128418, 0.02235638427734375, 0.022286016464233397, 0.02224028778076172, 0.022258655548095703, 0.022404640197753907, 0.02227862358093262, 0.022374399185180666, 0.02251571273803711, 0.022420576095581055, 0.023034784317016603, 0.02244607925415039, 0.022343423843383788, 0.02227132797241211, 0.022188959121704103, 0.022254751205444335, 0.022262624740600586, 0.02230646324157715, 0.02229078483581543, 0.022312416076660156, 0.02226211166381836, 0.022220991134643556, 0.02251366424560547, 0.022529439926147463, 0.022677087783813478, 0.022727231979370117, 0.022767263412475584, 0.02290934371948242, 0.02287449645996094, 0.022814176559448243, 0.022614559173583983, 0.022638336181640625, 0.022840991973876953, 0.022696544647216797, 0.022761823654174805, 0.022866464614868163, 0.023133951187133787, 0.022542591094970702, 0.02263654327392578, 0.022433792114257813, 0.02242723274230957, 0.023140159606933594, 0.02424687957763672, 0.025407487869262696, 0.02250752067565918, 0.02267750358581543, 0.02267750358581543, 0.022466144561767577, 0.02253046417236328, 0.022842912673950194, 0.022785791397094725, 0.02378620719909668, 0.022408384323120117, 0.022247392654418944, 0.022202816009521485, 0.022208320617675782, 0.022264415740966798, 0.022228448867797852, 0.022065696716308595, 0.022181888580322266, 0.022411231994628907, 0.02440809631347656, 0.022855680465698244, 0.022275455474853517, 0.022387327194213866, 0.02217740821838379, 0.022122400283813477, 0.02222537612915039, 0.022192127227783204, 0.022598783493041993, 0.02252067184448242, 0.023130144119262695, 0.022355840682983397, 0.023351423263549803, 0.024786943435668944, 0.022586719512939453, 0.02228291130065918, 0.022366079330444336, 0.022209888458251954, 0.022217472076416014, 0.022179519653320313, 0.02222729682922363, 0.022106111526489256, 0.022106111526489256, 0.022194175720214843, 0.022514879226684572, 0.02235475158691406, 0.02232691192626953, 0.02224166488647461, 0.022228992462158204, 0.022435104370117188, 0.02245091247558594, 0.022202144622802733, 0.022455808639526367, 0.02236079978942871, 0.022337535858154296, 0.022275455474853517, 0.022552576065063477, 0.02245631980895996, 0.02229043197631836, 0.02243328094482422, 0.02217011260986328, 0.022390783309936522, 0.022234176635742186, 0.02214393615722656, 0.022216575622558594, 0.022159488677978515, 0.022279712677001955, 0.022272480010986327, 0.02224336051940918, 0.02218364715576172, 0.022210208892822266, 0.022238815307617187, 0.022316064834594727, 0.022187999725341797, 0.022181184768676757, 0.022153919219970702, 0.022237184524536133, 0.022147071838378905, 0.022197311401367187, 0.022188192367553712, 0.02226655960083008, 0.022278240203857422, 0.022312959671020507, 0.022340991973876952, 0.022227136611938477, 0.022311199188232423, 0.022413471221923827, 0.022147071838378905, 0.022304031372070314, 0.02323324775695801, 0.02351103973388672, 0.022396928787231447, 0.022261760711669923, 0.022554079055786134, 0.02232304000854492, 0.0223024959564209, 0.02263484764099121, 0.02215497589111328, 0.02224844741821289, 0.02221142387390137, 0.022231168746948242, 0.022216800689697266, 0.02222774314880371, 0.022278144836425783, 0.02220185661315918, 0.022161184310913087, 0.022171392440795898, 0.02208867263793945, 0.022404767990112304, 0.022303071975708008, 0.02246396827697754, 0.022314592361450194, 0.022388927459716795, 0.022864639282226564, 0.022550527572631835, 0.0226810245513916, 0.02312153625488281, 0.022612640380859375, 0.022675519943237306, 0.02293609619140625, 0.022534271240234376, 0.022812671661376953, 0.022734848022460938, 0.022807743072509764, 0.022569280624389648, 0.02236057662963867, 0.02228428840637207, 0.022380544662475587, 0.02233545684814453, 0.022393951416015623, 0.02222585678100586, 0.022371679306030273, 0.02229520034790039, 0.022417280197143556, 0.02248422431945801, 0.022621055603027344, 0.022996959686279298, 0.022722591400146486, 0.02252390480041504, 0.022364160537719727, 0.02224742317199707, 0.022351871490478514, 0.022318111419677735, 0.022352863311767578, 0.022171392440795898, 0.022233184814453126, 0.02237385559082031, 0.022264511108398437, 0.022242624282836913, 0.02220921516418457, 0.02227801513671875, 0.02235100746154785, 0.02218288040161133, 0.02227519989013672, 0.022331552505493166, 0.02224371147155762, 0.022357919692993163, 0.022319551467895507, 0.022358015060424806, 0.022296575546264647, 0.02227609634399414, 0.022405120849609376, 0.022280128479003906, 0.022296640396118166, 0.022261760711669923, 0.02225667190551758, 0.02222172737121582, 0.022300735473632812, 0.02230271911621094, 0.022466560363769532, 0.022486143112182617, 0.022637439727783204, 0.022380544662475587, 0.022543872833251953, 0.022311424255371092, 0.02237593650817871, 0.02243619155883789, 0.022775840759277344, 0.022409343719482423, 0.022288383483886717, 0.02306662368774414, 0.022550527572631835, 0.0224768009185791, 0.022357088088989258, 0.02243881607055664, 0.02226959991455078, 0.02234409523010254, 0.022258752822875975, 0.02229747200012207, 0.022132736206054687, 0.02219411277770996, 0.022327360153198243, 0.02229827117919922, 0.022258016586303712, 0.022382591247558595, 0.022209823608398436, 0.02227631950378418, 0.023505407333374022, 0.022734336853027344, 0.02234828758239746, 0.022292255401611328, 0.02248521614074707, 0.022220703125, 0.022324832916259765, 0.02221516799926758, 0.022218751907348632, 0.02225702476501465, 0.02223276710510254, 0.02211686325073242, 0.02212499237060547, 0.022165504455566407, 0.02225971221923828, 0.022423360824584963, 0.022300416946411133, 0.022218496322631835, 0.022205312728881835, 0.022292415618896486, 0.02246847915649414, 0.022203807830810548, 0.02239548873901367, 0.02224723243713379, 0.022436031341552733, 0.02213430404663086, 0.022295007705688475, 0.02215936088562012, 0.022222015380859376, 0.02226464080810547, 0.022247264862060547, 0.02241142463684082, 0.02251276779174805, 0.022524383544921874, 0.022315423965454103, 0.022278144836425783, 0.02227916717529297, 0.02233772850036621, 0.022348608016967773, 0.022204416275024414, 0.02225152015686035, 0.0224399356842041, 0.022355968475341798, 0.02222857666015625, 0.022685440063476562, 0.02230108833312988, 0.022335744857788085, 0.022274015426635742, 0.022337535858154296, 0.022419296264648437, 0.022288543701171875, 0.02225766372680664, 0.0222959041595459, 0.022289056777954102, 0.022306623458862303, 0.02239619255065918, 0.022319807052612304, 0.022438079833984374, 0.022249504089355467, 0.02230067253112793, 0.022187360763549803, 0.022196895599365236, 0.02234297561645508, 0.022249183654785155, 0.022603967666625976, 0.02241779136657715, 0.02238649559020996, 0.02240358352661133, 0.022175840377807617, 0.0221759033203125, 0.022234975814819338, 0.022204416275024414, 0.022409215927124023, 0.022200288772583007, 0.022195680618286133, 0.022104639053344727, 0.02212396812438965, 0.02225388717651367, 0.022100223541259765, 0.02213478469848633, 0.022417407989501953, 0.02242355155944824, 0.02229043197631836, 0.022122495651245116, 0.022286336898803712, 0.02224947166442871, 0.023358623504638673, 0.022686559677124022, 0.022351871490478514, 0.022351392745971678, 0.02268169593811035, 0.022258047103881837, 0.022191232681274414, 0.02223107147216797, 0.022283103942871092, 0.02229792022705078, 0.022344383239746093, 0.022216224670410158, 0.022256095886230468, 0.022359615325927736, 0.022346048355102538, 0.022358144760131836, 0.02227609634399414, 0.022253568649291993, 0.02231091117858887, 0.02246659278869629, 0.022317024230957033, 0.02225257682800293, 0.02235696029663086, 0.02237446403503418, 0.02234566307067871, 0.022310335159301756, 0.022362688064575195, 0.022379711151123048, 0.023225151062011718, 0.02478489685058594, 0.022353567123413087, 0.02247292709350586, 0.02226736068725586, 0.022384639739990234, 0.022495904922485353, 0.022478847503662108, 0.022670495986938478, 0.022686559677124022, 0.026043968200683595, 0.023142847061157226, 0.024383487701416014, 0.02275328063964844, 0.02280243110656738, 0.022683647155761717, 0.02246451187133789, 0.022357696533203124, 0.02231737518310547, 0.022276224136352538, 0.022463552474975584, 0.022343679428100584, 0.022423519134521484, 0.02236198425292969, 0.02231395149230957, 0.022306400299072264, 0.022262176513671874, 0.022358015060424806, 0.022908672332763672, 0.022225151062011717, 0.022382272720336913, 0.022355487823486328, 0.022855680465698244, 0.022528064727783202, 0.022604511260986327, 0.022816768646240236, 0.02287820816040039, 0.02285977554321289, 0.0228286075592041, 0.023067071914672853, 0.023119871139526366, 0.022996639251708983, 0.023184831619262696, 0.023116640090942383, 0.02337798309326172, 0.023250944137573244, 0.023222080230712892, 0.023189695358276367, 0.023220224380493162, 0.023193151473999023, 0.023140800476074218, 0.02314035224914551, 0.023031808853149413, 0.0230645751953125, 0.02331443214416504, 0.023328800201416016, 0.02333692741394043, 0.024162303924560546, 0.02351923179626465, 0.023011264801025392, 0.023147584915161133, 0.022755712509155274, 0.022620576858520508, 0.022452384948730468, 0.022826528549194335, 0.02290928077697754, 0.022496639251708986, 0.0223874568939209, 0.02249728012084961, 0.022355615615844728, 0.022369632720947264, 0.022172672271728516, 0.02227609634399414, 0.022181631088256836, 0.02245043182373047, 0.022451616287231444, 0.022602336883544922, 0.022378496170043945, 0.022425600051879883, 0.022458368301391602, 0.022343679428100584, 0.022512832641601564, 0.022276927947998047, 0.02226563262939453, 0.022411136627197265, 0.02228873634338379, 0.022285600662231446, 0.022278879165649416, 0.022312959671020507, 0.022218719482421875, 0.02222697639465332, 0.022318496704101562, 0.022311519622802735, 0.022251487731933594, 0.02218377685546875, 0.022349119186401367, 0.022256511688232422, 0.02223468780517578, 0.022096319198608397, 0.02228428840637207, 0.02230179214477539, 0.022287040710449218, 0.0222589111328125, 0.022162431716918944, 0.022220800399780274, 0.022228992462158204, 0.02246451187133789, 0.02233263969421387, 0.022315807342529297, 0.022304767608642577, 0.02233296012878418, 0.022170080184936523, 0.02267955207824707, 0.0243056640625, 0.022999040603637694, 0.02231491279602051, 0.022304224014282226, 0.022551168441772462, 0.022258880615234376, 0.022278976440429688, 0.02251087951660156, 0.02230659294128418, 0.02228495979309082, 0.02249318313598633, 0.02240716743469238, 0.022392192840576173, 0.02232588768005371, 0.022392831802368163, 0.02227609634399414, 0.024000511169433594, 0.022401023864746093, 0.022353919982910156, 0.02224742317199707, 0.02232729530334473, 0.02242473602294922, 0.022266752243041994, 0.022249439239501952, 0.02230067253112793, 0.022335487365722655, 0.02275846481323242, 0.022360832214355468, 0.022363391876220703, 0.022344640731811524, 0.02225702476501465, 0.022260032653808593, 0.02222105598449707, 0.02230806350708008, 0.022258527755737306, 0.02214694404602051, 0.022241312026977537, 0.022275264739990235, 0.02226700782775879, 0.022187423706054688, 0.02218022346496582, 0.02249318313598633, 0.022268159866333008, 0.022181312561035157, 0.022305088043212892, 0.022245216369628906, 0.022288543701171875, 0.022231039047241212, 0.02251087951660156, 0.022242015838623046, 0.022273855209350588, 0.02231100845336914, 0.022851680755615233, 0.022468608856201173, 0.02234163284301758, 0.022269952774047853, 0.022300640106201173, 0.022312032699584962, 0.02226483154296875, 0.022200096130371095, 0.02236128044128418, 0.02227020835876465, 0.022468927383422852, 0.02255036735534668, 0.022633056640625, 0.022695903778076173, 0.022629695892333983, 0.02271504020690918, 0.022581600189208986, 0.02269264030456543, 0.022758272171020506, 0.0226345272064209, 0.02259552001953125]",tokens/s,44.423602004007925,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1550.217216,1546.584064,0.0,1168.113664,1154.613248,s,1,8.64842578125,8.64842578125,0.0,8.64842578125,8.64842578125,8.64842578125,8.64842578125,[8.64842578125],,kWh,4.103143184585557e-05,4.51875467082263e-06,1.3285010628000249e-05,5.8835197144678446e-05,,MB,1614.45888,1796.145152,0.0,1388.314624,1334.065152,s,10,0.8219226303100586,0.08219226303100585,0.0004280390602370377,0.08214347076416015,0.08276129989624023,0.08278773918151856,0.08280889060974121,"[0.08281417846679688, 0.0819373779296875, 0.08164854431152344, 0.08275542449951172, 0.08214393615722657, 0.082610595703125, 0.08180518341064454, 0.08247196960449218, 0.08159241485595703, 0.08214300537109374]",tokens/s,3114.6483934069033,kWh,2.5020421523860822e-06,2.759293593684368e-07,1.6584438718633522e-06,4.4364153836178716e-06,tokens/kWh,57704244.950848915,MB,1623.748608,1796.145152,0.0,1388.314624,1372.847616,s,10,14.96638134765625,1.496638134765625,0.009406909194957883,1.4962703857421875,1.5054152587890626,1.5100327026367186,1.5137266577148436,"[1.49269677734375, 1.50438916015625, 1.514650146484375, 1.4816754150390625, 1.48654833984375, 1.499749755859375, 1.5007628173828125, 1.50422900390625, 1.492791015625, 1.488888916015625]",tokens/s,42.09434367370698,kWh,4.24842924109445e-05,4.684985023969847e-06,2.0886351941735733e-05,6.805562937665008e-05,tokens/kWh,925713.2815763119,,s,630,14.96271859169006,0.023750346970936607,0.0003661580478043759,0.023750319480895996,0.024078694343566895,0.02421709747314453,0.024989873924255383,"[0.02380633544921875, 0.023762943267822266, 0.023607391357421875, 0.02354380798339844, 0.023275264739990233, 0.023283008575439454, 0.023229375839233398, 0.023183359146118163, 0.023592960357666014, 0.023416831970214845, 0.024385536193847656, 0.024706239700317382, 0.023783615112304687, 0.023743104934692384, 0.024041471481323243, 0.02397132873535156, 0.023676639556884767, 0.023415584564208985, 0.023458911895751954, 0.023972768783569336, 0.02384486389160156, 0.023412736892700195, 0.023745920181274412, 0.02337980842590332, 0.023310527801513672, 0.023598751068115233, 0.023786239624023438, 0.023750783920288086, 0.02358278465270996, 0.02364112091064453, 0.02396463966369629, 0.023725568771362306, 0.023619903564453124, 0.023675071716308595, 0.023586559295654296, 0.023671039581298826, 0.023658496856689453, 0.023844415664672852, 0.023769535064697266, 0.0236727352142334, 0.023636064529418944, 0.02361510467529297, 0.023546207427978517, 0.023480127334594727, 0.023560415267944335, 0.023631872177124022, 0.023518335342407225, 0.023503488540649414, 0.023717567443847655, 0.02366111946105957, 0.02370355224609375, 0.023769088745117187, 0.023799808502197265, 0.023985504150390625, 0.023896095275878906, 0.02388140869140625, 0.02381100845336914, 0.02373017692565918, 0.023791616439819335, 0.02364998435974121, 0.02373868751525879, 0.023752704620361328, 0.023810047149658203, 0.023736255645751953, 0.023730367660522462, 0.023744319915771483, 0.02378384017944336, 0.02387763214111328, 0.02389811134338379, 0.023823711395263673, 0.023720895767211914, 0.02379952049255371, 0.02370332717895508, 0.023720159530639648, 0.02366979217529297, 0.02381923294067383, 0.02386124801635742, 0.023922016143798828, 0.02406671905517578, 0.024233503341674803, 0.02417856025695801, 0.023800159454345705, 0.024000448226928713, 0.024113183975219728, 0.024078559875488282, 0.02417056083679199, 0.024141727447509767, 0.02396940803527832, 0.023980512619018554, 0.02400592041015625, 0.024105152130126952, 0.024222240447998047, 0.0241213436126709, 0.02386105537414551, 0.023830720901489258, 0.023958528518676758, 0.02396236801147461, 0.02398147201538086, 0.023927648544311522, 0.02386332893371582, 0.024174591064453126, 0.023848928451538087, 0.023760896682739258, 0.023787519454956055, 0.023777280807495117, 0.023807071685791017, 0.023802719116210937, 0.0242729606628418, 0.023896032333374024, 0.02367647933959961, 0.023789215087890624, 0.023743135452270508, 0.02366192054748535, 0.02374291229248047, 0.02383091163635254, 0.023754079818725585, 0.023728479385375978, 0.02387990379333496, 0.023610944747924804, 0.0235762882232666, 0.023685951232910157, 0.02365235137939453, 0.023635967254638672, 0.02370560073852539, 0.023918079376220702, 0.023871999740600586, 0.023883775711059572, 0.023834272384643553, 0.024004831314086914, 0.02396563148498535, 0.02396793556213379, 0.023924736022949217, 0.023982080459594726, 0.023940576553344726, 0.024035871505737303, 0.024410400390625, 0.024126304626464843, 0.02394179153442383, 0.02369990348815918, 0.023778783798217774, 0.02382265663146973, 0.02380985641479492, 0.02381756782531738, 0.023816864013671876, 0.023861663818359375, 0.023836448669433595, 0.023840160369873048, 0.023806560516357423, 0.023803903579711915, 0.023821887969970704, 0.023792064666748047, 0.023911903381347657, 0.024098623275756837, 0.02398896026611328, 0.023910400390625, 0.023988224029541014, 0.0241582088470459, 0.023795711517333985, 0.02389017677307129, 0.023876480102539063, 0.023923583984375, 0.024029184341430664, 0.024104223251342774, 0.024167135238647462, 0.023954591751098632, 0.023878496170043947, 0.023975936889648438, 0.024077375411987303, 0.024036287307739258, 0.024095903396606444, 0.023894847869873045, 0.023740447998046876, 0.023742464065551756, 0.023820255279541017, 0.024213119506835936, 0.02391859245300293, 0.024068511962890626, 0.025285888671875, 0.027642431259155272, 0.023999935150146486, 0.024421119689941408, 0.023995807647705078, 0.023908895492553713, 0.024059968948364256, 0.024195072174072265, 0.023985599517822264, 0.02398908805847168, 0.024044895172119142, 0.023935359954833986, 0.02414240074157715, 0.024014848709106446, 0.023961599349975587, 0.024110271453857423, 0.02407302474975586, 0.024231903076171873, 0.023986207962036134, 0.02397536087036133, 0.02382480049133301, 0.023787935256958007, 0.023869184494018553, 0.023887744903564455, 0.02401638412475586, 0.0240230712890625, 0.023997024536132814, 0.023961599349975587, 0.024037376403808593, 0.02372403144836426, 0.023621631622314454, 0.023901695251464843, 0.024130048751831053, 0.023625728607177734, 0.023442527770996095, 0.023337440490722658, 0.02329644775390625, 0.023389728546142578, 0.023187744140625, 0.023165119171142577, 0.02304204750061035, 0.023128095626831054, 0.023194751739501952, 0.02319856071472168, 0.023181312561035155, 0.023144447326660156, 0.023308000564575194, 0.023361824035644532, 0.023322784423828125, 0.02353545570373535, 0.02351103973388672, 0.02352249526977539, 0.02346668815612793, 0.023432735443115235, 0.023351903915405273, 0.02332057571411133, 0.02345084762573242, 0.023435295104980467, 0.023268096923828124, 0.02326323127746582, 0.0232857608795166, 0.023318527221679687, 0.023307327270507813, 0.023241664886474608, 0.023283231735229493, 0.02329030418395996, 0.02319977569580078, 0.023179264068603517, 0.023080608367919923, 0.023216480255126952, 0.023215232849121095, 0.02312895965576172, 0.023166976928710937, 0.023171072006225587, 0.023169023513793945, 0.02349679946899414, 0.023485599517822267, 0.023452512741088866, 0.023613439559936524, 0.02354380798339844, 0.0234967041015625, 0.024188127517700195, 0.023703935623168946, 0.02372150421142578, 0.0236712646484375, 0.023754175186157227, 0.023684064865112306, 0.023672256469726562, 0.023607872009277345, 0.023785375595092775, 0.023607391357421875, 0.023754751205444336, 0.023651647567749023, 0.023724159240722655, 0.023788095474243164, 0.023684736251831054, 0.023550336837768554, 0.023697216033935545, 0.023654592514038085, 0.02369740867614746, 0.023481632232666017, 0.023756607055664063, 0.02366556739807129, 0.023613311767578124, 0.0235807991027832, 0.02368409538269043, 0.02379395294189453, 0.023722719192504883, 0.023650304794311523, 0.023556095123291015, 0.023433311462402344, 0.024747520446777343, 0.024240383148193358, 0.023642175674438475, 0.023421024322509764, 0.02331430435180664, 0.023183488845825197, 0.02322617530822754, 0.02335353660583496, 0.023288864135742188, 0.023264223098754883, 0.023234016418457033, 0.02329631996154785, 0.023570655822753906, 0.023371776580810546, 0.023267616271972658, 0.024314815521240235, 0.023368480682373047, 0.023322463989257813, 0.02323468780517578, 0.023203872680664064, 0.023279424667358398, 0.023338720321655272, 0.023559839248657226, 0.023535423278808594, 0.023660831451416016, 0.023567071914672853, 0.02373436737060547, 0.02372822380065918, 0.023723360061645507, 0.023646783828735352, 0.023539039611816408, 0.023618175506591798, 0.02343129539489746, 0.02372198486328125, 0.023570432662963867, 0.02367647933959961, 0.023873600006103515, 0.023658912658691408, 0.023586175918579103, 0.023499359130859376, 0.02361100769042969, 0.023609695434570314, 0.023631904602050783, 0.023734272003173826, 0.023580671310424805, 0.023528511047363282, 0.023518144607543947, 0.023740415573120118, 0.02370355224609375, 0.02369270324707031, 0.02389462471008301, 0.024393728256225586, 0.023952959060668945, 0.023883232116699217, 0.023796703338623045, 0.023836383819580077, 0.02393110466003418, 0.023908287048339843, 0.023781503677368164, 0.02381814384460449, 0.023865440368652343, 0.023836544036865234, 0.023779199600219725, 0.023758207321166992, 0.023693695068359374, 0.023654495239257813, 0.023775487899780273, 0.023580831527709963, 0.023658496856689453, 0.02366873550415039, 0.02368307113647461, 0.023826431274414063, 0.02384486389160156, 0.023758848190307616, 0.023838720321655273, 0.023917631149291994, 0.023966655731201172, 0.02394316864013672, 0.024379392623901368, 0.02391561508178711, 0.023954336166381835, 0.02388582420349121, 0.023918079376220702, 0.024045984268188478, 0.024041151046752928, 0.023981855392456054, 0.02397657585144043, 0.0239649600982666, 0.024201183319091796, 0.024169216156005858, 0.023879871368408204, 0.023948831558227537, 0.023875776290893554, 0.0238144645690918, 0.023680864334106447, 0.02360704040527344, 0.02357904052734375, 0.023479711532592772, 0.023871936798095704, 0.023535776138305663, 0.023825504302978515, 0.023738847732543946, 0.023808576583862304, 0.02430544090270996, 0.024410015106201173, 0.024332639694213867, 0.0238057918548584, 0.02375017547607422, 0.02368355178833008, 0.02352889633178711, 0.023537599563598632, 0.0235546875, 0.023547103881835937, 0.023415584564208985, 0.023390207290649414, 0.02355583953857422, 0.023529727935791014, 0.023467199325561523, 0.023388704299926757, 0.023362943649291992, 0.023393184661865234, 0.023750463485717774, 0.02434681510925293, 0.0253798713684082, 0.02373731231689453, 0.023505088806152343, 0.023314016342163086, 0.023388320922851563, 0.02332415962219238, 0.02338604736328125, 0.023805856704711914, 0.024320735931396484, 0.024049663543701173, 0.023772159576416017, 0.023532543182373047, 0.025851903915405275, 0.02457804870605469, 0.02367283248901367, 0.023801023483276368, 0.023797855377197266, 0.02368953514099121, 0.023625120162963868, 0.023758975982666016, 0.023931743621826172, 0.02377110481262207, 0.02384492874145508, 0.023842815399169923, 0.024127071380615234, 0.023869535446166993, 0.023980096817016603, 0.023963615417480467, 0.024010751724243166, 0.023977472305297853, 0.02386387252807617, 0.02387353515625, 0.02371513557434082, 0.023756864547729493, 0.02378816032409668, 0.023883295059204102, 0.02382896041870117, 0.024020639419555664, 0.023609695434570314, 0.02364825630187988, 0.023536895751953123, 0.02363849639892578, 0.023666976928710937, 0.023756799697875978, 0.023948831558227537, 0.023799936294555665, 0.02363369560241699, 0.023728063583374023, 0.023757408142089844, 0.023689247131347655, 0.02391244888305664, 0.023740415573120118, 0.023842815399169923, 0.023830528259277343, 0.02382956886291504, 0.023901119232177734, 0.02407539176940918, 0.02398201560974121, 0.0239520320892334, 0.02390768051147461, 0.024017375946044923, 0.023982559204101563, 0.023887136459350585, 0.023816352844238282, 0.023881536483764648, 0.024017663955688478, 0.023812095642089845, 0.02390969657897949, 0.024220352172851563, 0.024079904556274415, 0.023921119689941407, 0.024071231842041015, 0.023952320098876954, 0.02401590347290039, 0.023935968399047852, 0.023992319107055664, 0.02388172721862793, 0.024201215744018553, 0.02388991928100586, 0.0240676155090332, 0.023807615280151368, 0.023935903549194337, 0.023981023788452148, 0.023804479598999024, 0.02379155158996582, 0.023772863388061522, 0.023798559188842775, 0.023611391067504883, 0.023610528945922853, 0.025088863372802736, 0.023813600540161132, 0.023751136779785156, 0.023517248153686523, 0.023779327392578126, 0.023602336883544923, 0.023452512741088866, 0.023428735733032228, 0.023343488693237303, 0.02348195266723633, 0.02354854393005371, 0.02337887954711914, 0.02349888038635254, 0.02341142463684082, 0.023400447845458985, 0.02371788787841797, 0.02406399917602539, 0.023939008712768554, 0.023869472503662108, 0.024172224044799805, 0.023875072479248048, 0.023824832916259767, 0.024041439056396486, 0.024080928802490235, 0.02406108856201172, 0.02413849639892578, 0.024035327911376952, 0.024495744705200197, 0.024295808792114258, 0.024159231185913087, 0.024738815307617186, 0.0242457275390625, 0.024043968200683594, 0.02411734390258789, 0.024096031188964844, 0.023892704010009765, 0.023932928085327147, 0.02389561653137207, 0.023828927993774413, 0.02373222351074219, 0.023793024063110353, 0.023661184310913085, 0.023434911727905273, 0.023347551345825196, 0.023389888763427735, 0.023303647994995118, 0.023432031631469726, 0.023459840774536132, 0.02354368019104004, 0.0234149112701416, 0.023345151901245118, 0.023390207290649414, 0.023360511779785157, 0.023419904708862304, 0.023230464935302734, 0.023053632736206055, 0.023214784622192383, 0.02334851264953613, 0.023274208068847658, 0.023371776580810546, 0.02328985595703125, 0.023617536544799804, 0.02353705596923828, 0.02362019157409668, 0.02373161506652832, 0.02366320037841797, 0.023580671310424805, 0.023758495330810547, 0.023785408020019532, 0.023663232803344727, 0.02367081642150879, 0.02370345687866211, 0.023670879364013672, 0.02366054344177246, 0.023463615417480467, 0.02348988723754883, 0.02595948791503906, 0.025237375259399415, 0.023840576171875, 0.023843040466308595, 0.023504896163940428, 0.023326719284057617, 0.023255136489868163, 0.02325289535522461, 0.023154239654541015, 0.023198144912719727, 0.023203840255737306, 0.023257087707519532, 0.02306172752380371, 0.02313091278076172, 0.023111679077148437, 0.023408639907836915, 0.02411075210571289, 0.023654048919677734, 0.023642240524291994, 0.023669599533081054, 0.02380975914001465, 0.023785472869873047, 0.023991296768188477, 0.023809024810791016, 0.023653375625610353, 0.023579647064208984, 0.023441247940063477, 0.023471935272216797, 0.023531871795654295, 0.02371295928955078, 0.02347292709350586, 0.02338991928100586, 0.023412256240844725, 0.02339859199523926, 0.023742111206054687, 0.023665599822998047, 0.023830528259277343, 0.02357040023803711, 0.02346518325805664, 0.02381702423095703, 0.023586816787719726, 0.023599103927612306, 0.023371103286743165, 0.02350966453552246, 0.023410688400268553, 0.023365631103515624, 0.02360540771484375, 0.02351702308654785, 0.023782848358154297, 0.024240703582763673, 0.02364112091064453, 0.02354275131225586, 0.02353971290588379, 0.023638015747070314]",tokens/s,42.10464803835093,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4091, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2958, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 2 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 854, in _apply self._buffers[key] = fn(buf) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 70.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 22.12 MiB is free. Process 25849 has 14.72 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 193.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,6729.650176,7525.564416,0.0,7147.094016,7138.9184,s,1,11.4090703125,11.4090703125,0.0,11.4090703125,11.4090703125,11.4090703125,11.4090703125,[11.4090703125],,kWh,0.00012896135883750805,1.4217553013790054e-05,4.2785312005994e-05,0.0001859642238572921,,MB,1605.595136,8226.013184,0.0,7818.182656,7724.300288,s,10,6.541907592773438,0.6541907592773437,0.0010968707736659236,0.6545454406738281,0.6554325134277343,0.6555990386962891,0.6557322589111328,"[0.6523662719726563, 0.6524636840820313, 0.6538348388671875, 0.6533842163085938, 0.6545287475585938, 0.6553955078125, 0.6548684692382812, 0.6547381591796875, 0.6545621337890625, 0.6557655639648438]",tokens/s,391.32316739354764,kWh,1.9139248714321864e-05,2.110738477930151e-06,1.2704662941499106e-05,3.395465013375112e-05,tokens/kWh,7539468.054937621,MB,1611.640832,8372.813824,0.0,7964.983296,7904.605696,s,10,30.872391845703124,3.087239184570312,0.009571757553960468,3.0865322265625,3.094320703125,3.1031818359375,3.1102707421875,"[3.077819091796875, 3.076754638671875, 3.08461279296875, 3.07926171875, 3.087107666015625, 3.087256103515625, 3.089228515625, 3.085956787109375, 3.0923515625, 3.11204296875]",tokens/s,20.40658213813403,kWh,8.99548742615112e-05,9.922422753871875e-06,5.9754061692101484e-05,0.00015963135870748454,tokens/kWh,394659.29821122397,,s,630,30.868402111053467,0.04899746366833883,0.000977710370331209,0.048934560775756836,0.04963838310241699,0.04993952903747558,0.05121825778961182,"[0.0511861457824707, 0.04846723175048828, 0.04803587341308594, 0.04802835083007812, 0.048527359008789066, 0.048216064453125, 0.04802969741821289, 0.047993854522705076, 0.047874656677246094, 0.04905615997314453, 0.04876697540283203, 0.048246784210205076, 0.04849200057983399, 0.04856681442260742, 0.04820102310180664, 0.04856902313232422, 0.048574462890625, 0.04836761474609375, 0.049616897583007816, 0.04921548843383789, 0.04869036865234375, 0.0485153923034668, 0.04882688140869141, 0.04887065505981445, 0.04970985412597656, 0.048368927001953124, 0.04843727874755859, 0.0487922248840332, 0.04877721786499024, 0.04836966323852539, 0.04904755020141602, 0.04895334243774414, 0.04922067260742188, 0.04842793655395508, 0.04895452880859375, 0.048632190704345706, 0.04861731338500976, 0.04861609649658203, 0.04899020767211914, 0.048936958312988284, 0.04855801773071289, 0.04964966583251953, 0.049079456329345704, 0.04900751876831055, 0.04865024185180664, 0.0493383674621582, 0.04933817672729492, 0.04878281784057617, 0.04896380615234375, 0.04905625534057617, 0.04910079956054687, 0.049235233306884764, 0.04918534469604492, 0.04887670516967774, 0.04871433639526367, 0.04894927978515625, 0.04920563125610351, 0.04914585494995117, 0.04892470550537109, 0.049538463592529294, 0.04931142425537109, 0.049592288970947265, 0.049414623260498045, 0.05143142318725586, 0.04864614486694336, 0.04810137557983398, 0.047768638610839846, 0.047700927734375, 0.047508575439453124, 0.04803062438964844, 0.048639999389648435, 0.0488812141418457, 0.04853219223022461, 0.0483691520690918, 0.048172286987304684, 0.04848534393310547, 0.04876435089111328, 0.04869174575805664, 0.04877132797241211, 0.04833052825927735, 0.04869094467163086, 0.04916044616699219, 0.04934860610961914, 0.04927078247070313, 0.04863155364990234, 0.04868531036376953, 0.04806198501586914, 0.04839267349243164, 0.04816656112670899, 0.04837615966796875, 0.04834678268432617, 0.048882015228271486, 0.04875836944580078, 0.04839670562744141, 0.04872185516357422, 0.04912271881103516, 0.048615806579589846, 0.04923183822631836, 0.04884921646118164, 0.04869055938720703, 0.04894518280029297, 0.049310302734375, 0.04903500747680664, 0.04870374298095703, 0.0489697265625, 0.049006591796875, 0.048811840057373046, 0.04921772766113281, 0.04910079956054687, 0.04883660888671875, 0.04878934478759766, 0.04894121551513672, 0.04865433502197265, 0.04894841766357422, 0.04905657577514649, 0.049414142608642575, 0.049119232177734375, 0.04881139373779297, 0.04918540954589844, 0.04933631896972656, 0.04925395202636719, 0.050059486389160156, 0.04977062225341797, 0.0490599365234375, 0.049209342956542966, 0.04957388687133789, 0.05131222534179687, 0.04826124954223633, 0.04803968048095703, 0.04798252868652344, 0.04846448135375977, 0.04786380767822265, 0.04809318542480469, 0.04869852828979492, 0.04877091217041016, 0.048288352966308595, 0.04828966522216797, 0.04813059234619141, 0.048807937622070315, 0.04840038299560547, 0.04871145629882812, 0.04855356979370117, 0.048415359497070314, 0.049408000946044923, 0.04879897689819336, 0.04953523254394531, 0.04994508743286133, 0.049167903900146484, 0.04876502227783203, 0.04884310531616211, 0.04845340728759766, 0.04849395370483398, 0.04879756927490234, 0.04872499084472656, 0.04825804901123047, 0.04849356842041016, 0.048840320587158204, 0.049053825378417966, 0.0487056655883789, 0.048711521148681644, 0.04950374221801758, 0.048509185791015624, 0.04899657440185547, 0.04925881576538086, 0.04916428756713867, 0.04893286514282227, 0.0501104621887207, 0.04923593521118164, 0.04928460693359375, 0.049207839965820316, 0.0486495361328125, 0.049056449890136716, 0.04882022476196289, 0.0488199348449707, 0.04887567901611328, 0.049078399658203126, 0.04951039886474609, 0.049065662384033204, 0.04898233413696289, 0.0491640625, 0.04934230422973633, 0.04891686248779297, 0.04921343994140625, 0.049470783233642575, 0.04921798324584961, 0.04960691070556641, 0.04971321487426758, 0.05123065567016601, 0.049169857025146486, 0.05111603164672852, 0.048981822967529294, 0.048506175994873044, 0.04861017608642578, 0.048903358459472655, 0.04848630523681641, 0.04821228790283203, 0.04822390365600586, 0.0485549430847168, 0.048164161682128906, 0.048470497131347656, 0.0485781135559082, 0.0484334716796875, 0.048255329132080076, 0.04819353485107422, 0.048963584899902345, 0.04873625564575195, 0.04829289627075195, 0.049212318420410156, 0.04910662460327148, 0.04905612945556641, 0.048707584381103515, 0.04871782302856445, 0.04911260986328125, 0.04900230407714844, 0.04855875015258789, 0.04885689544677734, 0.048596446990966796, 0.04895388793945313, 0.04880099105834961, 0.048567264556884766, 0.04838399887084961, 0.049030399322509764, 0.04850777435302735, 0.04925558471679688, 0.049080543518066407, 0.04886783981323242, 0.04876809692382812, 0.048630657196044924, 0.04999900817871094, 0.04873846435546875, 0.049291999816894534, 0.04926252746582031, 0.04882751846313477, 0.04877580642700195, 0.04850755310058594, 0.04886428833007812, 0.048845439910888674, 0.04898611068725586, 0.048963584899902345, 0.04871782302856445, 0.04867891311645508, 0.04907827377319336, 0.04914169692993164, 0.04917049789428711, 0.049251487731933594, 0.048747360229492186, 0.04904531097412109, 0.04924403381347656, 0.04900614547729492, 0.049422592163085935, 0.049388031005859374, 0.04950812911987305, 0.052891456604003906, 0.049322177886962894, 0.04810137557983398, 0.04811126327514648, 0.048656734466552734, 0.048159744262695314, 0.0477416000366211, 0.04853987121582031, 0.04840163040161133, 0.04803267288208008, 0.04841062545776367, 0.04832361602783203, 0.04863616180419922, 0.04891302490234375, 0.048783454895019535, 0.04860313415527344, 0.0484598388671875, 0.04877260971069336, 0.048925121307373046, 0.05010419082641602, 0.049852542877197266, 0.04863180923461914, 0.04856217575073242, 0.04828364944458008, 0.0487927360534668, 0.049496318817138674, 0.04850339126586914, 0.04856787109375, 0.04844287872314453, 0.04887033462524414, 0.049012607574462894, 0.048457183837890626, 0.04921001434326172, 0.05062646484375, 0.048293087005615236, 0.04891532897949219, 0.04912300872802734, 0.04872351837158203, 0.04964745712280273, 0.04958505630493164, 0.04912646484375, 0.04903769683837891, 0.04907267379760742, 0.04899023818969726, 0.048773342132568356, 0.04880534362792969, 0.04886707305908203, 0.04899283218383789, 0.049235969543457034, 0.04890828704833984, 0.04890419387817383, 0.04903923034667969, 0.04901811218261719, 0.04900259017944336, 0.04909648132324219, 0.049510944366455076, 0.04944883346557617, 0.049310302734375, 0.04950425720214844, 0.05018624114990235, 0.049768001556396484, 0.04933004760742187, 0.04927936172485352, 0.05104703903198242, 0.048719070434570313, 0.04796495819091797, 0.04776335906982422, 0.04804412841796875, 0.047808158874511716, 0.04817750549316406, 0.04833280181884766, 0.04878950500488281, 0.0486657600402832, 0.04810838317871094, 0.04840604782104492, 0.048329185485839844, 0.04839648056030273, 0.048406272888183596, 0.048191551208496095, 0.04813603210449219, 0.04840367889404297, 0.04976236724853516, 0.04993273544311523, 0.04977094268798828, 0.04938751983642578, 0.04915126419067383, 0.04871440124511719, 0.048398399353027345, 0.048361473083496094, 0.04888115310668945, 0.04869171142578125, 0.049018878936767575, 0.049168384552001954, 0.049005599975585935, 0.04871478271484375, 0.04920035171508789, 0.04913398361206055, 0.048908607482910156, 0.048742401123046876, 0.049176513671875, 0.05066950225830078, 0.049285247802734376, 0.04984009552001953, 0.04973366546630859, 0.049248062133789065, 0.04934265518188476, 0.048844223022460935, 0.04896416091918945, 0.0493568000793457, 0.049342369079589846, 0.04939785766601563, 0.04928489685058594, 0.04860540771484375, 0.05005231857299805, 0.04913011169433594, 0.04909891128540039, 0.048814079284667966, 0.04909260940551758, 0.04934009552001953, 0.049600833892822264, 0.04904550552368164, 0.04884604644775391, 0.04943731307983398, 0.04972150421142578, 0.049637374877929685, 0.04930748748779297, 0.05118790435791016, 0.048519168853759766, 0.04838396835327148, 0.048095264434814454, 0.04855807876586914, 0.048123649597167965, 0.0484477424621582, 0.04867839813232422, 0.04843366241455078, 0.04828160095214844, 0.048936256408691405, 0.04914019012451172, 0.0491517105102539, 0.048978431701660156, 0.04936281585693359, 0.04815990447998047, 0.04883350372314453, 0.04862905502319336, 0.048855743408203124, 0.04910038375854492, 0.049094047546386715, 0.04894412612915039, 0.04866361618041992, 0.04857952117919922, 0.04869062423706055, 0.04855174255371094, 0.04889267349243164, 0.04840857696533203, 0.048451583862304685, 0.04866457748413086, 0.04885488128662109, 0.04917059326171875, 0.049426433563232425, 0.04930559921264648, 0.04916630554199219, 0.04914790344238281, 0.04915203094482422, 0.04899391937255859, 0.049092510223388675, 0.048964065551757814, 0.04936284637451172, 0.04968790435791016, 0.04949478530883789, 0.04914329528808594, 0.0492509765625, 0.04882979202270508, 0.04915484619140625, 0.04869692611694336, 0.04895756912231446, 0.0491069450378418, 0.0490250244140625, 0.04947087860107422, 0.04923657608032227, 0.049075519561767575, 0.0494024658203125, 0.049425792694091794, 0.04912815856933594, 0.049825790405273435, 0.049601665496826174, 0.049593215942382814, 0.04960255813598633, 0.050003360748291016, 0.04965193557739258, 0.051533824920654295, 0.04860518264770508, 0.04825222396850586, 0.04814713668823242, 0.04884659194946289, 0.0482918701171875, 0.04817737579345703, 0.04801257705688477, 0.04853833770751953, 0.04871987152099609, 0.04885504150390625, 0.04856137466430664, 0.0486379508972168, 0.04865718460083008, 0.048535552978515625, 0.048551937103271485, 0.048527359008789066, 0.049000415802001956, 0.049261985778808595, 0.04945980834960938, 0.04997328186035156, 0.04969267272949219, 0.0490332145690918, 0.04899020767211914, 0.048898048400878906, 0.04841830444335937, 0.04846745681762695, 0.04866128158569336, 0.0488870735168457, 0.04851504135131836, 0.04893795013427735, 0.04899225616455078, 0.04874176025390625, 0.04910704040527344, 0.04927926254272461, 0.049333568572998046, 0.04859795379638672, 0.04883660888671875, 0.04918272018432617, 0.04894105529785156, 0.0495816650390625, 0.04944284820556641, 0.04896710586547852, 0.04921235275268555, 0.04878908920288086, 0.048720287322998046, 0.04848230361938476, 0.049209342956542966, 0.04861737442016602, 0.048750686645507815, 0.049235969543457034, 0.049069854736328126, 0.04893718338012695, 0.04892038345336914, 0.04893920135498047, 0.04843724822998047, 0.04907379150390625, 0.04921177673339844, 0.049733345031738284, 0.0496888656616211, 0.050151134490966795, 0.050014495849609375, 0.04971686553955078, 0.05080665588378906, 0.048567935943603514, 0.048161598205566404, 0.048510974884033206, 0.048492542266845705, 0.048366592407226565, 0.04816793441772461, 0.048244384765625, 0.04878780746459961, 0.048280960083007814, 0.048366207122802735, 0.04940390396118164, 0.049582080841064455, 0.048691200256347655, 0.048123905181884766, 0.04846131134033203, 0.04896521759033203, 0.048487327575683595, 0.048844799041748044, 0.049127422332763675, 0.04952463912963867, 0.049031265258789064, 0.049069950103759766, 0.048752574920654296, 0.048764575958251954, 0.04925494384765625, 0.0495797119140625, 0.048582912445068356, 0.0487916145324707, 0.049152000427246094, 0.04909875106811523, 0.049772544860839846, 0.049342369079589846, 0.04884489440917969, 0.04896710586547852, 0.049138240814208985, 0.04909875106811523, 0.04876406478881836, 0.04973791885375976, 0.04949795150756836, 0.0493408317565918, 0.04932854461669922, 0.0498581771850586, 0.049398143768310546, 0.04897587203979492, 0.049143585205078125, 0.048844799041748044, 0.04898633575439453, 0.049205249786376956, 0.04951788711547851, 0.049464065551757815, 0.048850879669189454, 0.0491879997253418, 0.04884348678588867, 0.04933849716186523, 0.04928102493286133, 0.04943667221069336, 0.04958380889892578, 0.04959059143066406, 0.04967628860473633, 0.049377281188964846, 0.04979075241088867, 0.04970124816894531, 0.05191968154907227, 0.06838175964355468, 0.04760870361328125, 0.04880393600463867, 0.04853142547607422, 0.048424415588378907, 0.04827795028686523, 0.047958110809326174, 0.048469631195068356, 0.047973793029785154, 0.04838889694213867, 0.04820601654052734, 0.04816886520385742, 0.04845577621459961, 0.048465633392333986, 0.04883280181884766, 0.04856217575073242, 0.04833420944213867, 0.049429119110107424, 0.050740673065185544, 0.050119232177734375, 0.0493568000793457, 0.049211006164550784, 0.04855152130126953, 0.04902377700805664, 0.049084415435791014, 0.048889854431152346, 0.048732158660888675, 0.04864745712280273, 0.048523998260498045, 0.04969062423706055, 0.04896460723876953, 0.048950271606445314, 0.048658432006835936, 0.04900864028930664, 0.0486596794128418, 0.04889782333374024, 0.04958652877807617, 0.05011727905273437, 0.05005855941772461, 0.04968719863891601, 0.04950982284545898, 0.049707294464111325, 0.04937350463867188, 0.04914995193481445, 0.0489238395690918, 0.049003326416015625, 0.048976993560791014, 0.04897270584106445, 0.04929478454589844, 0.04914172744750977, 0.04891459274291992, 0.04880428695678711, 0.048928768157958984, 0.049014785766601565, 0.0486707534790039, 0.0494601936340332, 0.04949913787841797, 0.05009612655639648, 0.04970086288452148, 0.05040332794189453, 0.05008544158935547, 0.049711231231689454]",tokens/s,20.409219684695227,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,3020.546048,3548.250112,0.0,3164.602368,3152.265216,s,1,10.2295244140625,10.2295244140625,0.0,10.2295244140625,10.2295244140625,10.2295244140625,10.2295244140625,[10.2295244140625],,kWh,8.221966643335084e-05,9.062363400607163e-06,2.791974455798596e-05,0.00011920177439194396,,MB,2831.183872,4038.98368,0.0,3623.878656,3526.724608,s,10,2.3884106292724607,0.23884106292724608,0.0006271116772395458,0.23892778015136718,0.23947232360839846,0.23971328125,0.23990604736328125,"[0.23995423889160156, 0.23832998657226562, 0.23845826721191407, 0.23767117309570313, 0.23918150329589843, 0.23941877746582033, 0.23878518676757812, 0.2383223114013672, 0.23907037353515626, 0.23921881103515624]",tokens/s,1071.8424916656008,kWh,7.153438551624898e-06,7.888870872878097e-07,4.768303272633816e-06,1.2710628911546522e-05,tokens/kWh,20140624.180086467,MB,2832.908288,4041.080832,0.0,3625.975808,3526.727168,s,10,23.358369384765624,2.3358369384765623,0.07301654287851123,2.3163145751953125,2.3523681884765626,2.451006848144531,2.529917775878906,"[2.5496455078125, 2.319209228515625, 2.26911279296875, 2.310240966796875, 2.330448486328125, 2.3273447265625, 2.315181884765625, 2.308947265625, 2.310791259765625, 2.317447265625]",tokens/s,26.97106076295237,kWh,6.814667446878866e-05,7.5165164795497575e-06,4.188701182936723e-05,0.00011755020277770561,tokens/kWh,535941.2277589749,,s,630,23.35399295806885,0.03706983009217278,0.002120859377414921,0.03660966300964355,0.03741806449890137,0.04440777988433834,0.046247140083312994,"[0.04576028823852539, 0.0462845458984375, 0.04628960037231445, 0.04640761566162109, 0.0460904655456543, 0.046089599609375, 0.04625267028808594, 0.04623360061645508, 0.045939838409423825, 0.04592636871337891, 0.04613622283935547, 0.04590374374389648, 0.04593673706054688, 0.04629008102416992, 0.04595094299316406, 0.046031776428222655, 0.04572153472900391, 0.046636161804199217, 0.046035457611083984, 0.04602515029907227, 0.045811073303222656, 0.04598009490966797, 0.04571356964111328, 0.04631343841552735, 0.04549331283569336, 0.037323776245117186, 0.0368438720703125, 0.036875648498535155, 0.0368612174987793, 0.036746238708496096, 0.03667686462402344, 0.03741772842407227, 0.036536319732666016, 0.03651583862304687, 0.03674726486206055, 0.03657932662963867, 0.03676956939697266, 0.03678249740600586, 0.03676870346069336, 0.03664371109008789, 0.03638272094726563, 0.03651168060302734, 0.03646572875976563, 0.03713737487792969, 0.03643190383911133, 0.03638592147827149, 0.03691167831420898, 0.03649977493286133, 0.03763932800292969, 0.036483840942382814, 0.03652207946777344, 0.036691967010498046, 0.0364769287109375, 0.03640524673461914, 0.03650566482543945, 0.036605888366699216, 0.03631923294067383, 0.0379205436706543, 0.038223392486572264, 0.03687046432495117, 0.037136768341064455, 0.036867233276367185, 0.03649577713012695, 0.036931102752685546, 0.036570976257324216, 0.036599712371826174, 0.03661497497558594, 0.03649631881713867, 0.036467681884765624, 0.036501502990722655, 0.0364769287109375, 0.036724735260009765, 0.037048320770263675, 0.03694182586669922, 0.03648921585083008, 0.036495361328125, 0.036661087036132814, 0.03657743835449219, 0.03702982330322266, 0.037083198547363284, 0.03783065414428711, 0.03718288040161133, 0.03660198211669922, 0.03653459167480469, 0.03796598434448242, 0.03692252731323242, 0.036608863830566406, 0.03651369476318359, 0.036638816833496096, 0.03754323196411133, 0.03655955123901367, 0.036834465026855466, 0.03744822311401367, 0.03668204879760742, 0.03667763137817383, 0.037103614807128905, 0.03662988662719727, 0.0366181755065918, 0.04015718460083008, 0.0367209587097168, 0.03721372985839844, 0.03673123168945312, 0.03655715179443359, 0.03665935897827149, 0.0366929931640625, 0.03709772872924805, 0.03637939071655273, 0.03687833786010742, 0.03669606399536133, 0.036715839385986326, 0.03665142440795898, 0.03663491058349609, 0.036689823150634765, 0.036668800354003904, 0.03640393447875977, 0.03657328033447266, 0.0364312629699707, 0.036921215057373044, 0.03670694351196289, 0.036853759765625, 0.036431873321533206, 0.03644416046142578, 0.036523487091064455, 0.03669046401977539, 0.03649126434326172, 0.03660166549682617, 0.036937633514404294, 0.03642348861694336, 0.03625807952880859, 0.03608694458007813, 0.0354947509765625, 0.03580723190307617, 0.03546726226806641, 0.03591136169433594, 0.03567647933959961, 0.03626598358154297, 0.03599359893798828, 0.03617792129516602, 0.035824703216552733, 0.036167903900146486, 0.035957695007324215, 0.03616886520385742, 0.036104831695556644, 0.03619039916992187, 0.036036415100097655, 0.036229118347167966, 0.03591987228393555, 0.03621478271484375, 0.03622662353515625, 0.03627824020385742, 0.03606371307373047, 0.03595468902587891, 0.03600588989257812, 0.036388961791992185, 0.036724639892578126, 0.0364312629699707, 0.036374111175537106, 0.036461246490478515, 0.03574716949462891, 0.03602748870849609, 0.03570991897583008, 0.03645500946044922, 0.03595910263061523, 0.035921600341796874, 0.03571334457397461, 0.03607747268676758, 0.0358331184387207, 0.035832000732421876, 0.03552726364135742, 0.035799072265625, 0.0354952621459961, 0.035697311401367185, 0.035864032745361325, 0.03639289474487305, 0.03616140747070312, 0.03645052719116211, 0.03612899017333984, 0.03638412857055664, 0.03602320098876953, 0.03620489501953125, 0.035490848541259765, 0.03579292678833008, 0.03589318466186524, 0.03596582412719727, 0.035574783325195314, 0.035791648864746096, 0.035452865600585935, 0.03572742462158203, 0.03539558410644531, 0.035487167358398436, 0.03545395278930664, 0.035614334106445315, 0.035759647369384764, 0.03562758255004883, 0.03556083297729492, 0.03566019058227539, 0.03569014358520508, 0.0354736328125, 0.03574643325805664, 0.035477344512939456, 0.035569438934326174, 0.035670398712158206, 0.035813377380371096, 0.03555737686157227, 0.03658883285522461, 0.03575062561035156, 0.03608883285522461, 0.03565897750854492, 0.035843006134033205, 0.03561318588256836, 0.03598575973510742, 0.035621952056884766, 0.035630016326904296, 0.035506175994873046, 0.035466625213623044, 0.03606195068359375, 0.03605696105957031, 0.035619873046875, 0.03587116622924805, 0.035551776885986326, 0.03551663970947266, 0.035507102966308594, 0.036188545227050783, 0.0359958724975586, 0.03610425567626953, 0.036134273529052734, 0.036350814819335935, 0.03554067230224609, 0.03647353744506836, 0.03586751937866211, 0.03607014465332031, 0.03570892715454101, 0.03569049453735352, 0.03633059310913086, 0.036258720397949216, 0.0363721923828125, 0.036366622924804685, 0.03618406295776367, 0.03671859359741211, 0.03622905731201172, 0.036200511932373045, 0.03600585556030273, 0.036139041900634765, 0.03753779220581055, 0.03802828979492188, 0.036125694274902344, 0.036071487426757816, 0.04392339324951172, 0.04579734420776367, 0.045158241271972654, 0.045019329071044924, 0.04496297454833984, 0.04480409622192383, 0.04509286499023438, 0.045160030364990236, 0.038508255004882815, 0.03586307144165039, 0.03619651031494141, 0.03557580947875977, 0.0357498893737793, 0.03570483016967774, 0.035942401885986325, 0.036165153503417966, 0.03589168167114258, 0.03577036666870117, 0.03600921630859375, 0.03651808166503906, 0.03674911880493164, 0.03638262557983398, 0.036557662963867185, 0.0365865592956543, 0.03659872055053711, 0.03668172836303711, 0.03666048049926758, 0.0365885124206543, 0.03655452728271484, 0.03659110260009766, 0.03660646438598633, 0.03657523345947265, 0.036611713409423825, 0.03647116851806641, 0.036421951293945314, 0.03655238342285156, 0.037005313873291014, 0.03708886337280273, 0.0366104621887207, 0.036519935607910156, 0.036749313354492184, 0.036413440704345705, 0.036574623107910154, 0.03645289611816406, 0.03658348846435547, 0.03711081695556641, 0.0375032958984375, 0.036620960235595704, 0.03703603363037109, 0.03689267349243164, 0.03658652877807617, 0.03665404891967773, 0.036485118865966795, 0.036318527221679685, 0.03642972946166992, 0.03661904144287109, 0.0364356803894043, 0.03684172821044922, 0.0366448974609375, 0.036675582885742186, 0.03688201522827148, 0.03668624114990234, 0.03669401550292969, 0.03749833679199219, 0.037231136322021484, 0.036947967529296875, 0.03640524673461914, 0.03666873550415039, 0.036376033782958984, 0.036649375915527346, 0.03678822326660156, 0.03696025466918945, 0.036786239624023435, 0.03650553512573242, 0.036560897827148435, 0.036708351135253905, 0.03705855941772461, 0.03719372940063476, 0.036550655364990234, 0.03724835205078125, 0.040157470703125, 0.037023967742919925, 0.03676790237426758, 0.03762771224975586, 0.03712390518188476, 0.03719619369506836, 0.03667555236816406, 0.03653222274780273, 0.036501502990722655, 0.036478977203369144, 0.0365588493347168, 0.03783679962158203, 0.03773171234130859, 0.03673356628417969, 0.03664799880981445, 0.03707206344604492, 0.036849407196044924, 0.03671449661254883, 0.03881795120239258, 0.03686383819580078, 0.03675686264038086, 0.03687488174438477, 0.036703712463378904, 0.03687839889526367, 0.03751369476318359, 0.03687833786010742, 0.03670822525024414, 0.03731795120239258, 0.036975135803222654, 0.03655660629272461, 0.036741344451904294, 0.03671065521240234, 0.0365404167175293, 0.03683097457885742, 0.03658572769165039, 0.03732233428955078, 0.03656265640258789, 0.037644126892089846, 0.03672505569458008, 0.03698742294311524, 0.03677552032470703, 0.03671231842041016, 0.03678876876831055, 0.03673648071289062, 0.03662243270874024, 0.03665145492553711, 0.036431873321533206, 0.03670425415039062, 0.03656438446044922, 0.03691088104248047, 0.03688735961914062, 0.03642726516723633, 0.036767841339111325, 0.036590206146240235, 0.03684966278076172, 0.03665510559082031, 0.036822463989257814, 0.03649337768554688, 0.036593311309814455, 0.036423904418945316, 0.036647552490234374, 0.036396671295166015, 0.03652851104736328, 0.03655398559570312, 0.03669833755493164, 0.03666179275512695, 0.03650559997558594, 0.03655641555786133, 0.03673052978515625, 0.03648175811767578, 0.03670425415039062, 0.03659945678710937, 0.036972320556640625, 0.03673350524902344, 0.03685990524291992, 0.03682432174682617, 0.03687091064453125, 0.03677743911743164, 0.03676623916625977, 0.036604961395263674, 0.03683609771728515, 0.03685305786132813, 0.03671920013427735, 0.036775585174560546, 0.03679299163818359, 0.037029441833496095, 0.03703548812866211, 0.03646358489990234, 0.03671244812011719, 0.036462593078613284, 0.03694960021972656, 0.036577983856201174, 0.036959102630615234, 0.03663520050048828, 0.03678604888916016, 0.036488929748535154, 0.03664761734008789, 0.036567039489746093, 0.03680255889892578, 0.03693296051025391, 0.03892083358764648, 0.037588768005371094, 0.03699257659912109, 0.0365467529296875, 0.036824928283691404, 0.03663657760620117, 0.0368520622253418, 0.036301216125488284, 0.036788032531738284, 0.03655654525756836, 0.036716705322265626, 0.03663251113891602, 0.03666543960571289, 0.03660416030883789, 0.03632547378540039, 0.036517887115478515, 0.036634624481201174, 0.03646476745605469, 0.03661971282958985, 0.03697094345092773, 0.036618240356445314, 0.036544513702392575, 0.03649548721313477, 0.03658124923706055, 0.03686195373535156, 0.03651926422119141, 0.03662902450561523, 0.03669964981079102, 0.03642764663696289, 0.03681679916381836, 0.03661091232299805, 0.03681280136108398, 0.036494400024414064, 0.036916160583496095, 0.036485118865966795, 0.03676774215698242, 0.036362239837646484, 0.03671449661254883, 0.036689918518066404, 0.03670220947265625, 0.03666534423828125, 0.03686195373535156, 0.03653257751464844, 0.03662847900390625, 0.036386463165283205, 0.03663203048706055, 0.03653196716308594, 0.03693183898925781, 0.03650809478759766, 0.03677171325683594, 0.03635958480834961, 0.03662112045288086, 0.03625958251953125, 0.03634815979003906, 0.036560321807861326, 0.03623379135131836, 0.03662985610961914, 0.03657712173461914, 0.03649113464355469, 0.03662329483032226, 0.036413055419921875, 0.0385662727355957, 0.036999008178710935, 0.03654671859741211, 0.036996990203857424, 0.0370115852355957, 0.036534271240234374, 0.03675545501708984, 0.036646846771240235, 0.03655276870727539, 0.036491329193115235, 0.036472766876220704, 0.03659996795654297, 0.03640860748291016, 0.03650588989257812, 0.03691059112548828, 0.03665907287597656, 0.03655744171142578, 0.03694374465942383, 0.03686617660522461, 0.036756702423095707, 0.036776737213134764, 0.03701500701904297, 0.03664336013793945, 0.03668310546875, 0.03643868637084961, 0.03682918548583984, 0.03642777633666992, 0.03671244812011719, 0.0364617919921875, 0.03650435256958008, 0.036334911346435544, 0.03657388687133789, 0.03670191955566406, 0.036669055938720704, 0.03648092651367187, 0.0365667839050293, 0.036567073822021484, 0.0366143684387207, 0.036396961212158206, 0.03643273544311523, 0.036503551483154296, 0.036557823181152346, 0.03627676773071289, 0.036657630920410154, 0.03626598358154297, 0.03666329574584961, 0.036443359375, 0.03675625610351563, 0.036450302124023434, 0.03644575881958008, 0.03648966217041016, 0.03710153579711914, 0.03681625747680664, 0.03695683288574219, 0.03649740982055664, 0.03671449661254883, 0.03655388641357422, 0.03692556762695313, 0.03642572784423828, 0.036512096405029296, 0.03641097640991211, 0.036759681701660156, 0.03670278549194336, 0.03665667343139648, 0.036784481048583985, 0.038708671569824216, 0.036711200714111325, 0.036634624481201174, 0.037855232238769534, 0.036523582458496094, 0.03690278244018555, 0.03665081787109375, 0.036383487701416015, 0.03657318496704102, 0.03711590576171875, 0.036647071838378904, 0.03650857543945313, 0.03632015991210937, 0.036550785064697264, 0.03630899047851562, 0.03675455856323242, 0.03635289764404297, 0.03657727813720703, 0.03652608108520508, 0.03701964950561523, 0.03802435302734375, 0.0369672622680664, 0.03659145736694336, 0.03692665481567383, 0.03664086532592774, 0.03648601531982422, 0.036395008087158204, 0.036190208435058595, 0.03638211059570313, 0.03635580825805664, 0.03652390289306641, 0.03644294357299805, 0.03907583999633789, 0.03681244659423828, 0.03644675064086914, 0.03698390579223633, 0.036488094329833985, 0.03662169647216797, 0.03642227172851562, 0.037466110229492186, 0.03799625778198242, 0.03684995269775391, 0.03656499099731445, 0.0368928337097168, 0.03648019027709961, 0.03690524673461914, 0.036654815673828126, 0.03725993728637695, 0.03699302291870117, 0.03677004623413086, 0.03649228668212891, 0.03663977432250977, 0.03644768142700195, 0.036952350616455076, 0.03667283248901367, 0.03692153549194336, 0.03685219192504883, 0.03742108917236328, 0.03718953704833984, 0.03717907333374024, 0.036682144165039066, 0.03680665588378906, 0.03671039962768555, 0.03698662567138672, 0.03659500885009766, 0.03677280044555664, 0.036329345703125, 0.03662041473388672, 0.036515071868896486, 0.03649407958984375, 0.03656403350830078, 0.03683164978027344, 0.03655049514770508, 0.036673473358154296, 0.036714561462402345, 0.03671072006225586, 0.03654803085327148]",tokens/s,26.976115010873706,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4253.396992,6121.455616,0.0,5737.807872,5464.489984,s,1,11.1370986328125,11.1370986328125,0.0,11.1370986328125,11.1370986328125,11.1370986328125,11.1370986328125,[11.1370986328125],,kWh,9.913709324173395e-05,1.0927141918626338e-05,3.3056415334020706e-05,0.000143120650494381,,MB,1551.040512,6526.205952,0.0,6111.100928,5872.503808,s,10,3.383427368164062,0.33834273681640625,0.0021296630611245385,0.3386007080078125,0.3404492706298828,0.3408719284057617,0.3412100546264648,"[0.33328350830078124, 0.336724609375, 0.3380541076660156, 0.33739666748046876, 0.3403553466796875, 0.33861480712890624, 0.33858660888671877, 0.33995111083984375, 0.339166015625, 0.3412945861816406]",tokens/s,756.6292168964525,kWh,9.949896978191646e-06,1.0973009906221288e-06,6.634542344666206e-06,1.768174031347998e-05,tokens/kWh,14478212.86035029,MB,1583.517696,6528.303104,0.0,6113.19808,5872.506368,s,10,22.584103271484377,2.2584103271484377,0.0029572598348577023,2.2585858154296874,2.2611031494140623,2.2620704956054687,2.2628443725585936,"[2.258884033203125, 2.256121337890625, 2.260457763671875, 2.26088818359375, 2.25828759765625, 2.257409912109375, 2.25820654296875, 2.263037841796875, 2.259349853515625, 2.251460205078125]",tokens/s,27.895727912095765,kWh,6.593585236972862e-05,7.272708329452639e-06,4.366797011953472e-05,0.00011687653081871597,tokens/kWh,539030.3729815321,,s,630,22.58029593276978,0.03584173957582504,0.0007527005517548154,0.035854927062988276,0.0365166259765625,0.03672447566986084,0.039795316467285166,"[0.04026892852783203, 0.037012351989746096, 0.03570390319824219, 0.034816928863525394, 0.03529235076904297, 0.035650302886962894, 0.0359417610168457, 0.03554172897338867, 0.03502486419677735, 0.034811233520507814, 0.03534665679931641, 0.034534912109375, 0.03519968032836914, 0.03605027389526367, 0.035676673889160154, 0.0363092155456543, 0.03577670288085937, 0.0351723518371582, 0.034764801025390625, 0.035778560638427735, 0.036052574157714845, 0.03580979156494141, 0.0356943359375, 0.0350079345703125, 0.03450271987915039, 0.035508895874023436, 0.03744134521484375, 0.03662198257446289, 0.036087615966796875, 0.03604876708984375, 0.035842079162597656, 0.035856224060058596, 0.03523065567016601, 0.03511907196044922, 0.035980640411376955, 0.03616844940185547, 0.03593622589111328, 0.03555686569213867, 0.034844928741455075, 0.03574195098876953, 0.036036609649658206, 0.03606937789916992, 0.035947521209716796, 0.03585740661621094, 0.03556515121459961, 0.03629097747802734, 0.03584172821044922, 0.03582598495483398, 0.0364031982421875, 0.03593625640869141, 0.03591683197021484, 0.03578979110717773, 0.03600384140014649, 0.03583795166015625, 0.03585551834106445, 0.03626070404052734, 0.03639420700073242, 0.03658329772949219, 0.03576473617553711, 0.03642819213867188, 0.03615334320068359, 0.036055038452148434, 0.03596083068847656, 0.04007292938232422, 0.0373364143371582, 0.035676673889160154, 0.036004287719726566, 0.03539724731445312, 0.03601036834716797, 0.035170238494873045, 0.03516748809814453, 0.03519510269165039, 0.03543715286254883, 0.035872447967529295, 0.03588294219970703, 0.035020767211914064, 0.03476895904541016, 0.03563494491577148, 0.03572592163085937, 0.03606528091430664, 0.035929473876953125, 0.03511929702758789, 0.034740062713623045, 0.03543920135498047, 0.03464524841308594, 0.03570150375366211, 0.03556147384643555, 0.036345855712890625, 0.03611983871459961, 0.03647273635864258, 0.03629548645019531, 0.03562905502319336, 0.03469891357421875, 0.03566012954711914, 0.03705145645141601, 0.03670719909667969, 0.0356163215637207, 0.03589785766601562, 0.035504127502441404, 0.035211265563964846, 0.03546112060546875, 0.03521331024169922, 0.035413406372070313, 0.036600414276123046, 0.03578060913085938, 0.03586624145507813, 0.03544307327270508, 0.03543417739868164, 0.03492419052124023, 0.03567683029174805, 0.035845470428466794, 0.03578537750244141, 0.035916065216064455, 0.03591449737548828, 0.03557270431518555, 0.03616115188598633, 0.03637919998168945, 0.03597907257080078, 0.03523379135131836, 0.03498355102539062, 0.03672716903686524, 0.036329471588134765, 0.035757888793945314, 0.036208831787109375, 0.03649516677856445, 0.03583932876586914, 0.039860607147216794, 0.0367674560546875, 0.035099967956542966, 0.03447907257080078, 0.03485081481933594, 0.034936767578125, 0.03607558441162109, 0.03592396926879883, 0.035588096618652344, 0.03483443069458008, 0.03567542266845703, 0.03571331024169922, 0.035310016632080075, 0.034366783142089845, 0.03544921493530274, 0.03645391845703125, 0.03571980667114258, 0.035512481689453125, 0.03612438583374023, 0.03582799911499023, 0.035844097137451174, 0.0353353271484375, 0.034818912506103514, 0.035443840026855467, 0.035789470672607425, 0.03562435150146484, 0.03549676895141601, 0.036706302642822264, 0.03676160049438477, 0.03661619186401367, 0.035695903778076174, 0.03586659240722656, 0.03640147018432617, 0.03582207870483398, 0.03616352081298828, 0.036031936645507814, 0.035749568939208984, 0.03611532974243164, 0.03567187118530273, 0.035757568359375, 0.03554169464111328, 0.03543782424926758, 0.03526028823852539, 0.03523168182373047, 0.034831295013427736, 0.03580268859863281, 0.036530624389648436, 0.035901439666748046, 0.035866622924804685, 0.03590758514404297, 0.03598745727539063, 0.03618323135375977, 0.036303359985351565, 0.03671900939941406, 0.03637648010253906, 0.03585843276977539, 0.0360142707824707, 0.03631494522094727, 0.03667967987060547, 0.03609952163696289, 0.03581923294067383, 0.036483840942382814, 0.03662448120117188, 0.03939123153686523, 0.03673040008544922, 0.03585244750976563, 0.03582803344726562, 0.035708545684814456, 0.034805984497070314, 0.0345843505859375, 0.03533184051513672, 0.036170368194580076, 0.035595455169677735, 0.036098880767822264, 0.03615670394897461, 0.03613359832763672, 0.0353177604675293, 0.034561408996582034, 0.034990623474121095, 0.03658575820922851, 0.036012928009033204, 0.03572627258300781, 0.03532185745239258, 0.035001758575439454, 0.03604732894897461, 0.03547763061523437, 0.034962718963623046, 0.035017440795898434, 0.03567756652832031, 0.036523712158203124, 0.03660281753540039, 0.036132865905761716, 0.03692544174194336, 0.035885055541992186, 0.03592380905151367, 0.03540124893188477, 0.03516092681884766, 0.034477790832519534, 0.035679615020751954, 0.035361473083496096, 0.03537510299682617, 0.03626393508911133, 0.03610009765625, 0.035984638214111325, 0.03576409530639649, 0.03638972854614258, 0.03591990280151367, 0.03634995269775391, 0.03634995269775391, 0.03560857772827149, 0.03505097579956055, 0.03568057632446289, 0.03560675048828125, 0.03580108642578125, 0.03618611145019531, 0.03623731231689453, 0.036343040466308596, 0.03664767837524414, 0.036324832916259764, 0.036170272827148436, 0.0365404167175293, 0.036544513702392575, 0.03623468780517578, 0.03579142379760742, 0.035639297485351565, 0.036450145721435546, 0.03997903823852539, 0.03708982467651367, 0.035708606719970705, 0.03599529647827148, 0.03532444763183594, 0.035874462127685545, 0.035676513671875, 0.034987262725830075, 0.03489049530029297, 0.03491430282592774, 0.036035839080810546, 0.03577532958984375, 0.036095680236816405, 0.03605526351928711, 0.035366912841796876, 0.0348671989440918, 0.035178497314453126, 0.03490921783447266, 0.03490908813476563, 0.03505110549926758, 0.03575155258178711, 0.03567497634887695, 0.03521529769897461, 0.034793537139892576, 0.035489791870117186, 0.036259166717529295, 0.03643622589111328, 0.03609027099609375, 0.03572326278686523, 0.03637238311767578, 0.03595683288574219, 0.03575398254394531, 0.036337631225585934, 0.03556259155273438, 0.03559628677368164, 0.036082366943359374, 0.0358175048828125, 0.03605116653442383, 0.03607107162475586, 0.03630508804321289, 0.03617398452758789, 0.03552678298950195, 0.03644575881958008, 0.03612294387817383, 0.03593625640869141, 0.035501407623291015, 0.03518454360961914, 0.03497552108764648, 0.03627721786499023, 0.03595008087158203, 0.035273216247558595, 0.034912254333496096, 0.03658956909179688, 0.03630284881591797, 0.036257793426513675, 0.03582540893554687, 0.036288639068603516, 0.03627225494384766, 0.036138816833496096, 0.03579062271118164, 0.03616400146484375, 0.03590348815917969, 0.03606041717529297, 0.03901043319702149, 0.03637510299682617, 0.03525379180908203, 0.03537763214111328, 0.0346808967590332, 0.03559999847412109, 0.03594790267944336, 0.036001983642578124, 0.03586470413208008, 0.035164798736572266, 0.034778816223144535, 0.03611475372314453, 0.0361082878112793, 0.03551027297973633, 0.0347586555480957, 0.036000831604003906, 0.03581024169921875, 0.035345569610595706, 0.03610844802856445, 0.03592832183837891, 0.035688766479492186, 0.0348361930847168, 0.03476220703125, 0.03500681686401367, 0.03532803344726562, 0.03480838394165039, 0.036928993225097656, 0.036721183776855466, 0.03668076705932617, 0.03597574234008789, 0.0356110725402832, 0.034979774475097654, 0.034754558563232424, 0.035147232055664064, 0.03679081726074219, 0.0363397102355957, 0.035844097137451174, 0.03587676620483399, 0.03568822479248047, 0.03643423843383789, 0.03560979080200195, 0.03533907318115234, 0.034609153747558595, 0.035547134399414065, 0.03556966400146484, 0.035694591522216795, 0.03630899047851562, 0.036070846557617185, 0.035912254333496096, 0.035991199493408205, 0.03604940795898438, 0.036011871337890626, 0.03671446228027344, 0.03630825424194336, 0.03613897705078125, 0.03626220703125, 0.03594287872314453, 0.03632896041870117, 0.03629308700561523, 0.03595177459716797, 0.03563718414306641, 0.03644079971313476, 0.03637680053710937, 0.039676513671875, 0.036846817016601564, 0.035727455139160154, 0.035213504791259766, 0.03512140655517578, 0.03600521469116211, 0.0354436149597168, 0.034947006225585935, 0.03485673522949219, 0.034652446746826174, 0.03544438552856445, 0.034942913055419925, 0.03609161758422851, 0.03581350326538086, 0.0358073616027832, 0.034996673583984374, 0.0345456657409668, 0.034709217071533204, 0.03487363052368164, 0.03588892745971679, 0.03600809478759766, 0.03570060729980469, 0.03609004974365235, 0.03594035339355469, 0.03540278244018555, 0.03564156723022461, 0.036378528594970705, 0.036561248779296875, 0.0362130241394043, 0.03626416015625, 0.035745441436767576, 0.035862110137939454, 0.035798015594482424, 0.036369632720947266, 0.036579872131347654, 0.03597663879394531, 0.035947135925292965, 0.03544623947143555, 0.036131294250488284, 0.03593011093139648, 0.03607756805419922, 0.03620793533325195, 0.03638687896728516, 0.0359224967956543, 0.035461185455322265, 0.036478977203369144, 0.036313087463378906, 0.03565286254882812, 0.03539839935302734, 0.034960670471191405, 0.03594723129272461, 0.03593203353881836, 0.03587676620483399, 0.035647102355957035, 0.03661004638671875, 0.036009822845458984, 0.036042976379394534, 0.035969566345214844, 0.035299327850341795, 0.035272705078125, 0.035237472534179685, 0.03686576080322266, 0.03664751815795898, 0.0402459831237793, 0.03680153656005859, 0.035756702423095706, 0.03490031814575195, 0.0347658576965332, 0.03466924667358398, 0.0353111686706543, 0.0356338882446289, 0.035231166839599606, 0.03511523056030273, 0.03619443130493164, 0.03560265731811523, 0.03626598358154297, 0.03604185485839844, 0.03577740859985352, 0.035593505859375, 0.034804447174072266, 0.03575193786621094, 0.03603046417236328, 0.03589324951171875, 0.03536268615722656, 0.03470867156982422, 0.035195457458496095, 0.03482624053955078, 0.03618563079833984, 0.03497814559936523, 0.03673500823974609, 0.036760032653808596, 0.03616348648071289, 0.036457950592041016, 0.035881599426269534, 0.03577446365356445, 0.03595161437988281, 0.03587078475952148, 0.03534534454345703, 0.035065792083740235, 0.03574790573120117, 0.0356864013671875, 0.03538534545898438, 0.03620044708251953, 0.03630486297607422, 0.03582777786254883, 0.03594585418701172, 0.035809886932373046, 0.035932159423828124, 0.036171775817871094, 0.03653798294067383, 0.036039039611816405, 0.036030719757080075, 0.035974910736083984, 0.035989185333251954, 0.03621510314941406, 0.036155391693115234, 0.03613695907592773, 0.03651583862304687, 0.036345470428466795, 0.03604684829711914, 0.03602447891235352, 0.03721142578125, 0.0364389762878418, 0.03616153717041016, 0.036247200012207034, 0.035948799133300784, 0.040158432006835935, 0.037268096923828126, 0.03605929565429687, 0.03571507263183594, 0.03546480178833008, 0.03518057632446289, 0.035592575073242184, 0.03577027130126953, 0.03506159973144531, 0.03452054214477539, 0.03501718521118164, 0.035940128326416014, 0.035551776885986326, 0.0350145263671875, 0.03522111892700195, 0.03591433715820312, 0.035403678894042966, 0.03514102554321289, 0.03554364776611328, 0.036042240142822264, 0.03589139175415039, 0.035873088836669925, 0.03568230438232422, 0.035323902130126955, 0.034934078216552734, 0.03676972961425781, 0.03632352066040039, 0.03601820755004883, 0.036069919586181644, 0.03623455810546875, 0.03617628860473633, 0.0356453742980957, 0.03620703887939453, 0.03621468734741211, 0.0361431999206543, 0.035676063537597655, 0.035966590881347654, 0.036104576110839844, 0.03602742385864258, 0.03571958541870117, 0.035244609832763674, 0.035931678771972654, 0.035473888397216796, 0.03539555358886719, 0.035149856567382814, 0.03571712112426758, 0.035589855194091795, 0.035682590484619144, 0.03585433578491211, 0.03566320037841797, 0.03624003219604492, 0.03657523345947265, 0.03643801498413086, 0.03700668716430664, 0.03566044616699219, 0.035315711975097655, 0.03535257720947266, 0.035003936767578125, 0.036587135314941406, 0.03643888092041016, 0.035620864868164064, 0.036212417602539064, 0.0362314567565918, 0.03984384155273438, 0.03673088073730469, 0.03537510299682617, 0.035270015716552736, 0.03468352127075195, 0.03544403076171875, 0.03510275268554688, 0.03460982513427734, 0.035130401611328126, 0.036233440399169925, 0.036072193145751955, 0.03592784118652344, 0.036542049407958986, 0.035508544921875, 0.03613727951049805, 0.036050945281982424, 0.035688449859619144, 0.03501875305175781, 0.03430604934692383, 0.03531097412109375, 0.03539382553100586, 0.03552892684936523, 0.035989887237548826, 0.035909374237060546, 0.03585843276977539, 0.034953216552734374, 0.034557247161865236, 0.03589164733886719, 0.036065536499023436, 0.03550940704345703, 0.035093215942382815, 0.034768798828125, 0.035496158599853514, 0.03482624053955078, 0.03614720153808594, 0.03653017425537109, 0.036016033172607424, 0.035294368743896486, 0.03539654541015625, 0.03565740966796875, 0.03540614318847656, 0.0366451530456543, 0.03609161758422851, 0.035817470550537106, 0.0357130241394043, 0.0359095344543457, 0.03548534393310547, 0.03592607879638672, 0.03572076797485352, 0.03523667144775391, 0.034699264526367186, 0.03638681411743164, 0.036111713409423825, 0.03586134338378906, 0.036673343658447266, 0.03635200119018555, 0.03625939178466797, 0.03623535919189453, 0.035931583404541015, 0.03548604965209961, 0.034980415344238285, 0.03618307113647461, 0.036088191986083984]",tokens/s,27.900431503455593,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,8197.681152,11080.237056,0.0,10701.766656,10468.923392,s,1,13.2618525390625,13.2618525390625,0.0,13.2618525390625,13.2618525390625,13.2618525390625,13.2618525390625,[13.2618525390625],,kWh,0.00018762591863748335,2.0689324372002834e-05,6.120893785599435e-05,0.0002695241808654805,,MB,3997.712384,11570.970624,0.0,11163.140096,10923.364352,s,10,7.304953063964844,0.7304953063964844,0.002164870958327621,0.7312315673828125,0.7322960510253906,0.7326491241455078,0.7329315826416015,"[0.7250454711914063, 0.7301515502929687, 0.7313854370117188, 0.72986328125, 0.7315874633789062, 0.7287291870117187, 0.7318931884765625, 0.733002197265625, 0.7310776977539063, 0.7322175903320313]",tokens/s,350.4471524435137,kWh,2.1300824997918728e-05,2.3490922201286624e-06,1.4110725574286878e-05,3.7760642792334264e-05,tokens/kWh,6779545.60805226,MB,4002.03776,11573.067776,0.0,11165.237248,10923.366912,s,10,34.147839843750006,3.414783984375,0.005954342253382606,3.4163756103515626,3.4219365234374997,3.42217822265625,3.42237158203125,"[3.404217041015625, 3.408157958984375, 3.4100400390625, 3.410267333984375, 3.41900244140625, 3.414662109375, 3.418089111328125, 3.41910107421875, 3.422419921875, 3.4218828125]",tokens/s,18.449190428521568,kWh,0.00010015927759166512,1.104791151118098e-05,6.658178342411099e-05,0.00017778897252695712,tokens/kWh,354352.68624688,,s,630,34.1449394950867,0.05419831665886773,0.0008717218262891059,0.054098608016967774,0.05497198028564453,0.05529226551055908,0.0588721688079834,"[0.058912704467773434, 0.05374745559692383, 0.05303049468994141, 0.05383411026000977, 0.05320127868652344, 0.053007904052734374, 0.05378879928588867, 0.05372518539428711, 0.05328736114501953, 0.05348953628540039, 0.052907806396484375, 0.05330697631835937, 0.053582462310791015, 0.05273955154418945, 0.05393052673339844, 0.053833728790283204, 0.053700641632080076, 0.054357662200927734, 0.05469734573364258, 0.05388911819458008, 0.0541124496459961, 0.05401615905761719, 0.05353932952880859, 0.05382064056396484, 0.05440182495117187, 0.053671070098876957, 0.05362511825561524, 0.05384431838989258, 0.0531541748046875, 0.053620574951171875, 0.053876190185546874, 0.053762462615966795, 0.05390348815917969, 0.05429353713989258, 0.053787071228027346, 0.054445919036865235, 0.05604422378540039, 0.055314430236816405, 0.05389046478271484, 0.05391215896606445, 0.05425689697265625, 0.05386111831665039, 0.053884895324707034, 0.054284320831298825, 0.05343641662597656, 0.05402009582519531, 0.05435391998291016, 0.05342351913452149, 0.053999713897705075, 0.05416806411743164, 0.0539504623413086, 0.05399465560913086, 0.05442361450195313, 0.05445302581787109, 0.054099071502685545, 0.054674015045166016, 0.05449552154541016, 0.054300670623779294, 0.05418739318847656, 0.05455116653442383, 0.05406860733032227, 0.054517631530761716, 0.05453286361694336, 0.059359745025634764, 0.053949600219726564, 0.05318127822875977, 0.053311134338378904, 0.05312124633789062, 0.05278323364257813, 0.05350358581542969, 0.05320745468139648, 0.053476577758789064, 0.053381824493408205, 0.05306585693359375, 0.053928001403808594, 0.05418384170532226, 0.05351225662231445, 0.05430793762207031, 0.053991870880126955, 0.05392243194580078, 0.05623580932617187, 0.05609804916381836, 0.055782142639160155, 0.05396889495849609, 0.05352566528320313, 0.05322633743286133, 0.052937889099121095, 0.053522880554199216, 0.05347369766235351, 0.053217281341552736, 0.05351424026489258, 0.05342403030395508, 0.05407344055175781, 0.053840961456298825, 0.05401696014404297, 0.05386444854736328, 0.05429452896118164, 0.0547729606628418, 0.05405344009399414, 0.055820510864257815, 0.05496207809448242, 0.05411849594116211, 0.05346713638305664, 0.05389648056030273, 0.054086368560791014, 0.053394912719726566, 0.05381584167480469, 0.053808673858642575, 0.05351881790161133, 0.054335136413574216, 0.05401430511474609, 0.053561214447021485, 0.054986495971679684, 0.054745471954345704, 0.054142112731933596, 0.054557537078857424, 0.05500723266601563, 0.05447884750366211, 0.05493350219726562, 0.0546602897644043, 0.05416633605957031, 0.054085792541503905, 0.053671775817871095, 0.053944255828857424, 0.05356345748901367, 0.05412659072875976, 0.05954150390625, 0.05396835327148437, 0.05328131103515625, 0.05309027099609375, 0.05330243301391602, 0.05285315322875977, 0.05304947280883789, 0.05320671844482422, 0.05288003158569336, 0.0533043212890625, 0.05348863983154297, 0.05351542282104492, 0.05453500747680664, 0.054470497131347655, 0.053963966369628906, 0.05402710342407226, 0.05407712173461914, 0.0556036491394043, 0.055894016265869144, 0.05419417572021484, 0.05345075225830078, 0.05343231964111328, 0.05320697784423828, 0.05344668960571289, 0.0534600944519043, 0.053214111328125, 0.053143520355224606, 0.05354828643798828, 0.053590656280517575, 0.053665950775146486, 0.05412249755859375, 0.054736351013183596, 0.054198814392089845, 0.05436972808837891, 0.054958049774169924, 0.05567343902587891, 0.05393203353881836, 0.05496329498291016, 0.054535072326660154, 0.053792896270751955, 0.05355507278442383, 0.05439897537231445, 0.05386582565307617, 0.05376476669311524, 0.053940223693847655, 0.05385363388061523, 0.053994049072265626, 0.0539504623413086, 0.05465292739868164, 0.05458905410766601, 0.05444230270385742, 0.0550503044128418, 0.05508729553222656, 0.055305568695068356, 0.05503392028808594, 0.05446902465820312, 0.054451969146728514, 0.05412451171875, 0.05449091339111328, 0.053622753143310546, 0.05394803237915039, 0.053911872863769535, 0.05355785751342773, 0.05910028839111328, 0.053558143615722656, 0.05332787322998047, 0.05288345718383789, 0.053430145263671874, 0.05327171325683594, 0.05264499282836914, 0.0534628791809082, 0.05339750289916992, 0.053278785705566406, 0.0539068489074707, 0.05454083251953125, 0.053564800262451175, 0.05398585510253906, 0.054146720886230466, 0.05356380844116211, 0.05424089431762695, 0.0550516471862793, 0.05475753784179688, 0.05419913482666015, 0.05403238296508789, 0.05357904052734375, 0.053553760528564455, 0.05366182327270508, 0.05359552001953125, 0.05350668716430664, 0.0541712646484375, 0.05384368133544922, 0.053659904479980466, 0.05419440078735351, 0.05412384033203125, 0.05375065612792969, 0.054363872528076174, 0.05438006210327148, 0.0542031364440918, 0.05463859176635742, 0.054849281311035156, 0.054690048217773436, 0.053811294555664066, 0.05401795196533203, 0.054261760711669924, 0.05386380767822266, 0.05377654266357422, 0.05429296112060547, 0.053884929656982425, 0.05438032150268555, 0.05414524841308594, 0.053544960021972655, 0.054301887512207034, 0.054303550720214845, 0.05436620712280273, 0.05455241775512695, 0.054562271118164064, 0.05504697418212891, 0.054392704010009764, 0.054458366394042966, 0.05479401779174805, 0.054290462493896484, 0.05450156784057617, 0.0541921272277832, 0.05415484619140625, 0.05426217651367188, 0.054666400909423825, 0.059961856842041014, 0.05433375930786133, 0.053098495483398435, 0.05336576080322265, 0.0533265266418457, 0.05286848068237305, 0.05323462295532227, 0.05332787322998047, 0.05348777770996094, 0.05357542419433594, 0.0539257926940918, 0.05396003341674805, 0.05468793487548828, 0.054233760833740235, 0.053905441284179685, 0.053972766876220706, 0.054026432037353515, 0.05496627044677734, 0.05482700729370117, 0.05414096069335937, 0.05411836624145508, 0.05413475036621094, 0.05379244613647461, 0.05353871917724609, 0.053129024505615234, 0.05357017517089844, 0.053710849761962894, 0.05391974258422851, 0.054281471252441406, 0.054226753234863284, 0.05436524963378906, 0.05457052612304687, 0.0542006721496582, 0.05401190567016602, 0.055113601684570315, 0.05528793716430664, 0.054875839233398435, 0.05382099151611328, 0.054917793273925784, 0.05421884918212891, 0.05374771118164062, 0.053833728790283204, 0.05436415863037109, 0.05361648178100586, 0.053913761138916015, 0.05458060836791992, 0.05366041564941406, 0.054499137878417966, 0.05434579086303711, 0.05401152038574219, 0.054661502838134764, 0.055093246459960936, 0.05503180694580078, 0.05451571273803711, 0.05507276916503906, 0.054970367431640625, 0.05446656036376953, 0.05434502410888672, 0.054346431732177736, 0.055459712982177736, 0.05410940933227539, 0.05424390411376953, 0.05480483245849609, 0.05947939300537109, 0.053601119995117186, 0.05336288070678711, 0.05308601760864258, 0.05309868621826172, 0.05319456100463867, 0.05349504089355469, 0.05345356750488281, 0.05353577423095703, 0.05387964630126953, 0.053467262268066404, 0.05391769790649414, 0.05455276870727539, 0.05381439971923828, 0.05412457656860352, 0.054010528564453125, 0.05400092697143555, 0.0553045768737793, 0.05467171096801758, 0.054234302520751954, 0.0539799690246582, 0.05370470428466797, 0.05351001739501953, 0.05346112060546875, 0.054077438354492184, 0.05378047943115234, 0.0533135986328125, 0.05384969711303711, 0.05409814453125, 0.05395059204101563, 0.054755199432373045, 0.05432252883911133, 0.05350051116943359, 0.05466681671142578, 0.05584550476074219, 0.05523251342773437, 0.05424662399291992, 0.05419702529907226, 0.0538787841796875, 0.053808895111083985, 0.05419638442993164, 0.05421596908569336, 0.053743488311767576, 0.053898174285888674, 0.05429967880249023, 0.05416016006469727, 0.05388102340698242, 0.0549150390625, 0.054574718475341795, 0.05431132888793945, 0.05403763198852539, 0.05468044662475586, 0.0544351692199707, 0.056000415802001956, 0.05517388916015625, 0.05433958435058594, 0.05394204711914063, 0.054499359130859376, 0.054143169403076175, 0.05351628875732422, 0.05455270385742188, 0.054429569244384766, 0.053972160339355466, 0.058772926330566404, 0.053666656494140624, 0.05321712112426758, 0.0528416633605957, 0.053312480926513674, 0.05324537658691406, 0.05285257720947266, 0.05370675277709961, 0.05401264190673828, 0.053340160369873046, 0.05450096130371094, 0.05402582550048828, 0.054395423889160154, 0.053967041015625, 0.054098014831542966, 0.05406281661987305, 0.05387424087524414, 0.05482700729370117, 0.05479497528076172, 0.05413888168334961, 0.05398454284667969, 0.05349004745483398, 0.05347689437866211, 0.05342816162109375, 0.05342201614379883, 0.05376467132568359, 0.05387203216552734, 0.054196800231933594, 0.054006145477294924, 0.05450140762329102, 0.0548752326965332, 0.05456943893432617, 0.05396934509277344, 0.05504000091552735, 0.05532467269897461, 0.05433724975585937, 0.05436361694335937, 0.05466156768798828, 0.0542314224243164, 0.05371686553955078, 0.053897342681884765, 0.05376176071166992, 0.05383168029785156, 0.05451395034790039, 0.054317054748535154, 0.05412822341918945, 0.054319232940673826, 0.05486211013793945, 0.054494239807128905, 0.05454947280883789, 0.05506867218017578, 0.05449852752685547, 0.05453084945678711, 0.0551629753112793, 0.05479414367675781, 0.054370304107666016, 0.05588281631469726, 0.05455353546142578, 0.05425551986694336, 0.05413817596435547, 0.05429328155517578, 0.05457676696777344, 0.0541082878112793, 0.05835190582275391, 0.053450782775878905, 0.053184513092041016, 0.0531578254699707, 0.05326787185668945, 0.05318060684204102, 0.05335871887207031, 0.05339078521728516, 0.053908382415771484, 0.05402009582519531, 0.05396438217163086, 0.05452560043334961, 0.05352495956420898, 0.05409222412109375, 0.054273887634277346, 0.0534015998840332, 0.05475708770751953, 0.055295806884765625, 0.054903263092041014, 0.05426995086669922, 0.05382963180541992, 0.05423865509033203, 0.05362540817260742, 0.053766143798828124, 0.05368435287475586, 0.05433536148071289, 0.05417331314086914, 0.05389936065673828, 0.05422835159301758, 0.05403535842895508, 0.054199295043945314, 0.053907711029052736, 0.054698753356933597, 0.055260990142822264, 0.05483292770385742, 0.054505889892578124, 0.05528579330444336, 0.054693599700927735, 0.05399577713012695, 0.05425539016723633, 0.05402796936035156, 0.053508159637451175, 0.054335968017578125, 0.05392745590209961, 0.05432163238525391, 0.05439078521728516, 0.053884929656982425, 0.05481369781494141, 0.05404569625854492, 0.05443791961669922, 0.054426593780517576, 0.0541736946105957, 0.05548339080810547, 0.05486524963378906, 0.055231136322021486, 0.054417407989501954, 0.05450758361816406, 0.05512524795532227, 0.05412524795532227, 0.05403647994995117, 0.05449728012084961, 0.054489086151123044, 0.05404022216796875, 0.058654720306396485, 0.054024192810058595, 0.05357468795776367, 0.052875328063964847, 0.053297950744628904, 0.05327065658569336, 0.05304288101196289, 0.053655872344970705, 0.0538419189453125, 0.05377433776855469, 0.054063102722167966, 0.054542335510253906, 0.05402153778076172, 0.05564604949951172, 0.054986495971679684, 0.05351068878173828, 0.05411888122558594, 0.05566463851928711, 0.05509312057495117, 0.05405913543701172, 0.05351216125488281, 0.053593982696533204, 0.05327385711669922, 0.054053375244140625, 0.05402256011962891, 0.053720191955566404, 0.05390835189819336, 0.0541921272277832, 0.05385171127319336, 0.05434400177001953, 0.05522819137573242, 0.054450431823730466, 0.05432710266113281, 0.054507808685302736, 0.05499059295654297, 0.054917057037353514, 0.0543721923828125, 0.05539683151245117, 0.05400073623657226, 0.05385683059692383, 0.05431331253051758, 0.05384966278076172, 0.0544277114868164, 0.05406278228759766, 0.05337363052368164, 0.054443809509277345, 0.054296321868896484, 0.054344158172607425, 0.054566913604736325, 0.054844768524169925, 0.05551785659790039, 0.0542023696899414, 0.0551893424987793, 0.0547465934753418, 0.05512851333618164, 0.054581504821777344, 0.0542658576965332, 0.05458848190307617, 0.05460838317871094, 0.05412704086303711, 0.05456281661987305, 0.053866497039794924, 0.05394800186157227, 0.05939129638671875, 0.053627582550048826, 0.05331513595581055, 0.05310508728027344, 0.05358313751220703, 0.0533240966796875, 0.05340147018432617, 0.05342057418823242, 0.05391155242919922, 0.05346028900146484, 0.05396140670776367, 0.05395251083374023, 0.05431628799438477, 0.0541396484375, 0.05458451080322266, 0.05399951934814453, 0.05457193756103516, 0.0549450569152832, 0.05433216094970703, 0.05409584045410156, 0.05395878219604492, 0.053571071624755856, 0.053682334899902345, 0.054145153045654294, 0.05420041656494141, 0.05389311981201172, 0.05388035202026367, 0.053639583587646485, 0.054185535430908205, 0.05405952072143555, 0.05455257415771484, 0.05467136001586914, 0.05459267044067383, 0.0545145263671875, 0.05528284835815429, 0.05444489669799805, 0.054926559448242186, 0.054854015350341796, 0.054304512023925784, 0.053932510375976565, 0.054612159729003906, 0.053833728790283204, 0.053921791076660154, 0.054454113006591795, 0.054327457427978516, 0.053804512023925784, 0.05437807846069336, 0.05437916946411133, 0.05446684646606445, 0.0551464958190918, 0.054679550170898435, 0.05440441513061523, 0.05502588653564453, 0.05489286422729492, 0.054423583984375, 0.054343841552734376, 0.05505020904541016, 0.05473459243774414, 0.05401011276245117, 0.054236705780029294, 0.05481929779052734, 0.05461510467529297, 0.054268863677978514]",tokens/s,18.450757544632776,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.751424,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,8557.34272,9684.516864,0.0,9298.771968,9263.222784,s,1,12.1748349609375,12.1748349609375,0.0,12.1748349609375,12.1748349609375,12.1748349609375,12.1748349609375,[12.1748349609375],,kWh,0.00015162622932917504,1.67181384463455e-05,5.129726325998896e-05,0.0002196416310355095,,MB,1665.196032,10212.999168,0.0,9795.796992,9630.892032,s,10,8.682466796875,0.8682466796874999,0.0010660658831856036,0.8682899780273436,0.8690290100097656,0.869810220336914,0.8704351885986328,"[0.8661140747070313, 0.8673870849609375, 0.8683126220703125, 0.868531982421875, 0.8679371337890625, 0.8683518676757812, 0.868267333984375, 0.8681178588867188, 0.8688554077148437, 0.8705914306640625]",tokens/s,294.84708204371105,kWh,2.539169216839999e-05,2.8002430535894016e-06,1.6820175493165862e-05,4.501211071515525e-05,tokens/kWh,5687358.26720089,MB,1673.515008,10212.999168,0.0,9795.796992,9630.894592,s,10,37.203754638671874,3.7203754638671875,0.018398588235630488,3.720713623046875,3.7395328613281253,3.7441312988281252,3.747810048828125,"[3.692738037109375, 3.69881689453125, 3.704166259765625, 3.710311279296875, 3.709472412109375, 3.731115966796875, 3.738510986328125, 3.748729736328125, 3.735549072265625, 3.734343994140625]",tokens/s,16.933774725660058,kWh,0.00010892902932576629,1.2014567777306433e-05,7.249450706963493e-05,0.00019343810417270766,tokens/kWh,325685.5740467328,,s,630,37.200874935150175,0.059049007833571664,0.001081663972702892,0.05890227317810058,0.060517172622680665,0.060964065742492676,0.06187439182281494,"[0.06360739135742187, 0.05749737548828125, 0.0567072639465332, 0.05746278381347656, 0.05717401504516602, 0.05719804763793945, 0.05747926330566406, 0.05710800170898438, 0.05784390258789063, 0.05813116836547851, 0.05804220962524414, 0.057743518829345704, 0.05797241592407226, 0.05978144073486328, 0.0583191032409668, 0.058654464721679685, 0.058294273376464846, 0.058748126983642575, 0.05741795349121094, 0.057794944763183594, 0.05769814300537109, 0.057960800170898434, 0.058208255767822265, 0.05775766372680664, 0.057648193359375, 0.05809775924682617, 0.05807807922363281, 0.059924480438232425, 0.058329280853271485, 0.058157920837402344, 0.06023267364501953, 0.06071001434326172, 0.05843654251098633, 0.05798287963867187, 0.05812332916259766, 0.058869728088378905, 0.058031105041503904, 0.0587786865234375, 0.05798601531982422, 0.05805852890014648, 0.06077468872070312, 0.057505695343017575, 0.058115425109863283, 0.057885345458984376, 0.05900492858886719, 0.05993267059326172, 0.06035456085205078, 0.059649024963378906, 0.05803724670410156, 0.05964559936523438, 0.058853729248046875, 0.06029107284545898, 0.05936947250366211, 0.05856224060058594, 0.05878406524658203, 0.0581671028137207, 0.05922016143798828, 0.05944649505615234, 0.058947521209716795, 0.0587149772644043, 0.058514720916748045, 0.05924323272705078, 0.06138819122314453, 0.061265918731689455, 0.056866817474365235, 0.057524223327636716, 0.057960094451904295, 0.05779286575317383, 0.05824716949462891, 0.057966400146484375, 0.058499263763427733, 0.05808127975463867, 0.057489566802978516, 0.05764838409423828, 0.059105182647705076, 0.060410560607910155, 0.060001502990722655, 0.05877135848999023, 0.057613185882568356, 0.05816915130615234, 0.05784320068359375, 0.0575714225769043, 0.058243648529052734, 0.05995727920532227, 0.060106273651123046, 0.058829280853271486, 0.05822873687744141, 0.05852569580078125, 0.05821235275268555, 0.05848899078369141, 0.05782102584838867, 0.05800726318359375, 0.05852188873291016, 0.05881564712524414, 0.05841961669921875, 0.057874847412109375, 0.05794819259643555, 0.058314750671386716, 0.05856000137329102, 0.05815497589111328, 0.06034864044189453, 0.059482048034667966, 0.058890625, 0.060233055114746095, 0.06072732925415039, 0.05883763122558594, 0.05854617691040039, 0.05812761688232422, 0.058256126403808596, 0.05837561416625976, 0.058047039031982425, 0.059995967864990236, 0.05785744094848633, 0.058579105377197266, 0.05962815856933594, 0.06015385437011719, 0.05872844696044922, 0.06029107284545898, 0.060179744720458984, 0.059697887420654294, 0.05856595230102539, 0.058237632751464846, 0.05809561538696289, 0.05972707366943359, 0.05847324752807617, 0.05852774429321289, 0.06113193511962891, 0.05683174514770508, 0.05740339279174805, 0.0571800651550293, 0.05706662368774414, 0.057793502807617185, 0.05818889617919922, 0.058055583953857424, 0.05829587173461914, 0.06047699356079102, 0.059038593292236326, 0.058175487518310545, 0.058380287170410154, 0.05785804748535156, 0.05802947235107422, 0.05851513671875, 0.05939420700073242, 0.05856742477416992, 0.057556991577148435, 0.05779385757446289, 0.05771948623657226, 0.05799116897583008, 0.05778230285644531, 0.05765273666381836, 0.05811248016357422, 0.060439777374267575, 0.059176929473876955, 0.057445152282714844, 0.06028086471557617, 0.05982617568969727, 0.059875328063964846, 0.05972275161743164, 0.06020329666137696, 0.060157886505126955, 0.059267871856689455, 0.0582737922668457, 0.056989662170410155, 0.05731094360351562, 0.059361278533935545, 0.05987360000610351, 0.05970483016967774, 0.05808998489379883, 0.05952451324462891, 0.0582825927734375, 0.058502590179443356, 0.059570751190185546, 0.0587325439453125, 0.05812819290161133, 0.058861759185791014, 0.05837004852294922, 0.060066848754882815, 0.058303009033203124, 0.059060672760009765, 0.060477439880371096, 0.05910240173339844, 0.05840963363647461, 0.05987753677368164, 0.060487041473388674, 0.05986368179321289, 0.059544639587402345, 0.058094528198242186, 0.060516353607177734, 0.05916262435913086, 0.06220844650268555, 0.056538623809814455, 0.05730300903320312, 0.05695315170288086, 0.0578771858215332, 0.05928681564331055, 0.06022966384887695, 0.05843974304199219, 0.05785161590576172, 0.05782620620727539, 0.057747455596923826, 0.05851923370361328, 0.059265087127685544, 0.058587390899658205, 0.05977203369140625, 0.0603073616027832, 0.06042316818237305, 0.058901569366455075, 0.05772560119628906, 0.057974849700927734, 0.05737081527709961, 0.05808556747436523, 0.058842559814453126, 0.05806732940673828, 0.05847040176391602, 0.058433536529541016, 0.05982003021240234, 0.05961497497558594, 0.057762046813964844, 0.05829632186889649, 0.058213409423828126, 0.05988399887084961, 0.058947521209716795, 0.059162975311279294, 0.058257183074951174, 0.05827542495727539, 0.058217536926269534, 0.05849270248413086, 0.06011286544799805, 0.05981801605224609, 0.05771456146240234, 0.057931392669677735, 0.058634750366210936, 0.0581212158203125, 0.06005027389526367, 0.057682014465332034, 0.06025017547607422, 0.060219390869140625, 0.06080233764648438, 0.059718334197998046, 0.0589087028503418, 0.059692737579345706, 0.05906668853759765, 0.05983337783813476, 0.0600483512878418, 0.06000844955444336, 0.0580423698425293, 0.059660255432128904, 0.05917516708374024, 0.05984233474731445, 0.059900127410888675, 0.057433887481689455, 0.05939116668701172, 0.06180643081665039, 0.05725404739379883, 0.05706041717529297, 0.057996414184570313, 0.057890625, 0.057536609649658205, 0.05837548828125, 0.05915094375610352, 0.057212928771972656, 0.05910236740112305, 0.05988355255126953, 0.05759468841552735, 0.05791350555419922, 0.05788191986083984, 0.0583191032409668, 0.059557918548583985, 0.05880153656005859, 0.05814764785766602, 0.05800374221801758, 0.05847836685180664, 0.058136577606201174, 0.058294273376464846, 0.057896961212158204, 0.05955788803100586, 0.059666431427001954, 0.059652095794677736, 0.05818268966674805, 0.05872534561157226, 0.05779449462890625, 0.05824723052978516, 0.05893555068969727, 0.06040127944946289, 0.06030963134765625, 0.05997158432006836, 0.060524543762207034, 0.05809561538696289, 0.058377918243408204, 0.05820652770996094, 0.059205184936523436, 0.0579813117980957, 0.05941254425048828, 0.06057574462890625, 0.05785599899291992, 0.059535358428955076, 0.05860054397583008, 0.05891696166992187, 0.05948704147338867, 0.0596409912109375, 0.058471263885498045, 0.05827305603027344, 0.058777503967285157, 0.06001939010620117, 0.05919120025634766, 0.058383872985839844, 0.05880035018920898, 0.05950515365600586, 0.05945894241333008, 0.05911001586914062, 0.05929305648803711, 0.05965887832641602, 0.05891891098022461, 0.059756542205810545, 0.06142771148681641, 0.06150940704345703, 0.05754288101196289, 0.05749094390869141, 0.05759139251708984, 0.058456062316894535, 0.05937001419067383, 0.060704193115234374, 0.057645824432373045, 0.059549888610839846, 0.060435745239257814, 0.060207553863525394, 0.058026016235351564, 0.05771465682983398, 0.05862137603759766, 0.05853065490722656, 0.058842784881591795, 0.058812767028808596, 0.05901311874389648, 0.05931411361694336, 0.058558528900146484, 0.059684864044189455, 0.05999539184570313, 0.05835443115234375, 0.05852979278564453, 0.058673439025878904, 0.058443489074707033, 0.058638336181640625, 0.05831647872924805, 0.05848710250854492, 0.058548225402832034, 0.058630271911621096, 0.05904569625854492, 0.05900908660888672, 0.05954707336425781, 0.058990753173828125, 0.05790329742431641, 0.058522945404052736, 0.05904252624511719, 0.05918848037719727, 0.05988803100585938, 0.05773161697387695, 0.05942272186279297, 0.0586690559387207, 0.059762657165527346, 0.05804035186767578, 0.05974016189575195, 0.06011904144287109, 0.05903769683837891, 0.06121625518798828, 0.06085683059692383, 0.06049587249755859, 0.058533409118652346, 0.05939599990844727, 0.05954003143310547, 0.058638336181640625, 0.060773632049560544, 0.06013004684448242, 0.06091062545776367, 0.06097129440307617, 0.06089590454101562, 0.06056553649902344, 0.06007398223876953, 0.05990764617919922, 0.062360095977783206, 0.05784323120117187, 0.05817808151245117, 0.05784985733032227, 0.059628929138183594, 0.06069750213623047, 0.05959036636352539, 0.05935849761962891, 0.05766012954711914, 0.05786198425292969, 0.05852614212036133, 0.05885513687133789, 0.059024799346923826, 0.058597984313964846, 0.0595968017578125, 0.0592465591430664, 0.06170598220825195, 0.058675487518310546, 0.05800080108642578, 0.05759625625610352, 0.058296161651611327, 0.05830083084106445, 0.05897216033935547, 0.059322368621826174, 0.05854003143310547, 0.059514942169189455, 0.059049633026123045, 0.05963558578491211, 0.06128886413574219, 0.0600002555847168, 0.06056959915161133, 0.05925878524780273, 0.061077377319335935, 0.05935686492919922, 0.05817385482788086, 0.05872211074829101, 0.05802531051635742, 0.058853439331054684, 0.05995174407958984, 0.05955107116699219, 0.05864339065551758, 0.058748031616210936, 0.058493087768554684, 0.06055705642700195, 0.061367328643798826, 0.059905311584472654, 0.06117862319946289, 0.0599529914855957, 0.060429534912109374, 0.05932726287841797, 0.059580478668212894, 0.059055198669433595, 0.058122528076171874, 0.058775489807128906, 0.05946233749389648, 0.059361534118652345, 0.05895756912231445, 0.06046636962890625, 0.05880915069580078, 0.05887180709838867, 0.061892608642578124, 0.05910492706298828, 0.0598449592590332, 0.06273843383789063, 0.05833369445800781, 0.05846006393432617, 0.05959219360351563, 0.06095523071289063, 0.06054886245727539, 0.059420318603515626, 0.05888880157470703, 0.05766758346557617, 0.0580893440246582, 0.058488384246826175, 0.059118144989013674, 0.05936515045166016, 0.0581429443359375, 0.05770463943481445, 0.05858899307250977, 0.060147457122802735, 0.059698974609375, 0.05909532928466797, 0.057865951538085936, 0.05854665756225586, 0.05896806335449219, 0.0586506233215332, 0.06057984161376953, 0.05929574584960937, 0.05784384155273437, 0.060157825469970706, 0.06053033447265625, 0.06131951904296875, 0.05868544006347656, 0.05920358276367187, 0.05970272064208984, 0.06063776016235352, 0.060876224517822264, 0.059530975341796875, 0.059400257110595704, 0.058288543701171876, 0.059176639556884764, 0.05929846572875976, 0.05937360000610352, 0.05997772979736328, 0.05841100692749023, 0.059498462677001956, 0.06069046401977539, 0.05960067367553711, 0.06099305725097656, 0.05904563140869141, 0.058804447174072266, 0.059312862396240236, 0.061720577239990235, 0.0598015022277832, 0.05894358444213867, 0.0607716178894043, 0.061829792022705075, 0.05975980758666992, 0.0587457275390625, 0.05931417465209961, 0.05984460830688477, 0.06097235107421875, 0.059838752746582034, 0.05959049606323242, 0.05867577743530274, 0.05933830261230469, 0.062123584747314456, 0.05770454406738281, 0.05787788772583008, 0.057912288665771486, 0.05792870330810547, 0.05816416168212891, 0.058875072479248045, 0.05803046417236328, 0.058528255462646485, 0.05876326370239258, 0.058431713104248044, 0.0584681282043457, 0.05845398330688477, 0.0585849609375, 0.058681663513183595, 0.06043782424926758, 0.06145487976074219, 0.059401569366455076, 0.05938447952270508, 0.05949030303955078, 0.05916876983642578, 0.057661247253417966, 0.05774528121948242, 0.058476512908935546, 0.058450016021728515, 0.059097343444824216, 0.05955382537841797, 0.06127391815185547, 0.06013158416748047, 0.06011484909057617, 0.057557151794433596, 0.05885526275634766, 0.059650047302246094, 0.06056502532958984, 0.05822236633300781, 0.058828929901123046, 0.06086713409423828, 0.059115520477294924, 0.05867472076416016, 0.05914803314208984, 0.05949923324584961, 0.06049587249755859, 0.059657470703125, 0.058528255462646485, 0.0603138542175293, 0.06166719818115234, 0.05917504119873047, 0.05944313430786133, 0.059482177734375, 0.059117568969726565, 0.0591739501953125, 0.05871708679199219, 0.06026038360595703, 0.06058598327636719, 0.0586608657836914, 0.05938927841186523, 0.061303455352783205, 0.06048080062866211, 0.05930057525634766, 0.05835683059692383, 0.05980767822265625, 0.06025878524780273, 0.05974879837036133, 0.061964576721191406, 0.05757500839233398, 0.05824553680419922, 0.05838438415527344, 0.05832806396484375, 0.058976734161376956, 0.05795651245117187, 0.05849331283569336, 0.05864448165893555, 0.0582628173828125, 0.05893807983398437, 0.058638336181640625, 0.059227264404296875, 0.059085086822509764, 0.057715297698974606, 0.05856051254272461, 0.05836816024780273, 0.0582979850769043, 0.05831472015380859, 0.05868489456176758, 0.05874476623535156, 0.05978607940673828, 0.06125158309936524, 0.05972598266601563, 0.06032944107055664, 0.06058975982666016, 0.05901580810546875, 0.05927475357055664, 0.05955846405029297, 0.059146240234375, 0.0594986572265625, 0.0588573112487793, 0.058264801025390625, 0.059015518188476564, 0.05890297698974609, 0.05858889770507812, 0.05777027130126953, 0.06066995239257812, 0.06056959915161133, 0.058760513305664064, 0.059034305572509764, 0.060219390869140625, 0.05941968154907227, 0.05945033645629883, 0.05873411178588867, 0.058434017181396486, 0.05901311874389648, 0.06015596771240234, 0.06005920028686523, 0.05930841445922851, 0.05921791839599609, 0.05958575820922852, 0.060410655975341794, 0.05871721649169922, 0.06092192077636719, 0.059706111907958985, 0.061104286193847654, 0.060124767303466796, 0.05929974365234375, 0.060889793395996095, 0.061150974273681644, 0.059146305084228516, 0.058949630737304685]",tokens/s,16.935085561784174,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4908.900352,7227.768832,0.0,6849.298432,6445.09696,s,1,11.409763671875,11.409763671875,0.0,11.409763671875,11.409763671875,11.409763671875,11.409763671875,[11.409763671875],,kWh,0.00012727060596250036,1.4031577753581444e-05,4.089781049598784e-05,0.00018219999421206963,,MB,2795.634688,7580.090368,0.0,7172.25984,6823.319552,s,10,3.659079406738282,0.3659079406738281,0.0022586638860903206,0.3660846557617188,0.36860031433105467,0.36880082855224605,0.3689612399291992,"[0.36158599853515627, 0.36374530029296875, 0.36855264282226563, 0.36434072875976564, 0.3690013427734375, 0.36476950073242187, 0.36626763916015626, 0.36590167236328125, 0.36855575561523435, 0.36635882568359374]",tokens/s,699.6295284780373,kWh,1.0731959706250413e-05,1.1835455571471207e-06,7.125333081214087e-06,1.904083834461162e-05,tokens/kWh,13444786.167855136,MB,2799.960064,7582.18752,0.0,7174.356992,6823.322112,s,10,22.737356689453122,2.2737356689453123,0.010605739310478824,2.2769017333984376,2.285530078125,2.286243310546875,2.286813896484375,"[2.2818232421875, 2.2616044921875, 2.28537158203125, 2.2813994140625, 2.2808974609375, 2.272906005859375, 2.25453369140625, 2.266579345703125, 2.265284912109375, 2.28695654296875]",tokens/s,27.70770624767609,kWh,6.630275453333297e-05,7.313228889816367e-06,4.402154116958458e-05,0.00011763752459273392,tokens/kWh,535543.4009522783,,s,630,22.73450096893311,0.03608650947449699,0.0005440227987745136,0.0360599365234375,0.03659966354370117,0.03701184482574463,0.037776075401306154,"[0.03709203338623047, 0.036034656524658204, 0.037209697723388675, 0.035833889007568356, 0.03566934585571289, 0.03578083038330078, 0.035856895446777344, 0.035621086120605466, 0.03602342224121094, 0.036013057708740234, 0.036138561248779295, 0.03601980972290039, 0.03599955368041992, 0.03596515274047852, 0.03611923217773438, 0.03609363174438476, 0.03618643188476563, 0.03596063995361328, 0.03614944076538086, 0.03641683197021484, 0.03616844940185547, 0.03617536163330078, 0.03633379364013672, 0.03605126571655273, 0.03628636932373047, 0.03619811248779297, 0.04121209716796875, 0.03624560165405273, 0.03650358581542969, 0.03624752044677734, 0.036413440704345705, 0.03710534286499023, 0.03643423843383789, 0.036036094665527346, 0.03605465698242188, 0.036045696258544924, 0.03602227020263672, 0.03609190368652344, 0.03579859161376953, 0.03606572723388672, 0.035917823791503906, 0.03592806243896484, 0.03580710220336914, 0.03592998504638672, 0.03586073684692383, 0.03582156753540039, 0.035999744415283204, 0.03575798416137695, 0.03582326507568359, 0.03583430480957031, 0.035860481262207033, 0.03798406219482422, 0.0359730224609375, 0.03593040084838867, 0.03573964691162109, 0.03564252853393555, 0.03588131332397461, 0.036995582580566407, 0.03616153717041016, 0.03616563034057617, 0.035995582580566406, 0.03667542266845703, 0.03614944076538086, 0.036913150787353514, 0.03591535949707031, 0.035668384552001955, 0.0357534065246582, 0.03563167953491211, 0.03547955322265625, 0.03520512008666992, 0.03529840087890625, 0.03516044616699219, 0.03530931091308594, 0.03499702453613281, 0.035198974609375, 0.03678822326660156, 0.03577446365356445, 0.03653836822509766, 0.035429824829101564, 0.03512982559204102, 0.03528230285644531, 0.03536764907836914, 0.035526336669921874, 0.03558636856079102, 0.03582361602783203, 0.03585228729248047, 0.03733299255371094, 0.03596060943603516, 0.03594982528686524, 0.035892192840576174, 0.03590553665161133, 0.0360274543762207, 0.036082271575927735, 0.035768062591552734, 0.03606147384643555, 0.03650387191772461, 0.03580928039550781, 0.035624801635742186, 0.035616928100585935, 0.03537638473510742, 0.03584022521972656, 0.035737438201904295, 0.03588140869140625, 0.036972862243652344, 0.03587990570068359, 0.03556844711303711, 0.035643550872802736, 0.03546931076049804, 0.035471359252929685, 0.03564652633666992, 0.03585094451904297, 0.03597049713134766, 0.0359958724975586, 0.036011871337890626, 0.03686617660522461, 0.036862049102783206, 0.03618828964233398, 0.03604931259155274, 0.0364071044921875, 0.03704393768310547, 0.036676063537597656, 0.03635740661621094, 0.03597110366821289, 0.03577468872070313, 0.036097824096679686, 0.035576351165771486, 0.036792320251464845, 0.0359444465637207, 0.036171009063720706, 0.03598947143554688, 0.03603263854980469, 0.03600352096557617, 0.036010303497314454, 0.036012096405029295, 0.036030719757080075, 0.03603286361694336, 0.0362105598449707, 0.035870849609375, 0.036018142700195314, 0.03621481704711914, 0.03606937789916992, 0.03665827178955078, 0.036152225494384765, 0.035862529754638675, 0.036116481781005856, 0.036060672760009765, 0.03606147384643555, 0.036116481781005856, 0.036065502166748045, 0.03644416046142578, 0.03656614303588867, 0.03629555130004883, 0.03632271957397461, 0.036491073608398435, 0.036391712188720705, 0.03628646469116211, 0.036245502471923825, 0.036211902618408204, 0.03666566467285156, 0.03613478469848633, 0.03616758346557617, 0.03645718383789062, 0.0361267204284668, 0.03630284881591797, 0.036337535858154295, 0.03651164627075195, 0.036230815887451175, 0.03620512008666992, 0.036124671936035156, 0.03657065582275391, 0.036388832092285155, 0.03606988906860351, 0.03642777633666992, 0.03636553573608398, 0.03612137603759766, 0.0362762222290039, 0.03721011352539062, 0.036796001434326174, 0.03633808135986328, 0.035935520172119144, 0.03595747375488281, 0.036359424591064456, 0.036743934631347654, 0.03669343948364258, 0.03620512008666992, 0.03607548904418945, 0.03627775955200195, 0.03663299179077149, 0.03660403060913086, 0.03693008041381836, 0.03660620880126953, 0.03591987228393555, 0.03624345779418945, 0.03624342346191406, 0.037142559051513674, 0.03628188705444336, 0.03593286514282226, 0.03589836883544922, 0.03597974395751953, 0.0359466552734375, 0.03589487838745117, 0.03591635131835937, 0.035923168182373046, 0.03612956619262695, 0.03619839859008789, 0.036206592559814454, 0.03608607864379883, 0.036130207061767575, 0.03625603103637695, 0.03659964752197266, 0.03637673568725586, 0.03623731231689453, 0.03606512069702148, 0.036155200958251955, 0.03620451354980469, 0.03693606567382812, 0.03640524673461914, 0.036286495208740235, 0.036410526275634764, 0.036057918548583985, 0.03616719818115234, 0.037292160034179685, 0.03683347320556641, 0.03691535949707031, 0.036130847930908205, 0.03608329772949219, 0.036120769500732425, 0.03675689697265625, 0.03616556930541992, 0.0361767349243164, 0.036095455169677736, 0.0361313591003418, 0.03625094223022461, 0.03617657470703125, 0.036181182861328126, 0.03589321517944336, 0.03581564712524414, 0.03581811141967774, 0.03583590316772461, 0.03574179077148437, 0.0357243537902832, 0.03568931198120117, 0.03561471939086914, 0.03600998306274414, 0.03595446395874023, 0.03603068923950195, 0.036038078308105466, 0.0359931526184082, 0.03610287857055664, 0.03705670547485351, 0.03623331069946289, 0.03647283172607422, 0.037802272796630856, 0.03569868850708008, 0.035345760345458985, 0.03525823974609375, 0.03548179244995117, 0.03504803085327148, 0.035110912322998046, 0.035381248474121094, 0.035925312042236326, 0.03594720077514649, 0.03581324768066406, 0.03901225662231445, 0.03611388778686524, 0.03648179244995117, 0.03589734268188476, 0.037344417572021484, 0.036337982177734374, 0.035829761505126956, 0.035568126678466795, 0.035649471282958985, 0.03573884963989258, 0.035741985321044924, 0.035855968475341796, 0.03575091171264649, 0.03591167831420899, 0.03602022552490235, 0.03583523178100586, 0.035764190673828126, 0.035675872802734376, 0.03592607879638672, 0.03623209762573242, 0.036034561157226565, 0.0361082878112793, 0.03613491058349609, 0.03617507171630859, 0.03742166519165039, 0.037626049041748044, 0.03642572784423828, 0.03611238479614258, 0.037064704895019535, 0.036268001556396486, 0.0362209587097168, 0.03654764938354492, 0.03633452987670899, 0.0365219841003418, 0.03622092819213867, 0.036157440185546875, 0.036239360809326174, 0.03635200119018555, 0.03618406295776367, 0.0362762222290039, 0.03632691192626953, 0.036146751403808595, 0.03649631881713867, 0.03652188873291016, 0.03625353622436524, 0.03637392044067383, 0.03645731353759766, 0.03659980773925781, 0.0367507209777832, 0.03624931335449219, 0.0363076171875, 0.0361607666015625, 0.037435615539550784, 0.03634995269775391, 0.0363680305480957, 0.03589779281616211, 0.03618201446533203, 0.036138721466064457, 0.03764048004150391, 0.036139007568359374, 0.036050079345703125, 0.03622515106201172, 0.036088542938232424, 0.03631289672851563, 0.03617302322387695, 0.03628950500488281, 0.03634175872802734, 0.03653222274780273, 0.036169887542724606, 0.03592380905151367, 0.03625881576538086, 0.03619900894165039, 0.03607913589477539, 0.035922817230224606, 0.03604003143310547, 0.035568286895751956, 0.03575404739379883, 0.036931617736816406, 0.03568947219848633, 0.03573648071289062, 0.036000961303710936, 0.03589580917358398, 0.03561094284057617, 0.03559999847412109, 0.03551884841918945, 0.035655040740966794, 0.03567270278930664, 0.03828879928588867, 0.03646752166748047, 0.036138687133789066, 0.03618560028076172, 0.03652608108520508, 0.0363587532043457, 0.0362534065246582, 0.036020511627197264, 0.036015296936035154, 0.03621561431884766, 0.03621046447753906, 0.03631875228881836, 0.037183647155761716, 0.0361416015625, 0.036116481781005856, 0.03564492797851562, 0.03540428924560547, 0.035714080810546875, 0.03597001647949219, 0.035198974609375, 0.03509622573852539, 0.03495971298217773, 0.03528470230102539, 0.03542454528808594, 0.03562905502319336, 0.035827583312988284, 0.03577471923828125, 0.03587443161010742, 0.03678345489501953, 0.036249343872070315, 0.036008544921875, 0.03557164764404297, 0.03618422317504883, 0.035918048858642575, 0.03582361602783203, 0.03632537460327148, 0.03538534545898438, 0.035236064910888674, 0.03521923065185547, 0.03552665710449219, 0.03528230285644531, 0.035232383728027346, 0.03554508972167969, 0.035590145111083986, 0.0352911376953125, 0.035073375701904295, 0.035248287200927736, 0.03529292678833008, 0.03513353729248047, 0.0351580810546875, 0.035106975555419924, 0.03498630523681641, 0.03547148895263672, 0.035248126983642575, 0.03517804718017578, 0.03528489685058594, 0.03531011199951172, 0.03631718444824219, 0.035565055847167966, 0.03506227111816406, 0.035915775299072264, 0.03558399963378906, 0.035573760986328126, 0.035676158905029294, 0.03727974319458008, 0.035844097137451174, 0.03581756973266602, 0.0359463996887207, 0.036134048461914064, 0.03593638229370117, 0.03556662368774414, 0.03569836807250976, 0.0361135368347168, 0.035857280731201174, 0.035672065734863284, 0.035483264923095705, 0.035985984802246095, 0.03581113433837891, 0.036152576446533205, 0.03613977432250977, 0.03632032012939453, 0.03621984100341797, 0.03616563034057617, 0.03624755096435547, 0.03629590225219727, 0.03614799880981445, 0.03640060806274414, 0.0363438720703125, 0.036446689605712894, 0.03656499099731445, 0.03628236770629883, 0.03671878433227539, 0.036030529022216796, 0.03604729461669922, 0.03612998580932617, 0.03595481491088867, 0.03582012939453125, 0.03568230438232422, 0.035845375061035155, 0.03566796875, 0.03585065460205078, 0.03599814224243164, 0.03595481491088867, 0.03644607925415039, 0.03610214233398437, 0.03637859344482422, 0.03622006225585937, 0.035971969604492185, 0.036311038970947264, 0.03740467071533203, 0.036275489807128906, 0.03595484924316406, 0.03609657669067383, 0.03618201446533203, 0.03609743881225586, 0.03613763046264649, 0.0363553581237793, 0.03617244720458984, 0.036470081329345705, 0.03628851318359375, 0.036272830963134765, 0.03629260635375976, 0.03617744064331055, 0.036085376739501955, 0.03604268646240234, 0.036141983032226564, 0.03603251266479492, 0.0359279670715332, 0.035848289489746096, 0.03588710403442383, 0.03592335891723633, 0.03614371109008789, 0.035901439666748046, 0.03640524673461914, 0.03611033630371094, 0.03557580947875977, 0.03573331069946289, 0.036869598388671876, 0.035668704986572264, 0.03552665710449219, 0.03526860809326172, 0.03521900939941406, 0.035622432708740236, 0.03545382308959961, 0.035426334381103514, 0.035575614929199216, 0.03590572738647461, 0.035784702301025394, 0.03565468978881836, 0.03597369766235352, 0.03514409637451172, 0.035643390655517575, 0.03524198532104492, 0.03530752182006836, 0.036947967529296875, 0.03725107192993164, 0.03591987228393555, 0.0356228141784668, 0.0352891845703125, 0.035280895233154294, 0.03554099273681641, 0.03546112060546875, 0.035638816833496095, 0.035781089782714846, 0.035683551788330076, 0.035525409698486325, 0.03562496185302735, 0.03559404754638672, 0.035683807373046876, 0.03575267028808594, 0.035465217590332034, 0.035778560638427735, 0.035527870178222655, 0.03540636825561523, 0.03625807952880859, 0.03702515029907227, 0.035709568023681644, 0.03589120101928711, 0.03597075271606445, 0.036299072265625, 0.03598950576782227, 0.03623526382446289, 0.036150398254394533, 0.037157760620117185, 0.03619232177734375, 0.03617990493774414, 0.03615119934082031, 0.03627999877929688, 0.036159328460693356, 0.03622355270385742, 0.03634969711303711, 0.03615356826782227, 0.035899425506591795, 0.0359683837890625, 0.03596700668334961, 0.03584880065917969, 0.03602841567993164, 0.03584012985229492, 0.036114303588867185, 0.036058433532714845, 0.03581411361694336, 0.03579391860961914, 0.03573398590087891, 0.03594211196899414, 0.03590572738647461, 0.03623596954345703, 0.036016033172607424, 0.03602227020263672, 0.03557539367675781, 0.0357699203491211, 0.03674508666992188, 0.03595280075073242, 0.0356583366394043, 0.035752159118652344, 0.03550163269042969, 0.03583635330200195, 0.03589120101928711, 0.037085182189941404, 0.03614720153808594, 0.03605920028686523, 0.03570393753051758, 0.03571203231811523, 0.035858207702636716, 0.03611443328857422, 0.03596409606933594, 0.03633644866943359, 0.03580464172363281, 0.03590607833862305, 0.03593625640869141, 0.03611852645874023, 0.03612876892089844, 0.03623715209960938, 0.03638211059570313, 0.03624342346191406, 0.03606585693359375, 0.036454624176025394, 0.03620272064208984, 0.03789596939086914, 0.03758451080322266, 0.03643840026855469, 0.036240447998046876, 0.03660076904296875, 0.03630825424194336, 0.036100833892822266, 0.037154815673828126, 0.03607500839233398, 0.03621120071411133, 0.03605299377441406, 0.035845951080322264, 0.03640729522705078, 0.037711936950683596, 0.03617103958129883, 0.03623107147216797, 0.035902271270751955, 0.03606902313232422, 0.036305374145507815, 0.0363765754699707, 0.03975971221923828, 0.0364156494140625, 0.036018177032470705, 0.03605641555786133, 0.03594307327270508, 0.03616563034057617, 0.03593830490112305, 0.03585228729248047, 0.03583724975585938, 0.03596563339233398, 0.036083713531494144, 0.03598649597167969, 0.036442527770996096, 0.036065055847167966, 0.03621555328369141, 0.036364288330078126, 0.03607878494262695, 0.03616032028198242, 0.036245502471923825, 0.036106239318847655, 0.03618611145019531, 0.036308799743652344, 0.03630688095092773]",tokens/s,27.71118666122914,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4625.73568,6587.02336,0.0,6201.278464,6094.567936,s,1,10.9061943359375,10.9061943359375,0.0,10.9061943359375,10.9061943359375,10.9061943359375,10.9061943359375,[10.9061943359375],,kWh,0.0001078134664250077,1.1885368227372451e-05,3.615697337000512e-05,0.00015585580802238527,,MB,2005.602304,7656.57088,0.0,7237.271552,6740.83328,s,10,8.10876580810547,0.8108765808105469,0.002536681049134044,0.8107609558105469,0.8140458618164063,0.8145027282714844,0.8148682214355469,"[0.8107168579101562, 0.8081912231445313, 0.8149595947265625, 0.8108050537109375, 0.8135609741210937, 0.8139443359375, 0.8076442260742187, 0.8114901123046875, 0.8100958862304688, 0.8073575439453125]",tokens/s,315.70772428043745,kWh,1.8344730342974458e-05,2.022029834320311e-06,1.2043047828876052e-05,3.240980800617082e-05,tokens/kWh,7898843.459710025,MB,2012.106752,7658.668032,0.0,7239.368704,6740.83584,s,10,47.05436669921875,4.705436669921875,0.017073674518933053,4.701096435546875,4.7311182128906255,4.734091333007813,4.736469829101562,"[4.67892236328125, 4.695111328125, 4.73045751953125, 4.71314208984375, 4.737064453125, 4.7112021484375, 4.697578125, 4.69641650390625, 4.689857421875, 4.70461474609375]",tokens/s,13.388768018642995,kWh,0.00010729437283576865,1.1836173904307607e-05,7.08672962771259e-05,0.00018999784301720212,tokens/kWh,331582.711674764,,s,630,47.05245947265626,0.07468644360739086,0.0008925641058429147,0.07452587127685548,0.07547093734741211,0.0761910270690918,0.078406254196167,"[0.07494911956787109, 0.07493405151367187, 0.07409276580810546, 0.07358675384521485, 0.07483580780029297, 0.07422383880615234, 0.07389389038085938, 0.07368089294433594, 0.07669964599609375, 0.07475132751464844, 0.07408707427978516, 0.0739266586303711, 0.07406121826171876, 0.07424674987792969, 0.07672000122070312, 0.07531887817382812, 0.07483856201171875, 0.0741212158203125, 0.07441318511962891, 0.07447026824951172, 0.0742309112548828, 0.07418109130859375, 0.07453849792480469, 0.07398214721679687, 0.07393885040283203, 0.0738306884765625, 0.07404354858398438, 0.07447180938720703, 0.0741308822631836, 0.07405343627929688, 0.07394175720214843, 0.07387545776367188, 0.07387478637695312, 0.07448438262939452, 0.0740126724243164, 0.0739205093383789, 0.07421337890625, 0.07377101135253906, 0.07389730834960938, 0.0736725082397461, 0.07367935943603515, 0.07392700958251953, 0.07421337890625, 0.07405773162841797, 0.07387865447998047, 0.07395938873291015, 0.07384767913818359, 0.07455951690673829, 0.07423999786376953, 0.07462470245361329, 0.07425875091552735, 0.07404338836669921, 0.07399219512939453, 0.07406339263916016, 0.07462754821777344, 0.07436038208007813, 0.07425244903564453, 0.07420496368408203, 0.07416377258300781, 0.07398892974853516, 0.07417664337158203, 0.07457100677490235, 0.0741373748779297, 0.07571116638183593, 0.07467641448974609, 0.07563228607177734, 0.07487932586669922, 0.07436492919921875, 0.07447551727294922, 0.07422342681884765, 0.07474944305419921, 0.07402492523193359, 0.07423868560791015, 0.07392460632324219, 0.07412505340576171, 0.07422386932373047, 0.07419462585449219, 0.07409625244140625, 0.07448175811767578, 0.07469241333007813, 0.07416912078857422, 0.07408640289306641, 0.07413251495361328, 0.07437516784667969, 0.07726953887939453, 0.07814803314208985, 0.07449091339111329, 0.07415087890625, 0.07402700805664063, 0.07412448120117188, 0.07394739532470704, 0.07430716705322266, 0.07446627044677734, 0.07432192230224609, 0.07393894195556641, 0.07394841766357421, 0.07382086181640625, 0.07439753723144531, 0.07437948608398437, 0.07431552124023437, 0.07401686096191407, 0.07399440002441406, 0.07406159973144531, 0.07430966186523437, 0.07418240356445313, 0.07450230407714843, 0.07456534576416016, 0.07413977813720703, 0.07403359985351562, 0.07400447845458985, 0.07393894195556641, 0.07401676940917969, 0.07485234832763672, 0.07476537322998048, 0.074416259765625, 0.07469683074951172, 0.07453321838378907, 0.07484774780273437, 0.07463148498535156, 0.0752174072265625, 0.07482316589355469, 0.0755216293334961, 0.07455635070800781, 0.0748257293701172, 0.0752384033203125, 0.0746911392211914, 0.07572051239013672, 0.07830799865722657, 0.0746250228881836, 0.07469055938720703, 0.07473766326904296, 0.07418265533447266, 0.07458611297607422, 0.07455948638916016, 0.07456358337402344, 0.07535139465332032, 0.0748653106689453, 0.0746455078125, 0.07456153869628906, 0.07478681945800782, 0.07532953643798829, 0.07530496215820312, 0.07533363342285156, 0.07494246673583985, 0.0745185317993164, 0.0748257293701172, 0.075044189453125, 0.07472099304199219, 0.07461894226074219, 0.07637894439697265, 0.07504287719726563, 0.07465164947509766, 0.0743934097290039, 0.07478905487060547, 0.0752490234375, 0.07499430084228516, 0.07532134246826172, 0.07483798217773438, 0.07495203399658203, 0.07501634979248047, 0.07531983947753906, 0.07498751831054687, 0.07535311889648437, 0.07463011169433593, 0.07477452850341797, 0.0745164794921875, 0.07496089935302734, 0.07450624084472657, 0.07443647766113282, 0.0745770263671875, 0.07462220764160156, 0.07475174713134766, 0.07521871948242187, 0.07873149108886719, 0.07525977325439454, 0.07542591857910157, 0.07487641906738281, 0.07876454162597656, 0.07533347320556641, 0.0748542709350586, 0.07484441375732422, 0.07471311950683594, 0.07470489501953125, 0.07509772491455079, 0.07452812957763671, 0.07466671752929688, 0.07549775695800781, 0.07484150695800781, 0.07500656127929688, 0.07573094177246094, 0.0747540512084961, 0.07498137664794922, 0.07461478424072265, 0.07459398651123048, 0.07461714935302734, 0.0752332763671875, 0.07619379425048828, 0.07508172607421874, 0.07446109008789062, 0.07461468505859375, 0.07442435455322266, 0.074363037109375, 0.07401225280761718, 0.07399468994140625, 0.07465980529785156, 0.07439155578613281, 0.07412326049804688, 0.07398604583740234, 0.07476223754882813, 0.07518611145019531, 0.07468204498291016, 0.07431155395507813, 0.07425689697265625, 0.07431168365478516, 0.07393651580810547, 0.07462335968017578, 0.07414083099365235, 0.07419376373291016, 0.07443456268310547, 0.0743034896850586, 0.07390617370605469, 0.07388108825683594, 0.07440640258789062, 0.07429670715332032, 0.0741270751953125, 0.07420188903808594, 0.07424345397949218, 0.07449462127685547, 0.07433634948730469, 0.07434444427490235, 0.07448371124267578, 0.07507295989990234, 0.07537107086181641, 0.07517183685302735, 0.07821926116943359, 0.07588796997070313, 0.07565174102783204, 0.07516365051269532, 0.0748090591430664, 0.07525129699707031, 0.07538349151611329, 0.07496246337890625, 0.07485692596435548, 0.07496685028076172, 0.07860572814941406, 0.07501420593261719, 0.07444121551513672, 0.07441840362548828, 0.0745769271850586, 0.07491887664794922, 0.07661772918701172, 0.07488307189941407, 0.07734476470947266, 0.0756236801147461, 0.07513302612304687, 0.07570432281494141, 0.07512700653076172, 0.07499616241455079, 0.07550169372558593, 0.07479462432861328, 0.07507174682617188, 0.0752168960571289, 0.07564041900634766, 0.07535411071777344, 0.07608975982666015, 0.07570022583007813, 0.07551773071289063, 0.07504508972167968, 0.07550892639160156, 0.07564192199707032, 0.0748848648071289, 0.0750775375366211, 0.0750551986694336, 0.075399169921875, 0.07523532867431641, 0.07517731475830078, 0.07489379119873046, 0.07480726623535157, 0.0752232666015625, 0.07506124877929687, 0.07581696319580078, 0.07515666961669921, 0.07484607696533203, 0.07508201599121093, 0.07509468841552734, 0.07501414489746094, 0.07524147033691406, 0.07525334167480469, 0.07561872100830078, 0.07464927673339844, 0.0748055648803711, 0.07450828552246094, 0.07520365142822266, 0.07527235412597656, 0.07602051544189453, 0.07590707397460937, 0.07524761962890625, 0.07626956939697266, 0.0819240951538086, 0.07448713684082031, 0.07494892883300781, 0.074111328125, 0.07397376251220703, 0.07401631927490235, 0.07441603088378906, 0.07439177703857422, 0.07441375732421875, 0.0746681900024414, 0.07468035125732422, 0.07470944213867188, 0.0745467529296875, 0.07421558380126952, 0.07438365173339843, 0.0739975357055664, 0.0741622085571289, 0.07514844512939453, 0.07767667388916015, 0.07666146850585938, 0.07585340881347656, 0.07557981109619141, 0.07532953643798829, 0.07454914855957032, 0.074459228515625, 0.0741640625, 0.07455145263671875, 0.07434223937988281, 0.07505321502685547, 0.0765665283203125, 0.07466300964355468, 0.07455795288085937, 0.07442626953125, 0.07483853149414063, 0.07449190521240234, 0.07403110504150391, 0.07509318542480468, 0.07468524932861328, 0.07436697387695312, 0.07414169311523437, 0.07475199890136719, 0.07461180877685547, 0.07462111663818359, 0.07842479705810547, 0.07546880340576172, 0.07469065856933593, 0.07399005126953125, 0.07384063720703125, 0.07426012420654297, 0.07452706909179688, 0.0741561279296875, 0.07395292663574218, 0.0737825927734375, 0.0757804183959961, 0.07483251190185547, 0.07412044525146484, 0.07532617950439453, 0.07718498992919921, 0.07519033813476562, 0.07439155578613281, 0.07426457977294922, 0.0740126724243164, 0.07452467346191406, 0.07626316833496094, 0.07489561462402344, 0.07431340789794921, 0.07425667572021484, 0.07413558197021484, 0.07493001556396485, 0.07409270477294921, 0.0739573745727539, 0.07426048278808593, 0.07453286743164063, 0.07397942352294921, 0.0739110107421875, 0.0744035873413086, 0.07506237030029297, 0.07417718505859375, 0.07411122894287109, 0.07379052734375, 0.07474380493164062, 0.07411417388916015, 0.07419971466064453, 0.07375279998779297, 0.07437107086181641, 0.0749854736328125, 0.07464543914794922, 0.0748559341430664, 0.0742569580078125, 0.07386319732666016, 0.07403619384765625, 0.07388374328613281, 0.0742548828125, 0.07412159729003906, 0.07400176239013671, 0.07416393280029297, 0.07407302093505859, 0.07412499237060546, 0.07456800079345703, 0.07474380493164062, 0.073959228515625, 0.07382806396484375, 0.07397980499267579, 0.07410745239257813, 0.07375161743164063, 0.07385183715820312, 0.07374953460693359, 0.07387776184082032, 0.07367958068847656, 0.07467369842529296, 0.073998046875, 0.07418752288818359, 0.07839539337158204, 0.07484998321533202, 0.07468268585205078, 0.07453081512451172, 0.07508377838134765, 0.07464041900634766, 0.07443923187255859, 0.07528447723388672, 0.07477235412597656, 0.07432582092285156, 0.07417724609375, 0.07444409942626953, 0.07428166198730468, 0.0751103973388672, 0.07521075439453125, 0.07454220581054688, 0.07479730987548829, 0.07461746978759766, 0.0743400650024414, 0.07430931091308594, 0.07602031707763672, 0.07654195404052734, 0.07593145751953125, 0.07469894409179688, 0.07438092803955078, 0.07420966339111328, 0.07448774719238281, 0.07478838348388672, 0.07486927795410156, 0.07618764495849609, 0.07503052520751953, 0.07540121459960937, 0.07497913360595704, 0.07495811462402344, 0.07511113739013672, 0.07470508575439454, 0.07504662322998047, 0.07516598510742188, 0.07527174377441406, 0.07501862335205078, 0.07482374572753907, 0.07477043151855468, 0.07484738922119141, 0.0747221450805664, 0.07456678771972657, 0.07667391967773438, 0.07517084503173828, 0.07432441711425782, 0.07431024169921875, 0.07465567779541016, 0.07424409484863281, 0.0740384292602539, 0.07377117156982421, 0.07511519622802734, 0.07406182098388672, 0.07385292816162109, 0.07499161529541015, 0.07529436492919922, 0.07721321868896484, 0.07591996765136719, 0.07417059326171875, 0.07430707550048828, 0.07425689697265625, 0.07670524597167969, 0.07501878356933593, 0.07418675231933594, 0.07402700805664063, 0.07389977264404297, 0.07384703826904297, 0.07410435485839843, 0.0747584991455078, 0.07430361938476562, 0.07399324798583984, 0.07466902160644531, 0.07395257568359374, 0.07443436431884766, 0.07438236999511719, 0.07416754913330079, 0.07382918548583985, 0.07368681335449219, 0.07357193756103515, 0.07368694305419922, 0.07373465728759766, 0.07452175903320313, 0.07391696166992187, 0.07378361511230469, 0.07391232299804687, 0.07462006378173829, 0.07416099548339844, 0.07405942535400391, 0.07389628601074219, 0.07418265533447266, 0.07437868499755859, 0.07407788848876953, 0.07500800323486329, 0.07443222045898437, 0.07400640106201171, 0.07389225769042969, 0.07384585571289062, 0.08005289459228515, 0.07466828918457032, 0.07457113647460938, 0.07376937866210938, 0.07383433532714843, 0.07423628997802735, 0.07425023651123047, 0.07380374145507812, 0.07393488311767578, 0.07378313446044922, 0.07383875274658203, 0.07364765167236328, 0.07360150146484375, 0.07378943634033203, 0.07710720062255859, 0.07474172973632813, 0.07412467193603516, 0.07408467102050781, 0.07391267395019531, 0.07454310607910156, 0.07459430694580078, 0.07428278350830078, 0.0740374755859375, 0.07387529754638672, 0.07363705444335937, 0.07374253082275391, 0.0737303009033203, 0.07415452575683594, 0.07402886199951172, 0.07398153686523437, 0.07393545532226563, 0.07393679809570312, 0.07397721862792969, 0.07486115264892577, 0.07831974029541015, 0.07559756469726563, 0.07499187469482421, 0.07418265533447266, 0.07386016082763672, 0.07492908477783203, 0.07450828552246094, 0.07446527862548828, 0.07576802825927734, 0.07496611022949219, 0.07659712219238281, 0.07426537322998047, 0.07431581115722656, 0.07529676818847657, 0.0741514892578125, 0.0740929946899414, 0.07384204864501953, 0.07381391906738281, 0.07380614471435547, 0.07378521728515625, 0.07380802917480468, 0.07441036987304687, 0.07385890960693359, 0.07378502655029297, 0.07498400115966797, 0.07410208129882813, 0.07408096313476563, 0.07367884826660157, 0.07382630157470703, 0.07499091339111329, 0.0741731185913086, 0.07401385498046875, 0.07378125, 0.07549014282226563, 0.07435673522949218, 0.07456563568115235, 0.0747287368774414, 0.07835721588134766, 0.07540531158447265, 0.07477772521972656, 0.07464640045166016, 0.07454041290283203, 0.07418345642089844, 0.07841069030761719, 0.07454300689697266, 0.07389286041259766, 0.07399833679199219, 0.07435660552978515, 0.07422783660888672, 0.07484620666503906, 0.07497132873535156, 0.07533958435058594, 0.07492546844482421, 0.07450380706787109, 0.07410582733154297, 0.07427216339111328, 0.07499225616455078, 0.07494857788085937, 0.07496089935302734, 0.0753743667602539, 0.07458956909179687, 0.07409465789794922, 0.07410313415527343, 0.07407148742675781, 0.07406259155273437, 0.07411062622070312, 0.07436144256591797, 0.07401010894775391, 0.0741401596069336, 0.07439155578613281, 0.07452191925048827, 0.07499641418457031, 0.074608642578125, 0.07430067443847656, 0.07417922973632812, 0.07465708923339844, 0.07539600372314453, 0.07493782043457031, 0.07512710571289062, 0.07461692810058594, 0.07474380493164062, 0.07452262115478515, 0.07518844604492188, 0.07514908599853516, 0.07454048156738281, 0.07491436767578125, 0.07473088073730469]",tokens/s,13.389310719583404,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,11127.222272,12225.282048,0.0,11846.811648,11814.785024,s,1,14.2289521484375,14.2289521484375,0.0,14.2289521484375,14.2289521484375,14.2289521484375,14.2289521484375,[14.2289521484375],,kWh,0.0002074531606000164,2.287619585885001e-05,6.829616574800107e-05,0.0002986255222068675,,MB,2072.178688,13158.514688,0.0,12750.68416,12641.86368,s,10,11.69583349609375,1.1695833496093748,0.0011074682986908374,1.1700194702148439,1.1705676147460937,1.1707565246582032,1.1709076525878908,"[1.1671827392578125, 1.1681357421875, 1.169578125, 1.170525634765625, 1.17003515625, 1.1700037841796875, 1.1703682861328124, 1.1689547119140624, 1.1709454345703125, 1.1701038818359375]",tokens/s,218.88136496257457,kWh,3.419648953291509e-05,3.7690069015955685e-06,2.2736184855599863e-05,6.070168129011051e-05,tokens/kWh,4217346.118907375,MB,2076.442624,13452.115968,0.0,13044.28544,12933.698048,s,10,50.42909423828126,5.0429094238281245,0.010713131044623299,5.045888671875,5.05390078125,5.0548341796875,5.0555808984374995,"[5.02344580078125, 5.03154052734375, 5.0300498046875, 5.03985888671875, 5.04284716796875, 5.055767578125, 5.04893017578125, 5.051208984375, 5.053693359375, 5.051751953125]",tokens/s,12.492788330149315,kWh,0.00014738280290417,1.6259827501932307e-05,9.79809950513999e-05,0.0002616236254575022,tokens/kWh,240803.94073674222,,s,630,50.42495937347408,0.08003961805313353,0.0012047015714267615,0.08003118515014648,0.08085504379272461,0.08119948577880859,0.08279828002929687,"[0.08202239990234375, 0.07817011260986328, 0.07856742095947265, 0.07859164428710938, 0.07911459350585938, 0.07797328186035156, 0.07938633728027343, 0.07966156768798828, 0.07951071929931641, 0.07934611511230469, 0.07875580596923829, 0.08002710723876953, 0.08064275360107422, 0.0799450912475586, 0.07833299255371094, 0.0786655044555664, 0.07802790069580078, 0.07945104217529297, 0.07972589111328125, 0.07980902099609374, 0.07916969299316406, 0.07876345825195312, 0.07837369537353515, 0.07974671936035156, 0.08063785552978515, 0.08017228698730469, 0.07948745727539062, 0.07952790069580078, 0.07863760375976563, 0.07819459533691406, 0.07928566741943359, 0.079727294921875, 0.08014546966552734, 0.08003065490722656, 0.07963648223876953, 0.07846633911132812, 0.07965360260009766, 0.08066777801513672, 0.08011660766601562, 0.08049606323242188, 0.07969235229492187, 0.07981791687011719, 0.07998323059082031, 0.08002690887451172, 0.07951862335205079, 0.08059913635253907, 0.08027088165283203, 0.08024668884277343, 0.07993936157226562, 0.08069395446777344, 0.08069107055664063, 0.08032585906982422, 0.08090716552734376, 0.07994687652587891, 0.08025138854980468, 0.08074240112304687, 0.07999942779541015, 0.07999686431884766, 0.08049049377441406, 0.08077705383300782, 0.08001363372802735, 0.08046339416503906, 0.08096578979492187, 0.0835343017578125, 0.07810419464111328, 0.07861702728271484, 0.07875993347167969, 0.07910320281982422, 0.07794563293457031, 0.07910720062255859, 0.0795841293334961, 0.07844659423828125, 0.07941075134277344, 0.07931241607666016, 0.08012470245361328, 0.08071308898925782, 0.07985024261474609, 0.07810867309570313, 0.07891295623779297, 0.07934611511230469, 0.07939897918701172, 0.07895174407958984, 0.07971241760253907, 0.07925743865966797, 0.07929933166503907, 0.07827251434326171, 0.07999488067626953, 0.08039833831787109, 0.08075654602050782, 0.07964649963378906, 0.07949763488769532, 0.0801434555053711, 0.07950224304199219, 0.08066819000244141, 0.07974671936035156, 0.07939974212646485, 0.08008704376220703, 0.07939891052246094, 0.07908716583251953, 0.08019503784179688, 0.07970719909667968, 0.08031632232666015, 0.07902758026123047, 0.08018598175048829, 0.07970611572265625, 0.08047135925292968, 0.07975580596923829, 0.07994793701171875, 0.08039218902587891, 0.08016671752929687, 0.08014867401123046, 0.08084870147705078, 0.08029811096191407, 0.08087353515625, 0.08011097717285157, 0.08075328063964844, 0.08015475463867187, 0.08049043273925781, 0.07981644439697265, 0.08087718200683594, 0.08140652465820312, 0.07990681457519531, 0.08044963073730468, 0.08093686676025391, 0.08105101013183594, 0.08097036743164063, 0.08178422546386718, 0.0778737564086914, 0.07885311889648437, 0.07947161865234376, 0.0794439697265625, 0.07947203063964844, 0.07845500946044921, 0.07841011047363282, 0.07793379211425781, 0.07976834869384766, 0.07905484771728516, 0.07920130920410157, 0.08019023895263672, 0.07980665588378906, 0.07923302459716797, 0.08001126098632813, 0.07951503753662109, 0.07991356658935547, 0.07924230194091797, 0.07892626953125, 0.0789184341430664, 0.0800191650390625, 0.07949465942382812, 0.07904086303710937, 0.08089395141601563, 0.08018748474121094, 0.08011170959472656, 0.08039421081542969, 0.07999839782714843, 0.07934595489501953, 0.07966544342041015, 0.0795990753173828, 0.07917158508300781, 0.07998928070068359, 0.07979622650146484, 0.08026521301269532, 0.07939891052246094, 0.07992652893066406, 0.08001187133789063, 0.08014454650878906, 0.08001126098632813, 0.0796033935546875, 0.08004640197753907, 0.07982284545898438, 0.0788359375, 0.07995321655273438, 0.08008096313476562, 0.0805823974609375, 0.0799566421508789, 0.07983513641357422, 0.08144204711914063, 0.08071033477783203, 0.08030332946777344, 0.0803674545288086, 0.0819947509765625, 0.0795832290649414, 0.08040447998046875, 0.08035123443603516, 0.0800747528076172, 0.08039833831787109, 0.08110899353027344, 0.08029798126220702, 0.08095334625244141, 0.0828392333984375, 0.07806198120117187, 0.07916134643554687, 0.07862214660644531, 0.07906339263916015, 0.07891990661621094, 0.07980441284179687, 0.07903132629394531, 0.07842095947265625, 0.07847917175292969, 0.0784051513671875, 0.07905709075927735, 0.08156195068359375, 0.0805827865600586, 0.07967948913574219, 0.07941840362548828, 0.07950640106201172, 0.07891792297363281, 0.07928988647460937, 0.08475667572021485, 0.08009728240966797, 0.07883958435058594, 0.07980802917480469, 0.0797085418701172, 0.08099667358398438, 0.08063516998291016, 0.08024070739746093, 0.08033110046386718, 0.0795726089477539, 0.0801596450805664, 0.08076390075683594, 0.07972329711914063, 0.07857913970947265, 0.07829503631591797, 0.0798326416015625, 0.08000128173828125, 0.08059161376953125, 0.08047545623779297, 0.08052336120605469, 0.07986646270751953, 0.07891725158691407, 0.08057231903076172, 0.07993532562255859, 0.07992784118652344, 0.07938278198242188, 0.07981858825683594, 0.0806379165649414, 0.08003734588623047, 0.08007942199707031, 0.08065634918212891, 0.08014771270751953, 0.08071183776855469, 0.08021651458740234, 0.08061971282958984, 0.08085298919677734, 0.08048639678955079, 0.07980643463134765, 0.07986179351806641, 0.0805514907836914, 0.08051248168945313, 0.08056288146972657, 0.08138780975341797, 0.08117657470703125, 0.08308306884765625, 0.078017822265625, 0.0793375015258789, 0.07864611053466797, 0.07843014526367187, 0.07839110565185547, 0.07856768035888671, 0.07947596740722657, 0.07975299072265625, 0.0790599365234375, 0.07823564910888672, 0.08095318603515625, 0.08110899353027344, 0.07955996704101563, 0.07958617401123047, 0.07955865478515625, 0.07912416076660156, 0.07958076477050781, 0.07961177825927734, 0.0796592025756836, 0.08012457275390625, 0.07991670227050782, 0.07975279998779297, 0.08049689483642578, 0.08065484619140625, 0.080500732421875, 0.07977983856201172, 0.07979779052734375, 0.07927375793457031, 0.08083936309814453, 0.08047561645507813, 0.08018179321289062, 0.08013321685791015, 0.08013673400878907, 0.07979865264892579, 0.08054579162597657, 0.0803616943359375, 0.08037763214111328, 0.08008620452880859, 0.07965078735351562, 0.07967852783203125, 0.08035206604003907, 0.07899849700927734, 0.07948697662353515, 0.08041661071777344, 0.08098627471923828, 0.08059001922607421, 0.08056915283203125, 0.08063542175292969, 0.08012643432617188, 0.08127446746826172, 0.08047388458251953, 0.08060582733154296, 0.07966925048828125, 0.08074211120605469, 0.0802523193359375, 0.08067881774902344, 0.0807147216796875, 0.08044748687744141, 0.08080537414550781, 0.08026950073242188, 0.08128569793701172, 0.08078025817871094, 0.08260041809082032, 0.07780124664306641, 0.078974365234375, 0.07772457885742187, 0.07980841827392578, 0.07981228637695313, 0.07924972534179688, 0.07912844848632812, 0.079665283203125, 0.07954150390625, 0.07949298858642578, 0.07997325134277344, 0.08013385772705078, 0.07919235229492187, 0.07950950622558593, 0.07904374694824219, 0.07904547119140624, 0.08030617523193359, 0.07933132934570312, 0.0795948486328125, 0.07898563385009766, 0.08004633331298829, 0.07961395263671875, 0.08034480285644531, 0.08109468841552735, 0.08066687774658203, 0.07996211242675781, 0.07929241943359375, 0.07982284545898438, 0.07984754943847656, 0.08003961944580078, 0.08027369689941406, 0.07988419342041016, 0.07969586944580079, 0.07988224029541016, 0.08014768218994141, 0.08029878234863282, 0.0798883819580078, 0.08016690826416016, 0.07977558135986328, 0.10099523162841798, 0.0751094741821289, 0.07972054290771484, 0.07915174102783203, 0.07932947540283203, 0.08023654174804687, 0.07990886688232422, 0.08022425842285157, 0.08081785583496094, 0.08087789154052734, 0.08048844909667968, 0.08153702545166015, 0.08276742553710938, 0.08016326141357422, 0.07964672088623047, 0.08062361907958984, 0.08073011016845703, 0.08084889221191406, 0.08042716979980469, 0.07946943664550782, 0.08045868682861328, 0.08133350372314453, 0.08083535766601563, 0.08281088256835938, 0.07819468688964844, 0.0787837142944336, 0.07909638214111328, 0.0784095687866211, 0.07843059539794922, 0.07951181030273438, 0.07959117126464844, 0.07859398651123047, 0.07862892913818359, 0.07986380767822265, 0.08073216247558594, 0.07984703826904296, 0.07969769287109375, 0.08000163269042969, 0.07982694244384765, 0.07962828826904297, 0.07913200378417969, 0.08030646514892578, 0.07916582489013672, 0.07974515533447266, 0.07991801452636718, 0.08004422760009766, 0.08020995330810547, 0.08005705261230468, 0.08031215667724609, 0.07999644470214844, 0.08046656036376953, 0.07968153381347656, 0.07918182373046875, 0.08052735900878906, 0.07966310119628907, 0.08011145782470704, 0.08012748718261718, 0.08035574340820313, 0.08065849304199219, 0.08064742279052735, 0.08028575897216797, 0.08058354949951171, 0.08019558715820313, 0.0801607666015625, 0.08009932708740235, 0.0806645736694336, 0.07974297332763672, 0.08039218902587891, 0.08077926635742187, 0.08033235168457031, 0.08140230560302734, 0.08115945434570312, 0.08065020751953125, 0.08051360321044922, 0.08078099060058594, 0.08051900482177735, 0.08082669067382812, 0.08073177337646484, 0.08044617462158203, 0.08070105743408203, 0.08081369781494141, 0.08031308746337891, 0.08110284423828125, 0.08118681335449218, 0.08131378936767578, 0.08087776184082031, 0.08203343963623047, 0.0788705291748047, 0.07800211334228516, 0.0788212127685547, 0.07983235168457031, 0.07948544311523438, 0.07987654113769531, 0.07957504272460937, 0.07935737609863282, 0.07880556488037109, 0.07954431915283203, 0.08023177337646484, 0.079917724609375, 0.08033916473388672, 0.07889282989501953, 0.07928006744384766, 0.07998265838623046, 0.07980960083007813, 0.07996102142333984, 0.07972892761230468, 0.07998191833496093, 0.07984371185302734, 0.08030207824707031, 0.08003379058837891, 0.08021526336669922, 0.08027011108398438, 0.07981465911865235, 0.0796874237060547, 0.07962239837646484, 0.08001760101318359, 0.08017708587646484, 0.08005353546142578, 0.08002953338623046, 0.07935574340820313, 0.07980124664306641, 0.08065843200683594, 0.0809184341430664, 0.08074588775634765, 0.08022905731201171, 0.08016281890869141, 0.08080786895751953, 0.08060912322998047, 0.08020764923095704, 0.08062406158447266, 0.08066643524169922, 0.08069929504394531, 0.08029827117919922, 0.08076902770996094, 0.08056124877929688, 0.08175504302978516, 0.0803430404663086, 0.08018915557861328, 0.08071395111083984, 0.08028575897216797, 0.08060041809082032, 0.080623779296875, 0.08120985412597656, 0.08060256195068359, 0.08084524536132813, 0.0807508773803711, 0.08079872131347657, 0.08210313415527344, 0.0803799057006836, 0.08197731018066406, 0.07891353607177734, 0.07926681518554687, 0.07878265380859376, 0.079927490234375, 0.07981938934326172, 0.08007884979248046, 0.07963986968994141, 0.0802863998413086, 0.07934944152832031, 0.07852003479003906, 0.07901238250732422, 0.08063616180419922, 0.07974278259277344, 0.0799817886352539, 0.07999097442626953, 0.08056278228759765, 0.08002329254150391, 0.07961420440673828, 0.08006246185302734, 0.07993753814697266, 0.08011161804199218, 0.07995552062988281, 0.07961030578613282, 0.08038380432128907, 0.08015248107910156, 0.080193603515625, 0.07987599945068359, 0.08072978973388673, 0.0800895004272461, 0.0796810531616211, 0.07945286560058594, 0.080552001953125, 0.08072185516357422, 0.08068077087402344, 0.07938019561767579, 0.08030252838134766, 0.0801095962524414, 0.08088166046142578, 0.08081584167480468, 0.08126287841796875, 0.08030617523193359, 0.0798023681640625, 0.08090966033935547, 0.08051939392089844, 0.0799543685913086, 0.08028739166259766, 0.08052713775634765, 0.0801698226928711, 0.08027721405029296, 0.07998992156982422, 0.08052374267578125, 0.08137561798095704, 0.08071987152099609, 0.0801812515258789, 0.0808157730102539, 0.08049603271484375, 0.08115039825439453, 0.08061593627929688, 0.08083570861816407, 0.08094156646728516, 0.08095747375488281, 0.08082262420654297, 0.08344767761230469, 0.07886227416992188, 0.07932319641113281, 0.07937359619140626, 0.07923715209960938, 0.07954736328125, 0.07995597076416015, 0.07898524475097657, 0.07883158111572265, 0.07951360321044922, 0.07836662292480469, 0.0800317153930664, 0.08160883331298828, 0.08018534088134766, 0.0795340805053711, 0.07982899475097656, 0.07962009429931641, 0.07976668548583984, 0.079595458984375, 0.07957097625732422, 0.07942438507080078, 0.08001126098632813, 0.07960745239257813, 0.07999727630615235, 0.08083468627929688, 0.08038349151611328, 0.08024921417236328, 0.07963648223876953, 0.08050688171386719, 0.08024269104003906, 0.08002559661865234, 0.08040761566162109, 0.07979084777832031, 0.07981276702880859, 0.07929859161376954, 0.07993068695068359, 0.08107283020019532, 0.08031231689453125, 0.08054579162597657, 0.080574462890625, 0.08040243530273437, 0.08026290893554687, 0.0799889907836914, 0.08022332763671874, 0.07904045104980469, 0.08054064178466797, 0.0801607666015625, 0.08037907409667969, 0.08105197143554688, 0.0807060775756836, 0.08126998138427734, 0.08064911651611328, 0.0808479995727539, 0.08123056030273437, 0.08054988861083984, 0.08071920013427734, 0.08062601470947266, 0.08052767944335938, 0.08094310760498047, 0.08080121612548828, 0.08081660461425781, 0.08070767974853515, 0.08101395416259766]",tokens/s,12.4938127432862,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.747328,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,3548.95872,4490.985472,0.0,4112.515072,3976.487424,s,1,10.02813671875,10.02813671875,0.0,10.02813671875,10.02813671875,10.02813671875,10.02813671875,[10.02813671875],,kWh,8.055114220416802e-05,8.878118307045344e-06,2.702307717399998e-05,0.00011645233768521334,,MB,1479.049216,4690.214912,0.0,4282.384384,4102.199808,s,10,3.114191009521484,0.3114191009521484,0.0034445911652642505,0.31207496643066407,0.3150069061279297,0.31512971954345703,0.3152279702758789,"[0.3030219421386719, 0.3110392150878906, 0.30835064697265624, 0.31066754150390624, 0.312292724609375, 0.31525253295898437, 0.3149796142578125, 0.3143934326171875, 0.31233615112304686, 0.3118572082519531]",tokens/s,822.0433467866702,kWh,8.933777769318192e-06,9.85237133470348e-07,5.937311147151524e-06,1.5856326049940063e-05,tokens/kWh,16144975.777725488,MB,1483.063296,4698.60352,0.0,4290.772992,4102.202368,s,10,20.3044150390625,2.0304415039062502,0.010348142657225981,2.028146728515625,2.040732727050781,2.0472989807128905,2.052551983642578,"[2.036552001953125, 2.0176671142578124, 2.0279625244140624, 2.017232421875, 2.0392735595703124, 2.053865234375, 2.0263463134765627, 2.03277294921875, 2.0283309326171874, 2.0244119873046875]",tokens/s,31.027734548765824,kWh,5.962421416026515e-05,6.576492029975314e-06,3.9472558514048464e-05,0.00010567326470428893,tokens/kWh,596177.2845411394,,s,630,20.301882244110097,0.032225209911285885,0.0005863946240674489,0.03213471984863281,0.032721768951416014,0.03309802722930908,0.03427442245483398,"[0.03317695999145508, 0.03250255966186524, 0.032330623626708986, 0.032279518127441405, 0.032067359924316405, 0.03225171279907227, 0.03252633666992188, 0.03202703857421875, 0.03313151931762695, 0.03214179229736328, 0.0319780158996582, 0.03199699211120605, 0.032199615478515624, 0.03259801483154297, 0.03223756790161133, 0.032925537109375, 0.03217158508300781, 0.03228927993774414, 0.033048736572265626, 0.03235763168334961, 0.03208822250366211, 0.03260662460327148, 0.032441726684570314, 0.03621468734741211, 0.032592159271240234, 0.0321921272277832, 0.0324027214050293, 0.03225360107421875, 0.03210019302368164, 0.03216534423828125, 0.03230134582519531, 0.03206329727172851, 0.032424545288085936, 0.03211859130859375, 0.03212716674804687, 0.03237580871582031, 0.03257855987548828, 0.03234815979003906, 0.032323585510253904, 0.03328799819946289, 0.03234016036987305, 0.032159648895263675, 0.03292752075195313, 0.03248569488525391, 0.031916032791137694, 0.032182174682617186, 0.03212271881103516, 0.03231769561767578, 0.0321497917175293, 0.03229872131347656, 0.03192831993103027, 0.031937824249267575, 0.032021217346191407, 0.03198975944519043, 0.031905792236328126, 0.03177267265319824, 0.03179929542541504, 0.03176217651367187, 0.03161523246765137, 0.03161868858337402, 0.031871360778808595, 0.03211264038085938, 0.03184230422973633, 0.033083393096923826, 0.032188255310058596, 0.03227481460571289, 0.03212860870361328, 0.032610462188720706, 0.03301174545288086, 0.032247135162353516, 0.03254732894897461, 0.03219039916992188, 0.03210079956054687, 0.031826719284057614, 0.03183865547180176, 0.031928895950317386, 0.0320184326171875, 0.03180953598022461, 0.031841600418090824, 0.031977888107299804, 0.03180175971984863, 0.0319071044921875, 0.03185103988647461, 0.0318253116607666, 0.0316463680267334, 0.03184025573730469, 0.031667232513427734, 0.031836544036865234, 0.03175075149536133, 0.03229254531860352, 0.03186086463928223, 0.03169468879699707, 0.03265980911254883, 0.031947839736938474, 0.03209724807739258, 0.03240745544433594, 0.032767166137695314, 0.03215039825439453, 0.03199590492248535, 0.03222528076171875, 0.031784959793090824, 0.032092159271240234, 0.03171711921691894, 0.03159612846374512, 0.031687519073486325, 0.03190303993225098, 0.03195459175109863, 0.031711679458618164, 0.03190361595153809, 0.03174774360656738, 0.0318874568939209, 0.03191801643371582, 0.03185494422912598, 0.032257728576660157, 0.032414112091064456, 0.03191654396057129, 0.032161537170410155, 0.03175420761108398, 0.03160902404785156, 0.03169020843505859, 0.03210841751098633, 0.03221372985839844, 0.03160671997070313, 0.03258134460449219, 0.03189583969116211, 0.03161497688293457, 0.032796607971191404, 0.032150592803955075, 0.031976512908935543, 0.03169593620300293, 0.03181865692138672, 0.03199935913085938, 0.03214390563964844, 0.03202899169921875, 0.03187494468688965, 0.031856191635131835, 0.03204508972167969, 0.031860960006713866, 0.03180953598022461, 0.03188121604919433, 0.031889408111572266, 0.031542560577392575, 0.031674879074096676, 0.03246716690063477, 0.03419161605834961, 0.03310316848754883, 0.0320250244140625, 0.03207372665405273, 0.0318525447845459, 0.03250697708129883, 0.032138145446777344, 0.031980607986450194, 0.0319081916809082, 0.032291038513183594, 0.03275107192993164, 0.03246992111206055, 0.0325459213256836, 0.03235017776489258, 0.032396129608154293, 0.03224787139892578, 0.03256934356689453, 0.03249382400512695, 0.032204734802246095, 0.03236355209350586, 0.032295902252197264, 0.03251385498046875, 0.03258531188964844, 0.032188961029052734, 0.032122112274169924, 0.03184614372253418, 0.03185670471191406, 0.03185724830627441, 0.031959264755249024, 0.03237811279296875, 0.03203964614868164, 0.032272289276123044, 0.03297654342651367, 0.03232611083984375, 0.03235631942749023, 0.032032798767089844, 0.03255699157714844, 0.03190496063232422, 0.031948959350585934, 0.03180604743957519, 0.0318691520690918, 0.0322784309387207, 0.03182537651062012, 0.032145503997802735, 0.03181817626953125, 0.03330774307250976, 0.03224563217163086, 0.032094112396240236, 0.03207379150390625, 0.032043041229248045, 0.03227238464355469, 0.03204095840454101, 0.03180544090270996, 0.032643070220947264, 0.031897472381591796, 0.03164521598815918, 0.03195555114746094, 0.032333824157714845, 0.03190169525146484, 0.03178291130065918, 0.03196313667297363, 0.031861055374145505, 0.031949888229370116, 0.03242015838623047, 0.03183238410949707, 0.031854591369628905, 0.03181363105773926, 0.03200115203857422, 0.03187187194824219, 0.032048255920410156, 0.032256160736083984, 0.032327392578125, 0.03210079956054687, 0.031742176055908206, 0.03218262481689453, 0.03209609603881836, 0.032133281707763674, 0.03208726501464844, 0.03187382316589355, 0.03224092864990234, 0.032217823028564456, 0.03203481674194336, 0.03296460723876953, 0.03193196868896484, 0.03262675094604492, 0.03158409690856934, 0.0317936954498291, 0.031890623092651366, 0.03195167922973633, 0.03215359878540039, 0.03174611282348633, 0.031593439102172854, 0.031976192474365235, 0.03170844841003418, 0.03211974334716797, 0.031705408096313475, 0.031758016586303714, 0.031731712341308595, 0.03189145660400391, 0.03175779151916504, 0.031838336944580076, 0.03175827217102051, 0.03170556831359863, 0.03201228713989258, 0.03190508842468262, 0.03187750434875488, 0.032073886871337894, 0.03197148895263672, 0.034414974212646485, 0.03209961700439453, 0.03227068710327148, 0.03181401634216308, 0.0318338565826416, 0.03185411262512207, 0.03180150413513184, 0.03227036666870117, 0.031742496490478514, 0.031747903823852536, 0.03166819190979004, 0.03238739013671875, 0.03210614395141602, 0.03187110328674316, 0.03195712089538574, 0.03176243209838867, 0.031854591369628905, 0.031768575668334964, 0.0318351993560791, 0.032134078979492185, 0.0321923828125, 0.03233599853515625, 0.03225600051879883, 0.03209830474853516, 0.03216793441772461, 0.03232915115356445, 0.03281379318237305, 0.032597854614257814, 0.03335567855834961, 0.03323839950561523, 0.032166622161865235, 0.032026622772216795, 0.03196723175048828, 0.03250128173828125, 0.032129566192626954, 0.0325645751953125, 0.03210419082641602, 0.03224838256835937, 0.03264131164550781, 0.0324587516784668, 0.03215510559082031, 0.032584449768066404, 0.033021728515625, 0.03223324966430664, 0.03540124893188477, 0.03249737548828125, 0.032144191741943356, 0.032362655639648436, 0.032309249877929686, 0.03208758544921875, 0.03214134216308594, 0.03236294555664063, 0.03218022537231445, 0.03259756851196289, 0.032364990234375, 0.03269836807250977, 0.032745471954345705, 0.03293183898925781, 0.03223961639404297, 0.032059391021728514, 0.032960479736328124, 0.03245043182373047, 0.03309174346923828, 0.03318374252319336, 0.032395263671875, 0.03222492980957031, 0.032296768188476564, 0.032182304382324216, 0.03233804702758789, 0.03209616088867188, 0.03226451110839844, 0.032239070892333986, 0.03210710525512695, 0.032091232299804685, 0.03220377731323242, 0.03222700881958008, 0.032393535614013674, 0.03259580612182617, 0.0326262092590332, 0.03230502319335937, 0.03286851119995117, 0.0331060791015625, 0.03197996711730957, 0.03200204849243164, 0.03187507247924805, 0.032552383422851563, 0.031911808013916014, 0.04034431838989258, 0.0338328971862793, 0.0324048957824707, 0.03284643173217774, 0.03221913528442383, 0.03217203140258789, 0.03241510391235351, 0.03258000183105469, 0.03238729476928711, 0.03256707382202149, 0.03251228713989258, 0.03221907043457031, 0.03234729766845703, 0.032365409851074216, 0.0323743667602539, 0.032362911224365236, 0.03229695892333984, 0.032401504516601565, 0.03368540954589844, 0.03235747146606445, 0.03236547088623047, 0.03213459014892578, 0.03344870376586914, 0.03210425567626953, 0.03297654342651367, 0.032465248107910155, 0.03231916809082031, 0.03246432113647461, 0.032205696105957034, 0.03266672134399414, 0.03229776000976563, 0.03250790405273438, 0.03237081527709961, 0.03205120086669922, 0.03256524658203125, 0.032194561004638675, 0.03249942398071289, 0.034259201049804684, 0.03293135833740234, 0.03337033462524414, 0.03235430526733398, 0.03192118453979492, 0.03196217536926269, 0.03239926528930664, 0.03174947166442871, 0.033391265869140624, 0.03174399948120117, 0.03178895950317383, 0.031856735229492186, 0.03196284866333008, 0.03271913528442383, 0.032231422424316404, 0.03199795150756836, 0.03195011138916016, 0.03180364799499512, 0.03230767822265625, 0.03194684791564941, 0.03189545631408691, 0.031741151809692385, 0.031912736892700196, 0.03177824020385742, 0.032134849548339846, 0.03190787124633789, 0.032045921325683596, 0.032004096984863284, 0.031997055053710935, 0.03233804702758789, 0.032275264739990234, 0.032008129119873045, 0.031882591247558593, 0.0323809928894043, 0.031871807098388674, 0.0319465274810791, 0.03239833450317383, 0.03280003356933594, 0.03298070526123047, 0.034484222412109376, 0.032355712890625, 0.03177078437805176, 0.03199231910705566, 0.03176806449890137, 0.032160224914550783, 0.031926271438598636, 0.03179110336303711, 0.03200204849243164, 0.03290521621704102, 0.03264851379394531, 0.03182076835632324, 0.031844032287597655, 0.031817760467529294, 0.03185663986206055, 0.03197686386108398, 0.03219055938720703, 0.032233951568603515, 0.032001121520996094, 0.032152511596679687, 0.03177638435363769, 0.03188307189941406, 0.032012863159179686, 0.03195289611816406, 0.03244236755371094, 0.032556671142578125, 0.033536094665527344, 0.03229673767089844, 0.03229708862304687, 0.03216505432128906, 0.03214815902709961, 0.03254185485839844, 0.03211363220214844, 0.03217510223388672, 0.032294975280761716, 0.03239724731445313, 0.032254974365234376, 0.03211027145385742, 0.032260414123535155, 0.032259265899658204, 0.03230998229980469, 0.03237283325195312, 0.03232489776611328, 0.032293472290039066, 0.03215776062011719, 0.03182143974304199, 0.031816127777099606, 0.03165763282775879, 0.03164505577087402, 0.031492576599121094, 0.03264665603637695, 0.031648128509521485, 0.031634464263916015, 0.033498592376708984, 0.031782432556152346, 0.03178303909301758, 0.03247561645507813, 0.032287967681884765, 0.03192697525024414, 0.03233171081542969, 0.03217808151245117, 0.03219276809692383, 0.03222323226928711, 0.031989664077758787, 0.031924320220947267, 0.032296382904052734, 0.03236726379394531, 0.03247504043579102, 0.03244963073730469, 0.03223030471801758, 0.032200702667236326, 0.032086017608642575, 0.03202592086791992, 0.03206009674072265, 0.032199905395507815, 0.03239811325073242, 0.032176128387451174, 0.03227974319458008, 0.03242681503295899, 0.032217086791992186, 0.032036865234375, 0.032573440551757815, 0.03307724761962891, 0.03208396911621094, 0.03301769638061523, 0.03226959991455078, 0.032219711303710936, 0.03266387176513672, 0.03340697479248047, 0.03315657424926758, 0.03227088165283203, 0.03199795150756836, 0.03213721466064453, 0.032268032073974606, 0.03200864028930664, 0.031815103530883786, 0.031842336654663086, 0.03171504020690918, 0.03163030433654785, 0.03177807998657227, 0.03176489639282227, 0.03359052658081055, 0.03338927841186524, 0.0348012809753418, 0.03322675323486328, 0.03290969467163086, 0.03198975944519043, 0.03168460845947266, 0.03172966384887695, 0.032284671783447266, 0.03181270408630371, 0.03163843154907227, 0.03155558395385742, 0.031669727325439455, 0.03161961555480957, 0.03207372665405273, 0.03191967964172363, 0.03214748764038086, 0.032543136596679685, 0.03221452713012695, 0.03208000183105469, 0.03215603256225586, 0.032083358764648434, 0.031897727966308596, 0.031889888763427736, 0.032376895904541014, 0.032122814178466796, 0.032745471954345705, 0.03179929542541504, 0.03171468734741211, 0.031840288162231445, 0.03216239929199219, 0.032527584075927735, 0.03213593673706055, 0.03271273422241211, 0.03215955352783203, 0.03285932922363281, 0.03210982513427734, 0.031981311798095706, 0.03265740966796875, 0.032061279296875, 0.03185193634033203, 0.03182284736633301, 0.03192940711975098, 0.03203750228881836, 0.03211420822143555, 0.03230569458007813, 0.031678464889526366, 0.03188649559020996, 0.03241660690307617, 0.0325153923034668, 0.032244415283203126, 0.033050495147705077, 0.03215577697753906, 0.031876575469970705, 0.03210089492797852, 0.031794240951538086, 0.0319804801940918, 0.033145984649658206, 0.032107391357421876, 0.032543041229248046, 0.03198755264282226, 0.032165569305419923, 0.03186233520507813, 0.03170569610595703, 0.03162931251525879, 0.031568159103393556, 0.03217567825317383, 0.03174991989135742, 0.03175590324401856, 0.03186764717102051, 0.03197689628601074, 0.03190822410583496, 0.03178463935852051, 0.03178771209716797, 0.03176540756225586, 0.0318940486907959, 0.031940895080566405, 0.03187420845031738, 0.031929471969604495, 0.03184406471252441, 0.03225001525878906, 0.03227571105957031, 0.03224825668334961, 0.032258304595947265, 0.032166942596435544, 0.032279518127441405, 0.032143360137939454, 0.0321976318359375, 0.03300396728515625, 0.032195137023925784, 0.032046207427978514, 0.03208262252807617, 0.0325032958984375, 0.03200921630859375, 0.0342806396484375, 0.03351366424560547, 0.032511905670166014, 0.03291587066650391, 0.03236249542236328, 0.03216313552856445, 0.032184894561767576, 0.03207952117919922, 0.031895519256591794, 0.03205916976928711, 0.03208879852294922, 0.03183318328857422, 0.031675296783447264, 0.0318791675567627, 0.03170918464660644, 0.031901792526245115, 0.03171120071411133, 0.03180867195129394, 0.03200284957885742, 0.032000286102294925]",tokens/s,31.031605465191426,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,5169.057792,5444.07552,0.0,5058.330624,5057.441792,s,1,10.582130859375,10.582130859375,0.0,10.582130859375,10.582130859375,10.582130859375,10.582130859375,[10.582130859375],,kWh,9.816520338332187e-05,1.0821117038309991e-05,3.342780452000871e-05,0.00014241412494164058,,MB,1723.899904,5630.722048,0.0,5215.617024,5189.834752,s,10,4.7688110656738285,0.47688110656738286,0.0037668501307190592,0.4785899200439453,0.47943124084472655,0.4796458724975586,0.4798175778198242,"[0.46651693725585935, 0.4755134582519531, 0.47505496215820314, 0.4787362365722656, 0.47761624145507814, 0.47876663208007814, 0.4798605041503906, 0.4789189453125, 0.478443603515625, 0.479383544921875]",tokens/s,536.8214351008838,kWh,1.3711206603976267e-05,1.5121170284231307e-06,9.070752206091517e-06,2.4294075838490918e-05,tokens/kWh,10537548.40076691,MB,1731.66592,5651.693568,0.0,5234.491392,5189.837312,s,10,28.756184326171873,2.8756184326171876,0.00655877971505943,2.8755748291015624,2.88197275390625,2.8843220703125003,2.8862015234375002,"[2.8758603515625, 2.873604736328125, 2.860920166015625, 2.871558837890625, 2.876234130859375, 2.873345458984375, 2.875289306640625, 2.881249267578125, 2.88145068359375, 2.88667138671875]",tokens/s,21.908330842997756,kWh,8.873658219310511e-05,9.786212936828019e-06,5.830952392030684e-05,0.00015683231905023998,tokens/kWh,401702.91672992765,,s,630,28.75398020935058,0.045641238427540615,0.0005420137993262267,0.04552609634399414,0.045938948440551755,0.046272666168212885,0.048391723403930664,"[0.04697417449951172, 0.04593878555297851, 0.045402816772460934, 0.04553932952880859, 0.04568796920776367, 0.04556272125244141, 0.04567552185058594, 0.04561407852172852, 0.04579344177246094, 0.045768543243408205, 0.04578857421875, 0.045699329376220704, 0.046029151916503905, 0.04603894424438477, 0.04570521545410156, 0.04546915054321289, 0.0455332145690918, 0.045593151092529295, 0.04545539093017578, 0.04597145462036133, 0.04575372695922852, 0.04566284942626953, 0.04541392135620117, 0.04532681655883789, 0.04531961441040039, 0.045289920806884765, 0.045187198638916015, 0.045307136535644534, 0.04536396789550781, 0.0454205436706543, 0.04539756774902344, 0.04544169616699219, 0.04570294570922852, 0.045400062561035154, 0.04534476852416992, 0.04526092910766601, 0.045459327697753904, 0.045378944396972654, 0.045507198333740236, 0.04661772918701172, 0.04580364990234375, 0.04541516876220703, 0.04609555053710938, 0.04565625762939453, 0.04559321594238281, 0.04563148880004883, 0.045570049285888675, 0.04574591827392578, 0.04545561599731445, 0.04542259216308594, 0.045451263427734374, 0.045515838623046874, 0.04547475051879883, 0.04537724685668945, 0.04537593460083008, 0.04669424057006836, 0.045725696563720705, 0.04545024108886719, 0.04736636734008789, 0.045644577026367185, 0.0454791374206543, 0.04552329635620117, 0.045382080078125, 0.04578268814086914, 0.045416961669921874, 0.04539769744873047, 0.047014209747314455, 0.04634828948974609, 0.045508384704589844, 0.04543065643310547, 0.04543020629882812, 0.04556687927246094, 0.045553665161132816, 0.045346431732177735, 0.045602912902832034, 0.0460882568359375, 0.048181472778320314, 0.045930496215820314, 0.04585827255249023, 0.04585526275634766, 0.045682689666748044, 0.045506462097167966, 0.045489505767822264, 0.04613811111450195, 0.0456371841430664, 0.04553964614868164, 0.045504638671875, 0.04555801773071289, 0.045522846221923825, 0.04579939270019531, 0.04574428939819336, 0.04561891174316406, 0.045708831787109376, 0.04555171203613281, 0.04570969772338867, 0.045725696563720705, 0.045725696563720705, 0.04566806411743164, 0.045666465759277346, 0.04549030303955078, 0.04564377593994141, 0.04554771041870117, 0.04547155380249023, 0.04539494323730469, 0.04531916809082031, 0.04542464065551758, 0.04529961776733398, 0.04532976150512695, 0.045406974792480466, 0.04535862350463867, 0.04545775985717773, 0.04545241546630859, 0.045306270599365234, 0.04546416091918945, 0.045271041870117185, 0.04532633590698242, 0.04549427032470703, 0.0454218864440918, 0.04528758239746094, 0.045267486572265626, 0.04530380630493164, 0.04533407974243164, 0.04549062347412109, 0.0455107192993164, 0.04524950408935547, 0.04525068664550781, 0.04572159957885742, 0.0458939208984375, 0.04530771255493164, 0.04523110580444336, 0.04570000076293945, 0.04513382339477539, 0.04526694488525391, 0.045195518493652345, 0.045272830963134766, 0.04520281600952149, 0.04526655960083008, 0.04542499160766601, 0.04519987106323242, 0.04530521774291992, 0.045290271759033204, 0.04532796859741211, 0.0452775993347168, 0.0451459846496582, 0.04501926422119141, 0.04506166458129883, 0.045187808990478515, 0.0452174072265625, 0.04525299072265625, 0.04514982223510742, 0.04531123352050781, 0.04540505599975586, 0.04515398406982422, 0.04515871810913086, 0.04519724655151367, 0.045348926544189455, 0.045445121765136716, 0.04559228897094727, 0.045537567138671874, 0.04528742218017578, 0.04543008041381836, 0.04525673675537109, 0.04544988632202149, 0.0453939208984375, 0.04527481460571289, 0.045463871002197266, 0.04569689559936523, 0.045324256896972656, 0.045383838653564455, 0.04540620803833008, 0.04540726470947266, 0.04543356704711914, 0.04552524948120117, 0.04562895965576172, 0.045948768615722654, 0.045746910095214845, 0.04574780654907227, 0.04562716674804688, 0.04553740692138672, 0.045652576446533207, 0.04553299331665039, 0.045496288299560546, 0.04552092742919922, 0.04547379302978516, 0.04557401657104492, 0.045486175537109375, 0.045572128295898434, 0.04566220855712891, 0.04555980682373047, 0.046431774139404296, 0.045625473022460936, 0.04553555297851562, 0.04576464080810547, 0.04560006332397461, 0.045470401763916014, 0.04539910507202148, 0.045480480194091795, 0.04543529510498047, 0.045434879302978515, 0.04536134338378906, 0.04549609756469727, 0.04518431854248047, 0.04522406387329102, 0.045146720886230465, 0.04524582290649414, 0.04522643280029297, 0.04523955154418945, 0.04541740798950195, 0.045409599304199216, 0.045480350494384765, 0.045449504852294924, 0.04542464065551758, 0.04525260925292969, 0.04540620803833008, 0.04552499389648437, 0.0455404167175293, 0.04536620712280273, 0.04519935989379883, 0.045293857574462894, 0.045430496215820314, 0.04526079940795898, 0.04540825653076172, 0.04543222427368164, 0.0452918701171875, 0.04629119873046875, 0.04541775894165039, 0.04533731079101563, 0.046508033752441405, 0.0452935676574707, 0.045416255950927735, 0.04608857727050781, 0.04528108978271484, 0.04533987045288086, 0.045390625, 0.0454205436706543, 0.04530998229980469, 0.04536931228637695, 0.0455081901550293, 0.04548233413696289, 0.04554963302612305, 0.04549427032470703, 0.04859084701538086, 0.04594483184814453, 0.04551679992675781, 0.04577280044555664, 0.045469440460205075, 0.04542489624023437, 0.048105472564697264, 0.04558438491821289, 0.04543078231811523, 0.04543904113769531, 0.04567238235473633, 0.04585881423950195, 0.045733985900878904, 0.04756876754760742, 0.046250015258789065, 0.045489185333251955, 0.045359840393066404, 0.045328289031982424, 0.04531849670410156, 0.0454425277709961, 0.04516940689086914, 0.04550611114501953, 0.04539324951171875, 0.049543838500976566, 0.04558051300048828, 0.04548198318481445, 0.04525859069824219, 0.04532758331298828, 0.04531203079223633, 0.045388702392578126, 0.04554729461669922, 0.045895454406738284, 0.04594041442871094, 0.045556480407714844, 0.04537139129638672, 0.04555107116699219, 0.04544739151000977, 0.04548998260498047, 0.045512577056884766, 0.045365886688232424, 0.04543078231811523, 0.04537580871582031, 0.045736736297607425, 0.045892513275146485, 0.04559491348266602, 0.04556332778930664, 0.04545516967773437, 0.04537513732910156, 0.04543366241455078, 0.045364990234375, 0.045363166809082034, 0.0453798713684082, 0.04539187240600586, 0.04536953735351563, 0.04573574447631836, 0.045676544189453126, 0.04553932952880859, 0.04562681579589844, 0.045496990203857425, 0.04568463897705078, 0.04561676788330078, 0.04563801574707031, 0.04577059173583985, 0.045779296875, 0.04572153472900391, 0.04566527938842774, 0.04565900802612305, 0.04570735931396484, 0.04572735977172852, 0.04571369552612305, 0.04566835021972656, 0.04556403350830078, 0.045653118133544925, 0.045652416229248045, 0.045978591918945315, 0.045679710388183595, 0.045448287963867184, 0.04586886215209961, 0.04554956817626953, 0.04547590255737305, 0.04560070419311523, 0.04561715316772461, 0.04557209777832031, 0.045568000793457034, 0.0454964485168457, 0.045442943572998044, 0.045644927978515625, 0.0455401611328125, 0.04538998413085937, 0.045848320007324216, 0.045631649017333985, 0.04582627105712891, 0.045612735748291014, 0.04552508926391602, 0.04545872116088867, 0.04560355377197266, 0.045436927795410156, 0.04568064117431641, 0.04561510467529297, 0.045658111572265625, 0.045676544189453126, 0.04547315216064453, 0.04548467254638672, 0.04555980682373047, 0.045692928314208986, 0.04567609786987305, 0.045443328857421875, 0.04554975891113281, 0.04544102478027344, 0.045590526580810545, 0.045598751068115236, 0.04558432006835938, 0.04556803131103516, 0.04585609436035156, 0.04561350250244141, 0.04551702499389648, 0.045401630401611326, 0.045593055725097656, 0.04543411254882813, 0.04550937652587891, 0.04546355056762695, 0.04611699295043945, 0.0457050895690918, 0.04568678283691406, 0.04537548828125, 0.04549836730957031, 0.04549427032470703, 0.04558848190307617, 0.045599937438964844, 0.04559167861938476, 0.04551561737060547, 0.0457163200378418, 0.04589110565185547, 0.04561558532714844, 0.04568678283691406, 0.046005630493164064, 0.04553817749023437, 0.0464119987487793, 0.04596736145019531, 0.04573183822631836, 0.04558975982666016, 0.045809761047363284, 0.04551289749145508, 0.04546326446533203, 0.04582486343383789, 0.04553868865966797, 0.046106910705566405, 0.04570342254638672, 0.04539580917358398, 0.045343006134033206, 0.04526476669311524, 0.04529971313476563, 0.04535087966918945, 0.045271072387695316, 0.04583248138427734, 0.0456025276184082, 0.045453311920166016, 0.04552694320678711, 0.045510753631591794, 0.04551068878173828, 0.04543280029296875, 0.045413856506347654, 0.04538422393798828, 0.04542668914794922, 0.04539318466186523, 0.04542127990722656, 0.04549427032470703, 0.045303329467773434, 0.045476001739501955, 0.046039360046386715, 0.04558643341064453, 0.045733665466308596, 0.04522780990600586, 0.04529097747802734, 0.04651222229003906, 0.04529369735717773, 0.04522470474243164, 0.045516128540039065, 0.04619244766235352, 0.045929313659667965, 0.04535897445678711, 0.0454486083984375, 0.045572254180908205, 0.04574627304077149, 0.0455316162109375, 0.045729694366455076, 0.045643871307373046, 0.04561097717285156, 0.04579481506347656, 0.045554206848144534, 0.04573593521118164, 0.04587833786010742, 0.04574825668334961, 0.0455648307800293, 0.04773065567016602, 0.04559836959838867, 0.04563596725463867, 0.04555996704101563, 0.04565795135498047, 0.045629119873046874, 0.046617919921875, 0.047863872528076175, 0.04598172760009766, 0.04561708831787109, 0.04581788635253906, 0.045332672119140625, 0.0453639030456543, 0.04542444610595703, 0.04544092941284179, 0.04534281539916992, 0.048389247894287106, 0.04656531143188476, 0.04560153579711914, 0.04553561782836914, 0.045435840606689454, 0.04549311828613281, 0.04540230560302734, 0.0453480339050293, 0.04533078384399414, 0.04539430236816406, 0.0454343376159668, 0.045440959930419925, 0.045411041259765625, 0.04538297653198242, 0.045676544189453126, 0.045476318359375, 0.0456778564453125, 0.0455052490234375, 0.045950687408447266, 0.045834209442138674, 0.046056961059570314, 0.04611548614501953, 0.04570703887939453, 0.045697406768798826, 0.04561920166015625, 0.04560281753540039, 0.045485729217529296, 0.04551919937133789, 0.04576051330566406, 0.045477279663085936, 0.045613471984863284, 0.04551699066162109, 0.04548198318481445, 0.04562278366088867, 0.045562305450439454, 0.04548838424682617, 0.04571635055541992, 0.04559523010253906, 0.04571360015869141, 0.04576476669311524, 0.0458076171875, 0.04566220855712891, 0.04623155212402344, 0.045556865692138675, 0.04532035064697266, 0.04581980895996094, 0.04603574371337891, 0.04571753692626953, 0.04571980667114258, 0.04575603103637695, 0.045756542205810546, 0.045735809326171876, 0.04569887924194336, 0.04607411193847656, 0.04588617706298828, 0.045698238372802735, 0.045518848419189455, 0.04548691177368164, 0.04587055969238281, 0.045391456604003906, 0.04552956771850586, 0.045488800048828125, 0.04535276794433594, 0.04563353729248047, 0.045362560272216794, 0.04550665664672852, 0.04583059310913086, 0.04564524841308594, 0.045631614685058594, 0.04557872009277344, 0.04533273696899414, 0.04561449432373047, 0.04545372772216797, 0.04885913467407227, 0.04585836791992187, 0.04569251251220703, 0.04541731262207031, 0.045674495697021485, 0.04555571365356445, 0.04554662322998047, 0.04572687911987305, 0.045383392333984376, 0.045541374206542966, 0.047198368072509767, 0.04654473495483399, 0.045731231689453124, 0.0455972785949707, 0.045666305541992185, 0.045749473571777347, 0.04549507141113281, 0.04526694488525391, 0.045544864654541016, 0.045338848114013675, 0.04549260711669922, 0.045707263946533204, 0.04543510437011719, 0.049228607177734376, 0.04561103820800781, 0.04566316986083984, 0.04557120132446289, 0.04560985565185547, 0.04558403015136719, 0.045759872436523436, 0.04549718475341797, 0.04560025787353516, 0.045875648498535156, 0.04561315155029297, 0.04554556655883789, 0.04542844772338867, 0.04541779327392578, 0.04577702331542969, 0.045466239929199216, 0.045625568389892575, 0.045590431213378906, 0.04550051116943359, 0.0453570556640625, 0.046457439422607424, 0.04599603271484375, 0.045534881591796875, 0.045691230773925784, 0.04562944030761719, 0.04565536117553711, 0.045402816772460934, 0.045369342803955076, 0.04595711898803711, 0.04529151916503906, 0.045412353515625, 0.04524851226806641, 0.04537548828125, 0.04550041580200195, 0.04730227279663086, 0.04751971054077148, 0.04555817413330078, 0.04542668914794922, 0.04545692825317383, 0.04568316650390625, 0.04537062454223633, 0.04533119964599609, 0.04622463989257813, 0.04535286331176758, 0.04546441650390625, 0.045445121765136716, 0.045475841522216794, 0.04555952072143555, 0.045945121765136716, 0.04544428634643555, 0.04573033523559571, 0.04537168121337891, 0.045487968444824216, 0.045424129486083986, 0.045416606903076175, 0.046489151000976565, 0.045339775085449216, 0.04924192047119141, 0.04556796646118164, 0.04535718536376953, 0.04596700668334961, 0.04577423858642578, 0.045425502777099606, 0.04543209457397461, 0.04535103988647461, 0.04533478546142578, 0.04532998275756836, 0.045313953399658206, 0.04537238311767578, 0.045606815338134765, 0.04564937591552734, 0.04550300979614258, 0.045881343841552735, 0.04532633590698242, 0.04549631881713867, 0.04529148864746094, 0.05094198226928711, 0.04557619094848633, 0.04553932952880859, 0.04623052978515625, 0.04839273452758789, 0.04547222518920899, 0.04573734283447266]",tokens/s,21.9100102112169,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,889.155584,638.517248,0.0,260.046848,253.883392,s,1,7.8444189453125,7.8444189453125,0.0,7.8444189453125,7.8444189453125,7.8444189453125,7.8444189453125,[7.8444189453125],,kWh,2.212889393751235e-05,2.432461837708945e-06,7.532228248008366e-06,3.209358402322966e-05,,MB,1186.328576,743.374848,0.0,335.54432,313.01632,s,13,0.19102784061431888,0.014694449278024527,7.09708829283819e-05,0.014706656455993653,0.014779667282104492,0.014788204765319824,0.01479198314666748,"[0.014605024337768554, 0.014754176139831543, 0.014706656455993653, 0.014738335609436035, 0.014622112274169922, 0.014633824348449707, 0.014693984031677246, 0.014758111953735352, 0.014735136032104492, 0.014792927742004395, 0.014785056114196777, 0.014570079803466796, 0.014632415771484376]",tokens/s,17421.544363887573,kWh,4.353322681321532e-07,4.800946855380307e-08,2.878500398510174e-07,7.711917765369738e-07,tokens/kWh,331953747.15944785,MB,1220.120576,768.540672,0.0,360.710144,313.805312,s,13,10.196140502929687,0.7843185002253606,0.005833425561564566,0.7838369140625,0.7906074829101563,0.7941986938476563,0.7972828051757812,"[0.7829224853515625, 0.7863546142578125, 0.7857490234375, 0.7838369140625, 0.7803174438476562, 0.7789570922851563, 0.7865230102539063, 0.7847035522460938, 0.7980538330078125, 0.7806293334960938, 0.7916286010742187, 0.7730145263671875, 0.7834500732421875]",tokens/s,80.32451100145924,kWh,2.23064743530215e-05,2.4600092574525966e-06,8.558268148147221e-06,3.332475175862131e-05,tokens/kWh,1890486.700585895,,s,819,10.188614595413219,0.012440310861310389,0.0002490865052194593,0.012426912307739258,0.012600223541259765,0.01267319393157959,0.01329150056838989,"[0.012099583625793458, 0.01241494369506836, 0.012588512420654297, 0.01240112018585205, 0.012310943603515624, 0.012381152153015136, 0.012483424186706542, 0.012439423561096192, 0.012439807891845704, 0.012334848403930664, 0.012407936096191407, 0.012344096183776855, 0.012552288055419922, 0.01252780818939209, 0.012666751861572265, 0.012570240020751954, 0.012529984474182129, 0.012507200241088867, 0.012444640159606933, 0.012498208045959473, 0.012359423637390137, 0.01234652805328369, 0.01237276840209961, 0.01233340835571289, 0.012414688110351563, 0.012406720161437988, 0.01236780834197998, 0.01242732810974121, 0.012629088401794433, 0.012823455810546875, 0.012461855888366699, 0.012508416175842285, 0.012266464233398437, 0.012304384231567383, 0.012249088287353516, 0.012166912078857423, 0.012212479591369629, 0.012175552368164063, 0.012170495986938477, 0.012187711715698242, 0.01208198356628418, 0.012168831825256347, 0.012191807746887207, 0.01236787223815918, 0.012310432434082032, 0.012431679725646972, 0.012441375732421874, 0.012605440139770508, 0.012555392265319824, 0.012467071533203125, 0.012541312217712403, 0.012507519721984862, 0.012536064147949218, 0.012517375946044922, 0.012529791831970215, 0.012523391723632812, 0.012505248069763183, 0.01250499153137207, 0.012520928382873536, 0.012511712074279785, 0.012472319602966308, 0.012472064018249511, 0.012431679725646972, 0.012390399932861328, 0.012529855728149413, 0.012481951713562011, 0.012566944122314454, 0.012478816032409667, 0.012492032051086425, 0.01244156837463379, 0.012364224433898926, 0.012445695877075195, 0.012496735572814941, 0.012365983963012696, 0.01239033603668213, 0.01242527961730957, 0.012502240180969239, 0.012512096405029297, 0.012527551651000977, 0.012429311752319335, 0.0123406400680542, 0.012409440040588379, 0.012367327690124512, 0.012310111999511719, 0.01241817569732666, 0.012514495849609375, 0.01259993553161621, 0.012489855766296387, 0.012544672012329101, 0.012456159591674804, 0.012489024162292481, 0.01248633575439453, 0.012490976333618163, 0.012444576263427735, 0.012442463874816895, 0.012496928215026855, 0.012484543800354004, 0.012482687950134278, 0.012469792366027832, 0.012468511581420899, 0.012533535957336426, 0.012495360374450683, 0.012520383834838868, 0.01256105613708496, 0.01261184024810791, 0.01245798397064209, 0.012478464126586914, 0.012468544006347657, 0.01244745635986328, 0.01242732810974121, 0.012455840110778809, 0.012381855964660644, 0.01240665626525879, 0.012422847747802734, 0.012542752265930176, 0.012470272064208985, 0.012445695877075195, 0.01244159984588623, 0.012498559951782227, 0.012544384002685547, 0.01249075222015381, 0.012515328407287597, 0.01247606372833252, 0.012673279762268067, 0.012562527656555175, 0.012398400306701661, 0.012230976104736328, 0.012460703849792481, 0.012439711570739746, 0.012395359992980958, 0.012407296180725098, 0.012433952331542968, 0.012308320045471192, 0.012740768432617188, 0.01238422393798828, 0.012326656341552734, 0.012394623756408691, 0.01235472011566162, 0.0123951997756958, 0.012480799674987792, 0.012590399742126465, 0.012722880363464355, 0.012565759658813477, 0.012407551765441895, 0.012391872406005859, 0.012313376426696778, 0.012182751655578613, 0.012098112106323243, 0.012120063781738282, 0.012134400367736817, 0.012152735710144042, 0.012134783744812012, 0.012247808456420898, 0.01237110424041748, 0.012313535690307617, 0.012350208282470703, 0.012406911849975586, 0.012391712188720703, 0.012362048149108887, 0.012583328247070312, 0.01244979190826416, 0.012582912445068359, 0.012668095588684081, 0.012554080009460448, 0.012534432411193848, 0.012550144195556641, 0.01254646396636963, 0.012550016403198243, 0.01254415988922119, 0.012543999671936035, 0.012529279708862305, 0.012517631530761719, 0.012543359756469726, 0.012670751571655273, 0.012612447738647461, 0.012627903938293456, 0.012521087646484375, 0.012552831649780273, 0.012588479995727538, 0.012580479621887207, 0.012519519805908203, 0.012563103675842286, 0.012543999671936035, 0.012580320358276368, 0.012728863716125489, 0.01263526439666748, 0.01262281608581543, 0.012574432373046875, 0.012531423568725586, 0.012290047645568849, 0.012462176322937012, 0.012463135719299316, 0.012468544006347657, 0.012445695877075195, 0.012425791740417481, 0.01245798397064209, 0.012533344268798829, 0.012491456031799316, 0.012449248313903809, 0.012468480110168456, 0.012505311965942383, 0.012500736236572265, 0.012574111938476563, 0.012548288345336915, 0.012580927848815917, 0.012502816200256348, 0.012482879638671875, 0.012454400062561035, 0.012465951919555664, 0.012504704475402832, 0.012545760154724122, 0.012546815872192384, 0.012511136054992676, 0.01246003246307373, 0.012453696250915528, 0.012533599853515626, 0.012517727851867676, 0.012415328025817872, 0.012461536407470703, 0.012581055641174316, 0.012601696014404296, 0.012569503784179687, 0.012527520179748536, 0.012497759819030762, 0.012456128120422363, 0.012453408241271972, 0.01233948802947998, 0.012423168182373047, 0.012456959724426269, 0.012368895530700684, 0.012313599586486817, 0.012312864303588867, 0.012772064208984375, 0.01235968017578125, 0.012310367584228515, 0.01225772762298584, 0.012312288284301759, 0.01225715160369873, 0.012304544448852539, 0.01226956844329834, 0.012247039794921874, 0.012254816055297851, 0.012303775787353515, 0.012583904266357422, 0.01236137580871582, 0.012314656257629394, 0.012402688026428223, 0.012337471961975098, 0.01230355167388916, 0.012245823860168457, 0.012369248390197754, 0.012298912048339843, 0.012154879570007325, 0.012371456146240235, 0.012446208000183106, 0.012318943977355957, 0.012351455688476563, 0.012325984001159668, 0.012347871780395508, 0.012255231857299804, 0.012279104232788086, 0.012281824111938476, 0.01230742359161377, 0.012263327598571778, 0.012251456260681153, 0.01233897590637207, 0.012322112083435059, 0.01243616008758545, 0.012281344413757325, 0.012274080276489258, 0.012236991882324218, 0.012179360389709473, 0.012253184318542481, 0.012269760131835937, 0.012254591941833495, 0.01227235221862793, 0.012291808128356934, 0.012300288200378418, 0.012324864387512208, 0.012312576293945313, 0.012230815887451172, 0.012326751708984375, 0.01255123233795166, 0.012458304405212402, 0.012412832260131837, 0.012317407608032226, 0.01236086368560791, 0.012335136413574218, 0.012321599960327148, 0.012401887893676758, 0.01236780834197998, 0.012306943893432617, 0.012269920349121093, 0.012339263916015626, 0.0124203519821167, 0.012366527557373047, 0.012378175735473633, 0.012380000114440917, 0.01230038356781006, 0.012783904075622559, 0.013473504066467286, 0.012815520286560059, 0.012485471725463868, 0.012489855766296387, 0.012450688362121582, 0.012441472053527832, 0.012368127822875977, 0.012377504348754884, 0.012380640029907226, 0.0123919038772583, 0.012488608360290527, 0.012423808097839356, 0.012396063804626465, 0.012465760231018067, 0.012370816230773926, 0.012053279876708985, 0.012380160331726075, 0.012476544380187988, 0.012263296127319336, 0.012367712020874024, 0.012306591987609864, 0.012342464447021485, 0.012546879768371582, 0.012445695877075195, 0.012391712188720703, 0.01235632038116455, 0.012451711654663087, 0.012365951538085937, 0.012396063804626465, 0.012357407569885254, 0.012417728424072266, 0.012414560317993165, 0.012222080230712891, 0.01216716766357422, 0.01214684772491455, 0.012087936401367188, 0.012084608078002929, 0.012038687705993652, 0.012152928352355957, 0.012201984405517579, 0.012240799903869629, 0.012249183654785157, 0.012352800369262696, 0.012343199729919433, 0.012421600341796875, 0.012239199638366699, 0.01225113582611084, 0.012237024307250977, 0.012164959907531738, 0.012828607559204102, 0.01220201587677002, 0.013653984069824219, 0.012327232360839845, 0.012299936294555664, 0.01226137638092041, 0.012282208442687988, 0.012515007972717285, 0.01236950397491455, 0.012324352264404297, 0.012329888343811036, 0.012511232376098632, 0.012644319534301758, 0.012396575927734375, 0.012443167686462402, 0.012503520011901855, 0.012470272064208985, 0.012422528266906738, 0.012432000160217286, 0.012402688026428223, 0.012361023902893066, 0.012348095893859863, 0.012371968269348145, 0.012283231735229492, 0.012260000228881835, 0.012353535652160644, 0.012332159996032715, 0.012218496322631837, 0.012311200141906738, 0.012294207572937012, 0.01248249626159668, 0.012377984046936035, 0.012375328063964844, 0.012345600128173827, 0.012415264129638672, 0.012390560150146484, 0.012357664108276368, 0.012365951538085937, 0.012346559524536133, 0.012417856216430665, 0.012467424392700195, 0.012437727928161621, 0.012438079833984376, 0.012486592292785644, 0.012488127708435059, 0.012420991897583007, 0.012487423896789552, 0.012530943870544434, 0.012603551864624023, 0.012525376319885253, 0.012520319938659668, 0.012822431564331055, 0.013252927780151367, 0.012793536186218262, 0.012637663841247558, 0.012466719627380371, 0.01246617603302002, 0.012523520469665527, 0.01253923225402832, 0.012450400352478028, 0.012503104209899902, 0.01255628776550293, 0.012471327781677246, 0.012603424072265624, 0.012560704231262206, 0.012581279754638672, 0.012495136260986328, 0.012515487670898438, 0.01254537582397461, 0.012515775680541992, 0.012480511665344238, 0.012382207870483398, 0.01248025608062744, 0.01241097640991211, 0.012423328399658203, 0.012414976119995117, 0.01237603187561035, 0.01243881607055664, 0.012401408195495605, 0.012461695671081543, 0.012367263793945312, 0.012380864143371582, 0.012371935844421388, 0.01242131233215332, 0.012343423843383789, 0.01236355209350586, 0.012341471672058105, 0.012365823745727538, 0.012381600379943849, 0.012372384071350098, 0.012400832176208496, 0.012638208389282226, 0.012328991889953613, 0.012606975555419921, 0.012609600067138672, 0.01240278434753418, 0.012382559776306153, 0.012357439994812012, 0.012363743782043458, 0.0124169921875, 0.012320799827575684, 0.012371552467346192, 0.012364640235900878, 0.012378047943115235, 0.012427103996276856, 0.012412927627563476, 0.012445311546325683, 0.012478848457336426, 0.012472448348999023, 0.01239641571044922, 0.012421119689941406, 0.012338848114013671, 0.012452192306518554, 0.012620927810668946, 0.012510080337524414, 0.012546079635620117, 0.012572640419006348, 0.012558303833007812, 0.012630047798156739, 0.012588640213012696, 0.012615903854370118, 0.012601375579833984, 0.012597408294677735, 0.012550144195556641, 0.012527071952819824, 0.012458527565002441, 0.012556223869323731, 0.012648480415344238, 0.012507200241088867, 0.012504416465759277, 0.012509568214416504, 0.012523743629455566, 0.01256601619720459, 0.012554656028747559, 0.012549983978271484, 0.012593279838562012, 0.012567999839782715, 0.012499456405639648, 0.01245631980895996, 0.012452768325805665, 0.012350272178649903, 0.012297344207763673, 0.012249919891357422, 0.012216511726379395, 0.012270976066589355, 0.012331456184387206, 0.0122772159576416, 0.01239311981201172, 0.012245408058166504, 0.01236342430114746, 0.012362815856933594, 0.012325344085693359, 0.012282431602478027, 0.012263168334960937, 0.012308287620544433, 0.01205452823638916, 0.012818431854248047, 0.012981792449951172, 0.0124235200881958, 0.013357343673706055, 0.012404640197753907, 0.012535840034484864, 0.012285856246948243, 0.012298015594482422, 0.01233737564086914, 0.012283488273620606, 0.012321184158325196, 0.012408415794372558, 0.012347807884216308, 0.012522879600524903, 0.012556927680969238, 0.01256828784942627, 0.012576543807983398, 0.013664768218994141, 0.012488703727722168, 0.012550399780273438, 0.012438431739807129, 0.01252233600616455, 0.0126113920211792, 0.012936767578125, 0.01260319995880127, 0.012589887619018554, 0.012640255928039551, 0.012595199584960937, 0.012571776390075683, 0.012598112106323242, 0.012705535888671874, 0.012550175666809082, 0.012551967620849609, 0.012612256050109863, 0.012598400115966796, 0.01304851245880127, 0.01276956844329834, 0.015601216316223144, 0.012673184394836426, 0.012630016326904296, 0.012533760070800782, 0.012572671890258789, 0.012576864242553712, 0.012586175918579101, 0.012524255752563477, 0.01268496036529541, 0.012529824256896973, 0.012558527946472168, 0.012465439796447753, 0.014080639839172363, 0.012463583946228027, 0.012462719917297364, 0.012570624351501464, 0.013017087936401368, 0.012619935989379882, 0.012543840408325195, 0.012543328285217285, 0.012552448272705078, 0.012547904014587403, 0.01251798439025879, 0.012408191680908204, 0.012456576347351074, 0.012217984199523926, 0.012511103630065919, 0.012423551559448243, 0.012394687652587891, 0.01236950397491455, 0.012406559944152832, 0.012351776123046875, 0.012304672241210938, 0.012322431564331054, 0.012355072021484375, 0.012395392417907715, 0.012375136375427247, 0.012358559608459472, 0.012378111839294433, 0.012354623794555665, 0.012394975662231446, 0.012495327949523925, 0.012518719673156738, 0.012509856224060058, 0.012531776428222657, 0.012476384162902832, 0.012389504432678222, 0.012379008293151856, 0.012408543586730957, 0.012398112297058105, 0.012400383949279786, 0.012431808471679687, 0.012454463958740234, 0.0124235200881958, 0.012523455619812012, 0.012326080322265625, 0.012372096061706543, 0.012310943603515624, 0.012464127540588378, 0.012969344139099122, 0.012442399978637695, 0.012386143684387207, 0.012390144348144531, 0.012336607933044433, 0.012727264404296876, 0.012269375801086425, 0.01232588768005371, 0.01232307243347168, 0.012269632339477539, 0.012282560348510742, 0.012214271545410157, 0.012426912307739258, 0.012257311820983887, 0.012244895935058593, 0.012198016166687012, 0.012229056358337402, 0.012285568237304688, 0.0122390718460083, 0.012298111915588378, 0.012322943687438965, 0.01251142406463623, 0.012365632057189942, 0.012268927574157715, 0.012226495742797851, 0.0125283203125, 0.012339200019836426, 0.012298527717590333, 0.01233033561706543, 0.012116000175476075, 0.012436927795410157, 0.012896224021911621, 0.01255894374847412, 0.012484767913818359, 0.012489855766296387, 0.012415712356567382, 0.012634112358093261, 0.012416959762573242, 0.012345727920532226, 0.012392224311828614, 0.012455455780029297, 0.012460543632507324, 0.012533632278442383, 0.012511296272277832, 0.012629055976867675, 0.012495743751525878, 0.01244979190826416, 0.01254745578765869, 0.012431200027465821, 0.01245263957977295, 0.012456992149353027, 0.012579360008239747, 0.012689824104309083, 0.012680959701538087, 0.012544287681579589, 0.012683391571044922, 0.013142111778259278, 0.012650176048278809, 0.012500896453857421, 0.01243769645690918, 0.012602751731872558, 0.012573087692260742, 0.012437727928161621, 0.012486656188964844, 0.012488703727722168, 0.012421152114868165, 0.012443327903747558, 0.01245417594909668, 0.01257695960998535, 0.012599167823791503, 0.013299967765808106, 0.015553248405456544, 0.012569664001464843, 0.01242080020904541, 0.012658528327941895, 0.012370559692382812, 0.012427007675170899, 0.012522687911987304, 0.013081503868103026, 0.012642208099365235, 0.012537471771240235, 0.012498559951782227, 0.012466943740844727, 0.012349439620971679, 0.01221225643157959, 0.012250271797180176, 0.012286687850952148, 0.012257439613342285, 0.012265439987182616, 0.012224032402038574, 0.012267968177795411, 0.012322815895080566, 0.011807328224182128, 0.012056575775146485, 0.012158880233764649, 0.01208944034576416, 0.012038175582885741, 0.012033568382263184, 0.012052767753601075, 0.01198095989227295, 0.012015616416931153, 0.011972415924072265, 0.01202195167541504, 0.012025088310241698, 0.01201638412475586, 0.011916799545288086, 0.011959103584289551, 0.011986623764038086, 0.011965567588806153, 0.01189151954650879, 0.011905088424682617, 0.012107904434204102, 0.012148608207702636, 0.01248256015777588, 0.013426719665527343, 0.012582880020141601, 0.012345343589782716, 0.01233897590637207, 0.012308608055114746, 0.012224703788757325, 0.012258272171020507, 0.012302847862243652, 0.012316608428955078, 0.012290080070495606, 0.012313055992126464, 0.012319968223571778, 0.012339008331298829, 0.0126430082321167, 0.0123722562789917, 0.012371968269348145, 0.012345151901245117, 0.012380352020263672, 0.012308095932006836, 0.012294431686401368, 0.012263360023498536, 0.012241056442260742, 0.012275712013244629, 0.012231712341308593, 0.01233619213104248, 0.012351391792297363, 0.012391712188720703, 0.01243996810913086, 0.012444191932678223, 0.012523296356201172, 0.012445919990539551, 0.012383168220520019, 0.012364255905151366, 0.012318464279174804, 0.01228867244720459, 0.012906047821044922, 0.01235308837890625, 0.012290783882141113, 0.012251263618469238, 0.012298239707946777, 0.012337151527404786, 0.012058464050292969, 0.012327072143554687, 0.012275967597961426, 0.012221535682678223, 0.012277631759643555, 0.012233535766601562, 0.012230015754699708, 0.012216256141662598, 0.01208569622039795, 0.012086784362792969, 0.01212492847442627, 0.012111104011535645, 0.012045023918151855, 0.012124159812927245, 0.01225119972229004, 0.012345343589782716, 0.012387424468994141, 0.01243836784362793, 0.01243068790435791, 0.012491104125976562, 0.012435839653015137, 0.012418111801147462, 0.012391231536865234, 0.012374079704284668, 0.012371744155883789, 0.012331551551818847, 0.012381024360656738, 0.01241097640991211, 0.012429120063781739, 0.012401599884033204, 0.012404735565185548, 0.012406111717224121, 0.012396927833557128, 0.012396832466125489, 0.012496031761169434, 0.012471136093139648, 0.012529664039611817, 0.01247436809539795, 0.012496031761169434, 0.012559200286865234, 0.012515711784362792, 0.012472000122070313, 0.012423104286193847, 0.012615232467651368, 0.012632224082946777, 0.012581151962280273, 0.012650272369384766, 0.0128307523727417, 0.012777664184570313, 0.012703583717346192, 0.012666399955749513, 0.012577407836914063, 0.012559712409973145, 0.012547776222229004, 0.012548480033874512, 0.012513888359069825, 0.012597151756286621, 0.012537407875061034, 0.012571040153503419, 0.012529600143432617, 0.012518752098083495, 0.012542143821716308, 0.01258351993560791]",tokens/s,80.38384339012136,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,4260.343808,4725.866496,0.0,4347.396096,4328.833024,s,1,10.356025390625,10.356025390625,0.0,10.356025390625,10.356025390625,10.356025390625,10.356025390625,[10.356025390625],,kWh,9.157256575830767e-05,1.0093641610077022e-05,3.0194468599975544e-05,0.00013186067596836025,,MB,1385.377792,5220.794368,0.0,4812.96384,4756.928512,s,10,3.487279876708985,0.3487279876708984,0.0018193859702139725,0.3491572570800781,0.3502679779052734,0.3507423263549805,0.3511218051147461,"[0.3448252868652344, 0.348823486328125, 0.34959466552734375, 0.34949102783203123, 0.3461040649414063, 0.3501625671386719, 0.3486292724609375, 0.3485438537597656, 0.34988897705078126, 0.3512166748046875]",tokens/s,734.0965137607261,kWh,1.0162405058045626e-05,1.1207336724002644e-06,6.739708456896455e-06,1.8022847187342344e-05,tokens/kWh,14204193.007850157,MB,1385.377792,5325.651968,0.0,4917.82144,4876.091904,s,10,20.489777587890625,2.0489777587890625,0.008619845570690534,2.0503623046875,2.05667080078125,2.0608374755859376,2.0641708154296876,"[2.03930029296875, 2.045240234375, 2.053543212890625, 2.049490478515625, 2.0404039306640627, 2.055744873046875, 2.05490625, 2.051234130859375, 2.065004150390625, 2.0349100341796875]",tokens/s,30.74703945895086,kWh,5.958664198445075e-05,6.572193137807543e-06,3.9495856309304044e-05,0.00010565469143156238,tokens/kWh,596282.0878693127,,s,630,20.486324972152726,0.03251797614627414,0.0007612176222767933,0.03237366485595703,0.03315205459594727,0.03342379589080811,0.03613603931427002,"[0.035639007568359374, 0.03324729537963867, 0.03235619354248047, 0.0322116813659668, 0.031927967071533205, 0.03177846336364746, 0.03207443237304688, 0.038157535552978517, 0.03178780746459961, 0.032153472900390626, 0.03230047988891602, 0.03191263961791992, 0.031971168518066403, 0.03236470413208008, 0.032218753814697264, 0.032127361297607425, 0.03240140914916992, 0.03218227386474609, 0.03200579071044922, 0.03203855895996094, 0.03194540786743164, 0.032053249359130856, 0.032290241241455075, 0.03203129577636719, 0.031835487365722656, 0.03188751983642578, 0.03196678352355957, 0.031987808227539063, 0.03245142364501953, 0.032950271606445314, 0.032727039337158204, 0.03289907073974609, 0.032745216369628904, 0.032471294403076174, 0.03255839920043945, 0.03265811157226563, 0.03229062271118164, 0.03224588775634766, 0.031983680725097656, 0.031940160751342775, 0.03215385437011719, 0.03196323204040527, 0.0320096321105957, 0.03179737663269043, 0.03211443328857422, 0.03211663818359375, 0.03205974578857422, 0.0320181770324707, 0.031938655853271485, 0.031979936599731446, 0.031891008377075196, 0.0320103988647461, 0.03204092788696289, 0.032134944915771485, 0.032299488067626954, 0.03216569519042969, 0.03223961639404297, 0.032168510437011716, 0.03228579330444336, 0.03259894561767578, 0.03264681625366211, 0.03253692626953125, 0.033017856597900394, 0.035425792694091796, 0.03315126419067383, 0.032007518768310546, 0.031842527389526365, 0.03201500701904297, 0.031959039688110355, 0.0318791675567627, 0.03249894332885742, 0.03259878540039062, 0.03222732925415039, 0.03187302398681641, 0.03175820732116699, 0.03207385635375976, 0.03207916641235352, 0.03220755386352539, 0.031894559860229495, 0.03235939025878906, 0.03206134414672852, 0.03217622375488281, 0.03223952102661133, 0.03214140701293945, 0.03227347183227539, 0.03219488143920898, 0.03218691253662109, 0.03237836837768555, 0.03206310272216797, 0.032183040618896486, 0.03219068908691406, 0.03306905746459961, 0.03334348678588867, 0.03278643035888672, 0.03265945434570312, 0.032716384887695314, 0.03251446533203125, 0.03231692886352539, 0.032395713806152346, 0.032114273071289064, 0.032653568267822265, 0.0324876480102539, 0.032493057250976565, 0.03252191925048828, 0.032672542572021485, 0.03240127944946289, 0.032382495880126955, 0.03235084915161133, 0.03223484802246094, 0.03234064102172852, 0.03281919860839844, 0.03272268676757813, 0.03274361419677734, 0.03265478515625, 0.03228860855102539, 0.03286505508422852, 0.03277113723754883, 0.03234624099731445, 0.032176959991455076, 0.03237459182739258, 0.03283779144287109, 0.03331689453125, 0.03285385513305664, 0.032729248046875, 0.03273519897460937, 0.03229699325561523, 0.036216320037841795, 0.03335014343261719, 0.032449569702148434, 0.03188607978820801, 0.03203299331665039, 0.03206553649902344, 0.0321638412475586, 0.03231462478637695, 0.0322790412902832, 0.032076030731201174, 0.032081504821777344, 0.03213059234619141, 0.034144382476806644, 0.03181849670410156, 0.03162521553039551, 0.031813503265380856, 0.03175436782836914, 0.03195199966430664, 0.032059391021728514, 0.03244940948486328, 0.032314910888671874, 0.03220323181152344, 0.032518207550048826, 0.032355422973632815, 0.03227267074584961, 0.03243040084838867, 0.0326003189086914, 0.03226947021484375, 0.03257987213134766, 0.03322118377685547, 0.033147998809814457, 0.033108318328857425, 0.03282796859741211, 0.032640033721923825, 0.032588768005371097, 0.032571392059326174, 0.03272000122070313, 0.032647998809814456, 0.032760929107666016, 0.03252691268920899, 0.032745376586914066, 0.03246950531005859, 0.032217086791992186, 0.032408737182617185, 0.03241363143920899, 0.03241257476806641, 0.03234521484375, 0.03267264175415039, 0.032806495666503906, 0.032814849853515626, 0.032473758697509764, 0.03246633529663086, 0.032422496795654294, 0.03221651077270508, 0.03238355255126953, 0.033067008972167966, 0.03248537445068359, 0.03236422348022461, 0.032701759338378905, 0.032608543395996094, 0.0327154541015625, 0.03258780670166016, 0.03640019226074219, 0.03617670440673828, 0.03307014465332031, 0.032084926605224606, 0.031891359329223636, 0.03199337577819824, 0.03230352020263672, 0.032064865112304684, 0.032035648345947264, 0.032059200286865236, 0.03207187271118164, 0.03202252960205078, 0.032086017608642575, 0.032378814697265626, 0.03218406295776367, 0.03213036727905273, 0.03215631866455078, 0.03211043167114258, 0.03228313446044922, 0.03362748718261719, 0.03225872039794922, 0.031915456771850585, 0.03183059120178223, 0.03186892890930176, 0.03211801528930664, 0.032494335174560546, 0.03249478530883789, 0.032358623504638674, 0.03208195114135742, 0.03270265579223633, 0.033196414947509766, 0.033159168243408206, 0.032916576385498046, 0.03274844741821289, 0.032651329040527345, 0.03278841781616211, 0.032356128692626954, 0.03229718399047852, 0.03240755081176758, 0.03256057739257812, 0.032459327697753906, 0.03224496078491211, 0.03219475173950195, 0.032292865753173826, 0.03252694320678711, 0.032286113739013675, 0.03212758255004883, 0.03240719985961914, 0.03268262481689453, 0.03280831909179688, 0.032513729095458986, 0.03250038528442383, 0.03233116912841797, 0.03254742431640625, 0.03260009765625, 0.03280892944335938, 0.03297894287109375, 0.03275161743164062, 0.03304009628295899, 0.032790817260742185, 0.03301990509033203, 0.03304652786254883, 0.03303628921508789, 0.03317929458618164, 0.036000831604003906, 0.03352876663208008, 0.032307201385498044, 0.031748096466064454, 0.031510528564453126, 0.03144655990600586, 0.03179772758483887, 0.03117465591430664, 0.03122790336608887, 0.032984447479248044, 0.03163587188720703, 0.03195721626281738, 0.03164329528808594, 0.031582143783569334, 0.03153686332702637, 0.032276798248291015, 0.03213750457763672, 0.03157923126220703, 0.03129644775390625, 0.031760448455810546, 0.031336448669433595, 0.031862367630004884, 0.03181814384460449, 0.031987712860107424, 0.03219971084594726, 0.03261539077758789, 0.03251609420776367, 0.03260822296142578, 0.03261648178100586, 0.032952320098876955, 0.03275980758666992, 0.032702049255371096, 0.033231231689453126, 0.03488771057128906, 0.03258163070678711, 0.03257753753662109, 0.03253609466552734, 0.032317920684814455, 0.03211209487915039, 0.032112255096435546, 0.03261942291259766, 0.032712703704833986, 0.0324956169128418, 0.03257872009277344, 0.03242448043823242, 0.03235667037963867, 0.03214745712280274, 0.03205734252929687, 0.03197664070129395, 0.03206979370117188, 0.03225462341308594, 0.032086017608642575, 0.03261644744873047, 0.032546817779541014, 0.033538047790527346, 0.03271254348754883, 0.0328287353515625, 0.03276476669311523, 0.03270041656494141, 0.03247228622436524, 0.03309648132324219, 0.03280691146850586, 0.03278643035888672, 0.03558729553222656, 0.03322959899902344, 0.032276481628417966, 0.032389118194580076, 0.03228057479858398, 0.032091392517089846, 0.031989952087402344, 0.03200467300415039, 0.032389118194580076, 0.03226217651367187, 0.03208182525634766, 0.03210655975341797, 0.03250790405273438, 0.03230515289306641, 0.03210627365112305, 0.03242825698852539, 0.032421886444091795, 0.03213840103149414, 0.032282623291015625, 0.03243708801269531, 0.0331960334777832, 0.0320654411315918, 0.03222127914428711, 0.03213721466064453, 0.03237273788452148, 0.03253247833251953, 0.032847713470458985, 0.032379039764404295, 0.03313449478149414, 0.033359329223632814, 0.03326828765869141, 0.03303430557250977, 0.03283955383300781, 0.032718177795410155, 0.03332995223999023, 0.03215145492553711, 0.03215574264526367, 0.03267776107788086, 0.03277987289428711, 0.032562942504882814, 0.032344863891601565, 0.03238092803955078, 0.03248892974853516, 0.03265337753295899, 0.032647647857666016, 0.03263078308105469, 0.03280441665649414, 0.032544670104980467, 0.03239369583129883, 0.03253868865966797, 0.032689407348632814, 0.03297267150878906, 0.03238307189941406, 0.03244521713256836, 0.0322979850769043, 0.032484352111816404, 0.03284377670288086, 0.03269126510620117, 0.032938270568847655, 0.03301161575317383, 0.033407390594482424, 0.033417247772216795, 0.03325164794921875, 0.03603647994995117, 0.033675392150878905, 0.03233078384399414, 0.0320296630859375, 0.03201827239990234, 0.03215980911254883, 0.0320533447265625, 0.032003265380859375, 0.032578369140625, 0.03212492752075195, 0.03206470489501953, 0.03218719863891602, 0.03214950561523437, 0.03206553649902344, 0.032196094512939456, 0.032133632659912106, 0.0318831672668457, 0.032153182983398435, 0.03222169494628906, 0.03230825424194336, 0.03218940734863281, 0.03252019119262695, 0.032108062744140624, 0.03200048065185547, 0.03249094390869141, 0.03220912170410156, 0.03221286392211914, 0.03247296142578125, 0.033043006896972656, 0.03310163116455078, 0.03328841781616211, 0.034219551086425784, 0.03278211212158203, 0.03285676956176758, 0.03281100845336914, 0.03689471817016601, 0.03159859275817871, 0.03201836776733399, 0.032237632751464844, 0.032538623809814454, 0.03201958465576172, 0.03214134216308594, 0.03227939224243164, 0.03226828765869141, 0.03219660949707031, 0.032464897155761716, 0.03248332977294922, 0.032470497131347656, 0.0324101448059082, 0.03241104125976563, 0.03248393630981445, 0.032158912658691405, 0.0325715217590332, 0.03323875045776367, 0.032532543182373044, 0.032838558197021486, 0.0329543685913086, 0.03269222259521484, 0.03298476791381836, 0.033280223846435544, 0.03324518585205078, 0.033285343170166015, 0.03314499282836914, 0.03595043182373047, 0.033975105285644534, 0.03211673736572265, 0.03188121604919433, 0.031885120391845705, 0.03181366348266602, 0.031919872283935544, 0.03194083213806152, 0.03191007995605469, 0.03197068786621094, 0.032258689880371096, 0.03245619201660156, 0.032136959075927736, 0.032025344848632814, 0.032008190155029294, 0.03201638412475586, 0.03205734252929687, 0.032056671142578125, 0.03242598342895508, 0.032258655548095705, 0.033429153442382814, 0.03236495971679688, 0.03224166488647461, 0.03215359878540039, 0.03224099349975586, 0.032199329376220706, 0.03246457672119141, 0.032385345458984374, 0.0332410888671875, 0.03327721786499024, 0.034013919830322266, 0.03264716720581055, 0.032620449066162106, 0.03257676696777344, 0.03237974548339844, 0.03216588973999023, 0.03238489532470703, 0.03193187141418457, 0.03222390365600586, 0.03212895965576172, 0.03298720169067383, 0.03287859344482422, 0.03230681610107422, 0.032319774627685545, 0.03237827301025391, 0.03240620803833008, 0.03277004623413086, 0.032868350982666016, 0.03258752059936523, 0.03271500778198242, 0.03221820831298828, 0.032310176849365234, 0.032806495666503906, 0.032717216491699216, 0.03235977554321289, 0.03237542343139648, 0.032653343200683596, 0.03313663864135742, 0.032927745819091796, 0.03323904037475586, 0.033529857635498046, 0.03327164840698242, 0.03298073577880859, 0.03645027160644531, 0.0337955207824707, 0.032489601135253905, 0.03205286407470703, 0.03594118499755859, 0.037631999969482424, 0.03216793441772461, 0.033726463317871096, 0.033009536743164064, 0.03182985687255859, 0.031929664611816407, 0.03188412857055664, 0.03199193572998047, 0.03199180793762207, 0.03234739303588867, 0.0321984977722168, 0.032046142578125, 0.03237257766723633, 0.032304161071777346, 0.03211363220214844, 0.03212076950073242, 0.03345187377929688, 0.03211862564086914, 0.032371070861816405, 0.03244646453857422, 0.03209817504882812, 0.03226022338867188, 0.03344384002685547, 0.03246694564819336, 0.03518463897705078, 0.03279443359375, 0.032869792938232424, 0.03311286544799805, 0.03338649749755859, 0.032950271606445314, 0.03263033676147461, 0.03286675262451172, 0.03241984176635742, 0.032339744567871094, 0.032400638580322265, 0.03249151992797852, 0.03252080154418945, 0.03222566223144531, 0.03215151977539062, 0.03223759841918945, 0.03264716720581055, 0.03210841751098633, 0.03207539367675781, 0.0325923843383789, 0.032307201385498044, 0.032059391021728514, 0.03211673736572265, 0.03227033615112305, 0.0324073600769043, 0.03273747253417969, 0.032580894470214845, 0.03304726409912109, 0.03306480026245117, 0.03275788879394531, 0.03262262344360352, 0.0329273910522461, 0.03348515319824219, 0.03319919967651367, 0.035547359466552735, 0.03317132949829102, 0.032169952392578124, 0.0315221118927002, 0.0316376953125, 0.03196345520019531, 0.031922527313232425, 0.0319069766998291, 0.0316711368560791, 0.03173990440368652, 0.031639520645141604, 0.0315248966217041, 0.032161792755126956, 0.031906911849975586, 0.031734687805175785, 0.03183350372314453, 0.031590688705444334, 0.031833728790283206, 0.031697343826293946, 0.03163158416748047, 0.031862815856933596, 0.031882400512695315, 0.031881664276123045, 0.03183568000793457, 0.032314239501953126, 0.03250358581542969, 0.03237887954711914, 0.03252374267578125, 0.032979713439941404, 0.03331043243408203, 0.03328028869628906, 0.03300556945800781, 0.03278438568115234, 0.03269830322265625, 0.03214956665039063, 0.03203420639038086, 0.032610912322998044, 0.0324128303527832, 0.03209833526611328, 0.03172435188293457, 0.03209577560424805, 0.0321929931640625, 0.032221023559570315, 0.03263283157348633, 0.032438209533691406, 0.03223984146118164, 0.032282623291015625, 0.03219180679321289, 0.03225017547607422, 0.032180160522460935, 0.032311744689941406, 0.032200702667236326, 0.03219993591308594, 0.03231615829467773, 0.03236044692993164, 0.03227558517456055, 0.0324754867553711, 0.03280131149291992, 0.03289424133300781, 0.032798561096191406, 0.032682880401611325, 0.032785472869873045, 0.03274233627319336]",tokens/s,30.75222134064387,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,7115.18208,7715.291136,0.0,7329.54624,7078.776832,s,1,11.6170458984375,11.6170458984375,0.0,11.6170458984375,11.6170458984375,11.6170458984375,11.6170458984375,[11.6170458984375],,kWh,0.0001249525286625006,1.3774687313256607e-05,4.2240311569990974e-05,0.0001809675275457482,,MB,2954.862592,8071.806976,0.0,7656.701952,7404.957696,s,10,6.938225646972657,0.6938225646972657,0.0015122897761256362,0.6935894775390625,0.6957393859863281,0.6962917510986328,0.6967336431884765,"[0.691801025390625, 0.692944091796875, 0.692273681640625, 0.6932676391601562, 0.69400439453125, 0.69257470703125, 0.694988037109375, 0.6939113159179687, 0.6956166381835938, 0.6968441162109374]",tokens/s,368.9704155293652,kWh,2.03102591611125e-05,2.2398703662126e-06,1.3427047778665398e-05,3.59771773059905e-05,tokens/kWh,7115622.157421835,MB,2967.003136,8323.465216,0.0,7908.360192,7623.554048,s,10,31.182210205078125,3.118221020507813,0.005556037359009233,3.118703857421875,3.1261424316406248,3.1262538330078127,3.126342954101563,"[3.10967724609375, 3.11135107421875, 3.113287109375, 3.119398681640625, 3.120428955078125, 3.115010986328125, 3.122564208984375, 3.12611767578125, 3.118009033203125, 3.126365234375]",tokens/s,20.203827626606223,kWh,9.129777060347223e-05,1.0070587978039294e-05,6.08066782749365e-05,0.00016217503685644804,tokens/kWh,388469.15789984073,,s,630,31.170880546569816,0.04947758816915844,0.0006741886052991562,0.04938678359985352,0.05019880828857422,0.05038684368133545,0.052615010948181155,"[0.05163167953491211, 0.04957868957519531, 0.048640159606933596, 0.048691200256347655, 0.04911513519287109, 0.04857855987548828, 0.04861529541015625, 0.048459903717041015, 0.04939324951171875, 0.04919132614135742, 0.04857241439819336, 0.049079936981201173, 0.04887180709838867, 0.04874966430664063, 0.04896451187133789, 0.04945004653930664, 0.04953318405151367, 0.04905039978027344, 0.04917459106445313, 0.05009823989868164, 0.049636577606201174, 0.04914438247680664, 0.04913971328735352, 0.049202430725097654, 0.04906060791015625, 0.04885459136962891, 0.04908835220336914, 0.049481342315673825, 0.04889616012573242, 0.04918355178833008, 0.048764926910400394, 0.04921859359741211, 0.049253150939941405, 0.0497476806640625, 0.04927331161499023, 0.049353984832763674, 0.04920415878295899, 0.049514305114746096, 0.049968193054199216, 0.05001023864746094, 0.049627105712890626, 0.04934748840332031, 0.04972563171386719, 0.04981856155395508, 0.049294143676757815, 0.04941823959350586, 0.04897568130493164, 0.04938143920898438, 0.04898009490966797, 0.04928492736816406, 0.049246143341064454, 0.049651134490966795, 0.049879871368408206, 0.04935270309448242, 0.04909900665283203, 0.0494570541381836, 0.049673152923583985, 0.050303966522216796, 0.05030470275878906, 0.049777984619140625, 0.049754207611083984, 0.04931817626953125, 0.04950278472900391, 0.05320649719238281, 0.04929203033447266, 0.049009761810302734, 0.0486707534790039, 0.048860031127929686, 0.04849798583984375, 0.0485731201171875, 0.048739521026611325, 0.048441184997558596, 0.048856033325195315, 0.04871343994140625, 0.04886540985107422, 0.04901091384887695, 0.04934649658203125, 0.04931100845336914, 0.04893894577026367, 0.0490681266784668, 0.0485362548828125, 0.04996710586547851, 0.05049456024169922, 0.05000899124145508, 0.0497883529663086, 0.04934624099731445, 0.04907228851318359, 0.04900527954101563, 0.048986366271972656, 0.0494466552734375, 0.049251873016357424, 0.04883504104614258, 0.04885299301147461, 0.049084095001220705, 0.049076351165771484, 0.04944095993041992, 0.04898799896240234, 0.04920492935180664, 0.04913638305664063, 0.0492457275390625, 0.04906003189086914, 0.04995244979858399, 0.04990598297119141, 0.05004470443725586, 0.04985385513305664, 0.04952300643920898, 0.049291519165039065, 0.04889420700073242, 0.04926828765869141, 0.04931628799438476, 0.049302688598632814, 0.048908702850341795, 0.048897663116455076, 0.04934534454345703, 0.05000396728515625, 0.04938137435913086, 0.049704959869384766, 0.049708545684814455, 0.04894976043701172, 0.049411678314208986, 0.049644031524658204, 0.050175777435302736, 0.0504136962890625, 0.049809406280517575, 0.050006046295166015, 0.050450401306152345, 0.053405696868896485, 0.049737728118896485, 0.04868710327148437, 0.04839996719360352, 0.048803390502929686, 0.04842355346679687, 0.0479213752746582, 0.048639678955078126, 0.0488389778137207, 0.04888764953613281, 0.04878508758544922, 0.048400672912597656, 0.048640159606933596, 0.04860947036743164, 0.04936223983764648, 0.04905744171142578, 0.048925567626953125, 0.04868096160888672, 0.04909260940551758, 0.05075987243652344, 0.05000995254516601, 0.04961481475830078, 0.049395809173583986, 0.049491935729980466, 0.049001953125, 0.04912201690673828, 0.04907126235961914, 0.048519168853759766, 0.049014785766601565, 0.04901932907104492, 0.049041568756103514, 0.048800895690917966, 0.04952761459350586, 0.04925571060180664, 0.049386177062988285, 0.04917462539672852, 0.04944051361083984, 0.04974208068847656, 0.05021286392211914, 0.04978278350830078, 0.04984204864501953, 0.0494961929321289, 0.05038284683227539, 0.04961280059814453, 0.049635326385498044, 0.05040127944946289, 0.04918844985961914, 0.04994867324829102, 0.0495557746887207, 0.04933846282958984, 0.04910079956054687, 0.049299617767333985, 0.049944095611572266, 0.049243904113769534, 0.04948640060424805, 0.04904755020141602, 0.04992310333251953, 0.050242111206054686, 0.04972175979614258, 0.04977356719970703, 0.050062335968017575, 0.04993228912353516, 0.05029417419433594, 0.05264015960693359, 0.049207679748535155, 0.04872297668457031, 0.04899900817871094, 0.048922943115234374, 0.04866847991943359, 0.04867878341674805, 0.0493034553527832, 0.04925676727294922, 0.04925523376464844, 0.04909734344482422, 0.048740703582763674, 0.04929228973388672, 0.04936377716064453, 0.04884643173217774, 0.04882668685913086, 0.04924649429321289, 0.049215518951416015, 0.049401023864746096, 0.050275104522705075, 0.05139023971557617, 0.04918876647949219, 0.04938739013671875, 0.04927123260498047, 0.049124671936035154, 0.0489903678894043, 0.049154495239257814, 0.04984870529174805, 0.04945248031616211, 0.04920348739624023, 0.0491063346862793, 0.04909321594238281, 0.04944489669799805, 0.04925027084350586, 0.04899430465698242, 0.04948579025268555, 0.049371166229248045, 0.04916169738769531, 0.04984707260131836, 0.05016739273071289, 0.049798656463623046, 0.049781406402587894, 0.04964966583251953, 0.04926668930053711, 0.04936854553222656, 0.049775135040283205, 0.04974537658691406, 0.0497520637512207, 0.04954780960083008, 0.050219295501708984, 0.0496385269165039, 0.05027676773071289, 0.04944851303100586, 0.04914416122436523, 0.04925263977050781, 0.04962310409545898, 0.04962911987304688, 0.049590145111083984, 0.049966304779052735, 0.049604606628417966, 0.050532543182373046, 0.05000470352172852, 0.04971241760253906, 0.05255344009399414, 0.049964641571044924, 0.04877936172485352, 0.04896419143676758, 0.04906796646118164, 0.048799007415771485, 0.04878015899658203, 0.04882140731811523, 0.04907289505004883, 0.048877376556396485, 0.048998497009277345, 0.049113056182861325, 0.04963369750976562, 0.049354465484619144, 0.049105121612548826, 0.0488056640625, 0.04948579025268555, 0.04907753753662109, 0.049614849090576174, 0.05005753707885742, 0.04994678497314453, 0.049398048400878906, 0.04944879913330078, 0.05006905746459961, 0.04977542495727539, 0.04896646499633789, 0.04878969573974609, 0.049105697631835935, 0.049051647186279294, 0.04912518310546875, 0.04921567916870117, 0.04990771102905273, 0.04931692886352539, 0.04995923233032227, 0.049260673522949217, 0.04948403167724609, 0.049258785247802736, 0.05023683166503906, 0.04987263870239258, 0.04968531036376953, 0.04956396865844727, 0.049686206817626956, 0.05026816177368164, 0.049714687347412106, 0.0494633903503418, 0.049305313110351565, 0.04903769683837891, 0.04887811279296875, 0.049610336303710936, 0.04983417510986328, 0.04894515228271484, 0.04953699111938477, 0.049653793334960936, 0.04953196716308594, 0.049334495544433594, 0.04985689544677734, 0.04963494491577149, 0.0496401596069336, 0.050329246520996095, 0.050235103607177735, 0.050040863037109376, 0.05048294448852539, 0.04992291259765625, 0.05212911987304687, 0.04960220718383789, 0.04905472183227539, 0.04847977447509766, 0.048574943542480466, 0.048428543090820314, 0.048705406188964843, 0.0490332145690918, 0.04868767929077149, 0.04873017501831055, 0.04915609741210938, 0.04894121551513672, 0.04879897689819336, 0.049252960205078126, 0.04904864120483399, 0.04915705490112305, 0.04901635360717774, 0.049133247375488284, 0.05007839965820313, 0.04949398422241211, 0.04978934478759765, 0.04980912017822266, 0.04925987243652344, 0.049173152923583985, 0.049137279510498046, 0.049320289611816406, 0.0500382080078125, 0.049746528625488284, 0.04904959869384766, 0.04879788970947266, 0.049140830993652344, 0.04902601623535156, 0.04947123336791992, 0.049470592498779296, 0.04901363372802734, 0.0495136947631836, 0.049592864990234374, 0.04926489639282226, 0.049650943756103516, 0.04957260894775391, 0.050092033386230465, 0.049498111724853515, 0.049522689819335934, 0.04923993682861328, 0.04933871841430664, 0.04972316741943359, 0.04965990447998047, 0.049401695251464844, 0.049430686950683596, 0.049408000946044923, 0.049315391540527345, 0.04977827072143555, 0.049716064453125, 0.049530879974365234, 0.04974182510375977, 0.05022054290771484, 0.04962787246704101, 0.04974160003662109, 0.050522113800048826, 0.050282497406005856, 0.0497336311340332, 0.049536094665527344, 0.04939059066772461, 0.05294291305541992, 0.05006502532958984, 0.048429374694824216, 0.04868864059448242, 0.048986625671386716, 0.048621856689453125, 0.04858022308349609, 0.04857385635375976, 0.04879759979248047, 0.050027198791503906, 0.04982588958740235, 0.04971494293212891, 0.04881830215454101, 0.04924755096435547, 0.04884540939331055, 0.04932361602783203, 0.04889478302001953, 0.04930326461791992, 0.049616161346435546, 0.05138687896728516, 0.05088902282714844, 0.050006271362304684, 0.04946230316162109, 0.04944550323486328, 0.0489595832824707, 0.049026622772216796, 0.04894483184814453, 0.048677055358886716, 0.049328929901123045, 0.049710399627685545, 0.049140289306640626, 0.04921456146240234, 0.049570785522460935, 0.04944060897827148, 0.049152000427246094, 0.04956288146972656, 0.04908313751220703, 0.04966835021972656, 0.05012179183959961, 0.05042041778564453, 0.04959008026123047, 0.05030335998535156, 0.05010835266113281, 0.049632896423339845, 0.04931577682495117, 0.049563072204589845, 0.04930598449707031, 0.04877363204956055, 0.0495300178527832, 0.04968329620361328, 0.04946710586547851, 0.04911743927001953, 0.04973379135131836, 0.049237857818603514, 0.04962684631347656, 0.04960211181640625, 0.05019724655151367, 0.0502743034362793, 0.04981350326538086, 0.05001638412475586, 0.04988300704956055, 0.05034214401245117, 0.04981126403808594, 0.05313536071777344, 0.0499323844909668, 0.0495136947631836, 0.04890687942504883, 0.0488551025390625, 0.04864131164550781, 0.04881891250610351, 0.04900864028930664, 0.050231201171875, 0.048955486297607424, 0.0489114875793457, 0.04885184097290039, 0.04885094451904297, 0.04913100814819336, 0.049373695373535156, 0.05044575881958008, 0.0492242546081543, 0.04927897644042969, 0.05003424072265625, 0.050375102996826175, 0.05010432052612305, 0.049748096466064456, 0.05015334320068359, 0.04960870361328125, 0.0491638069152832, 0.049092063903808596, 0.0488040657043457, 0.049599262237548826, 0.04935475158691406, 0.04954908752441406, 0.04920137786865234, 0.049318977355957035, 0.048774078369140626, 0.04915017700195313, 0.05011177444458008, 0.049837566375732424, 0.049339393615722656, 0.050135040283203126, 0.0498383674621582, 0.0504378547668457, 0.049998046875, 0.04984604644775391, 0.04978470230102539, 0.049696510314941406, 0.04949983978271484, 0.049651966094970704, 0.04950265502929688, 0.04926572799682617, 0.049193920135498045, 0.05011251068115234, 0.04983526229858398, 0.04946201705932617, 0.04931180953979492, 0.04990560150146484, 0.04955750274658203, 0.0497786865234375, 0.04945116806030273, 0.05036825561523438, 0.04977840042114258, 0.050411903381347656, 0.04983820724487305, 0.04952051162719726, 0.04936265563964844, 0.05287308883666992, 0.049579326629638674, 0.04840492630004883, 0.048675071716308596, 0.04857827377319336, 0.0486607666015625, 0.04842700958251953, 0.04914790344238281, 0.04912947082519531, 0.04892649459838867, 0.049194881439208984, 0.04918102264404297, 0.048783233642578125, 0.04878553771972656, 0.049121280670166016, 0.04954476928710937, 0.04881657409667969, 0.04948191833496094, 0.050114368438720705, 0.050329185485839846, 0.05011705780029297, 0.04959603118896484, 0.04933257675170898, 0.049242111206054685, 0.0487501449584961, 0.04867283248901367, 0.04935494232177735, 0.049307361602783206, 0.04901884841918945, 0.049737953186035154, 0.04941632080078125, 0.04896521759033203, 0.04926726531982422, 0.050049022674560545, 0.04923392105102539, 0.0494202880859375, 0.049401695251464844, 0.05026652908325195, 0.04967158508300781, 0.05041392135620117, 0.049747936248779295, 0.0496473274230957, 0.04948614501953125, 0.0493460807800293, 0.04916851043701172, 0.04997359848022461, 0.04962639999389649, 0.04911587142944336, 0.04939753723144531, 0.04924160003662109, 0.04962691116333008, 0.04934137725830078, 0.04967164611816406, 0.049712799072265626, 0.04955670547485352, 0.04973916625976563, 0.05017808151245117, 0.050003456115722655, 0.05042879867553711, 0.050299808502197264, 0.0497325439453125, 0.04954111862182617, 0.04925795364379883, 0.053991424560546876, 0.04981350326538086, 0.05034931182861328, 0.04864863967895508, 0.04856659317016602, 0.048624961853027344, 0.04864070510864258, 0.04849049758911133, 0.048661983489990235, 0.04930204772949219, 0.0490885124206543, 0.0489835205078125, 0.04926518249511719, 0.0490332145690918, 0.04918476867675781, 0.04908236694335937, 0.048934112548828124, 0.04941046524047851, 0.04989129638671875, 0.050614688873291014, 0.050355838775634765, 0.05000844955444336, 0.04955279922485351, 0.05139926528930664, 0.04849663925170898, 0.04858809661865234, 0.04895564651489258, 0.048836193084716796, 0.048649055480957035, 0.049893665313720706, 0.049411552429199215, 0.049473793029785156, 0.04911734390258789, 0.049454559326171876, 0.049142143249511716, 0.04931929779052734, 0.05004867172241211, 0.04952572631835937, 0.0500015983581543, 0.05000540924072266, 0.04974275207519531, 0.05039011383056641, 0.050275230407714845, 0.04975769424438477, 0.051087871551513675, 0.04899212646484375, 0.04914985656738281, 0.048989822387695316, 0.04995852661132812, 0.04971139144897461, 0.04935299301147461, 0.049258975982666015, 0.04918265533447266, 0.04978428649902344, 0.04928278350830078, 0.0496506233215332, 0.0502982063293457, 0.04995945739746094, 0.0502658576965332, 0.05032371139526367, 0.05034364700317383, 0.05049782562255859, 0.05011260986328125]",tokens/s,20.21117109793448,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4020.248576,4435.345408,0.0,4051.697664,3779.100672,s,1,9.717603515625,9.717603515625,0.0,9.717603515625,9.717603515625,9.717603515625,9.717603515625,[9.717603515625],,kWh,7.369995470833904e-05,8.122436377960391e-06,2.4535297405980927e-05,0.00010635768849228036,,MB,1998.184448,4573.75744,0.0,4158.652416,4007.960576,s,10,3.3254024963378908,0.3325402496337891,0.0041422438547234364,0.3341753387451172,0.3354868408203125,0.33548914794921875,0.33549099365234375,"[0.32105389404296875, 0.33255642700195315, 0.3301783447265625, 0.33246353149414065, 0.3339721374511719, 0.3349423828125, 0.335491455078125, 0.335486328125, 0.3343785400390625, 0.33487945556640625]",tokens/s,769.8316227341525,kWh,9.542196640590952e-06,1.0523283282664413e-06,6.364745235161854e-06,1.695927020401925e-05,tokens/kWh,15094989.166416459,MB,2008.768512,4657.64352,0.0,4263.510016,4124.249088,s,10,19.155225708007812,1.9155225708007813,0.003654266585016025,1.9142969970703123,1.917348193359375,1.9214446655273438,1.9247218432617188,"[1.9164378662109376, 1.913582763671875, 1.9134342041015624, 1.9255411376953124, 1.9132388916015626, 1.9117025146484374, 1.9163616943359374, 1.9163326416015625, 1.9144207763671874, 1.9141732177734374]",tokens/s,32.889197423376196,kWh,5.3407206017739996e-05,5.890728476690482e-06,3.524867694443644e-05,9.45466114388669e-05,tokens/kWh,666338.0002860844,,s,630,19.151747722625736,0.030399599559723385,0.0007076733891106615,0.030278111457824705,0.030828188705444334,0.03126022701263428,0.034402718887329105,"[0.03442291259765625, 0.031465152740478515, 0.030245183944702148, 0.029829120635986327, 0.029714336395263673, 0.030287967681884766, 0.03037001609802246, 0.030115264892578125, 0.029833568572998046, 0.02993561553955078, 0.0303023681640625, 0.02984844779968262, 0.029709280014038084, 0.030451711654663087, 0.03023276710510254, 0.02997228813171387, 0.029638559341430663, 0.03125020790100098, 0.03042950439453125, 0.030479679107666014, 0.030123008728027343, 0.030002880096435546, 0.029646848678588866, 0.030267391204833984, 0.03040880012512207, 0.03032771110534668, 0.030212703704833983, 0.030529951095581053, 0.030129215240478516, 0.030115007400512695, 0.030592031478881836, 0.030620384216308593, 0.030877471923828125, 0.031053472518920898, 0.030606943130493162, 0.030550880432128905, 0.030345439910888672, 0.030656415939331053, 0.030772735595703125, 0.030008127212524414, 0.030039743423461916, 0.02989030456542969, 0.03014271926879883, 0.030521343231201172, 0.030468320846557616, 0.030671968460083007, 0.030544576644897462, 0.03051247978210449, 0.030491296768188476, 0.03077324867248535, 0.031143903732299805, 0.03071798324584961, 0.030410751342773438, 0.03054591941833496, 0.030306304931640625, 0.030620800018310548, 0.03023161506652832, 0.030228063583374022, 0.030490848541259767, 0.030382080078125, 0.030582752227783203, 0.030117919921875, 0.02991257667541504, 0.034097152709960936, 0.0316906566619873, 0.030318304061889647, 0.030122560501098634, 0.03034707260131836, 0.030054399490356445, 0.02978611183166504, 0.030103168487548827, 0.030327039718627928, 0.03015692710876465, 0.02958348846435547, 0.029652799606323242, 0.029499456405639647, 0.03125420761108398, 0.030009855270385744, 0.02990470314025879, 0.030162239074707033, 0.030109567642211912, 0.030370624542236328, 0.03019980812072754, 0.03070751953125, 0.030021663665771484, 0.02949273681640625, 0.030167680740356445, 0.030763040542602538, 0.030511104583740234, 0.030038015365600586, 0.02979430389404297, 0.030134271621704102, 0.030422815322875975, 0.030519519805908203, 0.031080415725708008, 0.03071798324584961, 0.03062339210510254, 0.03047443199157715, 0.030730016708374025, 0.030349695205688476, 0.030145631790161134, 0.02989148712158203, 0.029900800704956054, 0.030355136871337892, 0.030500991821289063, 0.03044166374206543, 0.03016089630126953, 0.029833215713500977, 0.030015487670898438, 0.030470144271850585, 0.030447616577148437, 0.030439231872558595, 0.03020614433288574, 0.030320255279541016, 0.03049510383605957, 0.030228479385375977, 0.030414751052856445, 0.030149824142456056, 0.029999168395996093, 0.029789024353027344, 0.03063199996948242, 0.031827903747558596, 0.030253055572509766, 0.030693376541137695, 0.030713855743408205, 0.030629024505615235, 0.034670398712158206, 0.031703359603881834, 0.030473344802856444, 0.03017526435852051, 0.03063465690612793, 0.030134271621704102, 0.030294015884399415, 0.030113792419433592, 0.02983526420593262, 0.030068288803100585, 0.03039481544494629, 0.029978239059448242, 0.02959779167175293, 0.03010348892211914, 0.03022662353515625, 0.0303591365814209, 0.029913055419921876, 0.02981875228881836, 0.030046880722045897, 0.03028384017944336, 0.029820703506469728, 0.030056575775146484, 0.030328927993774416, 0.029865983963012696, 0.029999103546142578, 0.030170495986938477, 0.0300263671875, 0.029973791122436522, 0.03033932876586914, 0.030325216293334963, 0.030801919937133788, 0.03082035255432129, 0.031080480575561523, 0.030988256454467775, 0.030636032104492186, 0.03030348777770996, 0.030309215545654297, 0.0305314884185791, 0.030199840545654298, 0.030544960021972656, 0.03026118469238281, 0.030259647369384766, 0.030205856323242186, 0.03050262451171875, 0.030041215896606445, 0.030219423294067384, 0.030156991958618165, 0.03049078369140625, 0.02989084815979004, 0.030062591552734375, 0.030895488739013672, 0.030330944061279296, 0.029772607803344727, 0.029927391052246094, 0.03073206329345703, 0.03067910385131836, 0.030310335159301757, 0.030318592071533205, 0.03016422462463379, 0.030051071166992186, 0.030515199661254884, 0.03070518493652344, 0.030619775772094727, 0.03435327911376953, 0.03184156799316406, 0.03050569534301758, 0.03017420768737793, 0.030036191940307617, 0.030042272567749023, 0.03085523223876953, 0.031555456161499025, 0.0296345272064209, 0.029934303283691406, 0.030418399810791016, 0.030232576370239257, 0.029764127731323243, 0.030076927185058593, 0.030461919784545897, 0.030111967086791994, 0.030193471908569337, 0.03037385559082031, 0.030009376525878907, 0.02997763252258301, 0.030183616638183593, 0.029974559783935546, 0.029980512619018556, 0.030282655715942384, 0.02999295997619629, 0.02992665672302246, 0.030181983947753906, 0.030058656692504883, 0.032952320098876955, 0.030420032501220703, 0.030722496032714843, 0.030861824035644532, 0.03129548835754394, 0.03384931182861328, 0.030400543212890624, 0.030344608306884766, 0.030745439529418946, 0.030693119049072265, 0.030481887817382813, 0.029953920364379882, 0.029872800827026365, 0.030181407928466797, 0.032492897033691404, 0.030745216369628906, 0.030468095779418947, 0.03016294479370117, 0.03023593521118164, 0.03058252716064453, 0.03045680046081543, 0.030719648361206053, 0.030634336471557617, 0.030330495834350588, 0.030232959747314454, 0.030504959106445313, 0.030203903198242187, 0.030164575576782225, 0.03003843116760254, 0.030414848327636718, 0.030611455917358397, 0.03056435203552246, 0.03045894432067871, 0.030731199264526367, 0.03057155227661133, 0.034441856384277346, 0.03176780891418457, 0.031062591552734376, 0.030026016235351564, 0.029962240219116212, 0.03030201530456543, 0.030285247802734373, 0.03006121635437012, 0.02979439926147461, 0.030101503372192383, 0.030152191162109376, 0.030113567352294923, 0.029867008209228517, 0.030129888534545898, 0.03019161605834961, 0.0304333438873291, 0.029752351760864257, 0.029702592849731445, 0.030138336181640624, 0.03041535949707031, 0.029771839141845703, 0.029743040084838867, 0.03034886360168457, 0.03009756851196289, 0.030299808502197264, 0.03000998306274414, 0.029728799819946288, 0.029709888458251954, 0.030654336929321287, 0.030571039199829102, 0.03076652717590332, 0.03133200073242187, 0.03110188865661621, 0.030781408309936524, 0.030443519592285157, 0.03052694320678711, 0.030247264862060547, 0.03041279983520508, 0.030635616302490235, 0.030747167587280272, 0.030219423294067384, 0.030499839782714845, 0.030125375747680663, 0.029927167892456055, 0.029970432281494142, 0.030208864212036134, 0.02997452735900879, 0.02987808036804199, 0.03063417625427246, 0.030644256591796874, 0.030541055679321288, 0.03036796760559082, 0.030242496490478516, 0.030095264434814452, 0.030170015335083008, 0.030682144165039064, 0.030522335052490236, 0.030488576889038086, 0.029909120559692384, 0.029894527435302735, 0.030232416152954102, 0.030361759185791017, 0.030676992416381835, 0.034423328399658205, 0.03205235290527344, 0.03057052803039551, 0.030208223342895506, 0.029928064346313475, 0.03021932792663574, 0.030000064849853517, 0.029967552185058595, 0.030204736709594726, 0.03001103973388672, 0.029944416046142577, 0.030119392395019533, 0.030201728820800782, 0.02979471969604492, 0.030068416595458985, 0.030290239334106444, 0.02980415916442871, 0.029990432739257812, 0.030257152557373046, 0.030161760330200196, 0.029743104934692382, 0.029886463165283202, 0.030195104598999024, 0.03029596710205078, 0.02998940849304199, 0.029882080078125, 0.030314943313598634, 0.03025904083251953, 0.030062368392944336, 0.030498367309570312, 0.03092460823059082, 0.03126515197753906, 0.031043392181396484, 0.030808895111083985, 0.030727167129516602, 0.030321727752685546, 0.030151968002319337, 0.030609760284423828, 0.030013248443603514, 0.029791744232177734, 0.030280160903930663, 0.030319135665893556, 0.030074880599975585, 0.030299808502197264, 0.030468223571777343, 0.030374111175537108, 0.030709983825683594, 0.03023459243774414, 0.030336320877075194, 0.030361536026000977, 0.029870080947875976, 0.029856319427490233, 0.030341119766235353, 0.030155807495117187, 0.02981987190246582, 0.02982111930847168, 0.03072719955444336, 0.030820383071899413, 0.030098304748535157, 0.030580608367919922, 0.03011737632751465, 0.030226911544799805, 0.030262943267822265, 0.034188095092773436, 0.03195289611816406, 0.03058291244506836, 0.03022425651550293, 0.02993356704711914, 0.03016499137878418, 0.030275583267211914, 0.029629535675048828, 0.030067007064819337, 0.030328639984130858, 0.03012380790710449, 0.029903455734252928, 0.029832799911499022, 0.030284608840942383, 0.030291168212890626, 0.02966534423828125, 0.03221686553955078, 0.0304201602935791, 0.029902591705322265, 0.030255104064941408, 0.03028976058959961, 0.029825183868408205, 0.02982707214355469, 0.030418943405151368, 0.03015033531188965, 0.029779712677001954, 0.030058048248291017, 0.030196735382080078, 0.030087167739868165, 0.029886432647705078, 0.03068880081176758, 0.03090073585510254, 0.030887168884277345, 0.031030015945434572, 0.030743871688842773, 0.030655168533325197, 0.030623296737670898, 0.030807519912719728, 0.030247903823852538, 0.03018332862854004, 0.030238815307617187, 0.030345216751098632, 0.030468095779418947, 0.03027280044555664, 0.030597759246826173, 0.030529632568359374, 0.030725568771362306, 0.030278207778930664, 0.030392288208007812, 0.03045721626281738, 0.03063875198364258, 0.030304256439208983, 0.030566368103027344, 0.030486207962036133, 0.03051350402832031, 0.03015065574645996, 0.0299965763092041, 0.029874719619750977, 0.029911327362060546, 0.03037353515625, 0.03020595169067383, 0.030476287841796876, 0.030717952728271485, 0.035139167785644534, 0.0320024642944336, 0.030470144271850585, 0.030186912536621095, 0.0301549129486084, 0.03039072036743164, 0.029887840270996092, 0.02975017547607422, 0.03025596809387207, 0.030206880569458007, 0.029995008468627928, 0.03024287986755371, 0.02986182403564453, 0.029401311874389647, 0.03018115234375, 0.030093408584594725, 0.029898815155029297, 0.02958835220336914, 0.03027801513671875, 0.03097660827636719, 0.029863008499145506, 0.030260128021240236, 0.03003824043273926, 0.029671199798583986, 0.030281696319580078, 0.030394016265869142, 0.02989286422729492, 0.029735040664672852, 0.0301977596282959, 0.030663936614990235, 0.030800640106201174, 0.03070988845825195, 0.030643648147583007, 0.03064431953430176, 0.03081865692138672, 0.030512832641601564, 0.03057651138305664, 0.03056483268737793, 0.030437343597412108, 0.030531583786010744, 0.030371423721313476, 0.03045827293395996, 0.03021536064147949, 0.029913183212280273, 0.029823711395263672, 0.030062591552734375, 0.030066688537597655, 0.030244064331054688, 0.030062751770019533, 0.02982566452026367, 0.029935392379760742, 0.03086073684692383, 0.030825183868408202, 0.03074006462097168, 0.030501344680786132, 0.03040870475769043, 0.030288000106811524, 0.030443391799926757, 0.03036185646057129, 0.03024460792541504, 0.031066112518310547, 0.03189967918395996, 0.031165664672851562, 0.034632545471191406, 0.03362223815917969, 0.030539552688598634, 0.029784063339233398, 0.029634559631347656, 0.03035955238342285, 0.030321983337402342, 0.02988313674926758, 0.029736000061035155, 0.030349472045898437, 0.030309247970581054, 0.029904735565185546, 0.02979840087890625, 0.03018275260925293, 0.030171808242797853, 0.030043840408325195, 0.029788703918457032, 0.030381856918334962, 0.029906944274902345, 0.02980659294128418, 0.030457471847534178, 0.030169376373291014, 0.029650367736816407, 0.02986636734008789, 0.030406944274902342, 0.030201471328735352, 0.02979875183105469, 0.03011382484436035, 0.030279424667358397, 0.03015910339355469, 0.0301977596282959, 0.031182079315185546, 0.0316136646270752, 0.031247999191284178, 0.030859296798706054, 0.03047667121887207, 0.030447200775146486, 0.030420480728149416, 0.030152639389038085, 0.030476383209228516, 0.030617759704589843, 0.03071664047241211, 0.030252191543579103, 0.030458911895751953, 0.030197568893432617, 0.030287200927734376, 0.030360223770141602, 0.030266624450683594, 0.030124799728393555, 0.029851839065551757, 0.029967647552490234, 0.030108192443847655, 0.030162111282348632, 0.029893440246582033, 0.03026028823852539, 0.030374847412109374, 0.03042291259765625, 0.030243072509765625, 0.02981808090209961, 0.029886335372924806, 0.030655263900756836, 0.031033344268798828, 0.030811840057373047, 0.034920448303222655, 0.03200985717773437, 0.030865888595581054, 0.030275487899780275, 0.029765344619750975, 0.029892896652221678, 0.030471231460571287, 0.030284736633300783, 0.029961280822753907, 0.029729375839233397, 0.029900896072387696, 0.030380287170410157, 0.029863391876220703, 0.029931552886962892, 0.030188032150268555, 0.03015507125854492, 0.029922527313232423, 0.03004086494445801, 0.030391199111938477, 0.03012428855895996, 0.02967737579345703, 0.02989743995666504, 0.030629087448120117, 0.030522144317626954, 0.030733631134033202, 0.030396543502807617, 0.029892896652221678, 0.029903295516967774, 0.030603103637695313, 0.03061724853515625, 0.03056265640258789, 0.03095961570739746, 0.030988288879394532, 0.03139788818359375, 0.03101103973388672, 0.03069014358520508, 0.030481216430664062, 0.030764863967895507, 0.03046227264404297, 0.030191776275634765, 0.029787071228027345, 0.030217056274414063, 0.030695295333862303, 0.03040096092224121, 0.029935359954833984, 0.029869760513305664, 0.03018921661376953, 0.030034591674804687, 0.029814783096313476, 0.03017462348937988, 0.03048896026611328, 0.030472415924072266, 0.030248960494995116, 0.030090591430664063, 0.02981065559387207, 0.029940160751342773, 0.030929088592529297, 0.030728416442871095, 0.030278528213500976, 0.030371936798095703, 0.03001228713989258, 0.02993484878540039, 0.029985151290893554]",tokens/s,32.895170149705066,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,2169.253888,2486.10816,0.0,2107.63776,1984.899072,s,1,8.8144833984375,8.8144833984375,0.0,8.8144833984375,8.8144833984375,8.8144833984375,8.8144833984375,[8.8144833984375],,kWh,5.424428525838417e-05,5.9763018791294576e-06,1.7273902708003774e-05,7.749448984551741e-05,,MB,2199.273472,2823.749632,0.0,2415.919104,2256.887808,s,10,1.735695556640625,0.17356955566406249,0.0004596212890163055,0.17345442962646485,0.17400710449218748,0.17439400177001954,0.17470351959228517,"[0.17478089904785157, 0.17301289367675782, 0.17341500854492187, 0.17358131408691407, 0.17349385070800782, 0.17329776000976563, 0.17332313537597657, 0.17392112731933593, 0.1735089874267578, 0.17336058044433594]",tokens/s,1474.9130342620604,kWh,5.1528663482451505e-06,5.678961177005749e-07,3.42743938912333e-06,9.148201855069055e-06,tokens/kWh,27983641.3817377,MB,2214.412288,2865.692672,0.0,2457.862144,2339.93984,s,10,19.12052990722656,1.9120529907226562,0.012359207432674373,1.9121944580078125,1.9239779296875001,1.9306148803710939,1.9359244409179688,"[1.908995361328125, 1.9225030517578126, 1.9153935546875, 1.8950416259765626, 1.9156470947265625, 1.92163134765625, 1.905275146484375, 1.9004888916015625, 1.898302001953125, 1.9372518310546876]",tokens/s,32.94887762299375,kWh,5.549004872341853e-05,6.120737917687838e-06,3.159161640387469e-05,9.320240304498108e-05,tokens/kWh,675948.2367594655,,s,630,19.117521875381463,0.030345272818065826,0.0005584700915796066,0.030308752059936522,0.03076296901702881,0.031010713958740232,0.0323111268234253,"[0.03106768035888672, 0.03059779167175293, 0.030469600677490234, 0.030671743392944335, 0.030220287322998047, 0.03019699287414551, 0.0302476806640625, 0.030318143844604493, 0.03017568016052246, 0.030491968154907227, 0.030253471374511717, 0.030986528396606445, 0.031847776412963866, 0.030677663803100587, 0.030050304412841795, 0.030253055572509766, 0.03034316825866699, 0.030317983627319335, 0.030644800186157225, 0.030861343383789062, 0.029888063430786132, 0.02978019142150879, 0.029738367080688475, 0.029812639236450195, 0.029677824020385744, 0.029803007125854493, 0.029860031127929686, 0.029831167221069335, 0.029740032196044923, 0.030026752471923827, 0.029859935760498047, 0.029895967483520507, 0.029973119735717774, 0.030443519592285157, 0.030228479385375977, 0.030320640563964843, 0.030607295989990235, 0.030521408081054687, 0.030511104583740234, 0.030736383438110353, 0.03043961524963379, 0.030275392532348632, 0.029995008468627928, 0.030060543060302734, 0.030054399490356445, 0.030417951583862304, 0.030307104110717773, 0.030138080596923827, 0.030042591094970702, 0.02997248077392578, 0.029957279205322266, 0.029903711318969725, 0.02999465560913086, 0.030027519226074217, 0.030222944259643555, 0.03068832015991211, 0.03089299201965332, 0.030279680252075194, 0.030504127502441407, 0.030594976425170898, 0.0308822078704834, 0.030503423690795898, 0.03059872055053711, 0.03075071907043457, 0.030143840789794922, 0.030057119369506835, 0.030042112350463866, 0.030015487670898438, 0.030063871383666993, 0.030118528366088866, 0.03034124755859375, 0.030256160736083983, 0.030419008255004883, 0.03050092887878418, 0.030472671508789063, 0.030411136627197265, 0.03057459259033203, 0.03042505645751953, 0.030414880752563475, 0.03085945510864258, 0.030623327255249022, 0.030361888885498046, 0.030528831481933593, 0.030496608734130858, 0.030992864608764648, 0.03054240036010742, 0.03065011215209961, 0.030727167129516602, 0.030485504150390624, 0.030267391204833984, 0.030320640563964843, 0.030344383239746094, 0.03066963195800781, 0.031498239517211916, 0.030906368255615234, 0.03066044807434082, 0.03067510414123535, 0.03058633613586426, 0.030691392898559572, 0.03059350395202637, 0.030734336853027344, 0.03069523239135742, 0.030820032119750977, 0.03051366424560547, 0.030332704544067383, 0.030240991592407226, 0.03039023971557617, 0.030498815536499024, 0.030380064010620118, 0.03029529571533203, 0.030381919860839844, 0.030384767532348634, 0.030316831588745118, 0.030027776718139648, 0.029892416000366212, 0.03024505615234375, 0.03014409637451172, 0.030536096572875978, 0.03146249580383301, 0.030487455368041993, 0.0309403190612793, 0.03069219207763672, 0.030762943267822265, 0.03068934440612793, 0.030760480880737306, 0.031062623977661134, 0.030873760223388672, 0.03057868766784668, 0.030142112731933592, 0.030048351287841796, 0.02993984031677246, 0.030175008773803712, 0.029862495422363283, 0.032608192443847654, 0.03075926399230957, 0.030394367218017578, 0.03040185546875, 0.03061199951171875, 0.030374048233032226, 0.03123200035095215, 0.030545120239257813, 0.030532384872436525, 0.030379295349121094, 0.030554847717285158, 0.03032268714904785, 0.03119033622741699, 0.030450368881225588, 0.030631231307983398, 0.030009056091308595, 0.030264287948608398, 0.030267711639404296, 0.03027452850341797, 0.02997324752807617, 0.030122976303100586, 0.029897760391235352, 0.030027423858642578, 0.029873855590820314, 0.030009727478027343, 0.029802240371704102, 0.030245376586914063, 0.03029372787475586, 0.030582592010498046, 0.03034886360168457, 0.030623935699462892, 0.030394432067871093, 0.030662912368774414, 0.030484735488891603, 0.03056844711303711, 0.0305328311920166, 0.030532543182373046, 0.03037593650817871, 0.030270719528198244, 0.03019388771057129, 0.030943775177001955, 0.03278643035888672, 0.03042918395996094, 0.030047712326049806, 0.030093856811523437, 0.029995008468627928, 0.02996019172668457, 0.03012601661682129, 0.030238784790039063, 0.03051907157897949, 0.02989619255065918, 0.030005983352661133, 0.03040460777282715, 0.030320032119750977, 0.03021676826477051, 0.029904224395751952, 0.03020364761352539, 0.030183679580688478, 0.03019980812072754, 0.030353408813476562, 0.030274879455566405, 0.030382783889770507, 0.03029167938232422, 0.0304169921875, 0.030231935501098633, 0.030270015716552735, 0.030513408660888672, 0.030234624862670898, 0.029981727600097655, 0.03005539131164551, 0.02999295997619629, 0.029908992767333983, 0.029727968215942382, 0.029864736557006836, 0.02970419120788574, 0.029739007949829102, 0.02968780708312988, 0.029800447463989257, 0.029648895263671874, 0.029968128204345704, 0.030216575622558594, 0.03022220802307129, 0.029833215713500977, 0.029995008468627928, 0.029784416198730467, 0.02976028823852539, 0.029627264022827147, 0.03001753616333008, 0.02980803108215332, 0.029860448837280274, 0.032159744262695314, 0.03015235137939453, 0.029860191345214844, 0.02991641616821289, 0.02970086479187012, 0.029814783096313476, 0.029673471450805664, 0.029899904251098633, 0.029672128677368164, 0.02980473518371582, 0.02976153564453125, 0.029847551345825195, 0.02972572708129883, 0.029754335403442383, 0.02975334358215332, 0.029701696395874024, 0.029845951080322265, 0.029822240829467772, 0.030121824264526368, 0.029835487365722658, 0.031031967163085938, 0.030066688537597655, 0.030265344619750976, 0.03012403106689453, 0.030043487548828126, 0.03162297630310058, 0.030636896133422853, 0.030930944442749023, 0.030428384780883787, 0.030752832412719727, 0.030445247650146483, 0.030435359954833985, 0.03039983940124512, 0.030259647369384766, 0.029974720001220704, 0.02989641571044922, 0.029966848373413086, 0.03059052848815918, 0.03016534423828125, 0.03004662322998047, 0.029908767700195314, 0.030182783126831054, 0.030543487548828126, 0.03022105598449707, 0.03012019157409668, 0.030250240325927734, 0.029940479278564452, 0.029786336898803712, 0.029853471755981444, 0.029834272384643555, 0.029946847915649413, 0.02995199966430664, 0.030251007080078125, 0.030705663681030275, 0.031197183609008788, 0.03059507179260254, 0.0306212158203125, 0.030625919342041015, 0.030832799911499023, 0.03059712028503418, 0.030763200759887695, 0.030435327529907227, 0.030352575302124023, 0.03010767936706543, 0.03019990348815918, 0.03033888053894043, 0.030558719635009765, 0.030306175231933595, 0.030213888168334962, 0.02987718391418457, 0.03011974334716797, 0.029843456268310548, 0.02979635238647461, 0.02972604751586914, 0.029976800918579103, 0.029905344009399416, 0.030466047286987305, 0.03055411148071289, 0.03071107292175293, 0.030456159591674806, 0.0305579833984375, 0.03444591903686523, 0.032032703399658205, 0.0308155517578125, 0.030655231475830078, 0.03044112014770508, 0.030906719207763673, 0.031497440338134765, 0.030011999130249024, 0.0300382080078125, 0.030248224258422853, 0.03010633659362793, 0.030722015380859374, 0.030260063171386718, 0.030464000701904297, 0.03061555290222168, 0.03055820846557617, 0.03196457672119141, 0.0316135368347168, 0.03049616050720215, 0.030408992767333984, 0.0306998405456543, 0.030414527893066406, 0.0305996150970459, 0.030336896896362306, 0.030552064895629883, 0.030111743927001954, 0.030489919662475586, 0.03070636749267578, 0.030676992416381835, 0.03049283218383789, 0.030613344192504884, 0.030533632278442382, 0.03100057601928711, 0.03186796760559082, 0.03174905586242676, 0.030958911895751954, 0.030806432723999022, 0.030691616058349608, 0.030838783264160157, 0.030752767562866212, 0.030732160568237306, 0.030756992340087892, 0.030894079208374024, 0.030570240020751954, 0.030877952575683595, 0.030617536544799803, 0.030761024475097657, 0.030496320724487304, 0.030726591110229493, 0.030439424514770507, 0.030543071746826172, 0.030262048721313477, 0.030295839309692384, 0.030247135162353514, 0.03019366455078125, 0.029879840850830078, 0.02992323112487793, 0.029930047988891602, 0.029910943984985353, 0.030000288009643553, 0.02992633628845215, 0.029890560150146486, 0.03010313606262207, 0.03064054489135742, 0.03077120018005371, 0.030693056106567383, 0.030335296630859376, 0.029879776000976563, 0.029950496673583984, 0.029718175888061523, 0.029749824523925782, 0.02995792007446289, 0.029848960876464842, 0.029847232818603516, 0.030042816162109375, 0.030727968215942383, 0.030746912002563475, 0.029821887969970703, 0.029843807220458984, 0.029786239624023436, 0.02982147216796875, 0.030070783615112305, 0.03003392028808594, 0.030208000183105467, 0.030346912384033205, 0.030402912139892577, 0.030183744430541993, 0.030352319717407227, 0.030276351928710938, 0.030474239349365235, 0.030218143463134766, 0.030054496765136718, 0.030040063858032227, 0.03009721565246582, 0.0299520320892334, 0.03015670394897461, 0.029883808135986328, 0.02981340789794922, 0.02978220748901367, 0.030006624221801757, 0.029798208236694337, 0.029837919235229493, 0.029843711853027345, 0.029802303314208984, 0.0301296329498291, 0.030621919631958008, 0.030607872009277344, 0.03064784049987793, 0.03028630447387695, 0.03064793586730957, 0.030277599334716798, 0.030910720825195314, 0.03055836868286133, 0.03069715118408203, 0.03041926383972168, 0.030670848846435547, 0.030111743927001954, 0.03031590461730957, 0.030073471069335937, 0.03049180793762207, 0.03035251235961914, 0.030412511825561525, 0.030164512634277343, 0.029987295150756835, 0.029863935470581054, 0.030131744384765624, 0.030226943969726562, 0.03019977569580078, 0.030029823303222656, 0.03063590431213379, 0.03032048034667969, 0.03049497604370117, 0.030435039520263673, 0.030362239837646486, 0.03037343978881836, 0.03053171157836914, 0.030480384826660156, 0.030975648880004883, 0.03029007911682129, 0.03014179229736328, 0.030372447967529297, 0.030071008682250978, 0.029945695877075195, 0.029693952560424806, 0.02978540802001953, 0.02996281623840332, 0.02982310485839844, 0.02998886489868164, 0.030762624740600587, 0.03055859184265137, 0.030826496124267577, 0.030538911819458007, 0.030468896865844728, 0.030362848281860352, 0.030353279113769532, 0.03010972785949707, 0.0301331844329834, 0.030021247863769532, 0.030275039672851563, 0.030425504684448244, 0.030414655685424806, 0.030228992462158204, 0.030093503952026368, 0.029997055053710937, 0.030068031311035158, 0.029885120391845703, 0.03023052787780762, 0.03013612747192383, 0.030378175735473634, 0.030212448120117186, 0.029867679595947265, 0.029724159240722657, 0.02988697624206543, 0.02978825569152832, 0.029822879791259766, 0.029642847061157225, 0.029894432067871093, 0.029740352630615235, 0.030024192810058595, 0.02983897590637207, 0.030065343856811522, 0.030312448501586913, 0.03058073616027832, 0.030453760147094725, 0.03051091194152832, 0.030334911346435546, 0.030556415557861327, 0.030555871963500975, 0.0302893123626709, 0.030086015701293944, 0.030310400009155275, 0.029911039352416992, 0.030156736373901368, 0.030316608428955078, 0.030162784576416017, 0.02997817611694336, 0.029960735321044922, 0.029847232818603516, 0.03036115264892578, 0.02970707130432129, 0.030621728897094726, 0.030691295623779296, 0.030470176696777342, 0.030401920318603514, 0.030310848236083984, 0.03041708755493164, 0.03019878387451172, 0.030218591690063478, 0.029910783767700195, 0.030446495056152344, 0.030090944290161133, 0.030380319595336915, 0.0300152645111084, 0.030101760864257813, 0.029839359283447265, 0.02990825653076172, 0.029747840881347656, 0.02981692886352539, 0.029796480178833008, 0.029849472045898436, 0.03101900863647461, 0.03278438568115234, 0.030514495849609375, 0.030436031341552733, 0.030320287704467774, 0.030415199279785157, 0.03026915168762207, 0.030365983963012697, 0.030469888687133788, 0.030376192092895507, 0.030240608215332032, 0.030111072540283203, 0.029743936538696288, 0.029726720809936522, 0.02976742362976074, 0.030028032302856444, 0.02996767997741699, 0.02991379165649414, 0.02979216003417969, 0.029716575622558594, 0.029483007431030273, 0.029832576751708983, 0.029604576110839845, 0.030408607482910157, 0.030457183837890624, 0.031844383239746095, 0.030589599609375, 0.03022972869873047, 0.029993728637695314, 0.02975334358215332, 0.029645952224731445, 0.02979724884033203, 0.02969772720336914, 0.029808351516723633, 0.029803104400634765, 0.02959564781188965, 0.029681312561035157, 0.029628768920898437, 0.02958336067199707, 0.029883615493774413, 0.02978416061401367, 0.02990764808654785, 0.029797439575195313, 0.030674943923950194, 0.030463199615478515, 0.030424863815307616, 0.030471168518066406, 0.030374080657958984, 0.03057833671569824, 0.030378143310546876, 0.030822399139404297, 0.030971904754638672, 0.030719999313354493, 0.030455104827880858, 0.030644927978515625, 0.03066009521484375, 0.030629568099975586, 0.030493024826049805, 0.030591455459594727, 0.030482431411743165, 0.030619647979736327, 0.030518976211547852, 0.030867776870727538, 0.03054182434082031, 0.03068441581726074, 0.03053366470336914, 0.03064291191101074, 0.03142860794067383, 0.030697471618652345, 0.030176576614379884, 0.030406879425048827, 0.030344863891601563, 0.030558271408081053, 0.030513919830322266, 0.03047603225708008, 0.03054172706604004, 0.030609760284423828, 0.030547967910766603, 0.030687231063842774, 0.03097804832458496, 0.030519296646118164, 0.035633056640625, 0.03513935852050781, 0.031072576522827147, 0.03138355255126953, 0.03037151908874512, 0.030582719802856446, 0.030427520751953124, 0.030449663162231445, 0.030184736251831056, 0.030114143371582032, 0.029906784057617188, 0.029671968460083006, 0.02982707214355469, 0.029750368118286134, 0.0312957763671875, 0.031915807723999025, 0.030251871109008788, 0.03013983917236328, 0.03040937614440918, 0.030409664154052735, 0.030264223098754883, 0.03048886489868164, 0.030806848526000977, 0.03237295913696289, 0.031296255111694336]",tokens/s,32.9540619389211,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 57, in launch raise RuntimeError(f""Isolated process exited with non-zero code {isolated_process.exitcode}"") RuntimeError: Isolated process exited with non-zero code -9 " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1301.409792,1085.210624,0.0,706.740224,675.13344,s,1,8.295859375,8.295859375,0.0,8.295859375,8.295859375,8.295859375,8.295859375,[8.295859375],,kWh,3.7229761741673427e-05,4.099526151185418e-06,1.2231954230029762e-05,5.3561242122888603e-05,,MB,1362.333696,1403.977728,0.0,996.1472,942.731264,s,10,0.34586979675292967,0.034586979675292964,0.00027697715567381,0.03452019309997559,0.03478697814941406,0.035076289367675775,0.03530773834228515,"[0.0353656005859375, 0.03451801681518555, 0.03472268676757812, 0.034364673614501955, 0.034372161865234375, 0.034527904510498045, 0.03444595336914062, 0.03446873474121094, 0.03456169509887695, 0.034522369384765626]",tokens/s,7401.629237457595,kWh,1.0847740577603184e-06,1.1963046159972966e-07,7.20151340267668e-07,1.9245558596277156e-06,tokens/kWh,133017703.13360529,MB,1391.06304,1412.366336,0.0,1004.535808,942.733824,s,10,15.406380859375,1.5406380859375,0.00800279734248737,1.5395208740234376,1.5534908325195311,1.5542131286621095,1.554790965576172,"[1.546219482421875, 1.53852490234375, 1.553330322265625, 1.5330682373046876, 1.540516845703125, 1.533992919921875, 1.541059814453125, 1.5330107421875, 1.5549354248046876, 1.53172216796875]",tokens/s,40.89214759458813,kWh,4.483736391431977e-05,4.9451845287065935e-06,1.852830850293145e-05,6.831085694595781e-05,tokens/kWh,922254.5700142607,,s,630,15.40428860473634,0.02445125175354973,0.0004281711990985916,0.024329071998596193,0.02480334358215332,0.025083141994476314,0.02647872211456299,"[0.024335296630859374, 0.024533056259155275, 0.024349920272827147, 0.024212032318115233, 0.02451068878173828, 0.024208768844604493, 0.024222335815429687, 0.024162303924560546, 0.024139776229858398, 0.024217599868774413, 0.024276992797851563, 0.02467635154724121, 0.02411017608642578, 0.02521999931335449, 0.024410303115844727, 0.024257408142089844, 0.024265663146972656, 0.024383487701416014, 0.024696832656860353, 0.026015775680541992, 0.024395776748657227, 0.02436911964416504, 0.024882463455200194, 0.02453987121582031, 0.024286687850952147, 0.024384031295776366, 0.024383487701416014, 0.027291648864746092, 0.024642911911010743, 0.024470176696777344, 0.02450227165222168, 0.02457804870605469, 0.02433228874206543, 0.024303392410278322, 0.02434623908996582, 0.024631999969482423, 0.02830121612548828, 0.02460585594177246, 0.024445791244506836, 0.024629024505615233, 0.02432793617248535, 0.024920543670654296, 0.02435481643676758, 0.024175840377807616, 0.024875808715820312, 0.02430681610107422, 0.024295743942260743, 0.02430828857421875, 0.024426496505737305, 0.024205087661743164, 0.024407712936401368, 0.024449600219726562, 0.024186880111694335, 0.025133056640625, 0.02426838493347168, 0.02416681671142578, 0.0240611515045166, 0.024311872482299806, 0.02442313575744629, 0.024182783126831055, 0.024162303924560546, 0.024174591064453126, 0.024371200561523438, 0.02557267189025879, 0.024203968048095704, 0.024205631256103515, 0.024169696807861327, 0.024218080520629882, 0.024662015914916992, 0.0257392635345459, 0.024762367248535155, 0.024414207458496092, 0.024262208938598633, 0.024197536468505858, 0.024231744766235352, 0.02432431983947754, 0.024501632690429688, 0.024314495086669923, 0.024241504669189454, 0.024279712677001953, 0.02414918327331543, 0.024205312728881836, 0.024116031646728514, 0.02422127914428711, 0.02434908866882324, 0.02422163200378418, 0.024154176712036134, 0.024424448013305664, 0.024147968292236328, 0.024319999694824217, 0.02461075210571289, 0.02447273635864258, 0.02435369682312012, 0.025304927825927734, 0.024316064834594725, 0.02435686492919922, 0.02435465621948242, 0.02418636894226074, 0.02453356742858887, 0.024372543334960937, 0.024245023727416992, 0.024426496505737305, 0.02431551933288574, 0.024454944610595702, 0.02428169631958008, 0.02447769546508789, 0.02432204818725586, 0.024327999114990236, 0.024332479476928712, 0.024213504791259766, 0.02444697570800781, 0.024280351638793947, 0.024296159744262694, 0.02421116828918457, 0.02428303909301758, 0.024203647613525392, 0.024229440689086914, 0.024526687622070314, 0.024381824493408203, 0.024398208618164063, 0.024670047760009764, 0.024418272018432618, 0.024376480102539063, 0.02447654342651367, 0.02525526428222656, 0.024973983764648437, 0.02489139175415039, 0.024595712661743162, 0.024406784057617186, 0.02476032066345215, 0.02440950393676758, 0.02445782470703125, 0.024561119079589844, 0.02576361656188965, 0.024677120208740234, 0.0247459831237793, 0.024821632385253905, 0.024973247528076174, 0.02473798370361328, 0.02471731185913086, 0.024677696228027343, 0.028435136795043944, 0.02498150444030762, 0.024713216781616212, 0.024604671478271483, 0.024606719970703125, 0.02485606384277344, 0.024556032180786135, 0.024358495712280274, 0.0244553279876709, 0.02470528030395508, 0.024421407699584962, 0.024262912750244142, 0.024349407196044923, 0.024154111862182616, 0.024303615570068358, 0.024439807891845702, 0.02440083122253418, 0.024507551193237304, 0.025123136520385742, 0.02470694351196289, 0.02448588752746582, 0.02477743911743164, 0.024803327560424804, 0.0246824951171875, 0.0247293758392334, 0.02473187255859375, 0.024688640594482423, 0.024778751373291014, 0.024635391235351564, 0.024762367248535155, 0.02492144012451172, 0.024468128204345702, 0.025327680587768554, 0.024537248611450194, 0.024469440460205077, 0.024364896774291992, 0.024565759658813476, 0.02451456069946289, 0.02414543914794922, 0.02432252883911133, 0.024202367782592774, 0.02433113670349121, 0.024327680587768553, 0.0243023681640625, 0.024254112243652343, 0.024300928115844726, 0.024239904403686525, 0.024286111831665038, 0.02472483253479004, 0.02448044776916504, 0.024252384185791016, 0.025021663665771486, 0.024191776275634767, 0.02426470375061035, 0.024225791931152343, 0.02427903938293457, 0.024204448699951173, 0.024122047424316406, 0.024178848266601563, 0.02439936065673828, 0.024268991470336915, 0.024248159408569336, 0.024280767440795898, 0.024164255142211915, 0.024283903121948242, 0.024191104888916015, 0.024211456298828125, 0.024383071899414063, 0.02430544090270996, 0.024377119064331054, 0.024212032318115233, 0.024322336196899413, 0.025277856826782227, 0.02432195281982422, 0.024197824478149416, 0.024246271133422852, 0.02457708740234375, 0.02425040054321289, 0.02437603187561035, 0.024256704330444336, 0.02434377670288086, 0.024297887802124024, 0.024265087127685547, 0.024242176055908202, 0.02424831962585449, 0.02425651168823242, 0.02427289581298828, 0.024122655868530272, 0.024472288131713867, 0.02475609588623047, 0.024369279861450197, 0.024224895477294922, 0.024271743774414063, 0.024236032485961914, 0.02417990493774414, 0.024178592681884766, 0.0245134391784668, 0.024321983337402344, 0.024324159622192382, 0.02411929512023926, 0.024487295150756837, 0.025303680419921874, 0.024196704864501952, 0.024170047760009767, 0.024150815963745118, 0.02432931137084961, 0.02412544059753418, 0.02417967987060547, 0.024176639556884767, 0.024371200561523438, 0.024262655258178712, 0.024358047485351562, 0.02532643127441406, 0.024162559509277343, 0.02421855926513672, 0.02480415916442871, 0.02448918342590332, 0.024435232162475586, 0.02465203285217285, 0.024553247451782226, 0.02450444793701172, 0.024332256317138673, 0.024205440521240233, 0.024238079071044923, 0.0248152961730957, 0.02413759994506836, 0.024428991317749022, 0.024219648361206055, 0.024205055236816406, 0.0242193603515625, 0.024293920516967774, 0.024294559478759765, 0.024196992874145506, 0.024249120712280272, 0.02416249656677246, 0.02435686492919922, 0.02432204818725586, 0.024498176574707032, 0.024387840270996095, 0.024612607955932616, 0.02490108871459961, 0.024601119995117188, 0.02474393653869629, 0.024795135498046874, 0.024465408325195313, 0.024541088104248047, 0.02435286331176758, 0.024352767944335937, 0.024375295639038085, 0.02456166458129883, 0.0242872314453125, 0.024389631271362306, 0.02429465675354004, 0.02553891181945801, 0.024344959259033204, 0.024414016723632814, 0.024407424926757813, 0.025303903579711913, 0.024456607818603517, 0.02444758415222168, 0.02452889633178711, 0.02445919990539551, 0.024446271896362306, 0.02424515151977539, 0.02432534408569336, 0.024267295837402343, 0.024307039260864256, 0.02423222351074219, 0.024230367660522462, 0.024373247146606446, 0.024594432830810548, 0.02432953643798828, 0.024304319381713867, 0.024418304443359375, 0.024374847412109376, 0.024222143173217775, 0.02447065544128418, 0.02455567932128906, 0.02420195198059082, 0.024116640090942384, 0.02422649574279785, 0.0242457275390625, 0.0240928955078125, 0.024112768173217773, 0.02401545524597168, 0.024105024337768555, 0.024227136611938475, 0.024173183441162108, 0.024188159942626953, 0.024328832626342774, 0.024375104904174806, 0.024179264068603514, 0.024198911666870118, 0.024597503662109374, 0.0242445125579834, 0.02415279960632324, 0.024225791931152343, 0.02450227165222168, 0.0242457275390625, 0.0242806396484375, 0.024216415405273438, 0.024311935424804688, 0.024206783294677733, 0.02428166389465332, 0.024160287857055665, 0.024450464248657225, 0.024410688400268554, 0.024227840423583984, 0.02427903938293457, 0.02429747200012207, 0.02434377670288086, 0.026665279388427734, 0.024537055969238282, 0.02427136039733887, 0.02425651168823242, 0.024264415740966796, 0.024270559310913088, 0.024308191299438477, 0.024366975784301758, 0.024325759887695312, 0.024363616943359374, 0.024375295639038085, 0.024888736724853516, 0.024244735717773438, 0.024234079360961915, 0.024170495986938476, 0.024169759750366213, 0.024926944732666014, 0.02422684860229492, 0.024219648361206055, 0.024241119384765624, 0.02434169578552246, 0.024202047348022462, 0.02420240020751953, 0.025342687606811524, 0.024272991180419923, 0.02425551986694336, 0.02429974365234375, 0.024338111877441407, 0.024187040328979493, 0.024318111419677733, 0.02415350341796875, 0.02423222351074219, 0.02448953628540039, 0.024339168548583985, 0.02429955291748047, 0.024303455352783204, 0.024190656661987303, 0.024389120101928712, 0.025127904891967773, 0.024526847839355468, 0.02437238311767578, 0.024251136779785156, 0.0243438720703125, 0.02431161689758301, 0.024253536224365234, 0.024323680877685546, 0.02424860763549805, 0.024360960006713867, 0.02429305648803711, 0.02431622314453125, 0.024276992797851563, 0.024623104095458984, 0.024313312530517578, 0.024308256149291992, 0.024270847320556642, 0.02428927993774414, 0.02428108787536621, 0.024272991180419923, 0.024500127792358398, 0.024493440628051758, 0.024241952896118163, 0.024312671661376954, 0.025188352584838865, 0.024395776748657227, 0.024487743377685545, 0.024186912536621093, 0.024160415649414062, 0.027264352798461913, 0.024944320678710937, 0.024601247787475584, 0.024396064758300782, 0.024363040924072266, 0.024343616485595704, 0.024265056610107423, 0.024359519958496095, 0.024325471878051758, 0.024425119400024415, 0.024868864059448242, 0.024373247146606446, 0.0245166072845459, 0.024334272384643554, 0.02437126350402832, 0.02433228874206543, 0.02448307228088379, 0.02435148811340332, 0.02424831962585449, 0.02591948890686035, 0.024584192276000977, 0.02429132843017578, 0.02447657585144043, 0.024200576782226563, 0.0242346248626709, 0.024129535675048826, 0.02412748718261719, 0.024234048843383788, 0.024327680587768553, 0.024209760665893556, 0.02414806365966797, 0.02453913688659668, 0.024203264236450195, 0.024188928604125977, 0.0241810245513916, 0.024234815597534178, 0.024124319076538087, 0.024233503341674803, 0.024230367660522462, 0.024413440704345705, 0.024322656631469725, 0.02413590431213379, 0.024327104568481445, 0.024302560806274413, 0.02437721633911133, 0.024103071212768553, 0.024190975189208985, 0.024227840423583984, 0.024137184143066405, 0.024270847320556642, 0.024240671157836916, 0.025655296325683592, 0.02467430305480957, 0.0242872314453125, 0.024214752197265627, 0.024363008499145508, 0.024298271179199218, 0.0241778564453125, 0.02431862449645996, 0.024227487564086915, 0.024098304748535155, 0.024215999603271483, 0.02425913619995117, 0.024365055084228517, 0.024553216934204102, 0.02457961654663086, 0.024138015747070314, 0.024688735961914062, 0.02422400093078613, 0.02440755271911621, 0.024123104095458984, 0.024491104125976562, 0.02463417625427246, 0.02510710334777832, 0.024312128067016603, 0.02433945655822754, 0.024300128936767577, 0.024299936294555666, 0.02422524833679199, 0.024311904907226563, 0.024359359741210937, 0.024385311126708983, 0.024231967926025392, 0.02441206359863281, 0.024447263717651366, 0.0249466552734375, 0.0258023681640625, 0.02491644859313965, 0.025419200897216797, 0.02480348777770996, 0.02490131187438965, 0.024811807632446288, 0.02480073547363281, 0.02462175941467285, 0.02475859260559082, 0.024332128524780273, 0.024293535232543944, 0.024356191635131835, 0.02448246383666992, 0.024455039978027344, 0.02431398391723633, 0.024215551376342775, 0.024337823867797852, 0.024521312713623046, 0.025024511337280272, 0.02482348823547363, 0.025467199325561525, 0.02430771255493164, 0.024387584686279298, 0.024172544479370117, 0.02436848068237305, 0.025053855895996093, 0.026398815155029298, 0.02465337562561035, 0.024547679901123047, 0.0242872314453125, 0.024190240859985352, 0.02424275207519531, 0.02421798324584961, 0.024562976837158204, 0.024328096389770508, 0.024357471466064453, 0.02476851272583008, 0.02651136016845703, 0.024284671783447266, 0.02445158386230469, 0.02437446403503418, 0.02478163146972656, 0.024710432052612304, 0.024936479568481447, 0.024817695617675783, 0.025018463134765623, 0.02654982376098633, 0.024695871353149414, 0.02468217658996582, 0.024594240188598633, 0.024533439636230468, 0.024673376083374023, 0.024413087844848632, 0.024382495880126955, 0.02424496078491211, 0.02432758331298828, 0.024453983306884766, 0.024467456817626954, 0.02441334342956543, 0.024395711898803712, 0.02436102485656738, 0.0243986873626709, 0.024561695098876953, 0.024307680130004884, 0.024580127716064454, 0.024856351852416993, 0.0242587833404541, 0.024610815048217775, 0.02465177536010742, 0.02450022315979004, 0.024300735473632814, 0.02417251205444336, 0.024215967178344726, 0.024320159912109375, 0.024277280807495118, 0.02414918327331543, 0.02427587127685547, 0.02426652717590332, 0.024214784622192384, 0.024156864166259766, 0.024190271377563476, 0.024185728073120118, 0.024203264236450195, 0.024366207122802734, 0.024318527221679688, 0.024262975692749024, 0.024186880111694335, 0.024245664596557616, 0.024238208770751953, 0.024238208770751953, 0.024217504501342774, 0.024221376419067384, 0.02429567909240723, 0.0242359676361084, 0.024339008331298827, 0.02435481643676758, 0.024328384399414062, 0.024479551315307616, 0.024391679763793944, 0.0244715518951416, 0.0243240966796875, 0.024170495986938476, 0.024201215744018553, 0.024387392044067382, 0.024236223220825196, 0.02449612808227539, 0.024887296676635744, 0.024442880630493165, 0.024375295639038085, 0.02430156707763672, 0.024215551376342775, 0.024225791931152343, 0.024242431640625, 0.024082176208496092, 0.024518655776977538, 0.024147968292236328, 0.024311071395874025, 0.024314592361450196, 0.024219648361206055, 0.024176639556884767, 0.02419286346435547, 0.024172704696655275, 0.024143871307373048, 0.024135679244995118, 0.024161632537841798]",tokens/s,40.89770168330234,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1847.76704,2718.892032,0.0,2340.421632,2285.568,s,1,9.1549111328125,9.1549111328125,0.0,9.1549111328125,9.1549111328125,9.1549111328125,9.1549111328125,[9.1549111328125],,kWh,6.062554752914669e-05,6.676919272620522e-06,2.004196047800111e-05,8.734442727976833e-05,,MB,1862.26688,3087.990784,0.0,2680.160256,2578.855936,s,10,0.8394956512451173,0.08394956512451172,0.0005466569905256003,0.08386230087280273,0.08469899291992188,0.0848067756652832,0.08489300186157227,"[0.08491455841064453, 0.08333392333984375, 0.08359808349609375, 0.08373212432861328, 0.08372908782958985, 0.08399247741699219, 0.08308509063720704, 0.08467504119873047, 0.08435814666748047, 0.08407711791992188]",tokens/s,3049.4499836932773,kWh,2.601100544247896e-06,2.8680764648819035e-07,1.7284355027078933e-06,4.616343693443979e-06,tokens/kWh,55455143.074283026,MB,1862.26688,3087.990784,0.0,2680.160256,2578.858496,s,10,16.271067260742186,1.6271067260742185,0.00914318288080362,1.6311510620117189,1.6338862548828126,1.63465595703125,1.6352717187499999,"[1.63228515625, 1.628584228515625, 1.6121324462890625, 1.6354256591796874, 1.606869384765625, 1.6318067626953126, 1.6337152099609376, 1.6274884033203125, 1.630495361328125, 1.6322646484375]",tokens/s,38.71903360144203,kWh,4.652391645158327e-05,5.1313133475030975e-06,2.424697416649375e-05,7.590220396558012e-05,tokens/kWh,830015.4239074405,,s,630,16.268711450576784,0.025823351508852035,0.00041652314541038346,0.025816512107849122,0.026103797721862793,0.026413397789001463,0.02720422792434693,"[0.027130592346191407, 0.02615091133117676, 0.02595430374145508, 0.025999359130859375, 0.025994943618774413, 0.0259116153717041, 0.02613590431213379, 0.025985696792602538, 0.02609699249267578, 0.025886592864990236, 0.02596329689025879, 0.02595840072631836, 0.02592131233215332, 0.026223007202148436, 0.026103647232055664, 0.026085439682006835, 0.026139743804931642, 0.025955135345458985, 0.02582307243347168, 0.026020000457763672, 0.02592665672302246, 0.025818111419677735, 0.026755071640014647, 0.02574336051940918, 0.025810304641723632, 0.02568780708312988, 0.02572198486328125, 0.025695072174072266, 0.025461376190185545, 0.02553875160217285, 0.02553455924987793, 0.025527839660644532, 0.025780704498291014, 0.025591808319091795, 0.02540652847290039, 0.0254835205078125, 0.025446144104003907, 0.0254486083984375, 0.02541423988342285, 0.02560985565185547, 0.0255534725189209, 0.026009599685668947, 0.025882623672485353, 0.02589286422729492, 0.02591744041442871, 0.02609766387939453, 0.025968639373779297, 0.025862207412719728, 0.025884063720703124, 0.02599171257019043, 0.025982112884521485, 0.026026304244995118, 0.02599103927612305, 0.02594643211364746, 0.025958751678466795, 0.025827199935913085, 0.025922943115234375, 0.025962400436401366, 0.02603094482421875, 0.02588057518005371, 0.025790464401245116, 0.026869760513305665, 0.02595430374145508, 0.026584415435791015, 0.025816736221313478, 0.02659868812561035, 0.025992927551269532, 0.025860095977783205, 0.02594508743286133, 0.025967615127563477, 0.026025983810424806, 0.026017663955688476, 0.026310400009155275, 0.02603046417236328, 0.02614476776123047, 0.026019584655761718, 0.02608768081665039, 0.02616227149963379, 0.026016672134399413, 0.02591744041442871, 0.026275840759277344, 0.02594927978515625, 0.02614159965515137, 0.026858816146850584, 0.02607174491882324, 0.025951744079589844, 0.0260183048248291, 0.026068159103393555, 0.025972864151000977, 0.02591801643371582, 0.026009376525878907, 0.025995616912841798, 0.026032127380371094, 0.026105152130126954, 0.025846464157104492, 0.025802751541137696, 0.02698854446411133, 0.025800703048706054, 0.025829376220703124, 0.025899007797241212, 0.02591334342956543, 0.02571673583984375, 0.02574867248535156, 0.025879360198974608, 0.02625859260559082, 0.025893280029296875, 0.02575814437866211, 0.025540607452392578, 0.0256135368347168, 0.02570524787902832, 0.025540607452392578, 0.02555084800720215, 0.02556662368774414, 0.02546745681762695, 0.025151039123535158, 0.02510483169555664, 0.02505116844177246, 0.025298143386840822, 0.0252271671295166, 0.025381343841552734, 0.02521708869934082, 0.025149791717529298, 0.025333152770996094, 0.025598560333251953, 0.025438047409057616, 0.02524736022949219, 0.026045343399047852, 0.025817087173461914, 0.025727136611938477, 0.025640480041503905, 0.025370752334594727, 0.025132383346557617, 0.026252031326293945, 0.025604192733764648, 0.02525491142272949, 0.025426944732666015, 0.025452192306518556, 0.02529520034790039, 0.025140960693359374, 0.02519798469543457, 0.02533670425415039, 0.025274368286132814, 0.025070911407470704, 0.025174720764160156, 0.025184255599975586, 0.025251840591430662, 0.025167552947998047, 0.025205055236816407, 0.02547427177429199, 0.025376895904541015, 0.025207359313964842, 0.02539299201965332, 0.025374528884887695, 0.025464448928833008, 0.025338687896728516, 0.02531449508666992, 0.02606982421875, 0.02524729537963867, 0.02531372833251953, 0.02554409599304199, 0.025609983444213866, 0.02569664001464844, 0.025520608901977538, 0.02566534423828125, 0.025587327957153322, 0.025835615158081054, 0.025774560928344726, 0.02577177619934082, 0.025796863555908205, 0.026169343948364256, 0.025710687637329102, 0.026701728820800782, 0.025863487243652342, 0.02570515251159668, 0.02565449523925781, 0.025823007583618163, 0.025705472946166992, 0.02573107147216797, 0.025757696151733397, 0.02593756866455078, 0.02565974426269531, 0.025881599426269532, 0.025712799072265625, 0.02582601547241211, 0.02579849624633789, 0.02560233688354492, 0.025599199295043944, 0.025835552215576173, 0.02581385612487793, 0.026593408584594726, 0.025968639373779297, 0.02602406311035156, 0.025956575393676757, 0.025945375442504883, 0.025853376388549804, 0.02585696029663086, 0.025882976531982422, 0.02589263916015625, 0.025935712814331054, 0.025949888229370117, 0.025846111297607423, 0.02591526412963867, 0.025829183578491212, 0.02594972801208496, 0.025908000946044923, 0.025871679306030272, 0.02589766311645508, 0.027058176040649414, 0.02635366439819336, 0.026094976425170897, 0.026055295944213866, 0.026009599685668947, 0.02586953544616699, 0.025951007843017578, 0.026015743255615235, 0.02578963279724121, 0.025930559158325196, 0.025870336532592773, 0.026048511505126954, 0.025753664016723632, 0.02578201675415039, 0.025714176177978516, 0.025793216705322267, 0.025967775344848634, 0.02596268844604492, 0.025889440536499022, 0.025956352233886718, 0.02603545570373535, 0.026270399093627928, 0.025720895767211913, 0.02582703971862793, 0.025536800384521486, 0.02549760055541992, 0.025618431091308593, 0.025651199340820312, 0.026524927139282225, 0.028223455429077147, 0.025776927947998046, 0.025806848526000976, 0.025748512268066407, 0.02573155212402344, 0.025725439071655275, 0.025640960693359374, 0.02567884826660156, 0.02575667190551758, 0.026731840133666994, 0.025780096054077148, 0.025795391082763672, 0.025792415618896485, 0.02578374481201172, 0.025765695571899415, 0.025817535400390626, 0.026429439544677736, 0.025906463623046876, 0.025955039978027342, 0.02593120002746582, 0.026473215103149414, 0.025994335174560547, 0.02584217643737793, 0.025720800399780273, 0.025725183486938478, 0.02558361625671387, 0.025664735794067382, 0.025588512420654297, 0.025536512374877928, 0.025293855667114257, 0.025068511962890627, 0.024983552932739257, 0.02483344078063965, 0.025037408828735352, 0.024844287872314453, 0.025153535842895508, 0.02489139175415039, 0.02508185577392578, 0.024860607147216798, 0.024853952407836916, 0.02493503952026367, 0.025184192657470704, 0.025089696884155275, 0.02523334312438965, 0.025330144882202147, 0.02622604751586914, 0.0255863037109375, 0.025708351135253906, 0.025469120025634766, 0.02616659164428711, 0.02546963119506836, 0.025517087936401367, 0.025256927490234376, 0.0253986873626709, 0.02538147163391113, 0.02556694412231445, 0.025475360870361327, 0.025556991577148438, 0.02551398468017578, 0.02548284721374512, 0.025500064849853517, 0.02569024085998535, 0.025558464050292967, 0.025868383407592774, 0.025604448318481444, 0.025750688552856445, 0.025752416610717775, 0.025715839385986327, 0.02556403160095215, 0.02569625663757324, 0.025552896499633788, 0.025425312042236328, 0.025354400634765625, 0.025426431655883788, 0.025457632064819335, 0.025504735946655272, 0.025510047912597655, 0.02561756706237793, 0.025304864883422852, 0.026129600524902343, 0.025505887985229493, 0.02648297691345215, 0.025583360671997072, 0.025551519393920898, 0.025404800415039064, 0.02548601531982422, 0.0255446720123291, 0.025665536880493164, 0.02559494400024414, 0.02587487983703613, 0.025646944046020508, 0.025651872634887694, 0.025657344818115234, 0.025673728942871094, 0.02571878433227539, 0.02639379119873047, 0.029094240188598634, 0.025950687408447266, 0.02590444755554199, 0.025733823776245116, 0.02592972755432129, 0.026036224365234374, 0.025994815826416016, 0.02589961624145508, 0.025886560440063478, 0.025802400588989256, 0.025772384643554688, 0.025757728576660158, 0.0259849910736084, 0.02582476806640625, 0.025778688430786133, 0.028792383193969727, 0.02750035285949707, 0.026251039505004882, 0.02592854309082031, 0.02578553581237793, 0.025801183700561524, 0.02567612838745117, 0.025792512893676758, 0.025753599166870117, 0.02667910385131836, 0.026197471618652345, 0.025895647048950195, 0.02579987144470215, 0.02586092758178711, 0.02561347198486328, 0.025645919799804687, 0.025677919387817383, 0.02576153564453125, 0.025596063613891603, 0.025660640716552736, 0.02557360076904297, 0.025577152252197265, 0.025394144058227538, 0.02536639976501465, 0.0254997444152832, 0.025577407836914062, 0.025572383880615234, 0.02558051109313965, 0.02546227264404297, 0.0258155517578125, 0.025570943832397462, 0.026572799682617186, 0.025797983169555665, 0.02591606330871582, 0.025763423919677734, 0.02592527961730957, 0.025749984741210936, 0.02581737518310547, 0.02588431930541992, 0.025839679718017577, 0.025808895111083984, 0.02602627182006836, 0.025704095840454102, 0.025831647872924805, 0.025835615158081054, 0.027063520431518554, 0.02600796890258789, 0.025956031799316406, 0.026026079177856445, 0.02605072021484375, 0.025820863723754882, 0.025847808837890625, 0.026225408554077147, 0.025978496551513672, 0.026597343444824218, 0.025949760437011717, 0.025858495712280275, 0.02591584014892578, 0.025817087173461914, 0.02590015983581543, 0.025811840057373046, 0.02598636817932129, 0.025864736557006836, 0.02601558494567871, 0.026269535064697265, 0.026018239974975585, 0.025728832244873046, 0.025866239547729493, 0.025839839935302734, 0.025831424713134765, 0.025651199340820312, 0.025654367446899414, 0.025772960662841796, 0.025665536880493164, 0.02560358428955078, 0.02573091125488281, 0.025664159774780274, 0.025680063247680664, 0.02578518486022949, 0.025846271514892577, 0.025811424255371095, 0.025878240585327148, 0.025696544647216796, 0.027234304428100587, 0.026108192443847655, 0.02586515235900879, 0.025692960739135743, 0.025886720657348632, 0.02569606399536133, 0.02591084861755371, 0.026179807662963867, 0.026077600479125978, 0.025707551956176758, 0.025945056915283204, 0.026597408294677733, 0.025973791122436522, 0.025908191680908202, 0.026081279754638673, 0.025804800033569338, 0.02632441520690918, 0.02590287971496582, 0.026030847549438477, 0.025909120559692383, 0.026159263610839843, 0.025944063186645508, 0.025935007095336915, 0.02598179244995117, 0.026005504608154296, 0.02595155143737793, 0.02590585517883301, 0.02584137535095215, 0.026077472686767578, 0.025827327728271485, 0.025881887435913086, 0.02579961585998535, 0.02594803237915039, 0.025865888595581053, 0.02585420799255371, 0.025777952194213867, 0.027056352615356445, 0.02593791961669922, 0.02596611213684082, 0.025778848648071288, 0.02550377655029297, 0.025658336639404297, 0.025586015701293947, 0.026386463165283203, 0.025657791137695313, 0.02553798484802246, 0.025498176574707033, 0.025568479537963866, 0.02552239990234375, 0.02592416000366211, 0.02600134468078613, 0.02545055961608887, 0.02553152084350586, 0.02574425506591797, 0.025380704879760744, 0.025443872451782226, 0.025544704437255858, 0.025641599655151368, 0.025462591171264648, 0.025604223251342772, 0.025540000915527345, 0.02572764778137207, 0.025614336013793947, 0.025818527221679686, 0.02567843246459961, 0.025856000900268555, 0.025579456329345704, 0.025870399475097658, 0.025796192169189453, 0.02574172782897949, 0.02592518424987793, 0.025851743698120117, 0.025805408477783204, 0.02575564765930176, 0.02671615982055664, 0.026029855728149413, 0.02589104080200195, 0.026058752059936522, 0.026259456634521484, 0.02608332824707031, 0.025845760345458983, 0.025860095977783205, 0.025781343460083008, 0.025907712936401366, 0.025885087966918945, 0.0259051513671875, 0.02581033515930176, 0.025796192169189453, 0.0256409912109375, 0.02576278305053711, 0.02570854377746582, 0.02578563117980957, 0.02575164794921875, 0.025751487731933594, 0.025782976150512695, 0.025847808837890625, 0.02578220748901367, 0.02577004814147949, 0.02578166389465332, 0.02582307243347168, 0.025807167053222658, 0.025850303649902345, 0.02578179168701172, 0.025817440032958983, 0.025770111083984373, 0.025769535064697265, 0.026024383544921877, 0.025845056533813478, 0.025757568359375, 0.02586419105529785, 0.02690336036682129, 0.02608687973022461, 0.025736831665039064, 0.02582819175720215, 0.025916671752929686, 0.025908031463623048, 0.02884351921081543, 0.026077056884765627, 0.02596713638305664, 0.026081375122070313, 0.025884159088134767, 0.025849376678466797, 0.02591638374328613, 0.025790464401245116, 0.025866176605224608, 0.025704000473022463, 0.025772544860839845, 0.025965599060058592, 0.025736160278320312, 0.02570035171508789, 0.025560224533081054, 0.025445215225219725, 0.025221120834350585, 0.02519196891784668, 0.025239744186401368, 0.025332000732421874, 0.025419647216796876, 0.026289888381958008, 0.025775968551635744, 0.025670080184936522, 0.025735040664672852, 0.02572915267944336, 0.02584137535095215, 0.025897247314453125, 0.025886720657348632, 0.026193920135498046, 0.02687385559082031, 0.02580588722229004, 0.025768896102905274, 0.02587001609802246, 0.02567731285095215, 0.02583635139465332, 0.02570854377746582, 0.025913087844848633, 0.025790143966674804, 0.025786943435668945, 0.02573516845703125, 0.02595430374145508, 0.02598684883117676, 0.025984415054321287, 0.025899839401245118, 0.02595235252380371, 0.02555244827270508, 0.025874784469604492, 0.025753599166870117, 0.02607513618469238, 0.025860095977783205, 0.02582271957397461, 0.02582969665527344, 0.025859487533569335, 0.02558380889892578, 0.02570844841003418, 0.02554742431640625, 0.02785487937927246, 0.02584480094909668, 0.02577872085571289, 0.02581670379638672, 0.025979679107666017, 0.02574745559692383, 0.025816320419311523, 0.02575391960144043, 0.02582486343383789, 0.025723743438720702, 0.02597887992858887, 0.026787296295166015, 0.02590508842468262, 0.025729631423950194, 0.025847808837890625, 0.02590243148803711, 0.02595439910888672, 0.025823392868041993, 0.025868703842163086, 0.025856000900268555, 0.025888832092285156, 0.025802688598632814, 0.025831392288208008, 0.026034208297729493, 0.025937471389770508, 0.025848255157470704, 0.025849855422973633]",tokens/s,38.72464035728314,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,791.773184,537.853952,0.0,159.383552,141.760512,s,1,7.665677734375,7.665677734375,0.0,7.665677734375,7.665677734375,7.665677734375,7.665677734375,[7.665677734375],,kWh,1.739781461252126e-05,1.911688780291314e-06,5.643060069976391e-06,2.4952563462788968e-05,,MB,1160.425472,630.12864,0.0,222.298112,184.771584,s,24,0.19141417598724367,0.007975590666135152,6.836737458771063e-05,0.007971375942230224,0.008055766487121582,0.008078140878677368,0.008162211923599243,"[0.007951168060302735, 0.008186271667480469, 0.007896448135375976, 0.007883999824523926, 0.007929376125335693, 0.007905183792114258, 0.007932703971862793, 0.007983871936798096, 0.007885727882385254, 0.00795904016494751, 0.007985151767730713, 0.007983551979064942, 0.0079234881401062, 0.007959199905395507, 0.00805014419555664, 0.008058176040649415, 0.007984896183013917, 0.007949503898620605, 0.007914175987243652, 0.007996895790100097, 0.008027520179748535, 0.007984992027282714, 0.008001024246215821, 0.008081664085388183]",tokens/s,32097.93615499749,kWh,2.3439026773762346e-07,2.5849197541382268e-08,1.3309247595179246e-07,3.933319412307982e-07,tokens/kWh,650849761.1430571,MB,1194.758144,630.12864,0.0,222.298112,184.774144,s,24,9.903674468994138,0.4126531028747558,0.0038970866275595925,0.4117662048339844,0.41466705017089844,0.4213199203491211,0.42564024017333985,"[0.42658013916015625, 0.41120654296875, 0.4101144409179687, 0.41051300048828127, 0.4092403869628906, 0.4224936218261719, 0.41035195922851564, 0.4108104248046875, 0.41123602294921874, 0.4097622375488281, 0.4129798889160156, 0.41197479248046875, 0.4123426208496094, 0.4128636169433594, 0.41315121459960935, 0.4113250427246094, 0.4105936889648438, 0.41165512084960937, 0.4091098937988281, 0.41213641357421876, 0.41466262817382815, 0.4146689453125, 0.4118772888183594, 0.4120245361328125]",tokens/s,152.67060773591496,kWh,1.1901169499595156e-05,1.3124532521006592e-06,4.383788915328133e-06,1.7597411667023947e-05,tokens/kWh,3580071.955585187,,s,1512,9.89292088890076,0.006542937095833834,0.00020275533170362382,0.006507872104644775,0.006608620595932007,0.006682943940162659,0.00737621145725251,"[0.006587135791778565, 0.007636703968048096, 0.00865443229675293, 0.008525823593139649, 0.00865283203125, 0.00781379222869873, 0.006614208221435547, 0.006588223934173584, 0.0065976958274841305, 0.0066061439514160155, 0.007475840091705322, 0.006551551818847656, 0.006547679901123047, 0.006542175769805908, 0.006562911987304688, 0.006509791851043701, 0.006513279914855957, 0.006706975936889649, 0.006613215923309326, 0.006574336051940918, 0.006596352100372314, 0.006586143970489502, 0.006591743946075439, 0.006601471900939942, 0.006570208072662354, 0.006537568092346191, 0.006555168151855468, 0.006514815807342529, 0.006569983959197998, 0.006529248237609863, 0.006581471920013428, 0.006617663860321045, 0.006567935943603515, 0.006555424213409424, 0.006621407985687256, 0.006614143848419189, 0.006610112190246582, 0.006618112087249756, 0.006645472049713135, 0.006672832012176513, 0.006723999977111816, 0.006668416023254395, 0.006645343780517578, 0.0066769919395446775, 0.006643968105316162, 0.006655648231506348, 0.006668288230895996, 0.006762495994567871, 0.006616703987121582, 0.006668831825256348, 0.006622464179992676, 0.006660704135894775, 0.006670239925384521, 0.00668236780166626, 0.006638175964355468, 0.006663936138153076, 0.0066620478630065915, 0.006674752235412597, 0.006706975936889649, 0.006692863941192627, 0.006649856090545654, 0.006696991920471192, 0.006704927921295166, 0.006633471965789795, 0.0066488637924194335, 0.006638815879821777, 0.006605696201324463, 0.0065606398582458494, 0.006555647850036621, 0.006555647850036621, 0.006714879989624023, 0.006576159954071045, 0.0065428800582885745, 0.006560224056243897, 0.00658457612991333, 0.006541535854339599, 0.006583295822143555, 0.006566559791564942, 0.006558047771453857, 0.0065474557876586915, 0.00656547212600708, 0.006551008224487304, 0.006538176059722901, 0.0065491518974304195, 0.006569632053375244, 0.006580192089080811, 0.00653385591506958, 0.0065474557876586915, 0.006497663974761963, 0.006492800235748291, 0.006463263988494873, 0.006485631942749023, 0.006561984062194824, 0.006480288028717041, 0.0064650559425354, 0.006480127811431885, 0.006452799797058106, 0.006474400043487549, 0.0065146880149841305, 0.006479743957519531, 0.006498688220977783, 0.006490079879760742, 0.006479328155517578, 0.006504447937011719, 0.006455615997314453, 0.006572127819061279, 0.0064774718284606935, 0.006445280075073242, 0.006516416072845459, 0.006479296207427978, 0.0064701118469238286, 0.006468031883239746, 0.006463488101959228, 0.006498655796051025, 0.006487455844879151, 0.006467840194702148, 0.006475776195526123, 0.006476831912994385, 0.006456639766693115, 0.006452896118164062, 0.006494207859039307, 0.006511680126190186, 0.006466495990753174, 0.0064471039772033695, 0.006487840175628662, 0.006457151889801026, 0.006372384071350098, 0.006488863945007324, 0.006446656227111817, 0.006480512142181397, 0.006469632148742676, 0.006444255828857422, 0.006480415821075439, 0.0064924159049987796, 0.006495327949523926, 0.00658735990524292, 0.0064982399940490725, 0.006526976108551025, 0.0065064959526062015, 0.006468607902526856, 0.006537280082702637, 0.006509632110595703, 0.0064691839218139644, 0.006479519844055176, 0.006480000019073486, 0.006488031864166259, 0.006475808143615723, 0.006514944076538086, 0.0064980158805847165, 0.006459455966949463, 0.006476287841796875, 0.006512639999389648, 0.006475776195526123, 0.006516672134399414, 0.006501760005950928, 0.006480576038360596, 0.006524928092956543, 0.006565023899078369, 0.0064951682090759275, 0.006539167881011963, 0.00738483190536499, 0.006539519786834717, 0.006543360233306885, 0.006489471912384033, 0.006501279830932617, 0.00651958417892456, 0.006466752052307129, 0.006552768230438232, 0.0065214080810546875, 0.0064943361282348636, 0.006537087917327881, 0.006551743984222412, 0.006464384078979492, 0.006470848083496094, 0.006444799900054932, 0.0064921917915344235, 0.006457664012908935, 0.006454783916473389, 0.006494592189788819, 0.006427807807922363, 0.006481728076934814, 0.006478623867034912, 0.006478079795837402, 0.006448927879333496, 0.0064633598327636715, 0.006420608043670654, 0.006453536033630371, 0.006419519901275635, 0.006466047763824463, 0.00634611177444458, 0.0071617598533630375, 0.006481632232666016, 0.006487008094787598, 0.0064650559425354, 0.0064774398803710935, 0.006479968070983886, 0.006462240219116211, 0.00647603178024292, 0.006491903781890869, 0.0064778242111206055, 0.006449344158172607, 0.00648524808883667, 0.006486591815948487, 0.006486015796661377, 0.006454304218292236, 0.006448095798492432, 0.006492159843444824, 0.0064737281799316405, 0.006446271896362305, 0.006457695960998535, 0.006458111763000488, 0.006815455913543701, 0.0066847681999206545, 0.0066967358589172365, 0.006506624221801757, 0.006452640056610107, 0.006478432178497314, 0.006448959827423096, 0.006463679790496826, 0.006463488101959228, 0.006464735984802246, 0.006484543800354004, 0.006461311817169189, 0.00649455976486206, 0.006446623802185058, 0.0064512319564819336, 0.0064926080703735355, 0.006479072093963623, 0.0064560317993164066, 0.0064609599113464354, 0.006478367805480957, 0.006549503803253174, 0.006495456218719483, 0.006460192203521728, 0.006479872226715088, 0.006473919868469238, 0.006739264011383057, 0.006465343952178955, 0.006441664218902588, 0.006481919765472412, 0.006485119819641113, 0.006455679893493652, 0.006530687808990478, 0.00653331184387207, 0.006447455883026123, 0.006463232040405273, 0.006476384162902832, 0.006731776237487793, 0.006544864177703858, 0.006664735794067383, 0.006565887928009034, 0.006465536117553711, 0.0063688640594482425, 0.006464288234710693, 0.00646284818649292, 0.00646998405456543, 0.006483839988708496, 0.0064512319564819336, 0.006480000019073486, 0.006457215785980225, 0.006450911998748779, 0.00644927978515625, 0.006426559925079346, 0.006482336044311523, 0.006551487922668457, 0.0065064959526062015, 0.006582560062408448, 0.006475327968597412, 0.006479135990142822, 0.006468480110168457, 0.0064924159049987796, 0.006479199886322022, 0.006457759857177735, 0.006475776195526123, 0.006483168125152588, 0.006476480007171631, 0.006459487915039063, 0.006529056072235108, 0.006447072029113769, 0.00652288007736206, 0.006498271942138672, 0.0064982399940490725, 0.006578271865844727, 0.006486015796661377, 0.006445055961608887, 0.006477280139923096, 0.0064655041694641115, 0.0065326719284057614, 0.006470335960388184, 0.006455615997314453, 0.006494207859039307, 0.006643455982208252, 0.006457600116729736, 0.006793216228485107, 0.006454368114471435, 0.006598656177520752, 0.00648035192489624, 0.006491648197174072, 0.0064802560806274416, 0.00646611213684082, 0.006464672088623047, 0.006445919990539551, 0.006440959930419922, 0.006467584133148193, 0.0064646081924438475, 0.006448031902313233, 0.006486015796661377, 0.006459392070770263, 0.006465536117553711, 0.006457119941711426, 0.006537439823150634, 0.006510591983795166, 0.0065146880149841305, 0.006510816097259522, 0.006520607948303223, 0.006460383892059326, 0.006473760128021241, 0.00649948787689209, 0.00643507194519043, 0.006522528171539307, 0.006701119899749756, 0.007994080066680908, 0.007450751781463623, 0.007647264003753662, 0.010730976104736329, 0.008782015800476075, 0.006592864036560058, 0.006547103881835938, 0.006536960124969482, 0.006568543910980225, 0.006553599834442139, 0.0065476160049438474, 0.006539103984832764, 0.006522111892700196, 0.00652675199508667, 0.006994847774505615, 0.006544896125793457, 0.006552127838134766, 0.006567935943603515, 0.006490111827850342, 0.00658022403717041, 0.006498303890228272, 0.0064839677810668945, 0.006540512084960938, 0.006541728019714355, 0.006515071868896484, 0.006999807834625244, 0.006545631885528564, 0.006501599788665772, 0.00658457612991333, 0.00651142406463623, 0.00651852798461914, 0.006505856037139893, 0.0065829439163208004, 0.006561376094818115, 0.006494592189788819, 0.006510591983795166, 0.006500351905822754, 0.006496255874633789, 0.006490111827850342, 0.0065268478393554685, 0.00646889591217041, 0.006490975856781006, 0.00653110408782959, 0.00651475191116333, 0.006516416072845459, 0.00648524808883667, 0.006493152141571045, 0.006492159843444824, 0.006502399921417237, 0.0065409598350524905, 0.006649631977081299, 0.0065277438163757326, 0.006528831958770752, 0.006534848213195801, 0.006512351989746094, 0.006510528087615966, 0.006476448059082031, 0.0063788161277770995, 0.006484096050262451, 0.0064579200744628905, 0.006480991840362549, 0.00644598388671875, 0.006457024097442627, 0.006492479801177979, 0.006561791896820069, 0.006467743873596192, 0.006456960201263428, 0.006453216075897217, 0.0064739837646484374, 0.00655344009399414, 0.006477983951568603, 0.0065064959526062015, 0.006490111827850342, 0.006455103874206543, 0.006709375858306885, 0.006467167854309082, 0.006490560054779053, 0.006469664096832276, 0.0064719681739807125, 0.006522592067718506, 0.0064880638122558594, 0.0065593280792236325, 0.006533472061157226, 0.0065270400047302245, 0.006545407772064209, 0.006551551818847656, 0.006492224216461182, 0.006518720149993896, 0.006503744125366211, 0.006520959854125976, 0.006514976024627686, 0.006508831977844238, 0.006502016067504883, 0.006487711906433106, 0.0064960322380065915, 0.006511551856994629, 0.0065146880149841305, 0.006508543968200684, 0.006526976108551025, 0.006539552211761474, 0.006532832145690918, 0.006516992092132568, 0.00650764799118042, 0.006531424045562744, 0.006501760005950928, 0.006507423877716065, 0.006557695865631104, 0.006508543968200684, 0.006494207859039307, 0.006506303787231446, 0.006488383769989014, 0.006534815788269043, 0.006500576019287109, 0.006492159843444824, 0.0065372161865234375, 0.006533504009246826, 0.0065320320129394534, 0.00652672004699707, 0.006503359794616699, 0.006516736030578613, 0.006410496234893799, 0.006513088226318359, 0.0065426878929138186, 0.0065129919052124025, 0.006489568233489991, 0.006554687976837158, 0.006514815807342529, 0.006500383853912353, 0.006524991989135742, 0.006506207942962646, 0.006543360233306885, 0.0065491838455200195, 0.006543680191040039, 0.0065372161865234375, 0.006512639999389648, 0.006510591983795166, 0.006561600208282471, 0.0065512962341308595, 0.006521279811859131, 0.006547488212585449, 0.006540383815765381, 0.0065831680297851565, 0.006518335819244385, 0.006508992195129394, 0.006619455814361572, 0.006518464088439941, 0.00649129581451416, 0.006521344184875488, 0.006510335922241211, 0.006488671779632568, 0.0065186238288879395, 0.00648198413848877, 0.006542719841003418, 0.006506591796875, 0.006484608173370362, 0.0065064959526062015, 0.006602176189422607, 0.006502975940704345, 0.006499648094177246, 0.006458176136016846, 0.006530943870544433, 0.006520832061767578, 0.006471680164337158, 0.006533120155334473, 0.006526400089263916, 0.0064988799095153805, 0.0065146880149841305, 0.006453248023986816, 0.00650761604309082, 0.00648857593536377, 0.006464128017425537, 0.006518688201904297, 0.006539135932922363, 0.006498303890228272, 0.006507808208465576, 0.006494944095611572, 0.00652288007736206, 0.006492159843444824, 0.00646892786026001, 0.006502079963684082, 0.006501376152038574, 0.006479551792144775, 0.0064618239402770995, 0.006415296077728272, 0.006481887817382813, 0.006477791786193848, 0.006472703933715821, 0.006472320079803467, 0.0064899840354919434, 0.0066052799224853515, 0.00662937593460083, 0.006730016231536865, 0.006542111873626709, 0.006521312236785889, 0.006484447956085205, 0.0065484800338745115, 0.0064702401161193844, 0.006455552101135254, 0.006486112117767334, 0.0064570560455322265, 0.006463935852050781, 0.006459296226501465, 0.006468992233276367, 0.006484608173370362, 0.006508543968200684, 0.006481728076934814, 0.006521344184875488, 0.006471007823944092, 0.006451680183410645, 0.006502272129058838, 0.006475776195526123, 0.006518400192260742, 0.006531455993652344, 0.006483808040618897, 0.006525087833404541, 0.006526976108551025, 0.006782368183135986, 0.00666860818862915, 0.0065716800689697265, 0.0066341118812561035, 0.006496255874633789, 0.006497663974761963, 0.006548096179962158, 0.006524159908294678, 0.006521599769592285, 0.006653952121734619, 0.006530784130096436, 0.006512864112854004, 0.00660316801071167, 0.006510240077972412, 0.006497695922851563, 0.0065296320915222165, 0.0065372161865234375, 0.00649232006072998, 0.00651584005355835, 0.006505055904388428, 0.006559872150421143, 0.006499328136444092, 0.006506815910339355, 0.006471519947052002, 0.006490880012512207, 0.006542592048645019, 0.006486879825592041, 0.006485824108123779, 0.006516736030578613, 0.006477952003479004, 0.0064011201858520505, 0.00648419189453125, 0.006496607780456543, 0.0065630397796630855, 0.006503071784973145, 0.006477952003479004, 0.006502528190612793, 0.006481855869293213, 0.006494143962860108, 0.0065413122177124024, 0.006504447937011719, 0.006489727973937988, 0.006482367992401123, 0.006464511871337891, 0.0065155520439147945, 0.006495456218719483, 0.00645414400100708, 0.006473855972290039, 0.00650870418548584, 0.006481696128845215, 0.006493951797485352, 0.006457824230194092, 0.006526944160461426, 0.006500351905822754, 0.006489855766296387, 0.006497983932495117, 0.006461215972900391, 0.006470176219940185, 0.006502495765686035, 0.006479775905609131, 0.006591551780700684, 0.006499263763427734, 0.006531136035919189, 0.006500480175018311, 0.006487872123718262, 0.006494431972503662, 0.006516511917114258, 0.006479872226715088, 0.006529024124145508, 0.006492159843444824, 0.006480063915252686, 0.006543168067932129, 0.006500576019287109, 0.006474688053131104, 0.006484511852264404, 0.006465951919555664, 0.006500256061553955, 0.006479616165161133, 0.006668320178985596, 0.006512576103210449, 0.006555935859680176, 0.006445248126983643, 0.006459231853485107, 0.006432735919952392, 0.006483839988708496, 0.0064759039878845214, 0.006458816051483155, 0.0065214080810546875, 0.006511616230010986, 0.006501599788665772, 0.006516448020935059, 0.006502624034881592, 0.006528031826019287, 0.006418752193450928, 0.006481599807739258, 0.006567935943603515, 0.006515903949737549, 0.006474048137664795, 0.006600863933563232, 0.006490464210510254, 0.006463488101959228, 0.006508096218109131, 0.006488096237182617, 0.0064966721534729, 0.006500351905822754, 0.006445216178894043, 0.0065103678703308104, 0.006530144214630127, 0.00649724817276001, 0.006579520225524903, 0.006587071895599365, 0.006534207820892334, 0.006521632194519043, 0.006529024124145508, 0.006531231880187989, 0.006516736030578613, 0.006500351905822754, 0.006504191875457764, 0.006500095844268799, 0.006485631942749023, 0.006525824069976807, 0.0064839677810668945, 0.006498400211334228, 0.006549407958984375, 0.006538911819458008, 0.0065474557876586915, 0.006510655879974365, 0.006498591899871826, 0.006533023834228516, 0.006502624034881592, 0.006555520057678223, 0.006513792037963867, 0.006506944179534912, 0.006553055763244629, 0.006527967929840088, 0.0066293120384216305, 0.0065229439735412596, 0.0065045437812805175, 0.007114848136901855, 0.007272255897521973, 0.006692863941192627, 0.006533120155334473, 0.006524576187133789, 0.006531424045562744, 0.006500351905822754, 0.006543360233306885, 0.006563839912414551, 0.006502399921417237, 0.00652288007736206, 0.006653215885162354, 0.006554624080657959, 0.0065263681411743164, 0.006512800216674805, 0.006625696182250977, 0.006510335922241211, 0.006564000129699707, 0.0064876160621643066, 0.006506847858428955, 0.006535583972930908, 0.006803135871887207, 0.0065651841163635255, 0.006643871784210205, 0.006520639896392822, 0.0065277118682861325, 0.0064973759651184085, 0.006498847961425781, 0.0064802560806274416, 0.00651200008392334, 0.006486112117767334, 0.006488224029541015, 0.006559296131134033, 0.0066176319122314455, 0.00646563196182251, 0.0064898238182067875, 0.00652345609664917, 0.0064982080459594724, 0.006485151767730713, 0.006500671863555909, 0.0065030078887939454, 0.006676576137542725, 0.006507936000823974, 0.006696608066558838, 0.006544256210327148, 0.006535168170928955, 0.006597631931304931, 0.006500671863555909, 0.006519392013549805, 0.006505472183227539, 0.00714035177230835, 0.0065474557876586915, 0.006548575878143311, 0.006468512058258056, 0.0065147199630737306, 0.0064774398803710935, 0.006567520141601562, 0.006513216018676758, 0.006496448040008545, 0.00653926420211792, 0.0065231361389160155, 0.006445151805877685, 0.0065326719284057614, 0.0065129599571228025, 0.0065491518974304195, 0.006505983829498291, 0.006468095779418945, 0.006460608005523682, 0.0064951682090759275, 0.006449120044708252, 0.006506527900695801, 0.006497632026672364, 0.006542272090911865, 0.006540128231048584, 0.006556447982788086, 0.006459487915039063, 0.006475776195526123, 0.006470816135406494, 0.006505311965942383, 0.006490111827850342, 0.006446815967559814, 0.006398272037506103, 0.006524640083312988, 0.006508543968200684, 0.006465536117553711, 0.006479648113250733, 0.006580448150634766, 0.0065577921867370605, 0.006536672115325928, 0.006601151943206787, 0.006565887928009034, 0.006658048152923584, 0.0065576000213623045, 0.006535615921020508, 0.006498976230621338, 0.006533696174621582, 0.006487648010253907, 0.0064990720748901365, 0.006547552108764648, 0.006481503963470459, 0.0065335359573364254, 0.006582335948944092, 0.006518720149993896, 0.006573056221008301, 0.006534143924713135, 0.0065064959526062015, 0.006490272045135498, 0.006487296104431152, 0.0065296320915222165, 0.006516287803649903, 0.006510079860687256, 0.00655625581741333, 0.006594528198242187, 0.0064843521118164065, 0.006471007823944092, 0.006564544200897217, 0.006570176124572754, 0.0067612800598144535, 0.006540575981140137, 0.006501344203948975, 0.0065071358680725095, 0.006584415912628174, 0.006576128005981445, 0.006494207859039307, 0.006489247798919678, 0.006518879890441895, 0.006488831996917724, 0.006509568214416504, 0.006517471790313721, 0.00652239990234375, 0.006480127811431885, 0.006523392200469971, 0.00653056001663208, 0.006519296169281006, 0.006511871814727783, 0.006477888107299805, 0.00672435188293457, 0.0065322241783142086, 0.006816351890563965, 0.0065963840484619144, 0.006532608032226562, 0.006577087879180908, 0.006512639999389648, 0.0065064959526062015, 0.006429440021514893, 0.006573056221008301, 0.006622208118438721, 0.006577631950378418, 0.006547135829925537, 0.006497407913208008, 0.006586080074310303, 0.00655344009399414, 0.006535071849822998, 0.0064906878471374515, 0.006501183986663818, 0.006617983818054199, 0.006527167797088623, 0.006512383937835694, 0.00652623987197876, 0.00652345609664917, 0.0065099201202392576, 0.006556640148162842, 0.006553599834442139, 0.006541215896606446, 0.0065476479530334475, 0.0065370240211486816, 0.006620319843292236, 0.006499328136444092, 0.006487328052520752, 0.006511168003082275, 0.006516736030578613, 0.006541567802429199, 0.006678175926208496, 0.006650303840637207, 0.006611648082733154, 0.006526944160461426, 0.006503424167633057, 0.006542367935180664, 0.006556863784790039, 0.00652675199508667, 0.006913951873779297, 0.006737599849700928, 0.0064691839218139644, 0.006513792037963867, 0.006493663787841797, 0.006580480098724365, 0.006516064167022705, 0.0064784960746765135, 0.006485055923461914, 0.006480735778808594, 0.006530655860900879, 0.006506912231445313, 0.006616608142852783, 0.0065214080810546875, 0.006528160095214844, 0.006703968048095703, 0.006561408042907715, 0.006543392181396484, 0.006506912231445313, 0.00649619197845459, 0.006555776119232178, 0.006487936019897461, 0.006575647830963135, 0.0064834880828857425, 0.0064778242111206055, 0.006496799945831299, 0.006515103816986084, 0.006404096126556396, 0.006512639999389648, 0.006701344013214111, 0.006514400005340576, 0.006511839866638183, 0.006482656002044678, 0.0065229439735412596, 0.006502528190612793, 0.006469727993011475, 0.006509471893310547, 0.006491007804870605, 0.006475872039794922, 0.006475679874420166, 0.006465792179107666, 0.006553215980529785, 0.006609055995941162, 0.006461408138275147, 0.006668320178985596, 0.006487167835235595, 0.006441823959350586, 0.006500351905822754, 0.0064596481323242185, 0.006477375984191894, 0.006449183940887451, 0.006471839904785157, 0.006498303890228272, 0.006491807937622071, 0.00650275182723999, 0.0064800319671630855, 0.006467423915863037, 0.006567327976226806, 0.006501279830932617, 0.006481599807739258, 0.006557695865631104, 0.006625279903411865, 0.006768320083618164, 0.007166272163391114, 0.0072765440940856935, 0.007260159969329834, 0.006524928092956543, 0.006563488006591797, 0.006498655796051025, 0.006526976108551025, 0.006550975799560547, 0.006586944103240967, 0.006581952095031738, 0.0065030078887939454, 0.006492959976196289, 0.006582911968231201, 0.00651251220703125, 0.006486112117767334, 0.006518208026885987, 0.006517663955688476, 0.006498303890228272, 0.006520832061767578, 0.00648960018157959, 0.006479455947875977, 0.006501279830932617, 0.006455584049224854, 0.006534880161285401, 0.006550528049468994, 0.006458367824554443, 0.006504320144653321, 0.006478015899658203, 0.0064646401405334475, 0.006574783802032471, 0.006489855766296387, 0.006498688220977783, 0.006492032051086426, 0.006494239807128906, 0.0064759039878845214, 0.006622399806976318, 0.006474624156951905, 0.006502304077148437, 0.006588287830352783, 0.0064832639694213864, 0.006480832099914551, 0.006665984153747559, 0.006513984203338623, 0.006583199977874756, 0.0064633598327636715, 0.006499328136444092, 0.006476704120635986, 0.006479263782501221, 0.006485663890838623, 0.006474688053131104, 0.006498335838317871, 0.006485919952392578, 0.006524831771850586, 0.006601984024047851, 0.0065279040336608885, 0.006494048118591308, 0.006488096237182617, 0.006529151916503906, 0.006549503803253174, 0.00653107213973999, 0.006476831912994385, 0.0065335679054260255, 0.0065049281120300294, 0.006497983932495117, 0.006527359962463379, 0.0065413122177124024, 0.006519999980926514, 0.006558527946472168, 0.006502399921417237, 0.006536896228790283, 0.006514592170715332, 0.0065328960418701175, 0.006541759967803955, 0.006506688117980957, 0.006594111919403076, 0.006523327827453613, 0.0065170879364013674, 0.006546688079833984, 0.006498720169067383, 0.006535168170928955, 0.006540863990783691, 0.00662883186340332, 0.0064921917915344235, 0.006517600059509277, 0.006549600124359131, 0.0065372161865234375, 0.006512447834014893, 0.006465727806091308, 0.006525023937225342, 0.0066128640174865725, 0.006381375789642334, 0.006533664226531982, 0.00648089599609375, 0.0064642238616943355, 0.0064822077751159664, 0.006481919765472412, 0.006454912185668946, 0.00646998405456543, 0.00644649600982666, 0.006515327930450439, 0.00653107213973999, 0.006485568046569824, 0.0065642881393432615, 0.006560031890869141, 0.006473663806915283, 0.006487040042877197, 0.006476736068725586, 0.0065203838348388676, 0.0065456957817077634, 0.006520991802215576, 0.006526815891265869, 0.00654099178314209, 0.006459712028503418, 0.006469632148742676, 0.006504447937011719, 0.006489439964294434, 0.006490719795227051, 0.006719552040100098, 0.006719456195831299, 0.006551583766937256, 0.006504672050476074, 0.006539040088653565, 0.006482016086578369, 0.0065352959632873535, 0.006475584030151367, 0.00648524808883667, 0.006514400005340576, 0.006493184089660644, 0.006502399921417237, 0.0064737281799316405, 0.006508480072021485, 0.006555136203765869, 0.00647430419921875, 0.0064654722213745115, 0.006508287906646729, 0.006504096031188965, 0.006467807769775391, 0.0065251197814941405, 0.006505824089050293, 0.0064869441986083985, 0.006506048202514649, 0.00648473596572876, 0.006495872020721436, 0.006508736133575439, 0.0064992961883544925, 0.006529695987701416, 0.006533376216888428, 0.006495552062988281, 0.006539968013763428, 0.006608895778656006, 0.006533120155334473, 0.0065168957710266115, 0.006539328098297119, 0.006474080085754394, 0.0065411520004272465, 0.006487520217895508, 0.006522496223449707, 0.006509600162506103, 0.006493311882019043, 0.006552320003509521, 0.006516160011291504, 0.00650707197189331, 0.006516736030578613, 0.0064769601821899414, 0.007024735927581787, 0.006976096153259277, 0.006523039817810059, 0.006496255874633789, 0.006518176078796387, 0.006496960163116455, 0.006514592170715332, 0.006507967948913574, 0.006496831893920898, 0.006553887844085693, 0.006544191837310791, 0.006508639812469482, 0.006490431785583496, 0.006466400146484375, 0.006600351810455323, 0.006479296207427978, 0.006470208168029785, 0.006598368167877198, 0.006471776008605957, 0.006456831932067871, 0.006496960163116455, 0.0064793601036071775, 0.006486527919769287, 0.006487648010253907, 0.006490528106689453, 0.00653107213973999, 0.006496128082275391, 0.006499839782714844, 0.006496895790100097, 0.006622879981994629, 0.006603104114532471, 0.0066706881523132325, 0.006448480129241943, 0.006512256145477295, 0.006541664123535156, 0.00651094388961792, 0.0065259838104248045, 0.00648905611038208, 0.00649126386642456, 0.006615359783172608, 0.006506080150604248, 0.006492991924285888, 0.006498528003692627, 0.006489888191223144, 0.006534687995910645, 0.006482560157775879, 0.006450816154479981, 0.0064757118225097655, 0.006463903903961182, 0.006557695865631104, 0.006482304096221924, 0.006489120006561279, 0.006395008087158203, 0.0065170879364013674, 0.006468128204345703, 0.006479743957519531, 0.006462751865386963, 0.006503232002258301, 0.00647603178024292, 0.006547232151031494, 0.006508063793182373, 0.00647324800491333, 0.006448416233062744, 0.0064498882293701175, 0.006470592021942139, 0.006528927803039551, 0.006456607818603515, 0.006478367805480957, 0.006560031890869141, 0.0065372161865234375, 0.006454432010650634, 0.006451072216033936, 0.006443999767303467, 0.006493216037750244, 0.006488639831542969, 0.006459807872772217, 0.0064880638122558594, 0.006461440086364746, 0.00646943998336792, 0.006525216102600098, 0.006485919952392578, 0.006492159843444824, 0.006485119819641113, 0.00643775987625122, 0.0065372161865234375, 0.006453248023986816, 0.006492159843444824, 0.006469151973724365, 0.0064642238616943355, 0.006563583850860596, 0.006467840194702148, 0.006463232040405273, 0.00649232006072998, 0.006473567962646485, 0.006510591983795166, 0.00648364782333374, 0.006455615997314453, 0.006474815845489502, 0.006468255996704101, 0.006469791889190674, 0.00651043176651001, 0.0065632319450378415, 0.006480671882629394, 0.006504511833190918, 0.006642047882080078, 0.00652672004699707, 0.006484960079193115, 0.006470592021942139, 0.006495584011077881, 0.006455967903137207, 0.006492159843444824, 0.006479199886322022, 0.006474400043487549, 0.006467872142791748, 0.006483168125152588, 0.006377344131469727, 0.006494495868682861, 0.0064718079566955565, 0.006563168048858642, 0.006488448143005371, 0.00648633623123169, 0.006483007907867431, 0.006468192100524902, 0.0064584641456604, 0.006487167835235595, 0.006593696117401123, 0.006563551902770996, 0.00662390422821045, 0.006465407848358154, 0.006527391910552979, 0.0065146880149841305, 0.00648140811920166, 0.006550015926361084, 0.006520639896392822, 0.006455552101135254, 0.006563776016235352, 0.006472703933715821, 0.006502528190612793, 0.0064828162193298336, 0.006475552082061768, 0.006498528003692627, 0.006482175827026367, 0.006487648010253907, 0.006486464023590088, 0.006466944217681885, 0.006548096179962158, 0.006511583805084229, 0.006742784023284912, 0.00692633581161499, 0.006557727813720703, 0.006520800113677979, 0.006590464115142822, 0.0065326719284057614, 0.006510687828063965, 0.006565983772277832, 0.0065640959739685055, 0.0065493440628051755, 0.006578335762023926, 0.006516928195953369, 0.006566783905029297, 0.006574624061584473, 0.006537568092346191, 0.006584383964538574, 0.0065474557876586915, 0.006496255874633789, 0.006647808074951172, 0.006583327770233154, 0.0065361599922180175, 0.006588128089904785, 0.006517280101776123, 0.006597631931304931, 0.006524799823760986, 0.00649510383605957, 0.0065413122177124024, 0.006518367767333984, 0.0065131840705871585, 0.006536704063415527, 0.006550943851470948, 0.006432384014129639, 0.006534111976623535, 0.00657203197479248, 0.006488287925720215, 0.006635295867919922, 0.006509568214416504, 0.006486976146697998, 0.006524064064025879, 0.006503168106079101, 0.006530816078186035, 0.006517151832580567, 0.006615039825439453, 0.006497824192047119, 0.006474112033843994, 0.006475359916687012, 0.006487743854522705, 0.006470047950744629, 0.006507199764251709, 0.006499616146087647, 0.006476223945617676, 0.006487552165985107, 0.006503136157989502, 0.006483551979064942, 0.006498176097869873, 0.006455615997314453, 0.006498303890228272, 0.006490111827850342, 0.006516736030578613, 0.006526976108551025, 0.006474944114685058, 0.006478655815124512, 0.006684031963348389, 0.006851327896118164, 0.0064795842170715335, 0.006494368076324463, 0.006469632148742676, 0.006498079776763916, 0.006485631942749023, 0.0067142400741577145, 0.006530784130096436, 0.006540448188781738, 0.006471903800964356, 0.006618048191070557, 0.006499519824981689, 0.006494080066680908, 0.006482687950134278, 0.006460927963256836, 0.00655295991897583, 0.006497280120849609, 0.006471680164337158, 0.0064898238182067875, 0.006467872142791748, 0.007306464195251465, 0.007582752227783203, 0.006999807834625244, 0.006547103881835938, 0.00648367977142334, 0.007534719944000244, 0.006704639911651611, 0.006495232105255127, 0.006524479866027832, 0.00651478385925293, 0.00662278413772583, 0.00644265604019165, 0.0064685440063476565, 0.006463808059692383, 0.006510272026062012, 0.006436960220336914, 0.006488096237182617, 0.0064832639694213864, 0.006480095863342285, 0.006484511852264404, 0.006576191902160644, 0.006514431953430176, 0.0067010560035705566, 0.0067338237762451176, 0.006529024124145508, 0.006536928176879883, 0.0065047359466552735, 0.006583456039428711, 0.006539296150207519, 0.006503039836883545, 0.006527359962463379, 0.006509984016418457, 0.006486400127410889, 0.006530079841613769, 0.0065136637687683106, 0.006512224197387695, 0.006544064044952393, 0.006501535892486573, 0.006511168003082275, 0.006504703998565673, 0.006561312198638916, 0.006552095890045166, 0.006503392219543457, 0.006459680080413819, 0.0064926080703735355, 0.0065272641181945805, 0.006493120193481445, 0.0065359678268432616, 0.0065064959526062015, 0.006526976108551025, 0.006518784046173095, 0.006516799926757812, 0.006507487773895264, 0.006491104125976563, 0.006548736095428467, 0.006598527908325195, 0.0064858880043029785, 0.00651958417892456, 0.006496479988098144, 0.006518655776977539, 0.006541440010070801, 0.006498144149780273, 0.006496416091918945, 0.006477024078369141, 0.006484767913818359, 0.006553599834442139, 0.006864895820617676, 0.008388607978820802, 0.006631423950195312, 0.007474624156951905, 0.006629568099975586, 0.006662528038024903, 0.006506303787231446, 0.00653331184387207, 0.006412288188934326, 0.006497600078582764, 0.006519040107727051, 0.006521279811859131, 0.0064899840354919434, 0.006520959854125976, 0.006508543968200684, 0.006510591983795166, 0.006529376029968261, 0.006596255779266358, 0.006536799907684326, 0.006541952133178711, 0.006520607948303223, 0.006558815956115723, 0.006517663955688476, 0.00657155179977417, 0.006519264221191407, 0.0064737281799316405, 0.006508543968200684, 0.006508800029754638, 0.00651043176651001, 0.006534080028533936, 0.006495200157165528, 0.006494207859039307, 0.006764800071716309, 0.006493951797485352, 0.0071283202171325685, 0.006556000232696533, 0.006523295879364014, 0.0064839677810668945, 0.006493984222412109, 0.006467807769775391, 0.006500351905822754, 0.006493887901306152, 0.006471551895141601, 0.006636288166046143, 0.006514368057250977, 0.006498303890228272, 0.006508543968200684, 0.006461440086364746, 0.006501408100128174, 0.006517983913421631, 0.0064832639694213864, 0.0065006399154663086, 0.006541215896606446, 0.006572127819061279, 0.0064963197708129885, 0.006504447937011719, 0.00650822401046753, 0.006498720169067383, 0.00652288007736206, 0.00649616003036499, 0.0065079998970031736, 0.006484640121459961, 0.006459360122680664, 0.006490176200866699, 0.006601984024047851, 0.0065279040336608885, 0.0064899840354919434, 0.006516704082489014, 0.006494143962860108, 0.006518208026885987, 0.006795839786529541, 0.00643775987625122, 0.00651148796081543, 0.006683648109436035, 0.006484096050262451, 0.0065205440521240235, 0.006572319984436035, 0.007193664073944092, 0.006500383853912353, 0.006564479827880859, 0.006551871776580811, 0.006502079963684082, 0.006493760108947754, 0.0065008001327514645, 0.006526976108551025, 0.006552768230438232, 0.0065482878684997555, 0.006520415782928467, 0.0064966721534729, 0.006504191875457764, 0.006506752014160156, 0.006616799831390381, 0.006625664234161377, 0.006516223907470703, 0.0065335359573364254, 0.006518784046173095, 0.006492159843444824, 0.006486015796661377, 0.006555295944213867, 0.006530528068542481, 0.006518847942352295, 0.006484799861907959, 0.0065268797874450685, 0.00652297592163086, 0.006512639999389648, 0.0065127677917480465, 0.006524799823760986, 0.0066859197616577146, 0.006470431804656982, 0.0065289921760559085, 0.00655679988861084, 0.006588640213012695, 0.006509247779846191, 0.006497759819030762, 0.0064949760437011715, 0.006460319995880127, 0.006508543968200684, 0.006484864234924316, 0.0064880638122558594, 0.006481503963470459, 0.006496416091918945, 0.006452608108520508, 0.006531424045562744, 0.006701600074768066, 0.00647983980178833, 0.006574111938476563, 0.006488255977630615, 0.006486144065856934, 0.006521759986877441, 0.006478464126586914, 0.006512800216674805, 0.006475071907043457, 0.006468287944793701, 0.006506239891052246]",tokens/s,152.83656030206106,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,1846.919168,2466.11968,0.0,2080.374784,2078.348288,s,1,8.4916494140625,8.4916494140625,0.0,8.4916494140625,8.4916494140625,8.4916494140625,8.4916494140625,[8.4916494140625],,kWh,4.019314637920009e-05,4.426327011270956e-06,1.2508065561994686e-05,5.712753895246573e-05,,MB,1875.222528,2663.251968,0.0,2248.146944,2179.810304,s,10,1.218068229675293,0.12180682296752929,0.00014605318636620596,0.12178745651245118,0.12198533935546875,0.12198662338256837,0.12198765060424806,"[0.12198790740966797, 0.1216759033203125, 0.12156697845458984, 0.12194080352783203, 0.12196002960205078, 0.12171036529541016, 0.12198505401611329, 0.12182527923583984, 0.1217496337890625, 0.12166627502441406]",tokens/s,2101.6885077795955,kWh,3.672207121042371e-06,4.049818816275926e-07,2.4457970955248793e-06,6.522986098194842e-06,tokens/kWh,39245829.463111214,MB,1879.789568,2665.34912,0.0,2248.146944,2179.812864,s,10,16.353136840820316,1.6353136840820313,0.008504743588103372,1.6346669921875,1.64566962890625,1.6481655029296876,1.6501622021484377,"[1.63453173828125, 1.621454345703125, 1.6249727783203125, 1.6344356689453126, 1.62902783203125, 1.635652587890625, 1.645114990234375, 1.63480224609375, 1.6424832763671875, 1.650661376953125]",tokens/s,38.52471890453512,kWh,4.800751871854004e-05,5.294250029125877e-06,2.7238754429874735e-05,8.054052317754066e-05,tokens/kWh,782214.9337312478,,s,630,16.35113552474977,0.025954183372618657,0.0005528077001375713,0.025812623977661132,0.026294195938110353,0.02671730155944824,0.02880752901077272,"[0.027210079193115234, 0.02617692756652832, 0.026505247116088867, 0.026094303131103516, 0.025888479232788086, 0.025657632827758788, 0.025796607971191408, 0.025540191650390624, 0.025602272033691406, 0.025639104843139648, 0.025622175216674804, 0.027178688049316405, 0.025615007400512695, 0.02573516845703125, 0.025705663681030274, 0.02568272018432617, 0.02568147277832031, 0.02563324737548828, 0.025724544525146484, 0.025936256408691405, 0.026008960723876953, 0.026269439697265626, 0.02594291114807129, 0.025806304931640624, 0.026437728881835938, 0.025737024307250975, 0.026909311294555663, 0.02574131202697754, 0.025683040618896483, 0.025811264038085938, 0.025756256103515625, 0.026736640930175783, 0.026189823150634766, 0.02604761505126953, 0.026037120819091798, 0.025767936706542968, 0.02573084831237793, 0.027132127761840822, 0.02590924835205078, 0.025894208908081053, 0.025700416564941406, 0.025737855911254885, 0.02573721694946289, 0.02561177635192871, 0.025671392440795898, 0.025763935089111328, 0.02568671989440918, 0.025855039596557616, 0.02599622344970703, 0.025927839279174806, 0.025950048446655275, 0.025851903915405275, 0.025851903915405275, 0.026090784072875975, 0.026202016830444336, 0.026856256484985352, 0.025804800033569338, 0.02567740821838379, 0.025603712081909178, 0.02569910430908203, 0.025683967590332032, 0.02556723213195801, 0.025625984191894532, 0.02605801582336426, 0.025729759216308594, 0.02643881607055664, 0.02599817657470703, 0.025868288040161135, 0.025995264053344725, 0.02574745559692383, 0.025614336013793947, 0.025464832305908205, 0.025659296035766603, 0.025784416198730467, 0.02550783920288086, 0.0257126407623291, 0.02556224060058594, 0.026101984024047852, 0.02594063949584961, 0.026150495529174804, 0.02593833541870117, 0.025669792175292968, 0.025495359420776367, 0.02592361640930176, 0.02592060852050781, 0.02559228706359863, 0.026018239974975585, 0.02601372718811035, 0.025756927490234376, 0.025604223251342772, 0.025632736206054687, 0.025731712341308593, 0.025552896499633788, 0.025838943481445314, 0.02557814407348633, 0.02565711975097656, 0.025704000473022463, 0.02576803207397461, 0.025587520599365234, 0.025682687759399414, 0.025664575576782228, 0.025504703521728515, 0.025520128250122072, 0.02552422332763672, 0.026169376373291017, 0.026238271713256836, 0.025590431213378905, 0.02571023941040039, 0.025542783737182616, 0.0257108154296875, 0.025628000259399413, 0.025584287643432617, 0.02565020751953125, 0.025666528701782227, 0.025675775527954102, 0.025651199340820312, 0.026050559997558592, 0.02570403289794922, 0.025700895309448243, 0.025554399490356445, 0.0255512638092041, 0.025552896499633788, 0.02551398468017578, 0.02546588706970215, 0.02559008026123047, 0.025557567596435547, 0.025549856185913086, 0.025494495391845704, 0.025722272872924806, 0.025508384704589843, 0.025556159973144532, 0.025662336349487305, 0.025554975509643554, 0.025647071838378905, 0.0255830078125, 0.02570038414001465, 0.025571903228759765, 0.02562441635131836, 0.026337055206298827, 0.025647455215454102, 0.025749088287353516, 0.025878047943115233, 0.02581324768066406, 0.02594883155822754, 0.02628144073486328, 0.02606057548522949, 0.026284063339233398, 0.02593427276611328, 0.025962783813476564, 0.025955455780029297, 0.025887008666992187, 0.025859935760498047, 0.025723648071289063, 0.025767936706542968, 0.025668672561645508, 0.025645503997802733, 0.025537023544311522, 0.025604095458984375, 0.02553171157836914, 0.02589356803894043, 0.02575119972229004, 0.025639263153076172, 0.025589759826660157, 0.025571327209472656, 0.02552422332763672, 0.025618431091308593, 0.025624576568603515, 0.026023935317993165, 0.026222591400146485, 0.02594611167907715, 0.02617462348937988, 0.025815040588378906, 0.025652063369750976, 0.02573721694946289, 0.025761056900024414, 0.025715423583984376, 0.02569215965270996, 0.02634547233581543, 0.025706304550170898, 0.026067136764526367, 0.026216447830200194, 0.02613657569885254, 0.02586160087585449, 0.025641504287719726, 0.025900896072387696, 0.025593696594238283, 0.025669952392578126, 0.025583295822143554, 0.02584832000732422, 0.02668921661376953, 0.026144447326660155, 0.026422943115234375, 0.02626220893859863, 0.02625974464416504, 0.026080896377563476, 0.02606937599182129, 0.025992319107055663, 0.02589529609680176, 0.025919008255004882, 0.025701343536376955, 0.026387712478637696, 0.025634719848632814, 0.025733663558959962, 0.02563052749633789, 0.025946624755859377, 0.025661439895629884, 0.025732383728027344, 0.025720863342285155, 0.025694303512573242, 0.02568435287475586, 0.025927295684814455, 0.026370143890380858, 0.025604352951049805, 0.02582454490661621, 0.026127328872680666, 0.025699647903442382, 0.026028736114501953, 0.025988704681396486, 0.0260643196105957, 0.029342784881591796, 0.025941919326782227, 0.02567286491394043, 0.02560630416870117, 0.025629375457763674, 0.026459199905395508, 0.025696735382080078, 0.025755584716796873, 0.025999807357788087, 0.026034271240234375, 0.02598041534423828, 0.025847679138183595, 0.02595395278930664, 0.025734079360961913, 0.02669366455078125, 0.025624799728393554, 0.0257042236328125, 0.025636863708496094, 0.025862079620361328, 0.025882688522338868, 0.025685152053833007, 0.02566352081298828, 0.025756479263305664, 0.025874048233032226, 0.02567206382751465, 0.02557916831970215, 0.025691583633422853, 0.02570719909667969, 0.0255731201171875, 0.02555881690979004, 0.025733535766601562, 0.026200351715087892, 0.02559916877746582, 0.025874431610107423, 0.025573375701904297, 0.02558505630493164, 0.02561084747314453, 0.025708703994750975, 0.025491296768188478, 0.025847808837890625, 0.02570844841003418, 0.02570044708251953, 0.025724128723144533, 0.02561235237121582, 0.02566831970214844, 0.02574745559692383, 0.025769983291625977, 0.0265435848236084, 0.02693788719177246, 0.025753599166870117, 0.025890815734863282, 0.025704383850097656, 0.02577004814147949, 0.025781471252441405, 0.025886560440063478, 0.025682239532470702, 0.025739904403686523, 0.025819007873535155, 0.025632896423339845, 0.025573375701904297, 0.0257126407623291, 0.025632768630981444, 0.02570793533325195, 0.02564678382873535, 0.025732000350952147, 0.026402015686035157, 0.026018592834472658, 0.026056703567504884, 0.025831424713134765, 0.025752864837646484, 0.025729759216308594, 0.025699455261230467, 0.026900384902954103, 0.02593052864074707, 0.025706207275390625, 0.02570204734802246, 0.025920320510864257, 0.02607923126220703, 0.026560352325439452, 0.026183839797973632, 0.026147008895874024, 0.025999168395996093, 0.025966527938842774, 0.025907264709472657, 0.025792512893676758, 0.02616307258605957, 0.025802879333496093, 0.025880416870117186, 0.02565443229675293, 0.025689088821411132, 0.02565670394897461, 0.025775808334350586, 0.02560620880126953, 0.02558451271057129, 0.02614271926879883, 0.025830591201782226, 0.026944288253784178, 0.026202112197875976, 0.026195968627929687, 0.026660863876342773, 0.02587648010253906, 0.025980928421020507, 0.025884000778198243, 0.02575766372680664, 0.02567852783203125, 0.02571254348754883, 0.025707647323608397, 0.026010591506958006, 0.02612224006652832, 0.025863391876220703, 0.025867040634155274, 0.025812255859375, 0.025760480880737305, 0.025939071655273437, 0.025801599502563475, 0.02613609504699707, 0.025843488693237306, 0.025793216705322267, 0.025810400009155274, 0.02570627212524414, 0.025807647705078124, 0.025781856536865235, 0.025718303680419923, 0.02594646453857422, 0.026505727767944336, 0.026894336700439454, 0.026300416946411134, 0.025987071990966795, 0.026019840240478515, 0.02615705680847168, 0.026222591400146485, 0.025738752365112305, 0.025720703125, 0.025760383605957032, 0.025730688095092772, 0.026222976684570312, 0.025680927276611327, 0.026354047775268556, 0.025610496520996093, 0.025679744720458985, 0.025737375259399415, 0.02590070343017578, 0.025672351837158203, 0.025619935989379884, 0.02576438331604004, 0.025554399490356445, 0.025686559677124025, 0.025927616119384767, 0.026097375869750975, 0.02608131217956543, 0.025954879760742188, 0.02579340744018555, 0.026032928466796876, 0.025784095764160155, 0.025852224349975587, 0.027185152053833008, 0.025796672821044923, 0.025907136917114257, 0.026176607131958008, 0.026621631622314453, 0.02594131278991699, 0.025820159912109376, 0.025757696151733397, 0.025812511444091798, 0.02609404754638672, 0.025823232650756835, 0.025815040588378906, 0.02581657600402832, 0.02581475257873535, 0.0259420166015625, 0.026063648223876953, 0.026826751708984374, 0.026063072204589845, 0.025974464416503907, 0.026044384002685547, 0.025938047409057616, 0.03073023986816406, 0.026644479751586913, 0.026216447830200194, 0.026200063705444337, 0.025968128204345704, 0.025756160736083986, 0.02585955238342285, 0.02557596778869629, 0.025653247833251954, 0.025626623153686523, 0.026024032592773437, 0.025749408721923828, 0.0257205753326416, 0.0256494083404541, 0.025511936187744142, 0.02568351936340332, 0.02581273651123047, 0.02699305534362793, 0.027425056457519532, 0.026200063705444337, 0.025987232208251953, 0.026072927474975586, 0.02589286422729492, 0.0259051513671875, 0.02574950408935547, 0.025747360229492186, 0.025882720947265625, 0.025861631393432616, 0.026136768341064452, 0.02635398483276367, 0.02608332824707031, 0.02582467269897461, 0.025682207107543945, 0.025850175857543945, 0.026472448348999023, 0.025599103927612304, 0.02571504020690918, 0.025730815887451172, 0.02572694396972656, 0.028207935333251954, 0.02726848030090332, 0.026147455215454102, 0.02582294464111328, 0.02584182357788086, 0.0259421443939209, 0.02622591972351074, 0.02679772758483887, 0.026032159805297852, 0.025832511901855468, 0.02629350471496582, 0.025697887420654295, 0.025596832275390623, 0.02558361625671387, 0.02552934455871582, 0.025570304870605468, 0.025552896499633788, 0.025761791229248047, 0.0255283203125, 0.02657014465332031, 0.025634687423706056, 0.025907840728759766, 0.025585407257080077, 0.02582102394104004, 0.026163711547851562, 0.026028032302856444, 0.025977855682373048, 0.02589529609680176, 0.025944320678710938, 0.025909631729125977, 0.0257838077545166, 0.025809375762939454, 0.02572208023071289, 0.025674560546875, 0.025657344818115234, 0.02853264045715332, 0.02803536033630371, 0.026081087112426758, 0.025851903915405275, 0.025761695861816408, 0.025817344665527344, 0.025782112121582032, 0.025784320831298828, 0.025746976852416992, 0.026180063247680664, 0.02596601676940918, 0.02592211151123047, 0.025931776046752928, 0.026011648178100585, 0.02581100845336914, 0.025780160903930663, 0.025943231582641602, 0.02633135986328125, 0.026177759170532226, 0.025936256408691405, 0.026156896591186522, 0.02600476837158203, 0.02589743995666504, 0.025837984085083008, 0.02585759925842285, 0.025761215209960938, 0.025739456176757814, 0.025653823852539063, 0.025635263442993165, 0.02566508865356445, 0.025585920333862304, 0.025628671646118165, 0.026357152938842773, 0.025854015350341798, 0.025733280181884765, 0.029490976333618163, 0.026751392364501952, 0.02607923126220703, 0.0258306884765625, 0.025692895889282228, 0.025648416519165038, 0.025684703826904298, 0.025667583465576172, 0.02614476776123047, 0.02575155258178711, 0.02581283187866211, 0.026015039443969726, 0.026633056640625, 0.025993215560913087, 0.025769983291625977, 0.02555084800720215, 0.025697343826293944, 0.025724863052368162, 0.025654272079467775, 0.025865407943725587, 0.025741216659545898, 0.025777055740356446, 0.02581046485900879, 0.02604182434082031, 0.025928415298461915, 0.02577180862426758, 0.025733631134033205, 0.025765888214111327, 0.026040319442749024, 0.026163200378417968, 0.02601081657409668, 0.02664441680908203, 0.029406080245971678, 0.026185728073120116, 0.025997312545776367, 0.025957727432250978, 0.02576639938354492, 0.02587664031982422, 0.025857919692993163, 0.025949344635009766, 0.02990755271911621, 0.025743743896484376, 0.02569375991821289, 0.02572947120666504, 0.025632768630981444, 0.0256428165435791, 0.025761951446533204, 0.025937952041625977, 0.025701696395874024, 0.025774784088134765, 0.026071039199829102, 0.02614681625366211, 0.02609891128540039, 0.025940607070922852, 0.025876800537109376, 0.025767328262329102, 0.025665983200073243, 0.02571059226989746, 0.025972736358642577, 0.02569011116027832, 0.025615968704223634, 0.026040735244750975, 0.026261503219604493, 0.026205663681030274, 0.025811487197875977, 0.026394016265869142, 0.025782400131225586, 0.02589129638671875, 0.025790464401245116, 0.025818208694458007, 0.02574550437927246, 0.025920320510864257, 0.025860095977783205, 0.028919807434082033, 0.02635990333557129, 0.030713727951049805, 0.026369504928588867, 0.026054304122924806, 0.026116607666015625, 0.025967008590698244, 0.026220544815063477, 0.02604047966003418, 0.02580463981628418, 0.02572697639465332, 0.02627174377441406, 0.025765087127685545, 0.025602848052978515, 0.025806848526000976, 0.025917343139648438, 0.02571641540527344, 0.02570627212524414, 0.02595484733581543, 0.025696352005004884, 0.025762975692749022, 0.025901920318603517, 0.026060800552368164, 0.02618704032897949, 0.02627452850341797, 0.026284032821655274, 0.026189855575561524, 0.025909215927124023, 0.02589823913574219, 0.026397151947021483, 0.027228448867797853, 0.026076576232910157, 0.025917856216430665, 0.025622720718383788, 0.026064895629882814, 0.026149887084960938, 0.026223615646362306, 0.025892351150512697, 0.02601171112060547, 0.025604415893554687, 0.02571891212463379, 0.027725311279296876, 0.02804128074645996, 0.025799104690551758, 0.025789888381958007, 0.02564476776123047, 0.026104415893554687, 0.025820991516113282, 0.025737823486328124, 0.025798015594482422, 0.02724502372741699, 0.027162208557128906, 0.02624348831176758]",tokens/s,38.52943418188945,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4757.827584,6022.889472,0.0,5637.144576,5630.431232,s,1,10.50012890625,10.50012890625,0.0,10.50012890625,10.50012890625,10.50012890625,10.50012890625,[10.50012890625],,kWh,9.738072601247344e-05,1.0734567727351781e-05,3.2436970394011944e-05,0.00014055226413383715,,MB,1777.958912,6402.473984,0.0,5987.36896,5890.097152,s,10,4.491590454101563,0.4491590454101563,0.0013524220186125102,0.44954603576660157,0.4504979553222656,0.45054560852050785,0.4505837310791016,"[0.44571966552734377, 0.4489716491699219, 0.4493219299316406, 0.4486640319824219, 0.4482449645996094, 0.45059326171875, 0.44991082763671875, 0.4499066162109375, 0.4497701416015625, 0.45048736572265624]",tokens/s,569.9540120943791,kWh,1.3168138995104238e-05,1.4517434524554863e-06,8.758787200260306e-06,2.337866964782003e-05,tokens/kWh,10950152.590221103,MB,1791.852544,6404.571136,0.0,5987.36896,5890.099712,s,10,24.35413720703125,2.4354137207031252,0.02174457604674989,2.4268200683593752,2.47755400390625,2.4777497802734376,2.477906401367188,"[2.477510498046875, 2.477945556640625, 2.432479736328125, 2.414346435546875, 2.42366259765625, 2.43176318359375, 2.421041015625, 2.425750244140625, 2.427889892578125, 2.421748046875]",tokens/s,25.86829476423059,kWh,7.058175825864265e-05,7.785789300999727e-06,4.695059070054302e-05,0.00012531813826018541,tokens/kWh,502720.5229397795,,s,630,24.35203998184201,0.03865403171720959,0.0005978179405530489,0.038428991317749024,0.03928697319030762,0.039560321998596194,0.04066831684112549,"[0.039469184875488283, 0.040134654998779294, 0.03933184051513672, 0.03913657760620117, 0.03913715362548828, 0.03892873764038086, 0.03908992004394531, 0.03914595031738281, 0.039260414123535155, 0.03921920013427734, 0.03923763275146484, 0.039327552795410156, 0.03942009735107422, 0.03915494537353516, 0.03925404739379883, 0.039096126556396486, 0.039074718475341795, 0.03908095932006836, 0.039796897888183594, 0.03992259216308594, 0.039161792755126955, 0.039147518157958985, 0.043472671508789064, 0.03916755294799805, 0.039239585876464846, 0.03931795120239258, 0.03955651092529297, 0.03928688049316406, 0.03933212661743164, 0.03926480102539062, 0.03909427261352539, 0.03921075057983398, 0.03927593612670898, 0.038987617492675784, 0.03913420867919922, 0.03935641479492188, 0.038957279205322264, 0.039139102935791016, 0.039239681243896485, 0.03923353576660156, 0.03898700714111328, 0.03908809661865234, 0.03912729644775391, 0.039275039672851564, 0.039204864501953124, 0.03908607864379883, 0.03912681579589844, 0.03903510284423828, 0.039214622497558596, 0.0389964485168457, 0.03919462585449219, 0.03890176010131836, 0.039221248626708984, 0.03936783981323242, 0.0390863037109375, 0.0388812484741211, 0.038988449096679687, 0.03888054275512695, 0.038865631103515624, 0.03927008056640625, 0.03897375869750976, 0.04265369415283203, 0.03908752059936523, 0.040073535919189454, 0.039265697479248046, 0.03931308746337891, 0.039225662231445316, 0.03927257537841797, 0.03904761505126953, 0.03906768035888672, 0.039179454803466796, 0.039098976135253906, 0.0394692497253418, 0.039185630798339845, 0.040336193084716795, 0.03929087829589844, 0.03902265548706055, 0.03931539154052734, 0.03909632110595703, 0.039231487274169925, 0.039359710693359376, 0.03914012908935547, 0.03928780746459961, 0.03936153411865234, 0.04016537475585937, 0.039221248626708984, 0.038973438262939454, 0.03925971221923828, 0.0390164794921875, 0.03890422439575195, 0.03905535888671875, 0.039006145477294925, 0.039045185089111326, 0.03912704086303711, 0.038981632232666014, 0.039945919036865236, 0.039129409790039066, 0.039188480377197264, 0.04073267364501953, 0.03988838577270508, 0.039757598876953126, 0.03926476669311523, 0.039108257293701175, 0.03917062377929687, 0.03940723037719727, 0.039253791809082034, 0.03907404708862305, 0.039126785278320315, 0.0390181770324707, 0.03965350341796875, 0.04205648040771484, 0.03912287902832031, 0.03920483016967773, 0.03922256088256836, 0.03898969650268555, 0.03902560043334961, 0.03932924652099609, 0.03912144088745117, 0.03930441665649414, 0.03893328094482422, 0.03965507125854492, 0.03905366516113281, 0.039272319793701174, 0.03906572723388672, 0.038981632232666014, 0.0392806396484375, 0.0398480339050293, 0.039423583984375, 0.03896771240234375, 0.03895840072631836, 0.03906403350830078, 0.038957088470458985, 0.0388917121887207, 0.0389939193725586, 0.039774208068847655, 0.0389202880859375, 0.0389508171081543, 0.038843841552734376, 0.03893673706054687, 0.03867279815673828, 0.03856364822387695, 0.038633758544921876, 0.03809641647338867, 0.03809523010253906, 0.038059295654296874, 0.037984416961669924, 0.03850073623657226, 0.038830272674560545, 0.03877846527099609, 0.03857404708862305, 0.03856224060058594, 0.03863347244262695, 0.0386187858581543, 0.038612545013427736, 0.038583072662353515, 0.038653953552246094, 0.03868057632446289, 0.038663200378417965, 0.03859296035766602, 0.038640159606933594, 0.03927449417114258, 0.03892780685424805, 0.038718017578125, 0.038694911956787106, 0.038834175109863284, 0.03866009521484375, 0.038141857147216796, 0.0380656623840332, 0.03812934494018555, 0.03810591888427734, 0.038062175750732424, 0.03813727951049805, 0.03818867111206055, 0.038351806640625, 0.03873996734619141, 0.03825430297851563, 0.038213920593261716, 0.03818636703491211, 0.03842057418823242, 0.038131553649902346, 0.038152416229248046, 0.03817110443115234, 0.03822124862670898, 0.038210113525390624, 0.03819427108764648, 0.038216545104980466, 0.03820870590209961, 0.04089238357543945, 0.03820841598510742, 0.039062145233154294, 0.038422527313232424, 0.038414337158203124, 0.03819712066650391, 0.038365150451660154, 0.03813315200805664, 0.038122238159179686, 0.03809075164794922, 0.0381317138671875, 0.03812524795532227, 0.038247871398925784, 0.03803839874267578, 0.038828033447265625, 0.03809280014038086, 0.03806617736816406, 0.03815999984741211, 0.03826726531982422, 0.038432830810546874, 0.03824224090576172, 0.03796575927734375, 0.038193214416503904, 0.03825459289550781, 0.03809894561767578, 0.03818937683105469, 0.03816175842285156, 0.03853551864624023, 0.038307838439941407, 0.03845119857788086, 0.038243934631347655, 0.0392564811706543, 0.03899990463256836, 0.03906780624389648, 0.039081375122070314, 0.038895679473876954, 0.03868521499633789, 0.03824185562133789, 0.03833651351928711, 0.0381563835144043, 0.038838623046875, 0.03817788696289062, 0.03807494354248047, 0.03814566421508789, 0.03818953704833984, 0.03801113510131836, 0.037988353729248046, 0.03800259017944336, 0.0380560302734375, 0.03814604949951172, 0.03810271835327148, 0.03810275268554687, 0.03808217620849609, 0.03813270568847656, 0.03872358322143555, 0.0383785285949707, 0.03846640014648438, 0.03818662261962891, 0.03836083221435547, 0.03821353530883789, 0.038151008605957035, 0.03811862564086914, 0.03842937469482422, 0.03815024185180664, 0.03833449554443359, 0.03903551864624023, 0.038328479766845704, 0.03824332809448242, 0.038472064971923826, 0.0381464958190918, 0.03812681579589844, 0.039262462615966796, 0.038453792572021486, 0.03836646270751953, 0.038437633514404296, 0.038200927734375, 0.038443424224853515, 0.03838278579711914, 0.0382861442565918, 0.03842588806152344, 0.038263168334960934, 0.0388488655090332, 0.03839718246459961, 0.03826172637939453, 0.038809375762939455, 0.0384901123046875, 0.04241747283935547, 0.03869356918334961, 0.03870719909667969, 0.038467071533203126, 0.03835311889648438, 0.03827536010742188, 0.03841228866577148, 0.03828307342529297, 0.03829779052734375, 0.03835683059692383, 0.038297630310058596, 0.0386429443359375, 0.03848204803466797, 0.038882049560546875, 0.03857408142089844, 0.03831398391723633, 0.03827507019042969, 0.038235774993896486, 0.03826889419555664, 0.038361503601074216, 0.03831612777709961, 0.03861471939086914, 0.038250720977783204, 0.03824367904663086, 0.038283935546875, 0.03833446502685547, 0.038193153381347655, 0.03821977615356445, 0.03815407943725586, 0.038219871520996096, 0.038162494659423826, 0.038019073486328124, 0.038166526794433595, 0.038182910919189454, 0.038760448455810545, 0.039677345275878906, 0.03812636947631836, 0.03822777557373047, 0.03810649490356445, 0.03837952041625976, 0.03839449691772461, 0.0381399040222168, 0.039970016479492186, 0.039120960235595706, 0.03948166275024414, 0.039240032196044924, 0.03929708862304687, 0.039182334899902346, 0.03889766311645508, 0.03983516693115234, 0.03923811340332031, 0.03870515060424805, 0.03865599822998047, 0.038671905517578126, 0.03901078414916992, 0.03946086502075195, 0.039531967163085935, 0.03895558547973633, 0.03930886459350586, 0.03933843231201172, 0.039106559753417966, 0.039686145782470705, 0.03879455947875977, 0.038390464782714843, 0.03829759979248047, 0.03835289764404297, 0.038168575286865236, 0.03823235321044922, 0.03814985656738281, 0.03833164978027344, 0.038224479675292966, 0.038174144744873045, 0.0387193603515625, 0.03836540985107422, 0.03852659225463867, 0.03817779159545898, 0.038330368041992184, 0.03823206329345703, 0.03838771057128906, 0.03822796630859375, 0.03821529769897461, 0.03835491180419922, 0.03834921646118164, 0.03827452850341797, 0.03825718307495117, 0.03818899154663086, 0.038481151580810544, 0.0390766716003418, 0.03825459289550781, 0.03824639892578125, 0.03827097702026367, 0.038266880035400394, 0.03821315383911133, 0.03821171188354492, 0.03810508728027344, 0.038133312225341796, 0.038136608123779295, 0.03821363067626953, 0.03821977615356445, 0.03857939147949219, 0.038179649353027346, 0.03813580703735352, 0.03816243362426758, 0.03811123275756836, 0.03813702392578125, 0.04013913726806641, 0.038606815338134766, 0.038438945770263674, 0.03831615829467774, 0.03870297622680664, 0.03875145721435547, 0.03865996932983398, 0.03860572814941406, 0.03864067077636719, 0.039056350708007816, 0.03863142395019531, 0.03849216079711914, 0.03829555130004883, 0.03820483016967773, 0.03825929641723633, 0.038371231079101564, 0.03871753692626953, 0.03876249694824219, 0.038752254486083985, 0.03867443084716797, 0.038389759063720705, 0.03821968078613281, 0.03888079833984375, 0.03845110321044922, 0.03827779388427734, 0.03830121612548828, 0.038365665435791015, 0.03828086471557617, 0.038219711303710935, 0.03825910568237305, 0.03820544052124023, 0.03821670532226563, 0.03829366302490234, 0.03814998245239258, 0.03815055847167969, 0.03825289535522461, 0.038314239501953125, 0.03817798233032227, 0.03825542449951172, 0.03818668746948242, 0.03837164688110352, 0.03821891021728516, 0.03857513427734375, 0.03819091033935547, 0.038172832489013674, 0.03837731170654297, 0.03828326416015625, 0.03824835205078125, 0.03826697540283203, 0.03848806381225586, 0.038225921630859375, 0.038284446716308596, 0.038380382537841796, 0.03839516830444336, 0.038131744384765624, 0.038152286529541016, 0.03861155319213867, 0.03824835205078125, 0.03816396713256836, 0.03838214492797851, 0.03819625473022461, 0.039051361083984375, 0.03847660827636719, 0.040510753631591796, 0.03882447814941406, 0.038504352569580076, 0.03827036666870117, 0.038421504974365236, 0.03822169494628906, 0.03824435043334961, 0.03818646240234375, 0.03837187194824219, 0.0382911376953125, 0.03812287902832031, 0.038300609588623045, 0.03843996810913086, 0.038771007537841795, 0.03933865737915039, 0.038629119873046874, 0.03825075149536133, 0.03844684982299805, 0.038381214141845706, 0.038311710357666014, 0.038435359954833985, 0.0385846061706543, 0.038373374938964845, 0.03832831954956055, 0.038529022216796875, 0.03853107070922852, 0.039138751983642577, 0.038613601684570314, 0.038352481842041014, 0.038512542724609376, 0.038405662536621095, 0.038239166259765624, 0.03829126358032227, 0.03828345489501953, 0.038221023559570313, 0.03818371200561523, 0.03827711868286133, 0.03831398391723633, 0.03843875122070312, 0.03832595062255859, 0.038214111328125, 0.03820544052124023, 0.038176864624023435, 0.03832412719726563, 0.038225921630859375, 0.03816255950927734, 0.038076286315917966, 0.03814748764038086, 0.03811593627929687, 0.03844028854370117, 0.038254913330078126, 0.04022220611572266, 0.03918697738647461, 0.038543296813964845, 0.038604671478271485, 0.03835955047607422, 0.038588062286376956, 0.03894028854370117, 0.04045078277587891, 0.03827916717529297, 0.038300926208496094, 0.0383372802734375, 0.038168575286865236, 0.039562366485595704, 0.03838243103027344, 0.03823126220703125, 0.03905209732055664, 0.038212993621826175, 0.04097622299194336, 0.03830454254150391, 0.03853107070922852, 0.038125568389892575, 0.03810105514526367, 0.03808448028564453, 0.038484031677246094, 0.038134849548339844, 0.03841734313964844, 0.03830108642578125, 0.03845795059204102, 0.038365310668945315, 0.03954793548583985, 0.03819606399536133, 0.03833651351928711, 0.03840809631347656, 0.038193248748779295, 0.03823772811889648, 0.03861065673828125, 0.03827737426757812, 0.038306175231933595, 0.03822604751586914, 0.03849766540527344, 0.03827302551269531, 0.03931308746337891, 0.039508190155029294, 0.03895759963989258, 0.039567169189453126, 0.0390816650390625, 0.038514911651611326, 0.038324703216552736, 0.03829145431518555, 0.03830137634277344, 0.03856006240844727, 0.038299648284912106, 0.03824995040893555, 0.03865449523925781, 0.03829759979248047, 0.03995852661132813, 0.03843094253540039, 0.03823971176147461, 0.03827948760986328, 0.03836723327636719, 0.03824435043334961, 0.038340606689453126, 0.0383645133972168, 0.0383078727722168, 0.03826300811767578, 0.038435230255126955, 0.038223072052001955, 0.03889430236816406, 0.03848198318481445, 0.03845939254760742, 0.03836665725708008, 0.03866230392456055, 0.038266494750976564, 0.0381360969543457, 0.03819776153564453, 0.039662017822265624, 0.0385780143737793, 0.03827260971069336, 0.03826483154296875, 0.03827974319458008, 0.03885465621948242, 0.038321376800537106, 0.03875305557250976, 0.03833590316772461, 0.03844918441772461, 0.03836156845092773, 0.038383712768554686, 0.03830195236206055, 0.03820518493652344, 0.03847481536865234, 0.038204288482666014, 0.03884172821044922, 0.03827987289428711, 0.0389119987487793, 0.03822147369384766, 0.03842860794067383, 0.03842294311523437, 0.038217727661132815, 0.03869606399536133, 0.03828998565673828, 0.038125598907470706, 0.03813814544677734, 0.03827881622314453, 0.038131614685058594, 0.03827347183227539, 0.038250495910644534, 0.03819935989379883, 0.03865932846069336, 0.03955782318115234, 0.03832217788696289, 0.03866624069213867, 0.03906969451904297, 0.038387168884277345, 0.038316574096679684, 0.03819724655151367, 0.03832361602783203, 0.03822038269042969, 0.03814761734008789, 0.038217247009277346, 0.03830672073364258, 0.03874319839477539, 0.03828211212158203, 0.03822572708129883, 0.03843705749511719, 0.038337665557861327, 0.03827801513671875, 0.03820748901367187, 0.038313278198242186, 0.03832620620727539, 0.03833260726928711, 0.03848659133911133, 0.03855344009399414, 0.0383100471496582, 0.038266880035400394, 0.03899801635742187, 0.038700736999511716, 0.03834022521972656, 0.038597312927246094]",tokens/s,25.870522570994293,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,811.773952,522.059776,0.0,136.31488,125.60896,s,1,7.63188427734375,7.63188427734375,0.0,7.63188427734375,7.63188427734375,7.63188427734375,7.63188427734375,[7.63188427734375],,kWh,1.3297195770822632e-05,1.459585881186852e-06,3.7816696920045523e-06,1.8538451344014034e-05,,MB,1249.222656,641.59744,0.0,226.492416,195.515904,s,14,0.20480502223968505,0.014628930159977504,0.00025382982738715923,0.01450713586807251,0.014932060813903808,0.015058419132232666,0.015232074146270752,"[0.01440131187438965, 0.01490995216369629, 0.014420255661010743, 0.014474207878112794, 0.014508511543273926, 0.014481120109558106, 0.01494153594970703, 0.01443177604675293, 0.014487839698791504, 0.014582719802856445, 0.014506752014160157, 0.015275487899780274, 0.014876031875610351, 0.014507519721984862]",tokens/s,17499.57086406609,kWh,4.230851130435128e-07,4.665380296783534e-08,2.7624136431012064e-07,7.459802803214688e-07,tokens/kWh,343172610.2594572,MB,1281.826816,656.277504,0.0,241.17248,195.518464,s,14,9.999661865234376,0.7142615618024555,0.002868518246165444,0.7136603393554688,0.7171349731445312,0.719178076171875,0.72154181640625,"[0.7122301635742188, 0.7123153686523438, 0.714058349609375, 0.7121649780273438, 0.7221327514648438, 0.7160800170898437, 0.7137522583007813, 0.7135684204101562, 0.7104891967773438, 0.7175870971679688, 0.7151151733398438, 0.712313720703125, 0.7120822143554687, 0.7157721557617187]",tokens/s,88.20298244947978,kWh,2.0646329884871713e-05,2.276915137802149e-06,7.73116583526191e-06,3.065441085793577e-05,tokens/kWh,2055169.166093781,,s,882,9.991075596809404,0.011327750109761212,0.00027612207451770175,0.011250383853912353,0.011580649948120117,0.011699842929840089,0.01225159895896911,"[0.01074179172515869, 0.011239871978759766, 0.011134976387023926, 0.011235967636108398, 0.011136896133422851, 0.011179167747497559, 0.01112764835357666, 0.011161919593811034, 0.011105312347412109, 0.011100831985473633, 0.011385919570922852, 0.011166144371032714, 0.011119104385375977, 0.011156607627868652, 0.011191488265991211, 0.01119536018371582, 0.011174624443054199, 0.011440128326416015, 0.011511712074279786, 0.011464799880981445, 0.011478528022766114, 0.011376768112182618, 0.01130457592010498, 0.011294848442077636, 0.011764351844787597, 0.011554976463317872, 0.011470399856567382, 0.011321632385253907, 0.011932928085327149, 0.011264767646789551, 0.01122265625, 0.01148761558532715, 0.01124073600769043, 0.011233247756958008, 0.01120911979675293, 0.011716959953308105, 0.01126195240020752, 0.011282143592834472, 0.01124995231628418, 0.011192319869995117, 0.011244959831237793, 0.01119603157043457, 0.01118876838684082, 0.01117638397216797, 0.011187808036804199, 0.011161120414733886, 0.011365056037902832, 0.011204704284667969, 0.011180064201354981, 0.011163935661315919, 0.01116147232055664, 0.01154047966003418, 0.01139027214050293, 0.011151455879211425, 0.011618816375732421, 0.011306528091430665, 0.011276063919067382, 0.011203424453735351, 0.011198304176330566, 0.011163647651672364, 0.011277536392211913, 0.011502367973327637, 0.011889663696289063, 0.011067520141601563, 0.011756959915161133, 0.011555871963500976, 0.011589280128479004, 0.011556863784790039, 0.011388480186462402, 0.011266464233398438, 0.011314240455627442, 0.011232224464416504, 0.01124556827545166, 0.01125376033782959, 0.011177984237670899, 0.01117193603515625, 0.011215968132019043, 0.01116380786895752, 0.01121945571899414, 0.011910335540771484, 0.01131619167327881, 0.011229184150695801, 0.011166943550109863, 0.011130687713623046, 0.011176799774169922, 0.011214464187622071, 0.011219264030456543, 0.011182432174682616, 0.011167584419250488, 0.011145119667053222, 0.01111359977722168, 0.011160544395446777, 0.011110400199890137, 0.011155488014221192, 0.011270432472229005, 0.01113590431213379, 0.011236127853393554, 0.011343680381774902, 0.011536352157592774, 0.01148755168914795, 0.011310527801513672, 0.011542816162109375, 0.01148851203918457, 0.011506303787231446, 0.011646976470947265, 0.01139065647125244, 0.011360896110534669, 0.011210751533508301, 0.011263360023498535, 0.011219231605529786, 0.011244000434875489, 0.011359616279602051, 0.011215359687805175, 0.011232255935668945, 0.011293696403503419, 0.011927552223205566, 0.011293760299682617, 0.011273119926452637, 0.011184224128723145, 0.011282367706298828, 0.011196415901184082, 0.011130880355834961, 0.011126688003540039, 0.011135071754455566, 0.011152480125427246, 0.011164575576782227, 0.010783807754516601, 0.011118495941162109, 0.011208703994750976, 0.011177951812744141, 0.011153152465820312, 0.011194944381713867, 0.011581472396850586, 0.011369312286376953, 0.011604831695556641, 0.011502880096435547, 0.01144495964050293, 0.011448639869689942, 0.011750399589538574, 0.011590368270874023, 0.011660991668701172, 0.011647232055664062, 0.011446304321289062, 0.01127731227874756, 0.011230112075805664, 0.011289728164672852, 0.011197407722473145, 0.011207903861999512, 0.011207232475280762, 0.01116211223602295, 0.011261664390563964, 0.011126815795898437, 0.011127840042114258, 0.011131839752197266, 0.011122431755065918, 0.011183775901794434, 0.011300448417663575, 0.011073887825012207, 0.011317919731140137, 0.01130083179473877, 0.011853568077087403, 0.011557151794433593, 0.011157504081726074, 0.011173664093017578, 0.01114134407043457, 0.011168767929077148, 0.01112985610961914, 0.011160799980163574, 0.011199263572692872, 0.011157504081726074, 0.011223391532897949, 0.011163104057312011, 0.011210944175720215, 0.011233280181884766, 0.011330975532531738, 0.011648736000061034, 0.011731840133666993, 0.011626111984252929, 0.011410112380981446, 0.011426560401916504, 0.011393152236938476, 0.011615039825439454, 0.011570400238037109, 0.011425600051879883, 0.01130128002166748, 0.011350591659545899, 0.011402688026428222, 0.011258591651916504, 0.011236224174499512, 0.011062944412231445, 0.011287551879882812, 0.011187968254089355, 0.011204863548278808, 0.011300864219665528, 0.011247903823852538, 0.011218655586242675, 0.01124339199066162, 0.011476736068725586, 0.011337887763977051, 0.011253984451293945, 0.011321344375610352, 0.011804608345031739, 0.011350079536437989, 0.011245599746704102, 0.011317215919494628, 0.011257856369018555, 0.011214207649230958, 0.011104991912841797, 0.011316960334777831, 0.011241791725158691, 0.011244640350341797, 0.011254688262939454, 0.011165568351745606, 0.011290623664855956, 0.01123100757598877, 0.011239104270935059, 0.011260512351989747, 0.011511743545532227, 0.01147216033935547, 0.011401439666748047, 0.01142835235595703, 0.011391072273254395, 0.011474847793579102, 0.011553855895996093, 0.011534879684448242, 0.011344287872314453, 0.011645055770874024, 0.011379648208618164, 0.011371359825134278, 0.011352128028869628, 0.011268128395080566, 0.011292991638183593, 0.01123305606842041, 0.011206080436706543, 0.011333344459533691, 0.011211520195007324, 0.011114751815795899, 0.011222944259643555, 0.011700063705444337, 0.011292672157287598, 0.011267711639404297, 0.011256192207336426, 0.011132672309875488, 0.011226816177368164, 0.011164223670959473, 0.01112063980102539, 0.011218912124633789, 0.01112067222595215, 0.011124671936035157, 0.011147520065307618, 0.01116755199432373, 0.011208767890930177, 0.010836799621582032, 0.011364352226257325, 0.011664608001708984, 0.011434335708618164, 0.011426176071166992, 0.01139628791809082, 0.011398112297058106, 0.013692928314208984, 0.01212825584411621, 0.011513664245605468, 0.011329055786132813, 0.011311776161193847, 0.011322879791259765, 0.011370464324951172, 0.011328031539916992, 0.011328767776489258, 0.011244288444519043, 0.011339679718017578, 0.011167231559753419, 0.011317855834960937, 0.011300864219665528, 0.011206656455993653, 0.011218303680419922, 0.011146016120910644, 0.011214688301086425, 0.011194527626037598, 0.011084832191467286, 0.011135807991027832, 0.011240863800048828, 0.011182720184326173, 0.011151328086853027, 0.011282336235046387, 0.011190367698669433, 0.011152447700500489, 0.011217856407165527, 0.011165696144104004, 0.01111244773864746, 0.011909119606018067, 0.01274675178527832, 0.014002176284790039, 0.012342656135559082, 0.01173481559753418, 0.011594752311706542, 0.011805952072143555, 0.011669695854187012, 0.011661120414733887, 0.01163270378112793, 0.011381343841552734, 0.011323328018188477, 0.011271552085876465, 0.01122374439239502, 0.01125369644165039, 0.011225088119506836, 0.011205792427062988, 0.011254624366760254, 0.011206656455993653, 0.011208224296569824, 0.011184864044189453, 0.01122208023071289, 0.011258560180664063, 0.011174015998840332, 0.01148089599609375, 0.011448287963867187, 0.010834943771362305, 0.011175935745239257, 0.011223039627075194, 0.011163647651672364, 0.0111595516204834, 0.011165375709533692, 0.011133248329162598, 0.01113702392578125, 0.011233280181884766, 0.011175840377807618, 0.011209823608398438, 0.01124687957763672, 0.011648799896240234, 0.011947967529296876, 0.012579839706420898, 0.011572223663330078, 0.011646783828735352, 0.01156934356689453, 0.011505663871765137, 0.011386879920959473, 0.011462656021118164, 0.011662464141845703, 0.011613056182861329, 0.011476991653442382, 0.011422975540161133, 0.011287551879882812, 0.011183744430541992, 0.011194496154785157, 0.011191391944885254, 0.01118015956878662, 0.011270015716552735, 0.011123040199279784, 0.01118892765045166, 0.011132800102233886, 0.011207839965820313, 0.01113145637512207, 0.011206656455993653, 0.011405023574829101, 0.011410176277160644, 0.011493184089660645, 0.011439231872558594, 0.011354944229125977, 0.011327775955200195, 0.011382559776306152, 0.011177984237670899, 0.011171839714050292, 0.011220992088317871, 0.011218976020812989, 0.011218912124633789, 0.011212800025939941, 0.011126784324645997, 0.011130880355834961, 0.011344063758850097, 0.011447967529296875, 0.01132464027404785, 0.011385791778564453, 0.011415103912353515, 0.011372768402099609, 0.011349696159362793, 0.011508416175842286, 0.012230239868164062, 0.011743488311767579, 0.011361632347106933, 0.01119273567199707, 0.011538080215454102, 0.011985024452209473, 0.011394528388977051, 0.011354623794555664, 0.011450176239013671, 0.011326080322265624, 0.01124947166442871, 0.011169792175292969, 0.011227104187011719, 0.011144864082336425, 0.011196800231933594, 0.011229248046875, 0.01119660758972168, 0.011331328392028809, 0.011153311729431152, 0.011170111656188964, 0.01115347194671631, 0.01124937629699707, 0.011182144165039062, 0.011161536216735839, 0.011447903633117675, 0.011419424057006836, 0.011246463775634766, 0.011220735549926758, 0.011200511932373047, 0.011395071983337402, 0.011428928375244141, 0.011395648002624511, 0.011405728340148925, 0.011171008110046387, 0.011186528205871581, 0.011713088035583496, 0.011537728309631347, 0.011731519699096679, 0.01152342414855957, 0.011495295524597169, 0.011365247726440429, 0.011470975875854493, 0.011601119995117188, 0.011596351623535157, 0.011613984107971192, 0.011242976188659668, 0.011219776153564454, 0.01123635196685791, 0.01124448013305664, 0.011216896057128906, 0.011198464393615723, 0.01115340805053711, 0.011341823577880859, 0.011321344375610352, 0.011201663970947266, 0.011240320205688476, 0.011254816055297852, 0.011260895729064942, 0.011241472244262696, 0.011170144081115723, 0.011198111534118652, 0.011210432052612304, 0.011365920066833497, 0.011205504417419434, 0.011190176010131836, 0.01123136043548584, 0.010737567901611329, 0.011274016380310059, 0.011325407981872558, 0.011413567543029786, 0.01125164794921875, 0.011251551628112793, 0.01147219181060791, 0.011524959564208985, 0.011695648193359375, 0.011440608024597168, 0.011628095626831054, 0.01142204761505127, 0.011598336219787597, 0.011517439842224121, 0.0117042236328125, 0.011399231910705566, 0.011260319709777832, 0.011263999938964844, 0.011153120040893555, 0.011351455688476562, 0.012040800094604492, 0.011206656455993653, 0.011163647651672364, 0.011450207710266114, 0.011200672149658203, 0.011163583755493164, 0.011124799728393555, 0.01111366367340088, 0.011127615928649902, 0.011146335601806641, 0.011140000343322755, 0.011122431755065918, 0.011130975723266602, 0.011143168449401856, 0.011204383850097656, 0.011183839797973633, 0.011176608085632323, 0.011102463722229004, 0.011167840003967285, 0.011134592056274414, 0.01114076805114746, 0.01112940788269043, 0.011256735801696777, 0.011145376205444335, 0.011197183609008789, 0.011118464469909668, 0.011358336448669433, 0.01157254409790039, 0.011571904182434083, 0.011681792259216308, 0.011566207885742188, 0.011496479988098145, 0.01149062442779541, 0.011346464157104491, 0.011560480117797851, 0.011546976089477539, 0.011425919532775879, 0.01130851173400879, 0.011178560256958008, 0.0112293119430542, 0.011296607971191407, 0.011249664306640626, 0.011177056312561036, 0.010784832000732422, 0.011150367736816406, 0.011146080017089844, 0.011140255928039551, 0.011254719734191895, 0.011235360145568848, 0.011222911834716796, 0.011257984161376954, 0.011233535766601563, 0.011259936332702636, 0.011160351753234864, 0.011189184188842773, 0.011124544143676758, 0.011155263900756836, 0.011276384353637696, 0.011180319786071778, 0.011206303596496582, 0.011169247627258301, 0.01117478370666504, 0.011237471580505372, 0.011122943878173829, 0.011156160354614258, 0.011221952438354492, 0.011111552238464356, 0.011236000061035156, 0.01119212818145752, 0.01136070442199707, 0.01147862434387207, 0.01151632022857666, 0.01158460807800293, 0.011457728385925294, 0.011490943908691407, 0.01175766372680664, 0.011565055847167969, 0.011469920158386231, 0.011350624084472656, 0.011256128311157227, 0.01123532772064209, 0.011476351737976074, 0.011327327728271485, 0.011278880119323731, 0.011268287658691407, 0.011286656379699707, 0.01126153564453125, 0.01197651195526123, 0.011300512313842773, 0.011209600448608399, 0.011433600425720215, 0.011228896141052247, 0.01113798427581787, 0.011454239845275879, 0.011249695777893066, 0.011179295539855957, 0.011143808364868163, 0.011192031860351562, 0.011082015991210937, 0.01115881633758545, 0.011133664131164551, 0.011181952476501465, 0.011245152473449708, 0.011176480293273925, 0.011165696144104004, 0.01121827220916748, 0.011375328063964844, 0.011515968322753907, 0.011552703857421876, 0.011415712356567383, 0.011408415794372559, 0.011477824211120605, 0.011583488464355468, 0.011854880332946778, 0.01170531177520752, 0.011421664237976074, 0.011359647750854492, 0.011309120178222656, 0.011471487998962403, 0.01130281639099121, 0.011250720024108887, 0.011414527893066406, 0.01141923236846924, 0.011305376052856446, 0.011298815727233886, 0.011239775657653809, 0.011267904281616212, 0.011124799728393555, 0.0112391996383667, 0.011107872009277344, 0.011149600028991699, 0.01109011173248291, 0.011083776473999024, 0.011093536376953124, 0.011250144004821777, 0.011229215621948242, 0.011120160102844239, 0.0110796480178833, 0.011192768096923829, 0.01115135955810547, 0.011243007659912109, 0.011137568473815918, 0.011216896057128906, 0.011456512451171874, 0.011607839584350585, 0.011573151588439941, 0.014324031829833984, 0.011511808395385742, 0.01163276767730713, 0.011644800186157227, 0.01154047966003418, 0.011444160461425781, 0.011324992179870605, 0.011286335945129395, 0.01122704029083252, 0.011281120300292968, 0.011206720352172852, 0.011259584426879883, 0.011307647705078125, 0.011400896072387695, 0.011210335731506347, 0.011270560264587403, 0.01146281623840332, 0.01130303955078125, 0.011245280265808106, 0.011251711845397949, 0.011188063621520996, 0.011210911750793457, 0.011370495796203613, 0.010793984413146973, 0.011165535926818848, 0.011216896057128906, 0.01115135955810547, 0.011915264129638671, 0.011232959747314452, 0.0111844482421875, 0.011324511528015137, 0.011187295913696289, 0.011204416275024415, 0.011148415565490722, 0.01118297576904297, 0.011259903907775879, 0.011368512153625488, 0.011587519645690918, 0.011478879928588866, 0.011422176361083985, 0.011274944305419922, 0.011284607887268067, 0.011332192420959473, 0.011672991752624512, 0.011520735740661621, 0.011379199981689453, 0.011285280227661133, 0.011139904022216797, 0.011188159942626954, 0.011224287986755371, 0.01120076847076416, 0.011248319625854493, 0.01114249610900879, 0.011135616302490235, 0.011157504081726074, 0.011519488334655761, 0.011359968185424806, 0.011168543815612792, 0.011231231689453124, 0.011282336235046387, 0.011233599662780761, 0.01133568000793457, 0.011320287704467774, 0.01118291187286377, 0.011157504081726074, 0.011225088119506836, 0.01114691162109375, 0.011124992370605468, 0.01115875244140625, 0.011103103637695313, 0.011193951606750489, 0.011112159729003906, 0.011123392105102539, 0.011156576156616211, 0.011099040031433105, 0.01216921615600586, 0.013357151985168457, 0.012071968078613281, 0.011364352226257325, 0.011289567947387695, 0.011554719924926758, 0.011556032180786133, 0.011553536415100098, 0.011447392463684081, 0.011334431648254395, 0.011403552055358887, 0.01136233615875244, 0.011532447814941406, 0.011445055961608887, 0.011437984466552734, 0.011397120475769042, 0.011364352226257325, 0.01135148811340332, 0.011657919883728027, 0.011364224433898926, 0.011421695709228515, 0.011397120475769042, 0.011419648170471192, 0.011185503959655762, 0.011185952186584473, 0.011170687675476075, 0.011199872016906738, 0.01124009609222412, 0.011110079765319825, 0.011159839630126954, 0.011400927543640136, 0.011261664390563964, 0.0112359037399292, 0.011229408264160157, 0.011156767845153808, 0.011196991920471192, 0.011204383850097656, 0.011179455757141113, 0.011164159774780273, 0.011874527931213378, 0.011351263999938965, 0.01155519962310791, 0.011567520141601563, 0.011462016105651856, 0.011373184204101562, 0.011462431907653808, 0.0113721923828125, 0.011558815956115723, 0.011518400192260742, 0.011433856010437012, 0.011350208282470704, 0.011208864212036132, 0.011122688293457032, 0.011192480087280273, 0.011147104263305663, 0.011198464393615723, 0.011145376205444335, 0.011119647979736328, 0.011139904022216797, 0.011249664306640626, 0.011275520324707031, 0.011186400413513183, 0.011155584335327148, 0.011114944458007812, 0.011120608329772949, 0.011123711585998536, 0.011255840301513671, 0.011131072044372558, 0.011518048286437989, 0.011332287788391113, 0.011257472038269043, 0.01118064022064209, 0.011120415687561036, 0.01111689567565918, 0.010818911552429198, 0.01125062370300293, 0.011359968185424806, 0.011571488380432128, 0.011640640258789062, 0.011476927757263184, 0.011343968391418458, 0.011343135833740234, 0.011499744415283203, 0.011555487632751466, 0.011407487869262696, 0.011384767532348633, 0.011195455551147461, 0.011260800361633301, 0.011209792137145996, 0.011199423789978027, 0.011220992088317871, 0.011156991958618164, 0.01116982364654541, 0.01118563175201416, 0.011113375663757323, 0.011135071754455566, 0.011051072120666503, 0.011120575904846192, 0.011118592262268067, 0.011077664375305175, 0.011079039573669434, 0.011074175834655762, 0.011149279594421387, 0.011333727836608886, 0.01114128017425537, 0.011040512084960938, 0.011125056266784668, 0.011130271911621093, 0.011129376411437988, 0.011060480117797852, 0.011090271949768067, 0.01116585636138916, 0.011259903907775879, 0.011136032104492188, 0.011260640144348145, 0.011198240280151367, 0.011223520278930664, 0.011499520301818847, 0.011539551734924316, 0.011503904342651368, 0.011680383682250977, 0.011522047996520996, 0.011522047996520996, 0.011526016235351563, 0.011630016326904297, 0.011535039901733398, 0.011531904220581054, 0.01198259162902832, 0.01123145580291748, 0.011665823936462403, 0.01127030372619629, 0.011320927619934081, 0.01126534366607666, 0.011252127647399902, 0.011167840003967285, 0.011157343864440918, 0.011227423667907716, 0.010892895698547364, 0.011176351547241212, 0.011190208435058593, 0.011470080375671387, 0.011234432220458985, 0.011239104270935059, 0.01117411231994629, 0.011202112197875976, 0.011294207572937011, 0.01131174373626709, 0.011221088409423829, 0.011148544311523437, 0.011164416313171386, 0.011157440185546876, 0.01113475227355957, 0.011141407966613769, 0.01112063980102539, 0.011286527633666991, 0.011636927604675293, 0.011273119926452637, 0.011343968391418458, 0.011493632316589356, 0.011680480003356933, 0.01153388786315918, 0.011757856369018554, 0.012961759567260742, 0.011802656173706055, 0.012411999702453613, 0.0116212797164917, 0.011407103538513184, 0.01138319969177246, 0.01131708812713623, 0.011356160163879395, 0.011272192001342773, 0.011573247909545899, 0.011884544372558594, 0.011238975524902344, 0.011235775947570801, 0.011237631797790528, 0.01114857578277588, 0.011180512428283692, 0.011274239540100098, 0.011132927894592285, 0.01119865608215332, 0.011194175720214843, 0.011200511932373047, 0.011149184226989746, 0.011153535842895508, 0.011173279762268066, 0.01117039966583252, 0.01115340805053711, 0.01112883186340332, 0.011121696472167968, 0.011270336151123047, 0.011152159690856934, 0.011124256134033204, 0.011450240135192871, 0.011244447708129882, 0.011491007804870605, 0.011607232093811036, 0.011545408248901367, 0.011493696212768554, 0.011401023864746094]",tokens/s,88.2787835457539,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 133, in load_transformers_model_with_no_weights self.load_transformers_model_from_pretrained() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 85, in load_transformers_model_from_pretrained self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 4085, in from_pretrained dispatch_model(model, **device_map_kwargs) File ""/usr/local/lib/python3.10/dist-packages/accelerate/big_modeling.py"", line 494, in dispatch_model model.to(device) File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 2952, in to return super().to(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1174, in to return self._apply(convert) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 780, in _apply module._apply(fn) [Previous line repeated 3 more times] File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 805, in _apply param_applied = fn(param) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1160, in convert return t.to( torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.55 GiB is free. Process 221812 has 13.19 GiB memory in use. Of the allocated memory 13.09 GiB is allocated by PyTorch, and 880.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,923.21792,622.723072,0.0,236.978176,214.714368,s,1,7.86119873046875,7.86119873046875,0.0,7.86119873046875,7.86119873046875,7.86119873046875,7.86119873046875,[7.86119873046875],,kWh,2.071173705415579e-05,2.2772688461625287e-06,6.663616441998643e-06,2.9652622342316963e-05,,MB,1286.889472,752.746496,0.0,337.641472,300.087808,s,10,0.29704732704162595,0.029704732704162594,0.0007060224929661715,0.029473519325256346,0.029859769439697266,0.030834332275390623,0.03161398254394531,"[0.031808895111083986, 0.029472095489501953, 0.029474943161010743, 0.0295199031829834, 0.029419200897216797, 0.02943187141418457, 0.02953798484802246, 0.029325376510620116, 0.029643199920654298, 0.029413856506347657]",tokens/s,8618.15531382062,kWh,8.99496092833406e-07,9.914569087777779e-08,5.942791079015775e-07,1.5929208916127613e-06,tokens/kWh,160711056.86912766,MB,1319.759872,794.689536,0.0,379.584512,300.090368,s,10,10.570111938476563,1.0570111938476563,0.07183341859600346,1.0344741821289063,1.0674691162109373,1.1692772216796872,1.2507237060546874,"[1.2710853271484375, 1.0334930419921875, 1.0194849243164064, 1.035455322265625, 1.0202749633789063, 1.0448450927734374, 1.02953466796875, 1.04273828125, 1.04194921875, 1.0312510986328125]",tokens/s,59.60201780898073,kWh,3.0777510658418966e-05,3.394321230536434e-06,1.2056981013097797e-05,4.622881290205318e-05,tokens/kWh,1362786.4538395265,,s,630,10.564326416969294,0.016768772090427462,0.0014059454077514333,0.01637451171875,0.017065500831604005,0.021667848014831542,0.021929545516967772,"[0.0214300479888916, 0.021561376571655272, 0.0219015998840332, 0.02165350341796875, 0.021763999938964843, 0.021753087997436523, 0.021801727294921875, 0.021700767517089842, 0.021690208435058592, 0.02172313690185547, 0.021444223403930665, 0.021542783737182616, 0.02165155220031738, 0.021598623275756835, 0.0219835205078125, 0.021837535858154296, 0.021987327575683592, 0.021999616622924805, 0.021807327270507812, 0.021769664764404298, 0.0217073917388916, 0.02194095993041992, 0.02183065605163574, 0.022142879486083983, 0.02214227294921875, 0.021703359603881835, 0.02173750305175781, 0.021679584503173827, 0.021567935943603515, 0.021606176376342774, 0.021809535980224608, 0.021792768478393554, 0.021832799911499022, 0.021773216247558593, 0.021770240783691407, 0.021761056900024413, 0.02179408073425293, 0.021862112045288085, 0.021878015518188475, 0.02195529556274414, 0.021647008895874023, 0.02148387145996094, 0.02168524742126465, 0.019840063095092772, 0.0167663688659668, 0.016487167358398436, 0.016654048919677734, 0.016758079528808593, 0.016439584732055663, 0.016412256240844726, 0.01658665657043457, 0.016659423828125, 0.016594944000244142, 0.016727455139160158, 0.01673686408996582, 0.016934463500976563, 0.016714176177978515, 0.016546911239624023, 0.016563007354736328, 0.016438623428344727, 0.016356096267700196, 0.016367679595947267, 0.016253023147583007, 0.01595411205291748, 0.016442047119140626, 0.016178848266601563, 0.016224607467651368, 0.016117759704589844, 0.01642905616760254, 0.016300031661987305, 0.01630988883972168, 0.017977920532226563, 0.016180032730102538, 0.016202047348022462, 0.016091840744018555, 0.0167956485748291, 0.016187744140625, 0.016158367156982423, 0.01618739128112793, 0.01619753646850586, 0.01651318359375, 0.016504768371582032, 0.016486591339111328, 0.016475391387939454, 0.016452159881591797, 0.01654902458190918, 0.0166298885345459, 0.016679487228393554, 0.016555423736572265, 0.016651008605957033, 0.01635273551940918, 0.016300031661987305, 0.01622684860229492, 0.016329055786132814, 0.01616044807434082, 0.01619366455078125, 0.016268640518188476, 0.01611392021179199, 0.016054527282714844, 0.016005119323730468, 0.016034975051879882, 0.01597721576690674, 0.01597663974761963, 0.015929247856140135, 0.016059392929077147, 0.01596470355987549, 0.01661532783508301, 0.016806720733642578, 0.016760576248168946, 0.016609024047851563, 0.016637216567993163, 0.019542463302612306, 0.016977792739868165, 0.016818399429321288, 0.016368127822875975, 0.01628099250793457, 0.016398880004882814, 0.016250879287719726, 0.01619296073913574, 0.016239168167114258, 0.016099327087402342, 0.01600307273864746, 0.016144384384155275, 0.016182432174682616, 0.016171775817871093, 0.016461311340332033, 0.01624496078491211, 0.016848735809326172, 0.016654495239257813, 0.0166167049407959, 0.016505599975585938, 0.016438623428344727, 0.016271488189697265, 0.016418432235717772, 0.016289695739746094, 0.016514047622680664, 0.016289567947387694, 0.016435104370117186, 0.016285280227661132, 0.016200416564941405, 0.016158720016479493, 0.016172607421875, 0.0159270076751709, 0.016104160308837892, 0.016289791107177733, 0.01639628791809082, 0.01599897575378418, 0.016028768539428712, 0.016019872665405274, 0.01604991912841797, 0.015984479904174804, 0.01599555206298828, 0.01594803237915039, 0.015951711654663085, 0.0158887996673584, 0.016095104217529296, 0.01645916748046875, 0.016237375259399413, 0.016191328048706054, 0.016132160186767577, 0.016105215072631837, 0.01603993606567383, 0.016263200759887696, 0.016047519683837892, 0.015974047660827637, 0.01617398452758789, 0.016286752700805665, 0.01635183906555176, 0.016004959106445314, 0.015999232292175293, 0.01595571231842041, 0.016008960723876954, 0.01594425582885742, 0.01604159927368164, 0.016048095703125, 0.016213951110839845, 0.01600160026550293, 0.015984352111816407, 0.01602992057800293, 0.016212160110473633, 0.016102752685546874, 0.016046079635620117, 0.016013984680175782, 0.016136512756347657, 0.01615839958190918, 0.01637990379333496, 0.01620806312561035, 0.016149471282958985, 0.01603055953979492, 0.016187328338623047, 0.016400480270385744, 0.018460575103759765, 0.016570592880249025, 0.016450592041015625, 0.016530111312866212, 0.016574592590332032, 0.01652943992614746, 0.01659459114074707, 0.01657891273498535, 0.016474111557006836, 0.01656012725830078, 0.016444799423217773, 0.016390783309936523, 0.016277439117431642, 0.016261184692382812, 0.016559616088867187, 0.01638041687011719, 0.01641062355041504, 0.016369983673095702, 0.016364383697509765, 0.016354143142700197, 0.016236543655395508, 0.016016735076904296, 0.016124319076538086, 0.016072479248046875, 0.01610745620727539, 0.01709916877746582, 0.01676095962524414, 0.016969728469848632, 0.016870655059814454, 0.016265983581542968, 0.016250783920288087, 0.016152671813964844, 0.01616281509399414, 0.01606252861022949, 0.016306016921997072, 0.016334943771362305, 0.016348640441894532, 0.016656160354614258, 0.016603519439697265, 0.016499423980712892, 0.016548736572265625, 0.01666441535949707, 0.016581567764282226, 0.01639836883544922, 0.016412864685058592, 0.016502559661865233, 0.016289567947387694, 0.016173280715942383, 0.01633647918701172, 0.01625948715209961, 0.016178783416748048, 0.01621388816833496, 0.016163328170776366, 0.016459808349609376, 0.016117120742797853, 0.016130207061767578, 0.015988384246826172, 0.016559200286865236, 0.016375167846679688, 0.016449888229370116, 0.016419008255004884, 0.016145376205444335, 0.01639958381652832, 0.016491296768188477, 0.016273408889770507, 0.01604748725891113, 0.016044672012329102, 0.015985664367675782, 0.01595516777038574, 0.016021280288696288, 0.0160086727142334, 0.016099872589111327, 0.0160883846282959, 0.015960063934326172, 0.016210304260253907, 0.016053695678710938, 0.016039039611816405, 0.016125919342041016, 0.016012191772460938, 0.016063135147094728, 0.01602911949157715, 0.015993215560913085, 0.016157087326049806, 0.016164512634277345, 0.016271711349487305, 0.016023296356201172, 0.01613849639892578, 0.016240640640258788, 0.016506879806518555, 0.016629152297973633, 0.01669327926635742, 0.016618175506591795, 0.016623008728027345, 0.01691823959350586, 0.016863359451293945, 0.016585344314575194, 0.016189279556274413, 0.016076959609985352, 0.016099327087402342, 0.016002208709716796, 0.015932255744934083, 0.015996864318847656, 0.015878208160400392, 0.015955743789672853, 0.016009376525878905, 0.01596332836151123, 0.015956031799316407, 0.015985119819641114, 0.015996512413024903, 0.01604707145690918, 0.01586678409576416, 0.016155519485473634, 0.016013311386108398, 0.01619353675842285, 0.01624892807006836, 0.016159711837768556, 0.016200639724731444, 0.016329023361206056, 0.01635296058654785, 0.016362720489501954, 0.016228607177734375, 0.016797279357910155, 0.016134912490844727, 0.01612816047668457, 0.01589401626586914, 0.01630463981628418, 0.016261119842529297, 0.01628191947937012, 0.016344736099243164, 0.016234207153320312, 0.016222879409790038, 0.01635513687133789, 0.01620806312561035, 0.01625052833557129, 0.016545791625976563, 0.016441247940063478, 0.016373855590820312, 0.0164771842956543, 0.016407072067260744, 0.016418432235717772, 0.016548959732055665, 0.016652223587036132, 0.01655583953857422, 0.01658880043029785, 0.01642086410522461, 0.016363519668579102, 0.01706175994873047, 0.016859264373779298, 0.0164881591796875, 0.01649488067626953, 0.016827871322631836, 0.016906784057617186, 0.01717452812194824, 0.017817024230957032, 0.017728063583374025, 0.01665433692932129, 0.016664575576782227, 0.016476160049438478, 0.016660480499267577, 0.016562143325805665, 0.016761951446533203, 0.01701478385925293, 0.01664614486694336, 0.016669631958007813, 0.0166495361328125, 0.01641865539550781, 0.01645244789123535, 0.016160064697265625, 0.016110559463500977, 0.016148096084594728, 0.016208192825317384, 0.016280607223510744, 0.01760326385498047, 0.016615264892578124, 0.016705984115600585, 0.016744319915771484, 0.016641664505004882, 0.016554271697998047, 0.01672332763671875, 0.01646860885620117, 0.01653772735595703, 0.016394176483154298, 0.016572351455688476, 0.01710089683532715, 0.01651862335205078, 0.016442144393920898, 0.01651417541503906, 0.016283647537231445, 0.01654374313354492, 0.016373760223388673, 0.01629529571533203, 0.01633113670349121, 0.016271648406982423, 0.016320480346679687, 0.016160768508911134, 0.016297183990478515, 0.01658537673950195, 0.01644761657714844, 0.016533504486083983, 0.016594944000244142, 0.01647212791442871, 0.016394208908081055, 0.01641878318786621, 0.016246112823486328, 0.01618751907348633, 0.016306751251220702, 0.016328447341918944, 0.016123743057250978, 0.015996416091918944, 0.01598524761199951, 0.01623878479003906, 0.01628169631958008, 0.01635327911376953, 0.016267263412475585, 0.01601535987854004, 0.015986207962036134, 0.015976544380187988, 0.01611392021179199, 0.016087167739868163, 0.016038143157958984, 0.016103263854980468, 0.01632633590698242, 0.01616281509399414, 0.016294111251831056, 0.016483583450317384, 0.016392383575439453, 0.01637228775024414, 0.016482431411743163, 0.01654368019104004, 0.016547264099121092, 0.016517471313476563, 0.016906400680541993, 0.016536863327026367, 0.016251232147216795, 0.016242656707763672, 0.016232255935668946, 0.016099456787109376, 0.016154624938964843, 0.015937024116516114, 0.016643232345581054, 0.01916089630126953, 0.016379520416259764, 0.01643561553955078, 0.0166177921295166, 0.016138175964355468, 0.015982144355773924, 0.015967679977416993, 0.016030527114868163, 0.01614633560180664, 0.016072864532470702, 0.015909791946411133, 0.01623164749145508, 0.016336704254150392, 0.016489215850830078, 0.016554143905639647, 0.016486591339111328, 0.016414976119995116, 0.01655881690979004, 0.016624319076538087, 0.016600671768188476, 0.016311071395874024, 0.016555904388427734, 0.016332799911499024, 0.016359424591064452, 0.01739571189880371, 0.01737113571166992, 0.016909343719482422, 0.01624700736999512, 0.015987456321716307, 0.015994943618774415, 0.016084640502929688, 0.01670207977294922, 0.016426271438598632, 0.016523679733276366, 0.016455039978027344, 0.016227935791015623, 0.016425600051879884, 0.016762304306030273, 0.016712383270263673, 0.01665177536010742, 0.01738528060913086, 0.016696416854858398, 0.016565824508666994, 0.016566560745239257, 0.016414047241210938, 0.016499231338500977, 0.01648975944519043, 0.01646281623840332, 0.016693248748779296, 0.016514944076538085, 0.016510976791381835, 0.016041791915893555, 0.01600326347351074, 0.01600819206237793, 0.01617308807373047, 0.015979488372802733, 0.01606483268737793, 0.015947168350219726, 0.016268991470336915, 0.016686847686767578, 0.016704416275024413, 0.017155775070190428, 0.016778623580932617, 0.016776063919067382, 0.01668252754211426, 0.016875455856323242, 0.016773887634277344, 0.017162015914916992, 0.017016063690185546, 0.017044384002685545, 0.016829568862915038, 0.01675132751464844, 0.01688175964355469, 0.017280704498291017, 0.0170034236907959, 0.01692448043823242, 0.017305856704711915, 0.01700761604309082, 0.01697430419921875, 0.017039840698242187, 0.017535295486450195, 0.017040319442749023, 0.01725484848022461, 0.01695737648010254, 0.016882047653198243, 0.01684105682373047, 0.016762527465820312, 0.016664575576782227, 0.016594240188598633, 0.016726879119873046, 0.017020767211914062, 0.016680959701538087, 0.016610336303710938, 0.016784351348876955, 0.016936960220336913, 0.016821920394897463, 0.01663164710998535, 0.016510528564453127, 0.016382911682128905, 0.01631161689758301, 0.016185760498046875, 0.016167135238647462, 0.01607481575012207, 0.016027872085571288, 0.015972096443176268, 0.016181280136108398, 0.016336223602294923, 0.01619980812072754, 0.016277759552001954, 0.01607504081726074, 0.016042240142822267, 0.01608710479736328, 0.016051807403564454, 0.015998208045959472, 0.016018016815185547, 0.015943776130676268, 0.0159518404006958, 0.015960000038146974, 0.01622163200378418, 0.01594655990600586, 0.01612710380554199, 0.01646384048461914, 0.016499616622924804, 0.01665007972717285, 0.016666688919067384, 0.01682032012939453, 0.016702783584594726, 0.016658464431762696, 0.01659123229980469, 0.016638368606567384, 0.016604671478271483, 0.01649087905883789, 0.016339967727661133, 0.016240928649902345, 0.01627209663391113, 0.01640185546875, 0.015913344383239745, 0.01610563278198242, 0.01603318405151367, 0.01613871955871582, 0.016031648635864256, 0.015953248023986816, 0.015944064140319823, 0.016022016525268554, 0.01593958377838135, 0.016121856689453123, 0.016116863250732423, 0.01617395210266113, 0.016563711166381837, 0.016882240295410158, 0.016961248397827148, 0.01662918472290039, 0.01664886474609375, 0.016606592178344728, 0.016558080673217773, 0.016777984619140623, 0.016706783294677733, 0.016675615310668947, 0.016506784439086913, 0.01636751937866211, 0.01631417655944824, 0.016656576156616212, 0.016218496322631837, 0.016076000213623046, 0.01619209671020508, 0.01617638397216797, 0.01624937629699707, 0.016162975311279297, 0.016457439422607422, 0.016240991592407227, 0.01632592010498047, 0.01630454444885254, 0.016350528717041016, 0.016315391540527344, 0.016330816268920897, 0.01635526466369629, 0.01661238479614258, 0.016789791107177734, 0.01665836715698242, 0.01684940719604492, 0.016519392013549804, 0.01645891189575195, 0.016404863357543945, 0.01650124740600586, 0.016618560791015625, 0.016290752410888672, 0.01639200019836426, 0.01644985580444336, 0.01644940757751465, 0.01643107223510742, 0.016369184494018554, 0.01630873680114746, 0.016234495162963866, 0.01625935935974121, 0.016124671936035156, 0.016102367401123047, 0.01619913673400879, 0.016269920349121093, 0.016273344039916992]",tokens/s,59.63465867431373,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,7393.939456,7808.679936,0.0,7430.209536,7414.23104,s,1,11.698818359375,11.698818359375,0.0,11.698818359375,11.698818359375,11.698818359375,11.698818359375,[11.698818359375],,kWh,0.00013635895708749785,1.503068300448296e-05,4.451781339201433e-05,0.00019590745348399514,,MB,1706.82368,8584.626176,0.0,8176.795648,8052.041728,s,10,7.289791870117187,0.7289791870117187,0.0011240875603674401,0.7291243591308594,0.7299213500976562,0.7300348937988281,0.7301257287597657,"[0.7261265258789062, 0.7281160888671875, 0.7288939819335938, 0.7288358154296875, 0.7288203125, 0.729354736328125, 0.729755859375, 0.729843994140625, 0.7301484375, 0.7298961181640625]",tokens/s,351.1760068890481,kWh,2.1263119047915778e-05,2.3449535359086695e-06,1.421509073714296e-05,3.782316332096741e-05,tokens/kWh,6768339.227144586,MB,1711.788032,8731.426816,0.0,8323.596288,8263.496192,s,10,33.483260986328126,3.3483260986328127,0.006480873101779639,3.347747680664063,3.3554736572265624,3.356179089355469,3.356743435058594,"[3.334534912109375, 3.34156396484375, 3.347009765625, 3.3464755859375, 3.348485595703125, 3.346180419921875, 3.353955078125, 3.352854248046875, 3.35531689453125, 3.356884521484375]",tokens/s,18.81537166458313,kWh,9.774259868624956e-05,1.0781551038561054e-05,6.486063919005557e-05,0.0001733847889148662,tokens/kWh,363353.6736082061,,s,630,33.47922916412354,0.05314163359384689,0.0006883491317176238,0.05311716842651368,0.053848936462402346,0.05402839469909668,0.05579953453063965,"[0.05493145751953125, 0.052621311187744144, 0.052209663391113284, 0.05240342330932617, 0.05208067321777344, 0.05200563049316406, 0.05291337585449219, 0.052299808502197266, 0.05230259323120117, 0.052393985748291017, 0.05225471878051758, 0.052217281341552735, 0.05221638488769531, 0.05261062240600586, 0.052351425170898434, 0.052332542419433595, 0.05280767822265625, 0.0530882568359375, 0.05320908737182617, 0.05304729461669922, 0.052792415618896485, 0.05274307250976563, 0.05238784027099609, 0.05242630386352539, 0.0529117431640625, 0.05258528137207031, 0.052744190216064454, 0.05286038589477539, 0.05258607864379883, 0.05267516708374023, 0.05284694290161133, 0.05271142578125, 0.05303615951538086, 0.05292428970336914, 0.05300735855102539, 0.0530873908996582, 0.05343318557739258, 0.05355724716186523, 0.05349903869628906, 0.05301129531860352, 0.05314121627807617, 0.052979999542236325, 0.05258966445922852, 0.05317110443115235, 0.052940479278564455, 0.05301484680175781, 0.05323129653930664, 0.052940128326416015, 0.05268278503417969, 0.05318137741088867, 0.05358732986450195, 0.05342643356323242, 0.053371265411376954, 0.05345894241333008, 0.053269985198974606, 0.05354038238525391, 0.05348659133911133, 0.05336265563964844, 0.05361600112915039, 0.05357635116577148, 0.05342771148681641, 0.05302479934692383, 0.052967166900634764, 0.0562625617980957, 0.05240182495117188, 0.05183139038085938, 0.052199104309082034, 0.05209542465209961, 0.05272937774658203, 0.052341217041015624, 0.05176115036010742, 0.052086784362792966, 0.051943103790283204, 0.05230326461791992, 0.055758750915527344, 0.051555328369140625, 0.052628894805908204, 0.05269952011108398, 0.052386016845703126, 0.052703231811523435, 0.05356710433959961, 0.054382080078125, 0.05368921661376953, 0.05243699264526367, 0.052754432678222656, 0.05243494415283203, 0.052531200408935545, 0.05286656188964844, 0.05230339050292969, 0.05253014373779297, 0.05227695846557617, 0.05305168151855469, 0.05287859344482422, 0.05251148986816406, 0.05288755035400391, 0.05275852966308594, 0.05283020782470703, 0.05355708694458008, 0.053591873168945314, 0.05387299346923828, 0.05370169448852539, 0.053125438690185545, 0.053336704254150394, 0.052746238708496096, 0.053067649841308594, 0.053213310241699216, 0.052915679931640626, 0.05280188751220703, 0.05282835388183594, 0.05291417694091797, 0.05292639923095703, 0.05269916915893555, 0.05342006301879883, 0.053284191131591795, 0.053448768615722654, 0.05364582443237305, 0.05393139266967773, 0.05372726440429688, 0.05354905700683594, 0.05347398376464844, 0.05366579055786133, 0.05342950439453125, 0.05333606338500976, 0.05310950469970703, 0.053362144470214846, 0.05407798385620117, 0.05581619262695312, 0.0528238410949707, 0.05232662582397461, 0.05263683319091797, 0.052087646484375, 0.053483455657958985, 0.051933025360107424, 0.051953887939453124, 0.0524758415222168, 0.05196294403076172, 0.05230640029907226, 0.05290835189819336, 0.05311510467529297, 0.052923969268798825, 0.05240467071533203, 0.052836353302001954, 0.052822017669677736, 0.053766143798828124, 0.05369651031494141, 0.053356544494628906, 0.052910079956054686, 0.05284828948974609, 0.05255936050415039, 0.05259964752197266, 0.05247795104980469, 0.05276662445068359, 0.05279862213134766, 0.052813793182373045, 0.05274652862548828, 0.052811870574951174, 0.052934558868408206, 0.05315449523925781, 0.05305702209472656, 0.05329318237304687, 0.053556961059570314, 0.053501758575439456, 0.053547103881835936, 0.05353478240966797, 0.05322412872314453, 0.05323980712890625, 0.05323763275146484, 0.05356294250488281, 0.052996673583984376, 0.05302272033691406, 0.05289574432373047, 0.05362278366088867, 0.05313740921020508, 0.05314559936523437, 0.053207038879394535, 0.05348556900024414, 0.05316182327270508, 0.05363916778564453, 0.0538658561706543, 0.053555999755859375, 0.0539607048034668, 0.05378867340087891, 0.05361395263671875, 0.05360403060913086, 0.05344268798828125, 0.05324473571777344, 0.05320435333251953, 0.05347734451293945, 0.05374224090576172, 0.055185375213623045, 0.05253283309936523, 0.05254019165039062, 0.05204592132568359, 0.052381694793701174, 0.052514816284179686, 0.05263504028320312, 0.05229529571533203, 0.052253662109375, 0.052744190216064454, 0.05252710342407227, 0.05256508636474609, 0.05278543853759766, 0.05269977569580078, 0.05282598495483398, 0.05220512008666992, 0.052716064453125, 0.053847358703613284, 0.05342281723022461, 0.053556705474853514, 0.053015071868896486, 0.05296870422363281, 0.05281049728393555, 0.05262905502319336, 0.05266476821899414, 0.05292998504638672, 0.052528831481933595, 0.052888446807861325, 0.05273948669433594, 0.053224033355712894, 0.05314252853393555, 0.052980735778808595, 0.05342313766479492, 0.05330425643920898, 0.05301046371459961, 0.053233665466308595, 0.05353267288208008, 0.05370675277709961, 0.05351833724975586, 0.05379398345947266, 0.05338390350341797, 0.052787296295166014, 0.05303091049194336, 0.05342172622680664, 0.05324137496948242, 0.053050174713134765, 0.05305753707885742, 0.053579776763916016, 0.053515743255615235, 0.0535700798034668, 0.0530780143737793, 0.05344870376586914, 0.05387468719482422, 0.054368255615234375, 0.05363622283935547, 0.05344659042358398, 0.05354927825927734, 0.05397699356079102, 0.05351507186889649, 0.05328486251831055, 0.05285007858276367, 0.05295513534545898, 0.05321788787841797, 0.055826431274414064, 0.05284454345703125, 0.052217857360839844, 0.052176895141601565, 0.052258815765380856, 0.05227315139770508, 0.05202329635620117, 0.0524901123046875, 0.05209715270996094, 0.052373504638671874, 0.05297478485107422, 0.052902366638183596, 0.05253968048095703, 0.05296511840820312, 0.053399871826171875, 0.05318608093261719, 0.05256035232543945, 0.053464927673339845, 0.053674144744873045, 0.05337427139282227, 0.05307577514648437, 0.05361958312988281, 0.05301763153076172, 0.052783649444580076, 0.05261151885986328, 0.05239603042602539, 0.053348350524902347, 0.05244432067871094, 0.052654945373535156, 0.052950687408447265, 0.05270767974853516, 0.05285859298706055, 0.0534879035949707, 0.053079967498779294, 0.05400092697143555, 0.053256801605224606, 0.053722686767578125, 0.053652065277099606, 0.05321852874755859, 0.05324812698364258, 0.053535457611083984, 0.05325209426879883, 0.05333724975585938, 0.05340041732788086, 0.052888961791992185, 0.0528752326965332, 0.05268239974975586, 0.05306412887573242, 0.053123550415039064, 0.05314704132080078, 0.053451038360595705, 0.05346063995361328, 0.0532732162475586, 0.05395059204101563, 0.05392793655395508, 0.05399715042114258, 0.0539785270690918, 0.05428022384643555, 0.05346607971191406, 0.05337811279296875, 0.05321209716796875, 0.0534950065612793, 0.053153633117675785, 0.05600652694702148, 0.05279414367675781, 0.052055999755859374, 0.05234899139404297, 0.05182793426513672, 0.052577056884765626, 0.052359169006347656, 0.0521965446472168, 0.0523702392578125, 0.05218918228149414, 0.05254492950439453, 0.05206249618530273, 0.05236905670166016, 0.053332382202148435, 0.05291443252563476, 0.05314355087280274, 0.053352481842041014, 0.053528224945068356, 0.053825408935546874, 0.05348191833496094, 0.05333401489257812, 0.053059585571289064, 0.052319263458251955, 0.052759521484375, 0.05257839965820312, 0.05220547103881836, 0.05246297454833984, 0.05235980987548828, 0.05268473434448242, 0.05243283081054687, 0.0524453125, 0.05306777572631836, 0.05288140869140625, 0.053411838531494144, 0.05330124664306641, 0.05346057510375977, 0.053770561218261716, 0.05398537445068359, 0.053524673461914064, 0.053200382232666016, 0.05339123153686524, 0.05335065460205078, 0.05334364700317383, 0.0531033935546875, 0.053364704132080075, 0.05305142211914062, 0.05300656127929688, 0.0526616325378418, 0.05293641662597656, 0.052678913116455076, 0.05339123153686524, 0.053567264556884764, 0.05351299285888672, 0.05355705642700195, 0.05402848052978516, 0.05429043197631836, 0.05393923187255859, 0.05455971145629883, 0.05292851257324219, 0.053553150177001956, 0.0533831672668457, 0.05315379333496094, 0.05447782516479492, 0.055576545715332035, 0.05284864044189453, 0.05203510284423828, 0.05224905776977539, 0.05198361587524414, 0.05374233627319336, 0.052342784881591796, 0.05249407958984375, 0.05284048080444336, 0.052525279998779296, 0.052555553436279295, 0.05289299011230469, 0.052894622802734374, 0.05281689453125, 0.052782081604003904, 0.053579776763916016, 0.053184513092041016, 0.053818527221679686, 0.0537237434387207, 0.053403903961181644, 0.0526798095703125, 0.052806526184082034, 0.052711071014404295, 0.0525142707824707, 0.05263449478149414, 0.05306777572631836, 0.052813278198242185, 0.05298640060424804, 0.052653247833251954, 0.05239072036743164, 0.05313494491577148, 0.053057758331298825, 0.05281811141967773, 0.05361385726928711, 0.0538631362915039, 0.05439827346801758, 0.05378937530517578, 0.05356748962402344, 0.05382291030883789, 0.05374585723876953, 0.05305996704101563, 0.05279286575317383, 0.053105056762695314, 0.05342585754394531, 0.05337945556640625, 0.0531467514038086, 0.0529826545715332, 0.0527215690612793, 0.053195999145507815, 0.05324185562133789, 0.05323798370361328, 0.0540228157043457, 0.05395455932617187, 0.05390713500976563, 0.053948734283447264, 0.054072479248046874, 0.053911582946777344, 0.05361721420288086, 0.05346739196777344, 0.0532209587097168, 0.05400822448730469, 0.05402828979492187, 0.05370598220825195, 0.05594275283813477, 0.05277942276000976, 0.05242675018310547, 0.052160511016845705, 0.05207606506347656, 0.052744670867919924, 0.052621311187744144, 0.052787200927734375, 0.051950817108154294, 0.05259107208251953, 0.052674880981445314, 0.052176895141601565, 0.05203353500366211, 0.05243084716796875, 0.05292572784423828, 0.053243934631347654, 0.052932544708251955, 0.05363759994506836, 0.054151233673095704, 0.05362646484375, 0.05333260726928711, 0.053225345611572265, 0.052922431945800784, 0.05281523132324219, 0.052615135192871094, 0.052982494354248046, 0.05284249496459961, 0.05249001693725586, 0.05311923217773438, 0.05274803161621094, 0.05269712066650391, 0.05246380615234375, 0.05294720077514648, 0.053577022552490236, 0.05373110580444336, 0.0536910400390625, 0.05415935897827148, 0.054075199127197264, 0.053712257385253905, 0.05348406219482422, 0.05341136169433594, 0.05282643127441406, 0.053563232421875, 0.053486175537109375, 0.052833633422851564, 0.052929183959960935, 0.05337001419067383, 0.05312182235717773, 0.0533966064453125, 0.05364012908935547, 0.05350137710571289, 0.05349228668212891, 0.05347865676879883, 0.053615360260009765, 0.05383913421630859, 0.05374003219604492, 0.05371023941040039, 0.05404345703125, 0.05364940643310547, 0.05397401428222656, 0.05393920135498047, 0.05357068634033203, 0.05372198486328125, 0.05515468978881836, 0.05246566390991211, 0.05217279815673828, 0.051975872039794924, 0.051951648712158204, 0.052652320861816405, 0.052530719757080076, 0.052273632049560544, 0.05228300857543945, 0.05311222457885742, 0.052810718536376956, 0.05252703857421875, 0.05240019226074219, 0.0533072624206543, 0.05325222396850586, 0.053300960540771485, 0.05331372833251953, 0.05382092666625977, 0.05386710357666016, 0.053354496002197264, 0.05282592010498047, 0.05412268829345703, 0.053215232849121094, 0.0526250228881836, 0.052578113555908204, 0.05295468902587891, 0.05300089645385742, 0.05315001678466797, 0.053526527404785154, 0.0530513916015625, 0.05293260955810547, 0.053473281860351565, 0.053333248138427734, 0.053252864837646484, 0.05345024108886719, 0.05319631958007812, 0.05397808074951172, 0.053544960021972655, 0.053528575897216796, 0.05331558227539063, 0.05355110549926758, 0.05299814224243164, 0.05299135971069336, 0.05271590423583984, 0.053474559783935546, 0.05366886520385742, 0.05406515121459961, 0.053872638702392575, 0.053815105438232425, 0.05305567932128906, 0.05336064147949219, 0.053520126342773436, 0.05347081756591797, 0.05386921691894531, 0.05418393707275391, 0.054013950347900394, 0.05402624130249024, 0.05353993606567383, 0.05343939208984375, 0.05345183944702148, 0.05323052978515625, 0.05346918487548828, 0.05356748962402344, 0.05700185775756836, 0.058417919158935544, 0.051490943908691404, 0.052127742767333986, 0.052142078399658204, 0.05259190368652344, 0.05243363189697266, 0.05227724838256836, 0.052028446197509765, 0.05258134460449219, 0.05239622497558594, 0.05247078323364258, 0.052564449310302734, 0.05321295928955078, 0.05318531036376953, 0.05319795227050781, 0.052966239929199216, 0.05363481521606445, 0.054828094482421874, 0.05377705764770508, 0.05297187042236328, 0.05234044647216797, 0.052399391174316405, 0.0529332160949707, 0.053010143280029294, 0.05254419326782227, 0.05277692794799805, 0.0529815673828125, 0.052977535247802736, 0.05285209655761719, 0.05329983901977539, 0.053364128112792966, 0.0541357421875, 0.05361372756958008, 0.053412704467773436, 0.05355110549926758, 0.05398070526123047, 0.05361916732788086, 0.053319679260253904, 0.053431903839111325, 0.0534224967956543, 0.05310873413085938, 0.05303209686279297, 0.053220062255859374, 0.05318870544433594, 0.052921409606933596, 0.05282096099853516, 0.05281382369995117, 0.05282137680053711, 0.05352640151977539, 0.05353958511352539, 0.05324556732177734, 0.05349619293212891, 0.053661697387695315, 0.05348873519897461, 0.053690784454345705, 0.0541308479309082, 0.05346953582763672, 0.05354086303710937, 0.053477375030517575, 0.053460990905761716, 0.0535654411315918, 0.0539986572265625]",tokens/s,18.817637554066216,,, 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 67, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 103, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 178, in run_model_loading_tracking context_stack.enter_context(energy_tracker.track()) File ""/usr/lib/python3.10/contextlib.py"", line 492, in enter_context result = _cm_type.__enter__(cm) File ""/usr/lib/python3.10/contextlib.py"", line 135, in __enter__ return next(self.gen) File ""/workspace/optimum_benchmark/trackers/energy.py"", line 173, in track self.emission_tracker.start_task() File ""/usr/local/lib/python3.10/dist-packages/codecarbon/emissions_tracker.py"", line 547, in start_task if self._scheduler: AttributeError: 'EmissionsTracker' object has no attribute '_scheduler' " 4bit-awq-gemv-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,MB,4144.49664,4758.306816,0.0,4372.56192,4292.521984,s,1,10.4293017578125,10.4293017578125,0.0,10.4293017578125,10.4293017578125,10.4293017578125,10.4293017578125,[10.4293017578125],,kWh,8.12598212416636e-05,8.956433150981957e-06,2.740918859399366e-05,0.00011762544298663922,,MB,1448.513536,5045.61664,0.0,4628.414464,4562.7008,s,10,3.536725067138672,0.3536725067138672,0.0010365985162686803,0.3536701354980469,0.3547701751708984,0.3550686050415039,0.35530734893798827,"[0.354580810546875, 0.35204742431640623, 0.3529721374511719, 0.3529495849609375, 0.35349517822265625, 0.35235699462890624, 0.35536703491210936, 0.3544069519042969, 0.3538450927734375, 0.354703857421875]",tokens/s,723.8334762818092,kWh,1.0543396892709704e-05,1.1627320151976334e-06,7.039271504428913e-06,1.874540041233625e-05,tokens/kWh,13656683.472684197,MB,1475.223552,5045.61664,0.0,4628.414464,4562.70336,s,10,33.19560693359375,3.3195606933593753,0.02389785773210965,3.3302711181640623,3.3352509521484377,3.3359737426757814,3.3365519750976564,"[3.3291591796875, 3.33509033203125, 3.329411865234375, 3.33113037109375, 3.333496337890625, 3.320712646484375, 3.336696533203125, 3.274879150390625, 3.270264892578125, 3.334765625]",tokens/s,18.97841486255351,kWh,9.39194101339596e-05,1.035939476667848e-05,5.7941252702170644e-05,0.00016222005760280873,tokens/kWh,388361.34650040464,,s,630,33.192855087280286,0.052687071567111554,0.0006219954933963221,0.05272731208801269,0.053151857376098635,0.05354354648590088,0.05452835025787354,"[0.05453004837036133, 0.05418598556518555, 0.052546974182128905, 0.052777217864990233, 0.05270364761352539, 0.05271955108642578, 0.05258444976806641, 0.05268479919433594, 0.0531385612487793, 0.05264064025878906, 0.0531701774597168, 0.0526376953125, 0.05305344009399414, 0.05290518569946289, 0.052625694274902345, 0.052635585784912106, 0.05266080093383789, 0.05272383880615234, 0.05256201553344726, 0.05262102508544922, 0.052643905639648436, 0.052606464385986325, 0.05373936080932617, 0.05269136047363281, 0.052469600677490236, 0.05267635345458985, 0.05256572723388672, 0.05272886276245117, 0.052671905517578124, 0.05309286499023438, 0.05294307327270508, 0.052730945587158205, 0.0531808967590332, 0.05265843200683594, 0.05266592025756836, 0.052978111267089845, 0.05301145553588867, 0.052673534393310545, 0.0526295051574707, 0.05261651229858398, 0.05244163131713867, 0.05299625778198242, 0.052803585052490234, 0.052615169525146485, 0.052845600128173825, 0.05251785659790039, 0.05261283111572266, 0.05281977462768555, 0.052688640594482423, 0.052668704986572265, 0.052541889190673825, 0.052555774688720705, 0.052631553649902345, 0.05255987167358398, 0.052872928619384765, 0.054524192810058596, 0.053155296325683596, 0.053508384704589844, 0.052794750213623044, 0.05266255950927735, 0.052595294952392575, 0.05272102355957031, 0.05268339157104492, 0.053532512664794925, 0.052717601776123044, 0.05274848175048828, 0.05283407974243164, 0.05281763076782227, 0.05297574234008789, 0.052668094635009766, 0.05264432144165039, 0.05286921691894531, 0.05276457595825195, 0.0526929931640625, 0.05264556884765625, 0.05272403335571289, 0.053018718719482424, 0.05281548690795899, 0.0529268798828125, 0.05448076629638672, 0.052770816802978515, 0.0527050895690918, 0.05269318389892578, 0.05267007827758789, 0.052580352783203124, 0.05270566558837891, 0.05287705612182617, 0.052902145385742186, 0.05324390411376953, 0.05289123153686524, 0.052566368103027346, 0.05315180969238281, 0.05279948806762695, 0.05245542526245117, 0.052565502166748046, 0.05277084732055664, 0.05272150421142578, 0.05277145767211914, 0.05302828979492188, 0.05283878326416016, 0.05291641616821289, 0.052482048034667966, 0.052602081298828124, 0.052662784576416016, 0.05281766510009766, 0.05280412673950195, 0.05289075088500977, 0.05283638381958008, 0.05277302551269531, 0.052697792053222656, 0.05279129409790039, 0.05309030532836914, 0.05279344177246094, 0.05274342346191406, 0.05299881744384766, 0.05295119857788086, 0.05444707107543945, 0.05342031860351563, 0.05320355224609375, 0.05293670272827149, 0.05306351852416992, 0.05292252731323242, 0.05286297607421875, 0.05294079971313476, 0.05556592178344726, 0.053011009216308594, 0.05408883285522461, 0.05301456069946289, 0.052961727142333985, 0.05310092926025391, 0.05268867111206055, 0.05296876907348633, 0.052496734619140624, 0.05275680160522461, 0.05286115264892578, 0.05309427261352539, 0.05285424041748047, 0.053115615844726564, 0.05263974380493164, 0.05272931289672852, 0.052571712493896486, 0.052628448486328125, 0.05273180770874023, 0.052639007568359375, 0.052466495513916016, 0.05285273742675781, 0.05258444976806641, 0.05247087860107422, 0.05233977508544922, 0.05263475036621094, 0.052439777374267575, 0.05261116790771484, 0.0525577278137207, 0.05254467010498047, 0.05273820877075195, 0.052690750122070314, 0.05289459228515625, 0.05282729721069336, 0.05254435348510742, 0.05250457763671875, 0.05255500793457031, 0.0526317138671875, 0.05273846435546875, 0.05250678253173828, 0.053088542938232425, 0.05267388916015625, 0.052401920318603516, 0.052985824584960935, 0.052597438812255856, 0.052780193328857423, 0.05297443389892578, 0.05283430480957031, 0.052698623657226565, 0.052942367553710935, 0.05257731246948242, 0.05291206359863281, 0.052795391082763675, 0.0551978874206543, 0.05291161727905273, 0.052822334289550785, 0.05250457763671875, 0.05254150390625, 0.05241644668579101, 0.05293056106567383, 0.055887264251708986, 0.052818527221679686, 0.053000190734863284, 0.052467456817626955, 0.053284385681152346, 0.0538004150390625, 0.05298640060424804, 0.052703231811523435, 0.052641376495361325, 0.05263776016235352, 0.052528705596923825, 0.05305382537841797, 0.052486560821533204, 0.05257961654663086, 0.05284499359130859, 0.052762718200683595, 0.0525080337524414, 0.05271366500854492, 0.052634239196777344, 0.0529257926940918, 0.05301724624633789, 0.05273564910888672, 0.05253977584838867, 0.053012447357177736, 0.0527749137878418, 0.05276467132568359, 0.05275852966308594, 0.052757598876953124, 0.0527184944152832, 0.05295308685302735, 0.05334348678588867, 0.05277948760986328, 0.0529810562133789, 0.052593631744384764, 0.05286297607421875, 0.052646942138671875, 0.05283871841430664, 0.05268547058105469, 0.05295663833618164, 0.05330998229980469, 0.054046718597412106, 0.05310464096069336, 0.053746910095214845, 0.05305014419555664, 0.05304220962524414, 0.05292707061767578, 0.05304780960083008, 0.05313113784790039, 0.05300051116943359, 0.053282207489013675, 0.052719295501708986, 0.05282057571411133, 0.05292031860351563, 0.05259161758422851, 0.052824222564697265, 0.05285769653320312, 0.052762622833251956, 0.05278310394287109, 0.05319680023193359, 0.05266022491455078, 0.05272371292114258, 0.05294899368286133, 0.0531104621887207, 0.05262921524047852, 0.052521568298339844, 0.0524810562133789, 0.0525948486328125, 0.052480831146240234, 0.05432944107055664, 0.05279689788818359, 0.052668800354003904, 0.05308428955078125, 0.05260083389282227, 0.05284566497802735, 0.05253007888793945, 0.05285798263549805, 0.05247884750366211, 0.05243904113769531, 0.052539390563964845, 0.05244723129272461, 0.052719776153564456, 0.05272576141357422, 0.05270460891723633, 0.05304147338867188, 0.05341817474365235, 0.052848831176757816, 0.05278822326660156, 0.05275872039794922, 0.05298604965209961, 0.05333446502685547, 0.05309439849853516, 0.052817214965820314, 0.05275513458251953, 0.052961280822753906, 0.052992000579833984, 0.052805503845214846, 0.05299417495727539, 0.05268844985961914, 0.05305286407470703, 0.053203968048095705, 0.05273324966430664, 0.055652606964111326, 0.05308691024780274, 0.052606719970703125, 0.052442848205566404, 0.05245775985717773, 0.05261452865600586, 0.05266672134399414, 0.05248259353637695, 0.05446630477905273, 0.052948928833007815, 0.05287145614624023, 0.05293142318725586, 0.053009342193603516, 0.05288460922241211, 0.052697982788085934, 0.0525968017578125, 0.05282550430297851, 0.053152286529541015, 0.05260697555541992, 0.05246361541748047, 0.052964702606201175, 0.05300428771972656, 0.052560543060302736, 0.05280464172363281, 0.052646881103515626, 0.05317158508300781, 0.053056129455566405, 0.05304905700683594, 0.052717857360839844, 0.05274009704589844, 0.05421321487426758, 0.05280752182006836, 0.05259280014038086, 0.05243036651611328, 0.05255392074584961, 0.052635326385498046, 0.052779617309570315, 0.052811775207519535, 0.053077728271484374, 0.052933952331542966, 0.05284070587158203, 0.0527408332824707, 0.052899166107177736, 0.05290460968017578, 0.052534366607666014, 0.052738014221191405, 0.05276768112182617, 0.052633663177490235, 0.05261875152587891, 0.05266044616699219, 0.05260515213012695, 0.052580352783203124, 0.05251686477661133, 0.05247520065307617, 0.05260358428955078, 0.05260083389282227, 0.05263292694091797, 0.05277299118041992, 0.052595199584960936, 0.05250051116943359, 0.05250048065185547, 0.05257011032104492, 0.054056961059570315, 0.05277286529541016, 0.052647937774658204, 0.05260902404785156, 0.052602817535400394, 0.05253740692138672, 0.05250559997558594, 0.05251379013061523, 0.052490238189697266, 0.0523403205871582, 0.05223571014404297, 0.05246620941162109, 0.05251321411132812, 0.05266409683227539, 0.0526231689453125, 0.05260124969482422, 0.0525926399230957, 0.05262335968017578, 0.052701087951660154, 0.05305708694458008, 0.05262294387817383, 0.05267075347900391, 0.05248681640625, 0.052682849884033205, 0.05280758285522461, 0.05292816162109375, 0.052648288726806644, 0.05324998474121094, 0.05260483169555664, 0.05282819366455078, 0.05263391876220703, 0.05406403350830078, 0.05313324737548828, 0.0527883186340332, 0.05279759979248047, 0.05262745666503906, 0.052755264282226565, 0.05298499298095703, 0.05279401779174805, 0.05259443283081055, 0.05233299255371094, 0.05262540817260742, 0.05244313430786133, 0.05257567977905273, 0.052509246826171876, 0.05269504165649414, 0.05454828643798828, 0.05287923049926758, 0.053518497467041015, 0.05249235153198242, 0.052492385864257814, 0.052774368286132814, 0.05260902404785156, 0.0524683837890625, 0.05248396682739258, 0.05282815933227539, 0.052779006958007815, 0.05280502319335938, 0.05368278503417969, 0.053028865814208986, 0.05269424057006836, 0.05286377716064453, 0.0529153938293457, 0.05289446258544922, 0.05357164764404297, 0.05287510299682617, 0.052717727661132814, 0.05292348861694336, 0.05281603240966797, 0.05288560104370117, 0.0528636474609375, 0.053249534606933595, 0.05293107223510742, 0.05485097503662109, 0.05357833480834961, 0.05303091049194336, 0.05276467132568359, 0.05412995147705078, 0.052597087860107423, 0.05283878326416016, 0.052686847686767575, 0.05300428771972656, 0.052707073211669925, 0.052746017456054686, 0.05346140670776367, 0.05289990234375, 0.05271756744384765, 0.05293260955810547, 0.053065727233886716, 0.05303718566894531, 0.053028736114501956, 0.052951038360595705, 0.05311283111572265, 0.05297350311279297, 0.05380764770507813, 0.05323209762573242, 0.05303910446166992, 0.052340736389160154, 0.05210892868041992, 0.05210524749755859, 0.05234473419189453, 0.052732353210449216, 0.05226208114624024, 0.05229347229003906, 0.05166761779785156, 0.05355257415771485, 0.05407628631591797, 0.05377347183227539, 0.05298854446411133, 0.052343006134033206, 0.051537631988525394, 0.051456287384033204, 0.05226092910766601, 0.05196918487548828, 0.0521940803527832, 0.05341996765136719, 0.05383375930786133, 0.052176673889160155, 0.05199398422241211, 0.051967937469482424, 0.052332481384277346, 0.052478977203369144, 0.052258815765380856, 0.05236838531494141, 0.0523581428527832, 0.05151881790161133, 0.051333984375, 0.05190361785888672, 0.051495616912841796, 0.05128953552246094, 0.05123328018188476, 0.05122259140014648, 0.05107475280761719, 0.05129167938232422, 0.051628353118896485, 0.051261310577392576, 0.051321502685546874, 0.0511262092590332, 0.05130649566650391, 0.05123481750488281, 0.05158911895751953, 0.051961856842041014, 0.05115315246582031, 0.051805950164794924, 0.05187583923339844, 0.051418880462646484, 0.05141904067993164, 0.051405120849609375, 0.05138025665283203, 0.05124915313720703, 0.05146214294433594, 0.05139772796630859, 0.051667743682861325, 0.05158515167236328, 0.051138687133789065, 0.051230239868164065, 0.05134985733032227, 0.05277536010742188, 0.0514150390625, 0.05171993637084961, 0.05141113662719726, 0.05149651336669922, 0.05137868881225586, 0.05137311935424805, 0.05140544128417969, 0.05131705474853516, 0.05210105514526367, 0.051413055419921874, 0.05142323303222656, 0.051330303192138674, 0.051518016815185544, 0.05350624084472656, 0.05156454467773437, 0.0513015022277832, 0.051358592987060546, 0.05144985580444336, 0.05224556732177734, 0.052009567260742184, 0.051808609008789065, 0.05149900817871094, 0.051490814208984374, 0.051509246826171876, 0.05136553573608398, 0.051466590881347654, 0.05129951858520508, 0.05142406463623047, 0.05142252731323242, 0.05142105484008789, 0.0515910415649414, 0.05237609481811523, 0.05122294235229492, 0.05188608169555664, 0.05153561782836914, 0.05142556762695313, 0.05130851364135742, 0.05143331146240234, 0.051415199279785155, 0.05136716842651367, 0.05157759857177734, 0.051453792572021484, 0.051350849151611325, 0.05178249740600586, 0.051947265625, 0.05214575958251953, 0.052968032836914064, 0.05299347305297852, 0.05293734359741211, 0.052588638305664064, 0.052559326171875, 0.052692638397216794, 0.05270342254638672, 0.05279190444946289, 0.05271148681640625, 0.0527891845703125, 0.05288755035400391, 0.0526640625, 0.05280380630493164, 0.05271091079711914, 0.052578910827636716, 0.05255731201171875, 0.053936065673828124, 0.052799774169921876, 0.05259273529052735, 0.052836353302001954, 0.05264998245239258, 0.05267577743530273, 0.05256070327758789, 0.05310166549682617, 0.05266524887084961, 0.05296460723876953, 0.05291481781005859, 0.05247398376464844, 0.05258406448364258, 0.052654464721679686, 0.05260012817382813, 0.053009086608886716, 0.053362335205078125, 0.05300361633300781, 0.05325516891479492, 0.05274118423461914, 0.05288159942626953, 0.052701953887939454, 0.05289494323730469, 0.052955936431884766, 0.05294079971313476, 0.05273350524902344, 0.05307027053833008, 0.053265472412109376, 0.05395142364501953, 0.05330944061279297, 0.05306281661987305, 0.05279779052734375, 0.05274211120605469, 0.052740638732910156, 0.0528502082824707, 0.052761054992675784, 0.052819969177246094, 0.053120574951171874, 0.05282243347167969, 0.052834335327148436, 0.052764896392822266, 0.05299792098999023, 0.05308006286621094, 0.05279084777832031, 0.052789695739746095, 0.05281382369995117, 0.052876350402832034, 0.05297836685180664, 0.053125343322753905, 0.052908065795898435, 0.05288880157470703, 0.052816673278808596, 0.05271376037597656, 0.05318179321289063, 0.05330163192749023, 0.0530964469909668, 0.052994049072265625, 0.052908031463623044, 0.05296332931518555, 0.05309628677368164, 0.05284854507446289, 0.05303526306152344, 0.052882656097412106]",tokens/s,18.979988263842365,,, 4bit-awq-gemv-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,awq,4,gemv,,,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,MB,1041.743872,867.106816,0.0,488.636416,482.553856,s,1,7.8583193359375,7.8583193359375,0.0,7.8583193359375,7.8583193359375,7.8583193359375,7.8583193359375,[7.8583193359375],,kWh,2.8190545075002166e-05,3.1021099707537287e-06,9.49389648402521e-06,4.078655152978111e-05,,MB,1243.959296,1024.393216,0.0,616.562688,581.925888,s,10,0.33909443283081053,0.033909443283081056,0.00020581722807567733,0.033835599899291996,0.03415680732727051,0.03424669094085693,0.034318597831726076,"[0.03413683319091797, 0.03406169509887695, 0.03382361602783203, 0.0337279052734375, 0.034006336212158206, 0.033847583770751956, 0.033692222595214844, 0.03433657455444336, 0.03370121765136719, 0.03376044845581055]",tokens/s,7549.519402688923,kWh,1.0397370668743825e-06,1.1466459175257054e-07,6.882419503558293e-07,1.8426436089827824e-06,tokens/kWh,138930826.74914163,MB,1278.291968,1039.07328,0.0,631.242752,597.192192,s,10,14.378984497070313,1.4378984497070313,0.010146728865944542,1.4366253051757814,1.4495190185546873,1.4552125732421874,1.4597674169921875,"[1.4361473388671875, 1.4241981201171876, 1.4270159912109375, 1.43397021484375, 1.4609061279296875, 1.4414090576171874, 1.4482537841796874, 1.437103271484375, 1.4393765869140625, 1.43060400390625]",tokens/s,43.81394250257111,kWh,4.168290228937644e-05,4.597195103610237e-06,1.646704955164374e-05,6.27471469446304e-05,tokens/kWh,1004029.7139819396,,s,630,14.373176967620855,0.02281456661527119,0.00047591966537088664,0.02269748783111572,0.023278319358825687,0.023458812618255612,0.024824110603332533,"[0.02253209686279297, 0.022571136474609375, 0.022504703521728515, 0.022395135879516602, 0.022505216598510742, 0.022462560653686525, 0.022640960693359375, 0.022789791107177736, 0.02298633575439453, 0.022700992584228516, 0.022674688339233397, 0.02308790397644043, 0.023357440948486328, 0.02303385543823242, 0.023007232666015624, 0.02279648017883301, 0.022486751556396484, 0.02263868713378906, 0.022525888442993164, 0.022524192810058595, 0.022453023910522462, 0.02255558395385742, 0.022415008544921875, 0.02253660774230957, 0.022458368301391602, 0.022411136627197265, 0.02248716735839844, 0.022421503067016603, 0.022444032669067384, 0.02247865676879883, 0.02266111946105957, 0.02279020881652832, 0.02285126495361328, 0.02298854446411133, 0.023042144775390624, 0.023126016616821288, 0.023278175354003908, 0.022971935272216797, 0.023128543853759766, 0.023219392776489257, 0.023202367782592773, 0.023128383636474608, 0.023088703155517577, 0.022982463836669922, 0.02269254493713379, 0.022658111572265625, 0.02259836769104004, 0.02254364776611328, 0.02278892707824707, 0.02273904037475586, 0.022889663696289062, 0.022905088424682616, 0.023042272567749024, 0.023061887741088867, 0.022924160003662108, 0.023072063446044924, 0.022760128021240233, 0.022607744216918944, 0.022668800354003905, 0.022481695175170898, 0.022887775421142578, 0.023280384063720704, 0.023572223663330078, 0.02255900764465332, 0.022659679412841797, 0.022520959854125975, 0.02251046371459961, 0.022785728454589843, 0.02328607940673828, 0.022579200744628908, 0.022503776550292967, 0.02247977638244629, 0.022552799224853516, 0.02245644760131836, 0.02234204864501953, 0.022548479080200197, 0.022411264419555665, 0.02253209686279297, 0.022506624221801757, 0.022459264755249023, 0.02253968048095703, 0.022581855773925782, 0.022728544235229492, 0.022872032165527342, 0.02268899154663086, 0.022406112670898436, 0.022495231628417968, 0.022370271682739258, 0.022411296844482422, 0.022312959671020507, 0.022366207122802736, 0.022466976165771483, 0.02246348762512207, 0.022503103256225586, 0.022456735610961915, 0.02253059196472168, 0.022494367599487305, 0.022452224731445314, 0.022745920181274415, 0.022886688232421876, 0.023256799697875977, 0.0229171199798584, 0.022747135162353514, 0.0226296329498291, 0.02270899200439453, 0.02254643249511719, 0.02250268745422363, 0.022528064727783202, 0.022614688873291017, 0.022552576065063477, 0.022410816192626953, 0.022423999786376953, 0.022461631774902343, 0.022417312622070314, 0.022436800003051757, 0.02232111930847168, 0.022386688232421875, 0.022853631973266602, 0.022730688095092773, 0.02296022415161133, 0.02296214485168457, 0.023000736236572266, 0.022878559112548828, 0.022832223892211914, 0.022645599365234376, 0.022503711700439452, 0.022335487365722655, 0.022606912612915038, 0.02259552001953125, 0.02268083190917969, 0.022586111068725587, 0.022429792404174805, 0.022559648513793946, 0.022479040145874023, 0.02260767936706543, 0.02240716743469238, 0.02253343963623047, 0.022441791534423827, 0.022532991409301758, 0.022576160430908203, 0.022704799652099608, 0.022663679122924805, 0.022820671081542968, 0.022769792556762695, 0.022677312850952147, 0.022723648071289064, 0.022590688705444336, 0.024086591720581054, 0.022619359970092772, 0.022620672225952147, 0.022511199951171876, 0.02318377685546875, 0.022728672027587892, 0.022558656692504883, 0.022487136840820314, 0.022525951385498046, 0.022747360229492187, 0.022470048904418945, 0.022394559860229493, 0.022520095825195312, 0.022554975509643554, 0.02256697654724121, 0.022466560363769532, 0.02249113655090332, 0.022571008682250978, 0.022634496688842775, 0.02251692771911621, 0.022463296890258787, 0.02256220817565918, 0.023134687423706054, 0.022909055709838866, 0.022804479598999023, 0.022593856811523438, 0.02264031982421875, 0.022521184921264648, 0.02250819206237793, 0.022814495086669922, 0.022807872772216797, 0.02303580856323242, 0.022659328460693358, 0.02256358337402344, 0.022508768081665038, 0.022494144439697265, 0.022667007446289064, 0.022421600341796875, 0.022708223342895507, 0.022616064071655274, 0.022849248886108398, 0.022632736206054688, 0.022496992111206055, 0.022515552520751953, 0.02247657585144043, 0.022542303085327148, 0.0225020809173584, 0.02267068862915039, 0.022643360137939453, 0.02263654327392578, 0.02271379280090332, 0.0227128963470459, 0.022867231369018554, 0.02264860725402832, 0.022884416580200195, 0.02268864059448242, 0.022787872314453124, 0.022675872802734375, 0.022648639678955078, 0.022543392181396484, 0.02258367919921875, 0.02258515167236328, 0.022735616683959962, 0.022603519439697267, 0.023009056091308593, 0.02338991928100586, 0.02309622383117676, 0.02267862319946289, 0.022614112854003908, 0.022547136306762694, 0.022470016479492188, 0.02261030387878418, 0.022982912063598634, 0.022542335510253905, 0.022623584747314452, 0.022589567184448243, 0.022545087814331056, 0.02253603172302246, 0.022687871932983397, 0.022665088653564452, 0.02287820816040039, 0.02266111946105957, 0.022668800354003905, 0.02304025650024414, 0.022907232284545897, 0.02307708740234375, 0.023418624877929686, 0.02373801612854004, 0.022910335540771484, 0.02279091262817383, 0.02261417579650879, 0.022849536895751952, 0.022675455093383787, 0.022697984695434572, 0.022589439392089843, 0.02255801582336426, 0.022560543060302734, 0.022424480438232423, 0.022425792694091798, 0.02343916893005371, 0.022912384033203125, 0.022742752075195313, 0.02295814323425293, 0.023075679779052734, 0.022988800048828126, 0.022623359680175783, 0.02258390426635742, 0.02265033531188965, 0.022457151412963866, 0.02245827293395996, 0.022437343597412108, 0.022360191345214844, 0.02253379249572754, 0.02262915229797363, 0.02294790458679199, 0.027543552398681642, 0.027435327529907228, 0.02443846321105957, 0.023088672637939452, 0.022986591339111326, 0.02313075256347656, 0.023051488876342775, 0.02341894340515137, 0.023089887619018555, 0.023178272247314453, 0.023107967376708984, 0.022948448181152343, 0.022916255950927736, 0.022879072189331055, 0.02300022315979004, 0.023018560409545898, 0.023120800018310548, 0.02314249610900879, 0.02289039993286133, 0.022956928253173827, 0.02284339141845703, 0.02299014472961426, 0.022833280563354492, 0.022747200012207033, 0.022764127731323244, 0.02280790328979492, 0.022901311874389648, 0.022923263549804687, 0.02339798355102539, 0.02314179229736328, 0.02330316734313965, 0.023355392456054686, 0.023783647537231445, 0.023586591720581054, 0.023373823165893554, 0.02375657653808594, 0.0235317440032959, 0.023475648880004883, 0.024932928085327148, 0.02328166389465332, 0.023473312377929687, 0.023229087829589844, 0.023314624786376952, 0.02296188735961914, 0.0229399356842041, 0.022746976852416993, 0.022724927902221678, 0.022839136123657226, 0.02284707260131836, 0.02270047950744629, 0.022810592651367187, 0.022480480194091795, 0.022573471069335938, 0.022275007247924805, 0.022550207138061523, 0.023072223663330078, 0.022670175552368162, 0.02262118339538574, 0.022590463638305663, 0.022828159332275392, 0.022921472549438476, 0.022975072860717774, 0.022733055114746093, 0.02256780815124512, 0.0224736328125, 0.022700159072875977, 0.02238217544555664, 0.022325023651123047, 0.022366079330444336, 0.02232588768005371, 0.02250752067565918, 0.02231283187866211, 0.022482208251953125, 0.022401887893676756, 0.022553695678710937, 0.022485023498535157, 0.022449024200439455, 0.022762847900390626, 0.022978656768798827, 0.02325356864929199, 0.023191871643066405, 0.023211103439331054, 0.023171680450439453, 0.02324684715270996, 0.023142240524291993, 0.023004703521728516, 0.023118463516235352, 0.022982175827026368, 0.02270252799987793, 0.022683679580688478, 0.022787328720092773, 0.02303164863586426, 0.02325187110900879, 0.02328153610229492, 0.023283615112304687, 0.02316444778442383, 0.02311974334716797, 0.023156991958618166, 0.02307164764404297, 0.023154144287109376, 0.023031999588012695, 0.02313612747192383, 0.023058528900146483, 0.022868000030517577, 0.022728607177734374, 0.02264192008972168, 0.02271126365661621, 0.022875776290893556, 0.022951295852661133, 0.022604671478271485, 0.02263859176635742, 0.022697023391723633, 0.022683744430541993, 0.02311404800415039, 0.02517251205444336, 0.023605247497558594, 0.022695903778076173, 0.022631839752197267, 0.022593151092529296, 0.024963455200195314, 0.024557695388793946, 0.023130815505981447, 0.02283091163635254, 0.022644704818725585, 0.022396799087524413, 0.02256912040710449, 0.02250060844421387, 0.022430463790893553, 0.022656063079833984, 0.02263520050048828, 0.022655231475830078, 0.022896223068237305, 0.025541023254394533, 0.023058496475219726, 0.022882144927978517, 0.023152767181396486, 0.02269795227050781, 0.022664512634277344, 0.02299942398071289, 0.022602048873901368, 0.022550527572631835, 0.02353971290588379, 0.022547967910766603, 0.02260633659362793, 0.022477951049804688, 0.022475648880004882, 0.022347776412963868, 0.022452224731445314, 0.022355968475341798, 0.022343679428100584, 0.022386304855346678, 0.02242188835144043, 0.022546016693115234, 0.02253455924987793, 0.02263859176635742, 0.022800384521484376, 0.022819936752319334, 0.02270262336730957, 0.022761856079101563, 0.022674911499023436, 0.022979103088378906, 0.022734848022460938, 0.02271433639526367, 0.023029792785644532, 0.023012928009033203, 0.023357759475708006, 0.02343270492553711, 0.023718528747558594, 0.023398399353027344, 0.023396352767944335, 0.023318111419677736, 0.02335545539855957, 0.023492223739624025, 0.023737056732177735, 0.023434528350830076, 0.023372512817382812, 0.023592575073242188, 0.023507232666015624, 0.02350908851623535, 0.02331148719787598, 0.023626623153686525, 0.02336288070678711, 0.023481216430664063, 0.023465120315551757, 0.023558752059936523, 0.02342470359802246, 0.023451103210449218, 0.02366304016113281, 0.022987071990966796, 0.022794208526611327, 0.02259884834289551, 0.022854656219482423, 0.022953216552734374, 0.022876895904541016, 0.022968351364135744, 0.022791488647460938, 0.02270057678222656, 0.02254579162597656, 0.022659328460693358, 0.02253878402709961, 0.022589439392089843, 0.022490655899047852, 0.02284579277038574, 0.022740800857543944, 0.02275974464416504, 0.02272051239013672, 0.02307811164855957, 0.023063199996948242, 0.02305241584777832, 0.02265648078918457, 0.022612512588500975, 0.022622207641601562, 0.02252716827392578, 0.022515520095825196, 0.022395519256591796, 0.022452863693237304, 0.022428800582885742, 0.022450815200805663, 0.02239811134338379, 0.022465471267700196, 0.022514591217041014, 0.023055423736572267, 0.022764575958251952, 0.023169952392578123, 0.02262361526489258, 0.022487680435180665, 0.022583232879638673, 0.023009344100952147, 0.022540288925170897, 0.02260518455505371, 0.022556800842285157, 0.022884864807128907, 0.022584800720214845, 0.022546367645263674, 0.022684255599975587, 0.022601760864257813, 0.02268742370605469, 0.022495519638061522, 0.022413055419921876, 0.022508064270019532, 0.022920032501220704, 0.022791072845458983, 0.0223600959777832, 0.022822240829467773, 0.023083648681640624, 0.02327961540222168, 0.02332057571411133, 0.023275615692138672, 0.023216032028198243, 0.02305023956298828, 0.02309529685974121, 0.023136415481567384, 0.023130239486694334, 0.023096960067749025, 0.022997087478637695, 0.023050207138061524, 0.022556703567504884, 0.022537696838378907, 0.022464607238769533, 0.022485631942749024, 0.02264473533630371, 0.02281657600402832, 0.022736160278320313, 0.022897247314453126, 0.02279987144470215, 0.022714879989624022, 0.022788543701171875, 0.022972095489501954, 0.022732608795166014, 0.022902271270751954, 0.022819520950317383, 0.023005504608154297, 0.023239360809326173, 0.02314886474609375, 0.02313279914855957, 0.023248672485351562, 0.023398271560668947, 0.023087520599365235, 0.022845439910888672, 0.022899999618530273, 0.022694623947143555, 0.02279327964782715, 0.02295395278930664, 0.022744159698486328, 0.022841472625732422, 0.023104511260986327, 0.02281484794616699, 0.022760063171386718, 0.022873344421386718, 0.02270694351196289, 0.022724607467651366, 0.022705215454101563, 0.022786592483520506, 0.022674976348876955, 0.022604415893554688, 0.02278153610229492, 0.02263311958312988, 0.02249456024169922, 0.022629024505615235, 0.022482048034667967, 0.022395679473876953, 0.022394432067871093, 0.02253059196472168, 0.022468191146850586, 0.022440351486206055, 0.022271999359130858, 0.02294902420043945, 0.022653024673461915, 0.022651647567749022, 0.02256812858581543, 0.022698816299438478, 0.022757375717163086, 0.023590080261230467, 0.022707008361816407, 0.022749183654785156, 0.022571008682250978, 0.02252390480041504, 0.022525440216064452, 0.022467071533203126, 0.02299212837219238, 0.022504064559936525, 0.02247897529602051, 0.02245193672180176, 0.022513952255249024, 0.022435840606689454, 0.022550527572631835, 0.022468608856201173, 0.022576704025268554, 0.022503040313720704, 0.022424383163452147, 0.022435136795043945, 0.022532800674438476, 0.02249942398071289, 0.022589311599731446, 0.022517791748046877, 0.022435392379760742, 0.022630367279052734, 0.022534368515014648, 0.022542591094970702, 0.022498432159423827, 0.022468799591064452, 0.022489215850830076, 0.022528575897216796, 0.02272972869873047, 0.022797056198120117, 0.022689088821411133, 0.02247881507873535, 0.022490079879760743, 0.022587392807006838, 0.02243174362182617, 0.022784000396728517, 0.0226627197265625, 0.02252025604248047, 0.022603776931762694, 0.022591487884521484, 0.02271027183532715, 0.02302070426940918, 0.02343612861633301, 0.02594108772277832, 0.02303011131286621, 0.022900447845458985, 0.02263859176635742, 0.022807392120361328, 0.022773759841918945, 0.02286809539794922, 0.02288150405883789, 0.02274086380004883, 0.02267580795288086]",tokens/s,43.83164567021136,,,